diff --git a/.github/ISSUE_TEMPLATE/11_crash_report.yml b/.github/ISSUE_TEMPLATE/11_crash_report.yml index 1300809a39c6ecd9a10eb6a28e80ef4478dba6b5..97979308ae5ab4037c32db2660544c1299f2c750 100644 --- a/.github/ISSUE_TEMPLATE/11_crash_report.yml +++ b/.github/ISSUE_TEMPLATE/11_crash_report.yml @@ -35,10 +35,8 @@ body: attributes: label: If applicable, attach your `Zed.log` file to this issue. description: | - macOS: `~/Library/Logs/Zed/Zed.log` - Windows: `C:\Users\YOU\AppData\Local\Zed\logs\Zed.log` - Linux: `~/.local/share/zed/logs/Zed.log` or $XDG_DATA_HOME - If you only need the most recent lines, you can run the `zed: open log` command palette action to see the last 1000. + From the command palette, run `zed: open log` to see the last 1000 lines. + Or run `zed: reveal log in file manager` to reveal the log file itself. value: |
Zed.log diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2ebbcaba49823787aafe40e5f3dd80eb67478b42..8fe069c5e060ce5846cfd493f07148b8e4e8d2a5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -516,9 +516,7 @@ jobs: name: Create a macOS bundle runs-on: - self-mini-macos - if: | - ( startsWith(github.ref, 'refs/tags/v') - || contains(github.event.pull_request.labels.*.name, 'run-bundling') ) + if: startsWith(github.ref, 'refs/tags/v') needs: [macos_tests] env: MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} @@ -549,16 +547,14 @@ jobs: ref: ${{ github.ref }} - name: Limit target directory size - run: script/clear-target-dir-if-larger-than 100 + run: script/clear-target-dir-if-larger-than 300 - name: Determine version and release channel - if: ${{ startsWith(github.ref, 'refs/tags/v') }} run: | # This exports RELEASE_CHANNEL into env (GITHUB_ENV) script/determine-release-channel - name: Draft release notes - if: ${{ startsWith(github.ref, 'refs/tags/v') }} run: | mkdir -p target/ # Ignore any errors that occur while drafting release notes to not fail the build. @@ -567,29 +563,17 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Create macOS app bundle - run: script/bundle-mac + - name: Create macOS app bundle (aarch64) + run: script/bundle-mac aarch64-apple-darwin + + - name: Create macOS app bundle (x64) + run: script/bundle-mac x86_64-apple-darwin - name: Rename binaries - if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} run: | mv target/aarch64-apple-darwin/release/Zed.dmg target/aarch64-apple-darwin/release/Zed-aarch64.dmg mv target/x86_64-apple-darwin/release/Zed.dmg target/x86_64-apple-darwin/release/Zed-x86_64.dmg - - name: Upload app bundle (aarch64) to workflow run if main branch or specific label - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 - if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} - with: - name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg - path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg - - - name: Upload app bundle (x86_64) to workflow run if main branch or specific label - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 - if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} - with: - name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg - path: target/x86_64-apple-darwin/release/Zed-x86_64.dmg - - uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1 name: Upload app bundle to release if: ${{ env.RELEASE_CHANNEL == 'preview' || env.RELEASE_CHANNEL == 'stable' }} @@ -610,8 +594,7 @@ jobs: runs-on: - namespace-profile-16x32-ubuntu-2004 # ubuntu 20.04 for minimal glibc if: | - ( startsWith(github.ref, 'refs/tags/v') - || contains(github.event.pull_request.labels.*.name, 'run-bundling') ) + ( startsWith(github.ref, 'refs/tags/v') ) needs: [linux_tests] steps: - name: Checkout repo @@ -628,7 +611,6 @@ jobs: token: ${{ SECRETS.SENTRY_AUTH_TOKEN }} - name: Determine version and release channel - if: startsWith(github.ref, 'refs/tags/v') run: | # This exports RELEASE_CHANNEL into env (GITHUB_ENV) script/determine-release-channel @@ -636,23 +618,8 @@ jobs: - name: Create Linux .tar.gz bundle run: script/bundle-linux - - name: Upload Artifact to Workflow - zed (run-bundling) - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 - if: contains(github.event.pull_request.labels.*.name, 'run-bundling') - with: - name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz - path: target/release/zed-*.tar.gz - - - name: Upload Artifact to Workflow - zed-remote-server (run-bundling) - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 - if: contains(github.event.pull_request.labels.*.name, 'run-bundling') - with: - name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.gz - path: target/zed-remote-server-linux-x86_64.gz - - name: Upload Artifacts to release uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1 - if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }} with: draft: true prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }} @@ -669,7 +636,6 @@ jobs: - namespace-profile-8x32-ubuntu-2004-arm-m4 # ubuntu 20.04 for minimal glibc if: | startsWith(github.ref, 'refs/tags/v') - || contains(github.event.pull_request.labels.*.name, 'run-bundling') needs: [linux_tests] steps: - name: Checkout repo @@ -686,7 +652,6 @@ jobs: token: ${{ SECRETS.SENTRY_AUTH_TOKEN }} - name: Determine version and release channel - if: startsWith(github.ref, 'refs/tags/v') run: | # This exports RELEASE_CHANNEL into env (GITHUB_ENV) script/determine-release-channel @@ -694,23 +659,8 @@ jobs: - name: Create and upload Linux .tar.gz bundles run: script/bundle-linux - - name: Upload Artifact to Workflow - zed (run-bundling) - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 - if: contains(github.event.pull_request.labels.*.name, 'run-bundling') - with: - name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz - path: target/release/zed-*.tar.gz - - - name: Upload Artifact to Workflow - zed-remote-server (run-bundling) - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 - if: contains(github.event.pull_request.labels.*.name, 'run-bundling') - with: - name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.gz - path: target/zed-remote-server-linux-aarch64.gz - - name: Upload Artifacts to release uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1 - if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }} with: draft: true prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }} @@ -724,8 +674,7 @@ jobs: timeout-minutes: 60 runs-on: github-8vcpu-ubuntu-2404 if: | - false && ( startsWith(github.ref, 'refs/tags/v') - || contains(github.event.pull_request.labels.*.name, 'run-bundling') ) + false && ( startsWith(github.ref, 'refs/tags/v') ) needs: [linux_tests] name: Build Zed on FreeBSD steps: @@ -776,7 +725,7 @@ jobs: nix-build: name: Build with Nix - uses: ./.github/workflows/nix.yml + uses: ./.github/workflows/nix_build.yml needs: [job_spec] if: github.repository_owner == 'zed-industries' && (contains(github.event.pull_request.labels.*.name, 'run-nix') || @@ -789,11 +738,10 @@ jobs: bundle-windows-x64: timeout-minutes: 120 - name: Create a Windows installer + name: Create a Windows installer for x86_64 runs-on: [self-32vcpu-windows-2022] if: | - ( startsWith(github.ref, 'refs/tags/v') - || contains(github.event.pull_request.labels.*.name, 'run-bundling') ) + ( startsWith(github.ref, 'refs/tags/v') ) needs: [windows_tests] env: AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }} @@ -818,7 +766,6 @@ jobs: - name: Determine version and release channel working-directory: ${{ env.ZED_WORKSPACE }} - if: ${{ startsWith(github.ref, 'refs/tags/v') }} run: | # This exports RELEASE_CHANNEL into env (GITHUB_ENV) script/determine-release-channel.ps1 @@ -827,16 +774,55 @@ jobs: working-directory: ${{ env.ZED_WORKSPACE }} run: script/bundle-windows.ps1 - - name: Upload installer (x86_64) to Workflow - zed (run-bundling) - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 - if: contains(github.event.pull_request.labels.*.name, 'run-bundling') + - name: Upload Artifacts to release + uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1 + with: + draft: true + prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }} + files: ${{ env.SETUP_PATH }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + bundle-windows-aarch64: + timeout-minutes: 120 + name: Create a Windows installer for aarch64 + runs-on: [self-32vcpu-windows-2022] + if: | + ( startsWith(github.ref, 'refs/tags/v') ) + needs: [windows_tests] + env: + AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }} + AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }} + AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }} + ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }} + CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }} + ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }} + FILE_DIGEST: SHA256 + TIMESTAMP_DIGEST: SHA256 + TIMESTAMP_SERVER: "http://timestamp.acs.microsoft.com" + steps: + - name: Checkout repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: - name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe - path: ${{ env.SETUP_PATH }} + clean: false + + - name: Setup Sentry CLI + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2 + with: + token: ${{ SECRETS.SENTRY_AUTH_TOKEN }} + + - name: Determine version and release channel + working-directory: ${{ env.ZED_WORKSPACE }} + run: | + # This exports RELEASE_CHANNEL into env (GITHUB_ENV) + script/determine-release-channel.ps1 + + - name: Build Zed installer + working-directory: ${{ env.ZED_WORKSPACE }} + run: script/bundle-windows.ps1 -Architecture aarch64 - name: Upload Artifacts to release uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1 - if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }} with: draft: true prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }} @@ -850,7 +836,7 @@ jobs: false && startsWith(github.ref, 'refs/tags/v') && endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre') - needs: [bundle-mac, bundle-linux-x86_x64, bundle-linux-aarch64, bundle-windows-x64] + needs: [bundle-mac, bundle-linux-x86_x64, bundle-linux-aarch64, bundle-windows-x64, bundle-windows-aarch64] runs-on: - self-mini-macos steps: diff --git a/.github/workflows/danger.yml b/.github/workflows/danger.yml index 3f84179278d1baaa7a299e2292b3041830d9ca60..1134167e05e29ffebfcf176b4f8c6cfc1b9e862d 100644 --- a/.github/workflows/danger.yml +++ b/.github/workflows/danger.yml @@ -1,42 +1,40 @@ -name: Danger - +# Generated from xtask::workflows::danger +# Rebuild with `cargo xtask workflows`. +name: danger on: pull_request: - branches: [main] types: - - opened - - synchronize - - reopened - - edited - + - opened + - synchronize + - reopened + - edited + branches: + - main jobs: danger: if: github.repository_owner == 'zed-industries' runs-on: namespace-profile-2x4-ubuntu-2404 - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - - - uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0 - with: - version: 9 - - - name: Setup Node - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4 - with: - node-version: "20" - cache: "pnpm" - cache-dependency-path: "script/danger/pnpm-lock.yaml" - - - run: pnpm install --dir script/danger - - - name: Run Danger - run: pnpm run --dir script/danger danger ci - env: - # This GitHub token is not used, but the value needs to be here to prevent - # Danger from throwing an error. - GITHUB_TOKEN: "not_a_real_token" - # All requests are instead proxied through an instance of - # https://github.com/maxdeviant/danger-proxy that allows Danger to securely - # authenticate with GitHub while still being able to run on PRs from forks. - DANGER_GITHUB_API_BASE_URL: "https://danger-proxy.fly.dev/github" + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_pnpm + uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 + with: + version: '9' + - name: steps::setup_node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 + with: + node-version: '20' + cache: pnpm + cache-dependency-path: script/danger/pnpm-lock.yaml + - name: danger::install_deps + run: pnpm install --dir script/danger + shell: bash -euxo pipefail {0} + - name: danger::run + run: pnpm run --dir script/danger danger ci + shell: bash -euxo pipefail {0} + env: + GITHUB_TOKEN: not_a_real_token + DANGER_GITHUB_API_BASE_URL: https://danger-proxy.fly.dev/github diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml index ff2a3589e4c5482089536919618f1bbff982c63c..c61879faa8cd0a5dbdbed03a140f8e558f13322b 100644 --- a/.github/workflows/deploy_collab.yml +++ b/.github/workflows/deploy_collab.yml @@ -49,7 +49,7 @@ jobs: - name: Limit target directory size shell: bash -euxo pipefail {0} - run: script/clear-target-dir-if-larger-than 100 + run: script/clear-target-dir-if-larger-than 300 - name: Run tests shell: bash -euxo pipefail {0} diff --git a/.github/workflows/nix.yml b/.github/workflows/nix.yml deleted file mode 100644 index e682ce5890b86e8a3cf181be2d302d66025572c2..0000000000000000000000000000000000000000 --- a/.github/workflows/nix.yml +++ /dev/null @@ -1,69 +0,0 @@ -name: "Nix build" - -on: - workflow_call: - inputs: - flake-output: - type: string - default: "default" - cachix-filter: - type: string - default: "" - -jobs: - nix-build: - timeout-minutes: 60 - name: (${{ matrix.system.os }}) Nix Build - continue-on-error: true # TODO: remove when we want this to start blocking CI - strategy: - fail-fast: false - matrix: - system: - - os: x86 Linux - runner: namespace-profile-16x32-ubuntu-2204 - install_nix: true - - os: arm Mac - runner: [macOS, ARM64, test] - install_nix: false - if: github.repository_owner == 'zed-industries' - runs-on: ${{ matrix.system.runner }} - env: - ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} - ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} - ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }} - GIT_LFS_SKIP_SMUDGE: 1 # breaks the livekit rust sdk examples which we don't actually depend on - steps: - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - - # on our macs we manually install nix. for some reason the cachix action is running - # under a non-login /bin/bash shell which doesn't source the proper script to add the - # nix profile to PATH, so we manually add them here - - name: Set path - if: ${{ ! matrix.system.install_nix }} - run: | - echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH" - echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH" - - - uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f # v31 - if: ${{ matrix.system.install_nix }} - with: - github_access_token: ${{ secrets.GITHUB_TOKEN }} - - - uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad # v16 - with: - name: zed - authToken: "${{ secrets.CACHIX_AUTH_TOKEN }}" - pushFilter: "${{ inputs.cachix-filter }}" - cachixArgs: "-v" - - - run: nix build .#${{ inputs.flake-output }} -L --accept-flake-config - - - name: Limit /nix/store to 50GB on macs - if: ${{ ! matrix.system.install_nix }} - run: | - if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then - nix-collect-garbage -d || true - fi diff --git a/.github/workflows/nix_build.yml b/.github/workflows/nix_build.yml new file mode 100644 index 0000000000000000000000000000000000000000..cc6bac824f8374cd60bb5ce5dcfb19439ecdc22d --- /dev/null +++ b/.github/workflows/nix_build.yml @@ -0,0 +1,77 @@ +# Generated from xtask::workflows::nix_build +# Rebuild with `cargo xtask workflows`. +name: nix_build +on: + workflow_call: + inputs: + flake-output: + type: string + default: default + cachix-filter: + type: string +jobs: + build_nix_linux_x86_64: + if: github.repository_owner == 'zed-industries' + runs-on: namespace-profile-32x64-ubuntu-2004 + env: + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }} + GIT_LFS_SKIP_SMUDGE: '1' + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: nix_build::install_nix + uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f + with: + github_access_token: ${{ secrets.GITHUB_TOKEN }} + - name: nix_build::cachix_action + uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad + with: + name: zed + authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} + cachixArgs: -v + pushFilter: ${{ inputs.cachix-filter }} + - name: nix_build::build + run: nix build .#${{ inputs.flake-output }} -L --accept-flake-config + shell: bash -euxo pipefail {0} + timeout-minutes: 60 + continue-on-error: true + build_nix_mac_aarch64: + if: github.repository_owner == 'zed-industries' + runs-on: self-mini-macos + env: + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }} + GIT_LFS_SKIP_SMUDGE: '1' + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: nix_build::set_path + run: | + echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH" + echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH" + shell: bash -euxo pipefail {0} + - name: nix_build::cachix_action + uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad + with: + name: zed + authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} + cachixArgs: -v + pushFilter: ${{ inputs.cachix-filter }} + - name: nix_build::build + run: nix build .#${{ inputs.flake-output }} -L --accept-flake-config + shell: bash -euxo pipefail {0} + - name: nix_build::limit_store + run: |- + if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then + nix-collect-garbage -d || true + fi + shell: bash -euxo pipefail {0} + timeout-minutes: 60 + continue-on-error: true diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index 2026ee7b730698cd7e40eebcd141f5b8a6ee9d04..c593fea4cdf8538704aa6bab3478c2a3d894af41 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -1,93 +1,149 @@ -name: Release Nightly - -on: - schedule: - # Fire every day at 7:00am UTC (Roughly before EU workday and after US workday) - - cron: "0 7 * * *" - push: - tags: - - "nightly" - +# Generated from xtask::workflows::release_nightly +# Rebuild with `cargo xtask workflows`. +name: release_nightly env: CARGO_TERM_COLOR: always - CARGO_INCREMENTAL: 0 - RUST_BACKTRACE: 1 + CARGO_INCREMENTAL: '0' + RUST_BACKTRACE: '1' ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }} DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }} - +on: + push: + tags: + - nightly + schedule: + - cron: 0 7 * * * jobs: - style: + check_style: + if: github.repository_owner == 'zed-industries' + runs-on: self-mini-macos + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + fetch-depth: 0 + - name: steps::cargo_fmt + run: cargo fmt --all -- --check + shell: bash -euxo pipefail {0} + - name: ./script/clippy + run: ./script/clippy + shell: bash -euxo pipefail {0} timeout-minutes: 60 - name: Check formatting and Clippy lints + run_tests_mac: if: github.repository_owner == 'zed-industries' - runs-on: - - self-hosted - - macOS + runs-on: self-mini-macos steps: - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - fetch-depth: 0 - - - name: Run style checks - uses: ./.github/actions/check_style - - - name: Run clippy - run: ./script/clippy - - tests: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_cargo_config + run: | + mkdir -p ./../.cargo + cp ./.cargo/ci-config.toml ./../.cargo/config.toml + shell: bash -euxo pipefail {0} + - name: steps::setup_node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 + with: + node-version: '20' + - name: steps::cargo_install_nextest + run: cargo install cargo-nextest --locked + shell: bash -euxo pipefail {0} + - name: steps::clear_target_dir_if_large + run: ./script/clear-target-dir-if-larger-than 300 + shell: bash -euxo pipefail {0} + - name: steps::cargo_nextest + run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final + shell: bash -euxo pipefail {0} + - name: steps::cleanup_cargo_config + if: always() + run: | + rm -rf ./../.cargo + shell: bash -euxo pipefail {0} timeout-minutes: 60 - name: Run tests + run_tests_windows: if: github.repository_owner == 'zed-industries' - runs-on: - - self-hosted - - macOS - needs: style + runs-on: self-32vcpu-windows-2022 steps: - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - - - name: Run tests - uses: ./.github/actions/run_tests - - windows-tests: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_cargo_config + run: | + New-Item -ItemType Directory -Path "./../.cargo" -Force + Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml" + shell: pwsh + - name: steps::setup_node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 + with: + node-version: '20' + - name: steps::cargo_install_nextest + run: cargo install cargo-nextest --locked + shell: pwsh + - name: steps::clear_target_dir_if_large + run: ./script/clear-target-dir-if-larger-than.ps1 250 + shell: pwsh + - name: steps::cargo_nextest + run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final + shell: pwsh + - name: steps::cleanup_cargo_config + if: always() + run: | + Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue + shell: pwsh timeout-minutes: 60 - name: Run tests on Windows + bundle_mac_nightly_x86_64: + needs: + - check_style + - run_tests_mac if: github.repository_owner == 'zed-industries' - runs-on: [self-32vcpu-windows-2022] + runs-on: self-mini-macos + env: + MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} + MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} + APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }} + APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }} + APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} steps: - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - - - name: Configure CI - run: | - New-Item -ItemType Directory -Path "./../.cargo" -Force - Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml" - - - name: Run tests - uses: ./.github/actions/run_tests_windows - - - name: Limit target directory size - run: ./script/clear-target-dir-if-larger-than.ps1 1024 - - - name: Clean CI config file - if: always() - run: Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue - - bundle-mac: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 + with: + node-version: '20' + - name: steps::setup_sentry + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b + with: + token: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: steps::clear_target_dir_if_large + run: ./script/clear-target-dir-if-larger-than 300 + shell: bash -euxo pipefail {0} + - name: release_nightly::set_release_channel_to_nightly + run: | + set -eu + version=$(git rev-parse --short HEAD) + echo "Publishing version: ${version} on release channel nightly" + echo "nightly" > crates/zed/RELEASE_CHANNEL + shell: bash -euxo pipefail {0} + - name: run_bundling::bundle_mac + run: ./script/bundle-mac x86_64-apple-darwin + shell: bash -euxo pipefail {0} + - name: release_nightly::upload_zed_nightly + run: script/upload-nightly macos x86_64 + shell: bash -euxo pipefail {0} timeout-minutes: 60 - name: Create a macOS bundle + bundle_mac_nightly_aarch64: + needs: + - check_style + - run_tests_mac if: github.repository_owner == 'zed-industries' - runs-on: - - self-mini-macos - needs: tests + runs-on: self-mini-macos env: MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} @@ -95,161 +151,162 @@ jobs: APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }} APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} steps: - - name: Install Node - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4 - with: - node-version: "18" - - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - - - name: Set release channel to nightly - run: | - set -eu - version=$(git rev-parse --short HEAD) - echo "Publishing version: ${version} on release channel nightly" - echo "nightly" > crates/zed/RELEASE_CHANNEL - - - name: Setup Sentry CLI - uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2 - with: - token: ${{ SECRETS.SENTRY_AUTH_TOKEN }} - - - name: Create macOS app bundle - run: script/bundle-mac - - - name: Upload Zed Nightly - run: script/upload-nightly macos - - bundle-linux-x86: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 + with: + node-version: '20' + - name: steps::setup_sentry + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b + with: + token: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: steps::clear_target_dir_if_large + run: ./script/clear-target-dir-if-larger-than 300 + shell: bash -euxo pipefail {0} + - name: release_nightly::set_release_channel_to_nightly + run: | + set -eu + version=$(git rev-parse --short HEAD) + echo "Publishing version: ${version} on release channel nightly" + echo "nightly" > crates/zed/RELEASE_CHANNEL + shell: bash -euxo pipefail {0} + - name: run_bundling::bundle_mac + run: ./script/bundle-mac aarch64-apple-darwin + shell: bash -euxo pipefail {0} + - name: release_nightly::upload_zed_nightly + run: script/upload-nightly macos aarch64 + shell: bash -euxo pipefail {0} timeout-minutes: 60 - name: Create a Linux *.tar.gz bundle for x86 + bundle_linux_nightly_x86_64: + needs: + - check_style + - run_tests_mac if: github.repository_owner == 'zed-industries' - runs-on: - - namespace-profile-16x32-ubuntu-2004 # ubuntu 20.04 for minimal glibc - needs: tests + runs-on: namespace-profile-32x64-ubuntu-2004 steps: - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - - - name: Add Rust to the PATH - run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" - - - name: Install Linux dependencies - run: ./script/linux && ./script/install-mold 2.34.0 - - - name: Setup Sentry CLI - uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2 - with: - token: ${{ SECRETS.SENTRY_AUTH_TOKEN }} - - - name: Limit target directory size - run: script/clear-target-dir-if-larger-than 100 - - - name: Set release channel to nightly - run: | - set -euo pipefail - version=$(git rev-parse --short HEAD) - echo "Publishing version: ${version} on release channel nightly" - echo "nightly" > crates/zed/RELEASE_CHANNEL - - - name: Create Linux .tar.gz bundle - run: script/bundle-linux - - - name: Upload Zed Nightly - run: script/upload-nightly linux-targz - - bundle-linux-arm: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_sentry + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b + with: + token: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: release_nightly::add_rust_to_path + run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" + shell: bash -euxo pipefail {0} + - name: ./script/linux + run: ./script/linux + shell: bash -euxo pipefail {0} + - name: ./script/install-mold + run: ./script/install-mold + shell: bash -euxo pipefail {0} + - name: steps::clear_target_dir_if_large + run: ./script/clear-target-dir-if-larger-than 100 + shell: bash -euxo pipefail {0} + - name: release_nightly::set_release_channel_to_nightly + run: | + set -eu + version=$(git rev-parse --short HEAD) + echo "Publishing version: ${version} on release channel nightly" + echo "nightly" > crates/zed/RELEASE_CHANNEL + shell: bash -euxo pipefail {0} + - name: ./script/bundle-linux + run: ./script/bundle-linux + shell: bash -euxo pipefail {0} + - name: release_nightly::upload_zed_nightly + run: script/upload-nightly linux-targz x86_64 + shell: bash -euxo pipefail {0} timeout-minutes: 60 - name: Create a Linux *.tar.gz bundle for ARM + bundle_linux_nightly_aarch64: + needs: + - check_style + - run_tests_mac if: github.repository_owner == 'zed-industries' - runs-on: - - namespace-profile-8x32-ubuntu-2004-arm-m4 # ubuntu 20.04 for minimal glibc - needs: tests + runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4 steps: - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - - - name: Install Linux dependencies - run: ./script/linux - - - name: Setup Sentry CLI - uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2 - with: - token: ${{ SECRETS.SENTRY_AUTH_TOKEN }} - - - name: Limit target directory size - run: script/clear-target-dir-if-larger-than 100 - - - name: Set release channel to nightly - run: | - set -euo pipefail - version=$(git rev-parse --short HEAD) - echo "Publishing version: ${version} on release channel nightly" - echo "nightly" > crates/zed/RELEASE_CHANNEL - - - name: Create Linux .tar.gz bundle - run: script/bundle-linux - - - name: Upload Zed Nightly - run: script/upload-nightly linux-targz - - freebsd: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_sentry + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b + with: + token: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: release_nightly::add_rust_to_path + run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" + shell: bash -euxo pipefail {0} + - name: ./script/linux + run: ./script/linux + shell: bash -euxo pipefail {0} + - name: steps::clear_target_dir_if_large + run: ./script/clear-target-dir-if-larger-than 100 + shell: bash -euxo pipefail {0} + - name: release_nightly::set_release_channel_to_nightly + run: | + set -eu + version=$(git rev-parse --short HEAD) + echo "Publishing version: ${version} on release channel nightly" + echo "nightly" > crates/zed/RELEASE_CHANNEL + shell: bash -euxo pipefail {0} + - name: ./script/bundle-linux + run: ./script/bundle-linux + shell: bash -euxo pipefail {0} + - name: release_nightly::upload_zed_nightly + run: script/upload-nightly linux-targz aarch64 + shell: bash -euxo pipefail {0} timeout-minutes: 60 - if: false && github.repository_owner == 'zed-industries' - runs-on: github-8vcpu-ubuntu-2404 - needs: tests - name: Build Zed on FreeBSD + bundle_windows_nightly_x86_64: + needs: + - check_style + - run_tests_windows + if: github.repository_owner == 'zed-industries' + runs-on: self-32vcpu-windows-2022 + env: + AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }} + AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }} + AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }} + ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }} + CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }} + ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }} + FILE_DIGEST: SHA256 + TIMESTAMP_DIGEST: SHA256 + TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com steps: - - uses: actions/checkout@v4 - - name: Build FreeBSD remote-server - id: freebsd-build - uses: vmactions/freebsd-vm@c3ae29a132c8ef1924775414107a97cac042aad5 # v1.2.0 - with: - # envs: "MYTOKEN MYTOKEN2" - usesh: true - release: 13.5 - copyback: true - prepare: | - pkg install -y \ - bash curl jq git \ - rustup-init cmake-core llvm-devel-lite pkgconf protobuf # ibx11 alsa-lib rust-bindgen-cli - run: | - freebsd-version - sysctl hw.model - sysctl hw.ncpu - sysctl hw.physmem - sysctl hw.usermem - git config --global --add safe.directory /home/runner/work/zed/zed - rustup-init --profile minimal --default-toolchain none -y - . "$HOME/.cargo/env" - ./script/bundle-freebsd - mkdir -p out/ - mv "target/zed-remote-server-freebsd-x86_64.gz" out/ - rm -rf target/ - cargo clean - - - name: Upload Zed Nightly - run: script/upload-nightly freebsd - - bundle-nix: - name: Build and cache Nix package - needs: tests - secrets: inherit - uses: ./.github/workflows/nix.yml - - bundle-windows-x64: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_sentry + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b + with: + token: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: release_nightly::set_release_channel_to_nightly + run: | + $ErrorActionPreference = "Stop" + $version = git rev-parse --short HEAD + Write-Host "Publishing version: $version on release channel nightly" + "nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL" + shell: pwsh + working-directory: ${{ env.ZED_WORKSPACE }} + - name: release_nightly::build_zed_installer + run: script/bundle-windows.ps1 -Architecture x86_64 + shell: pwsh + working-directory: ${{ env.ZED_WORKSPACE }} + - name: release_nightly::upload_zed_nightly_windows + run: script/upload-nightly.ps1 -Architecture x86_64 + shell: pwsh + working-directory: ${{ env.ZED_WORKSPACE }} timeout-minutes: 60 - name: Create a Windows installer + bundle_windows_nightly_aarch64: + needs: + - check_style + - run_tests_windows if: github.repository_owner == 'zed-industries' - runs-on: [self-32vcpu-windows-2022] - needs: windows-tests + runs-on: self-32vcpu-windows-2022 env: AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }} AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }} @@ -259,65 +316,135 @@ jobs: ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }} FILE_DIGEST: SHA256 TIMESTAMP_DIGEST: SHA256 - TIMESTAMP_SERVER: "http://timestamp.acs.microsoft.com" + TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com steps: - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - - - name: Set release channel to nightly - working-directory: ${{ env.ZED_WORKSPACE }} - run: | - $ErrorActionPreference = "Stop" - $version = git rev-parse --short HEAD - Write-Host "Publishing version: $version on release channel nightly" - "nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL" - - - name: Setup Sentry CLI - uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2 - with: - token: ${{ SECRETS.SENTRY_AUTH_TOKEN }} - - - name: Build Zed installer - working-directory: ${{ env.ZED_WORKSPACE }} - run: script/bundle-windows.ps1 - - - name: Upload Zed Nightly - working-directory: ${{ env.ZED_WORKSPACE }} - run: script/upload-nightly.ps1 windows - - update-nightly-tag: - name: Update nightly tag + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_sentry + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b + with: + token: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: release_nightly::set_release_channel_to_nightly + run: | + $ErrorActionPreference = "Stop" + $version = git rev-parse --short HEAD + Write-Host "Publishing version: $version on release channel nightly" + "nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL" + shell: pwsh + working-directory: ${{ env.ZED_WORKSPACE }} + - name: release_nightly::build_zed_installer + run: script/bundle-windows.ps1 -Architecture aarch64 + shell: pwsh + working-directory: ${{ env.ZED_WORKSPACE }} + - name: release_nightly::upload_zed_nightly_windows + run: script/upload-nightly.ps1 -Architecture aarch64 + shell: pwsh + working-directory: ${{ env.ZED_WORKSPACE }} + timeout-minutes: 60 + build_nix_linux_x86_64: + needs: + - check_style + - run_tests_mac if: github.repository_owner == 'zed-industries' - runs-on: namespace-profile-2x4-ubuntu-2404 + runs-on: namespace-profile-32x64-ubuntu-2004 + env: + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }} + GIT_LFS_SKIP_SMUDGE: '1' + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: nix_build::install_nix + uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f + with: + github_access_token: ${{ secrets.GITHUB_TOKEN }} + - name: nix_build::cachix_action + uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad + with: + name: zed + authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} + cachixArgs: -v + - name: nix_build::build + run: nix build .#default -L --accept-flake-config + shell: bash -euxo pipefail {0} + timeout-minutes: 60 + continue-on-error: true + build_nix_mac_aarch64: needs: - - bundle-mac - - bundle-linux-x86 - - bundle-linux-arm - - bundle-windows-x64 + - check_style + - run_tests_mac + if: github.repository_owner == 'zed-industries' + runs-on: self-mini-macos + env: + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }} + GIT_LFS_SKIP_SMUDGE: '1' steps: - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - fetch-depth: 0 - - - name: Update nightly tag - run: | - if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then - echo "Nightly tag already points to current commit. Skipping tagging." - exit 0 - fi - git config user.name github-actions - git config user.email github-actions@github.com - git tag -f nightly - git push origin nightly --force - - - name: Create Sentry release - uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c # v3 - env: - SENTRY_ORG: zed-dev - SENTRY_PROJECT: zed - SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} - with: - environment: production + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: nix_build::set_path + run: | + echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH" + echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH" + shell: bash -euxo pipefail {0} + - name: nix_build::cachix_action + uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad + with: + name: zed + authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} + cachixArgs: -v + - name: nix_build::build + run: nix build .#default -L --accept-flake-config + shell: bash -euxo pipefail {0} + - name: nix_build::limit_store + run: |- + if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then + nix-collect-garbage -d || true + fi + shell: bash -euxo pipefail {0} + timeout-minutes: 60 + continue-on-error: true + update_nightly_tag: + needs: + - bundle_mac_nightly_x86_64 + - bundle_mac_nightly_aarch64 + - bundle_linux_nightly_x86_64 + - bundle_linux_nightly_aarch64 + - bundle_windows_nightly_x86_64 + - bundle_windows_nightly_aarch64 + if: github.repository_owner == 'zed-industries' + runs-on: namespace-profile-2x4-ubuntu-2404 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + fetch-depth: 0 + - name: release_nightly::update_nightly_tag + run: | + if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then + echo "Nightly tag already points to current commit. Skipping tagging." + exit 0 + fi + git config user.name github-actions + git config user.email github-actions@github.com + git tag -f nightly + git push origin nightly --force + shell: bash -euxo pipefail {0} + - name: release_nightly::create_sentry_release + uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c + with: + environment: production + env: + SENTRY_ORG: zed-dev + SENTRY_PROJECT: zed + SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} + timeout-minutes: 60 diff --git a/.github/workflows/run_bundling.yml b/.github/workflows/run_bundling.yml new file mode 100644 index 0000000000000000000000000000000000000000..98354d0b7895848196ccd58d0d953d2856cfa8c2 --- /dev/null +++ b/.github/workflows/run_bundling.yml @@ -0,0 +1,233 @@ +# Generated from xtask::workflows::run_bundling +# Rebuild with `cargo xtask workflows`. +name: run_bundling +env: + CARGO_TERM_COLOR: always + CARGO_INCREMENTAL: '0' + RUST_BACKTRACE: '1' + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} +on: + pull_request: + types: + - labeled + - synchronize +jobs: + bundle_mac_x86_64: + if: |- + (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') || + (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) + runs-on: self-mini-macos + env: + MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} + MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} + APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }} + APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }} + APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 + with: + node-version: '20' + - name: steps::setup_sentry + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b + with: + token: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: steps::clear_target_dir_if_large + run: ./script/clear-target-dir-if-larger-than 300 + shell: bash -euxo pipefail {0} + - name: run_bundling::bundle_mac + run: ./script/bundle-mac x86_64-apple-darwin + shell: bash -euxo pipefail {0} + - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg + path: target/x86_64-apple-darwin/release/Zed.dmg + - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz + path: target/zed-remote-server-macos-x86_64.gz + timeout-minutes: 60 + bundle_mac_arm64: + if: |- + (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') || + (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) + runs-on: self-mini-macos + env: + MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} + MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} + APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }} + APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }} + APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 + with: + node-version: '20' + - name: steps::setup_sentry + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b + with: + token: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: steps::clear_target_dir_if_large + run: ./script/clear-target-dir-if-larger-than 300 + shell: bash -euxo pipefail {0} + - name: run_bundling::bundle_mac + run: ./script/bundle-mac aarch64-apple-darwin + shell: bash -euxo pipefail {0} + - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg + path: target/aarch64-apple-darwin/release/Zed.dmg + - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz + path: target/zed-remote-server-macos-aarch64.gz + timeout-minutes: 60 + bundle_linux_x86_64: + if: |- + (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') || + (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) + runs-on: namespace-profile-32x64-ubuntu-2004 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_sentry + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b + with: + token: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: ./script/linux + run: ./script/linux + shell: bash -euxo pipefail {0} + - name: ./script/install-mold + run: ./script/install-mold + shell: bash -euxo pipefail {0} + - name: ./script/bundle-linux + run: ./script/bundle-linux + shell: bash -euxo pipefail {0} + - name: '@actions/upload-artifact zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz + path: target/release/zed-*.tar.gz + - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz + path: target/release/zed-remote-server-*.tar.gz + timeout-minutes: 60 + bundle_linux_arm64: + if: |- + (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') || + (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) + runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_sentry + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b + with: + token: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: ./script/linux + run: ./script/linux + shell: bash -euxo pipefail {0} + - name: ./script/bundle-linux + run: ./script/bundle-linux + shell: bash -euxo pipefail {0} + - name: '@actions/upload-artifact zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz + path: target/release/zed-*.tar.gz + - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz + path: target/release/zed-remote-server-*.tar.gz + timeout-minutes: 60 + bundle_windows_x86_64: + if: |- + (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') || + (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) + runs-on: self-32vcpu-windows-2022 + env: + AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }} + AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }} + AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }} + ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }} + CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }} + ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }} + FILE_DIGEST: SHA256 + TIMESTAMP_DIGEST: SHA256 + TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_sentry + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b + with: + token: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: run_bundling::bundle_windows + run: script/bundle-windows.ps1 -Architecture x86_64 + shell: pwsh + working-directory: ${{ env.ZED_WORKSPACE }} + - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe + path: ${{ env.SETUP_PATH }} + timeout-minutes: 60 + bundle_windows_arm64: + if: |- + (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') || + (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) + runs-on: self-32vcpu-windows-2022 + env: + AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }} + AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }} + AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }} + ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }} + CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }} + ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }} + FILE_DIGEST: SHA256 + TIMESTAMP_DIGEST: SHA256 + TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_sentry + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b + with: + token: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: run_bundling::bundle_windows + run: script/bundle-windows.ps1 -Architecture aarch64 + shell: pwsh + working-directory: ${{ env.ZED_WORKSPACE }} + - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe + path: ${{ env.SETUP_PATH }} + timeout-minutes: 60 +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} + cancel-in-progress: true diff --git a/.github/workflows/unit_evals.yml b/.github/workflows/unit_evals.yml index c03cf8b087188f3e10a298e52a8278e63765c4f0..53ed33a1af300d6b641b3b9430de0bb6846b27cc 100644 --- a/.github/workflows/unit_evals.yml +++ b/.github/workflows/unit_evals.yml @@ -63,7 +63,7 @@ jobs: - name: Run unit evals shell: bash -euxo pipefail {0} - run: cargo nextest run --workspace --no-fail-fast --features eval --no-capture -E 'test(::eval_)' + run: cargo nextest run --workspace --no-fail-fast --features unit-eval --no-capture -E 'test(::eval_)' env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} diff --git a/Cargo.lock b/Cargo.lock index f3f558c32ea849e3d0433471fb528959ca58635d..1e1ccad1a7dcb3257c9cf471c166655c23056a07 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4902,6 +4902,18 @@ dependencies = [ "syn 2.0.106", ] +[[package]] +name = "derive_setters" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae5c625eda104c228c06ecaf988d1c60e542176bd7a490e60eeda3493244c0c9" +dependencies = [ + "darling 0.20.11", + "proc-macro2", + "quote", + "syn 2.0.106", +] + [[package]] name = "deunicode" version = "1.6.2" @@ -6942,6 +6954,33 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "gh-workflow" +version = "0.8.0" +source = "git+https://github.com/zed-industries/gh-workflow?rev=0090c6b6ef82fff02bc8616645953e778d1acc08#0090c6b6ef82fff02bc8616645953e778d1acc08" +dependencies = [ + "async-trait", + "derive_more 2.0.1", + "derive_setters", + "gh-workflow-macros", + "indexmap 2.11.4", + "merge", + "serde", + "serde_json", + "serde_yaml", + "strum_macros 0.27.2", +] + +[[package]] +name = "gh-workflow-macros" +version = "0.8.0" +source = "git+https://github.com/zed-industries/gh-workflow?rev=0090c6b6ef82fff02bc8616645953e778d1acc08#0090c6b6ef82fff02bc8616645953e778d1acc08" +dependencies = [ + "heck 0.5.0", + "quote", + "syn 2.0.106", +] + [[package]] name = "gif" version = "0.13.3" @@ -7224,6 +7263,7 @@ dependencies = [ "async-task", "backtrace", "bindgen 0.71.1", + "bitflags 2.9.4", "blade-graphics", "blade-macros", "blade-util", @@ -7303,6 +7343,7 @@ dependencies = [ "wayland-cursor", "wayland-protocols 0.31.2", "wayland-protocols-plasma", + "wayland-protocols-wlr", "windows 0.61.3", "windows-core 0.61.2", "windows-numerics", @@ -9811,6 +9852,28 @@ dependencies = [ "gpui", ] +[[package]] +name = "merge" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10bbef93abb1da61525bbc45eeaff6473a41907d19f8f9aa5168d214e10693e9" +dependencies = [ + "merge_derive", + "num-traits", +] + +[[package]] +name = "merge_derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "209d075476da2e63b4b29e72a2ef627b840589588e71400a25e3565c4f849d07" +dependencies = [ + "proc-macro-error", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "metal" version = "0.29.0" @@ -9837,7 +9900,7 @@ dependencies = [ "pretty_assertions", "serde_json", "serde_json_lenient", - "settings", + "settings_json", "streaming-iterator", "tree-sitter", "tree-sitter-json", @@ -12800,6 +12863,30 @@ dependencies = [ "toml_edit 0.23.7", ] +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2", + "quote", + "syn 1.0.109", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2", + "quote", + "version_check", +] + [[package]] name = "proc-macro-error-attr2" version = "2.0.0" @@ -13038,7 +13125,6 @@ dependencies = [ "paths", "rope", "serde", - "serde_json", "text", "util", "uuid", @@ -14226,7 +14312,6 @@ dependencies = [ "log", "rand 0.9.2", "rayon", - "regex", "sum_tree", "unicode-segmentation", "util", @@ -15224,6 +15309,19 @@ dependencies = [ "syn 2.0.106", ] +[[package]] +name = "serde_yaml" +version = "0.9.34+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" +dependencies = [ + "indexmap 2.11.4", + "itoa", + "ryu", + "serde", + "unsafe-libyaml", +] + [[package]] name = "serial2" version = "0.2.33" @@ -15260,6 +15358,7 @@ dependencies = [ "indoc", "inventory", "log", + "migrator", "paths", "pretty_assertions", "release_channel", @@ -15268,17 +15367,31 @@ dependencies = [ "serde", "serde_json", "serde_json_lenient", - "serde_path_to_error", "serde_repr", "serde_with", + "settings_json", "settings_macros", "smallvec", "strum 0.27.2", + "unindent", + "util", + "zlog", +] + +[[package]] +name = "settings_json" +version = "0.1.0" +dependencies = [ + "anyhow", + "pretty_assertions", + "serde", + "serde_json", + "serde_json_lenient", + "serde_path_to_error", "tree-sitter", "tree-sitter-json", "unindent", "util", - "zlog", ] [[package]] @@ -16382,7 +16495,7 @@ dependencies = [ "editor", "file_icons", "gpui", - "multi_buffer", + "language", "ui", "workspace", ] @@ -17034,6 +17147,7 @@ dependencies = [ "parking_lot", "postage", "rand 0.9.2", + "regex", "rope", "smallvec", "sum_tree", @@ -17326,6 +17440,7 @@ dependencies = [ "anyhow", "auto_update", "call", + "channel", "chrono", "client", "cloud_llm_client", @@ -18396,6 +18511,12 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7264e107f553ccae879d21fbea1d6724ac785e8c3bfc762137959b5802826ef3" +[[package]] +name = "unsafe-libyaml" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" + [[package]] name = "untrusted" version = "0.9.0" @@ -18662,6 +18783,7 @@ dependencies = [ "serde", "serde_json", "settings", + "settings_ui", "task", "text", "theme", @@ -19369,6 +19491,19 @@ dependencies = [ "wayland-scanner", ] +[[package]] +name = "wayland-protocols-wlr" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "efd94963ed43cf9938a090ca4f7da58eb55325ec8200c3848963e98dc25b78ec" +dependencies = [ + "bitflags 2.9.4", + "wayland-backend", + "wayland-client", + "wayland-protocols 0.32.9", + "wayland-scanner", +] + [[package]] name = "wayland-scanner" version = "0.31.7" @@ -20802,9 +20937,12 @@ name = "xtask" version = "0.1.0" dependencies = [ "anyhow", + "backtrace", "cargo_metadata", "cargo_toml", "clap", + "gh-workflow", + "indexmap 2.11.4", "indoc", "toml 0.8.23", "toml_edit 0.22.27", @@ -20990,7 +21128,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.211.0" +version = "0.212.0" dependencies = [ "acp_tools", "activity_indicator", @@ -21517,6 +21655,7 @@ dependencies = [ "clock", "cloud_llm_client", "cloud_zeta2_prompt", + "collections", "edit_prediction", "edit_prediction_context", "feature_flags", @@ -21530,6 +21669,7 @@ dependencies = [ "pretty_assertions", "project", "release_channel", + "schemars 1.0.4", "serde", "serde_json", "settings", @@ -21544,6 +21684,7 @@ dependencies = [ name = "zeta2_tools" version = "0.1.0" dependencies = [ + "anyhow", "chrono", "clap", "client", diff --git a/Cargo.toml b/Cargo.toml index d69d768f33eb59e9f2c6e3194a976e612d5944ab..369082ff16736f9f682ad8c5bd09634c03434609 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -148,6 +148,7 @@ members = [ "crates/semantic_version", "crates/session", "crates/settings", + "crates/settings_json", "crates/settings_macros", "crates/settings_profile_selector", "crates/settings_ui", @@ -380,6 +381,7 @@ search = { path = "crates/search" } semantic_version = { path = "crates/semantic_version" } session = { path = "crates/session" } settings = { path = "crates/settings" } +settings_json = { path = "crates/settings_json" } settings_macros = { path = "crates/settings_macros" } settings_ui = { path = "crates/settings_ui" } snippet = { path = "crates/snippet" } @@ -506,6 +508,7 @@ fork = "0.2.0" futures = "0.3" futures-batch = "0.6.1" futures-lite = "1.13" +gh-workflow = { git = "https://github.com/zed-industries/gh-workflow", rev = "0090c6b6ef82fff02bc8616645953e778d1acc08" } git2 = { version = "0.20.1", default-features = false } globset = "0.4" handlebars = "4.3" diff --git a/Procfile.postgrest b/Procfile.postgrest deleted file mode 100644 index acab58e086ca15426b58529e2055b4126f65467a..0000000000000000000000000000000000000000 --- a/Procfile.postgrest +++ /dev/null @@ -1,2 +0,0 @@ -app: postgrest crates/collab/postgrest_app.conf -llm: postgrest crates/collab/postgrest_llm.conf diff --git a/Procfile.web b/Procfile.web index 814055514498124d1f20b1fed51f23a5809819a9..63190fc2ee1f57b3576236fafa08554b9e67b575 100644 --- a/Procfile.web +++ b/Procfile.web @@ -1,2 +1 @@ -postgrest_llm: postgrest crates/collab/postgrest_llm.conf website: cd ../zed.dev; npm run dev -- --port=3000 diff --git a/REVIEWERS.conl b/REVIEWERS.conl index d5254c7aaf394f4fae33be391fde84c567c37a53..78563fe466f38c644cd6a19c76ffe231a086fd56 100644 --- a/REVIEWERS.conl +++ b/REVIEWERS.conl @@ -38,6 +38,7 @@ linux = @smitbarmase = @p1n3appl3 = @cole-miller + = @probably-neb windows = @reflectronic @@ -76,8 +77,9 @@ crashes = @Veykril ai + = @rtfeldman = @danilo-leal - = @benbrandt + = @benbrandt design = @danilo-leal @@ -97,6 +99,7 @@ languages = @Veykril = @smitbarmase = @SomeoneToIgnore + = @probably-neb project_panel = @smitbarmase @@ -104,3 +107,6 @@ project_panel tasks = @SomeoneToIgnore = @Veykril + +docs + = @probably-neb diff --git a/assets/icons/link.svg b/assets/icons/link.svg new file mode 100644 index 0000000000000000000000000000000000000000..739d41b231f0e01945fc1fd526632964f921a938 --- /dev/null +++ b/assets/icons/link.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 3d94edafcdfc1d9acec5328cade996459547996b..d745474e09e1730127522e8c3170356864fd83b2 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -609,7 +609,7 @@ "ctrl-alt-b": "workspace::ToggleRightDock", "ctrl-b": "workspace::ToggleLeftDock", "ctrl-j": "workspace::ToggleBottomDock", - "ctrl-alt-y": "workspace::CloseAllDocks", + "ctrl-alt-y": "workspace::ToggleAllDocks", "ctrl-alt-0": "workspace::ResetActiveDockSize", // For 0px parameter, uses UI font size value. "ctrl-alt--": ["workspace::DecreaseActiveDockSize", { "px": 0 }], @@ -731,6 +731,14 @@ "tab": "editor::ComposeCompletion" } }, + { + "context": "Editor && in_snippet", + "use_key_equivalents": true, + "bindings": { + "alt-right": "editor::NextSnippetTabstop", + "alt-left": "editor::PreviousSnippetTabstop" + } + }, // Bindings for accepting edit predictions // // alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This is @@ -1298,5 +1306,12 @@ "ctrl-enter up": "dev::Zeta2RatePredictionPositive", "ctrl-enter down": "dev::Zeta2RatePredictionNegative" } + }, + { + "context": "Zeta2Context > Editor", + "bindings": { + "alt-left": "dev::Zeta2ContextGoBack", + "alt-right": "dev::Zeta2ContextGoForward" + } } ] diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 6c3f47cb45909c1e014e76c9d414b68f23632a14..50fa44be02703e0a0935e14de501070c53c4df87 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -679,7 +679,7 @@ "cmd-alt-b": "workspace::ToggleRightDock", "cmd-r": "workspace::ToggleRightDock", "cmd-j": "workspace::ToggleBottomDock", - "alt-cmd-y": "workspace::CloseAllDocks", + "alt-cmd-y": "workspace::ToggleAllDocks", // For 0px parameter, uses UI font size value. "ctrl-alt-0": "workspace::ResetActiveDockSize", "ctrl-alt--": ["workspace::DecreaseActiveDockSize", { "px": 0 }], @@ -801,6 +801,14 @@ "tab": "editor::ComposeCompletion" } }, + { + "context": "Editor && in_snippet", + "use_key_equivalents": true, + "bindings": { + "alt-right": "editor::NextSnippetTabstop", + "alt-left": "editor::PreviousSnippetTabstop" + } + }, { "context": "Editor && edit_prediction", "bindings": { @@ -1404,5 +1412,12 @@ "cmd-enter up": "dev::Zeta2RatePredictionPositive", "cmd-enter down": "dev::Zeta2RatePredictionNegative" } + }, + { + "context": "Zeta2Context > Editor", + "bindings": { + "alt-left": "dev::Zeta2ContextGoBack", + "alt-right": "dev::Zeta2ContextGoForward" + } } ] diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 5b96d20633b573d939e49a3ea60c4afc5d7ca721..ef454ff12d2a437bda4b3fba0f214651a0c74396 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -614,7 +614,7 @@ "ctrl-alt-b": "workspace::ToggleRightDock", "ctrl-b": "workspace::ToggleLeftDock", "ctrl-j": "workspace::ToggleBottomDock", - "ctrl-shift-y": "workspace::CloseAllDocks", + "ctrl-shift-y": "workspace::ToggleAllDocks", "alt-r": "workspace::ResetActiveDockSize", // For 0px parameter, uses UI font size value. "shift-alt--": ["workspace::DecreaseActiveDockSize", { "px": 0 }], @@ -736,6 +736,14 @@ "tab": "editor::ComposeCompletion" } }, + { + "context": "Editor && in_snippet", + "use_key_equivalents": true, + "bindings": { + "alt-right": "editor::NextSnippetTabstop", + "alt-left": "editor::PreviousSnippetTabstop" + } + }, // Bindings for accepting edit predictions // // alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This is @@ -1327,5 +1335,12 @@ "ctrl-enter up": "dev::Zeta2RatePredictionPositive", "ctrl-enter down": "dev::Zeta2RatePredictionNegative" } + }, + { + "context": "Zeta2Context > Editor", + "bindings": { + "alt-left": "dev::Zeta2ContextGoBack", + "alt-right": "dev::Zeta2ContextGoForward" + } } ] diff --git a/assets/keymaps/linux/jetbrains.json b/assets/keymaps/linux/jetbrains.json index a5e387c014e1315bf51cfdf7c5226adaa8a20b27..cf28c43dbd7f8335f30ef7702e584bea5c0ba5e0 100644 --- a/assets/keymaps/linux/jetbrains.json +++ b/assets/keymaps/linux/jetbrains.json @@ -91,7 +91,7 @@ { "context": "Workspace", "bindings": { - "ctrl-shift-f12": "workspace::CloseAllDocks", + "ctrl-shift-f12": "workspace::ToggleAllDocks", "ctrl-shift-r": ["pane::DeploySearch", { "replace_enabled": true }], "alt-shift-f10": "task::Spawn", "ctrl-e": "file_finder::Toggle", diff --git a/assets/keymaps/macos/jetbrains.json b/assets/keymaps/macos/jetbrains.json index 2c757c3a30a08eb55e8344945ab66baf91ce0c6b..e5e5aeb0b8516285136438d40b57fb17fc9a9777 100644 --- a/assets/keymaps/macos/jetbrains.json +++ b/assets/keymaps/macos/jetbrains.json @@ -93,7 +93,7 @@ { "context": "Workspace", "bindings": { - "cmd-shift-f12": "workspace::CloseAllDocks", + "cmd-shift-f12": "workspace::ToggleAllDocks", "cmd-shift-r": ["pane::DeploySearch", { "replace_enabled": true }], "ctrl-alt-r": "task::Spawn", "cmd-e": "file_finder::Toggle", diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index da7491a0070cc74d8329d9bae65d445896b77386..d6bdff1cd02fcd0bfb31fb48d2c47a321c54de2c 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -220,6 +220,8 @@ "[ {": ["vim::UnmatchedBackward", { "char": "{" }], "] )": ["vim::UnmatchedForward", { "char": ")" }], "[ (": ["vim::UnmatchedBackward", { "char": "(" }], + "[ r": "vim::GoToPreviousReference", + "] r": "vim::GoToNextReference", // tree-sitter related commands "[ x": "vim::SelectLargerSyntaxNode", "] x": "vim::SelectSmallerSyntaxNode" @@ -432,7 +434,7 @@ "shift-t": ["vim::PushFindBackward", { "after": true, "multiline": true }], "shift-f": ["vim::PushFindBackward", { "after": false, "multiline": true }], "alt-.": "vim::RepeatFind", - + // Changes "shift-r": "editor::Paste", "`": "vim::ConvertToLowerCase", @@ -442,14 +444,14 @@ "ctrl-r": "vim::Redo", "y": "vim::HelixYank", "p": "vim::HelixPaste", - "shift-p": ["vim::HelixPaste", { "before": true }], + "shift-p": ["vim::HelixPaste", { "before": true }], ">": "vim::Indent", "<": "vim::Outdent", "=": "vim::AutoIndent", "d": "vim::HelixDelete", "c": "vim::HelixSubstitute", "alt-c": "vim::HelixSubstituteNoYank", - + // Selection manipulation "s": "vim::HelixSelectRegex", "alt-s": ["editor::SplitSelectionIntoLines", { "keep_selections": true }], @@ -466,7 +468,7 @@ "alt-i": "editor::SelectSmallerSyntaxNode", "alt-p": "editor::SelectPreviousSyntaxNode", "alt-n": "editor::SelectNextSyntaxNode", - + // Goto mode "g e": "vim::EndOfDocument", "g h": "vim::StartOfLine", @@ -477,11 +479,11 @@ "g b": "vim::WindowBottom", "g r": "editor::FindAllReferences", // zed specific "g n": "pane::ActivateNextItem", - "shift-l": "pane::ActivateNextItem", + "shift-l": "pane::ActivateNextItem", "g p": "pane::ActivatePreviousItem", "shift-h": "pane::ActivatePreviousItem", "g .": "vim::HelixGotoLastModification", // go to last modification - + // Window mode "space w h": "workspace::ActivatePaneLeft", "space w l": "workspace::ActivatePaneRight", @@ -512,7 +514,7 @@ "]": ["vim::PushHelixNext", { "around": true }], "[": ["vim::PushHelixPrevious", { "around": true }], "g q": "vim::PushRewrap", - "g w": "vim::PushRewrap", + "g w": "vim::PushRewrap" // "tab": "pane::ActivateNextItem", // "shift-tab": "pane::ActivatePrevItem", } @@ -1017,5 +1019,16 @@ // and Windows. "alt-l": "editor::AcceptEditPrediction" } + }, + { + "context": "SettingsWindow > NavigationMenu && !search", + "bindings": { + "l": "settings_editor::ExpandNavEntry", + "h": "settings_editor::CollapseNavEntry", + "k": "settings_editor::FocusPreviousNavEntry", + "j": "settings_editor::FocusNextNavEntry", + "g g": "settings_editor::FocusFirstNavEntry", + "shift-g": "settings_editor::FocusLastNavEntry" + } } ] diff --git a/assets/prompts/assistant_system_prompt.hbs b/assets/prompts/assistant_system_prompt.hbs deleted file mode 100644 index f47c1ffa908b861eb81d37642a7634616c92a0d9..0000000000000000000000000000000000000000 --- a/assets/prompts/assistant_system_prompt.hbs +++ /dev/null @@ -1,179 +0,0 @@ -You are a highly skilled software engineer with extensive knowledge in many programming languages, frameworks, design patterns, and best practices. - -## Communication - -1. Be conversational but professional. -2. Refer to the user in the second person and yourself in the first person. -3. Format your responses in markdown. Use backticks to format file, directory, function, and class names. -4. NEVER lie or make things up. -5. Refrain from apologizing all the time when results are unexpected. Instead, just try your best to proceed or explain the circumstances to the user without apologizing. - -{{#if has_tools}} -## Tool Use - -1. Make sure to adhere to the tools schema. -2. Provide every required argument. -3. DO NOT use tools to access items that are already available in the context section. -4. Use only the tools that are currently available. -5. DO NOT use a tool that is not available just because it appears in the conversation. This means the user turned it off. -6. NEVER run commands that don't terminate on their own such as web servers (like `npm run start`, `npm run dev`, `python -m http.server`, etc) or file watchers. -7. Avoid HTML entity escaping - use plain characters instead. - -## Searching and Reading - -If you are unsure how to fulfill the user's request, gather more information with tool calls and/or clarifying questions. - -{{! TODO: If there are files, we should mention it but otherwise omit that fact }} -If appropriate, use tool calls to explore the current project, which contains the following root directories: - -{{#each worktrees}} -- `{{abs_path}}` -{{/each}} - -- Bias towards not asking the user for help if you can find the answer yourself. -- When providing paths to tools, the path should always start with the name of a project root directory listed above. -- Before you read or edit a file, you must first find the full path. DO NOT ever guess a file path! -{{# if (has_tool 'grep') }} -- When looking for symbols in the project, prefer the `grep` tool. -- As you learn about the structure of the project, use that information to scope `grep` searches to targeted subtrees of the project. -- The user might specify a partial file path. If you don't know the full path, use `find_path` (not `grep`) before you read the file. -{{/if}} -{{else}} -You are being tasked with providing a response, but you have no ability to use tools or to read or write any aspect of the user's system (other than any context the user might have provided to you). - -As such, if you need the user to perform any actions for you, you must request them explicitly. Bias towards giving a response to the best of your ability, and then making requests for the user to take action (e.g. to give you more context) only optionally. - -The one exception to this is if the user references something you don't know about - for example, the name of a source code file, function, type, or other piece of code that you have no awareness of. In this case, you MUST NOT MAKE SOMETHING UP, or assume you know what that thing is or how it works. Instead, you must ask the user for clarification rather than giving a response. -{{/if}} - -## Code Block Formatting - -Whenever you mention a code block, you MUST use ONLY use the following format: -```path/to/Something.blah#L123-456 -(code goes here) -``` -The `#L123-456` means the line number range 123 through 456, and the path/to/Something.blah -is a path in the project. (If there is no valid path in the project, then you can use -/dev/null/path.extension for its path.) This is the ONLY valid way to format code blocks, because the Markdown parser -does not understand the more common ```language syntax, or bare ``` blocks. It only -understands this path-based syntax, and if the path is missing, then it will error and you will have to do it over again. -Just to be really clear about this, if you ever find yourself writing three backticks followed by a language name, STOP! -You have made a mistake. You can only ever put paths after triple backticks! - -Based on all the information I've gathered, here's a summary of how this system works: -1. The README file is loaded into the system. -2. The system finds the first two headers, including everything in between. In this case, that would be: -```path/to/README.md#L8-12 -# First Header -This is the info under the first header. -## Sub-header -``` -3. Then the system finds the last header in the README: -```path/to/README.md#L27-29 -## Last Header -This is the last header in the README. -``` -4. Finally, it passes this information on to the next process. - - -In Markdown, hash marks signify headings. For example: -```/dev/null/example.md#L1-3 -# Level 1 heading -## Level 2 heading -### Level 3 heading -``` - -Here are examples of ways you must never render code blocks: - -In Markdown, hash marks signify headings. For example: -``` -# Level 1 heading -## Level 2 heading -### Level 3 heading -``` - -This example is unacceptable because it does not include the path. - -In Markdown, hash marks signify headings. For example: -```markdown -# Level 1 heading -## Level 2 heading -### Level 3 heading -``` - -This example is unacceptable because it has the language instead of the path. - -In Markdown, hash marks signify headings. For example: - # Level 1 heading - ## Level 2 heading - ### Level 3 heading - -This example is unacceptable because it uses indentation to mark the code block -instead of backticks with a path. - -In Markdown, hash marks signify headings. For example: -```markdown -/dev/null/example.md#L1-3 -# Level 1 heading -## Level 2 heading -### Level 3 heading -``` - -This example is unacceptable because the path is in the wrong place. The path must be directly after the opening backticks. - -{{#if has_tools}} -## Fixing Diagnostics - -1. Make 1-2 attempts at fixing diagnostics, then defer to the user. -2. Never simplify code you've written just to solve diagnostics. Complete, mostly correct code is more valuable than perfect code that doesn't solve the problem. - -## Debugging - -When debugging, only make code changes if you are certain that you can solve the problem. -Otherwise, follow debugging best practices: -1. Address the root cause instead of the symptoms. -2. Add descriptive logging statements and error messages to track variable and code state. -3. Add test functions and statements to isolate the problem. - -{{/if}} -## Calling External APIs - -1. Unless explicitly requested by the user, use the best suited external APIs and packages to solve the task. There is no need to ask the user for permission. -2. When selecting which version of an API or package to use, choose one that is compatible with the user's dependency management file(s). If no such file exists or if the package is not present, use the latest version that is in your training data. -3. If an external API requires an API Key, be sure to point this out to the user. Adhere to best security practices (e.g. DO NOT hardcode an API key in a place where it can be exposed) - -## System Information - -Operating System: {{os}} -Default Shell: {{shell}} - -{{#if (or has_rules has_user_rules)}} -## User's Custom Instructions - -The following additional instructions are provided by the user, and should be followed to the best of your ability{{#if has_tools}} without interfering with the tool use guidelines{{/if}}. - -{{#if has_rules}} -There are project rules that apply to these root directories: -{{#each worktrees}} -{{#if rules_file}} -`{{root_name}}/{{rules_file.path_in_worktree}}`: -`````` -{{{rules_file.text}}} -`````` -{{/if}} -{{/each}} -{{/if}} - -{{#if has_user_rules}} -The user has specified the following rules that should be applied: -{{#each user_rules}} - -{{#if title}} -Rules title: {{title}} -{{/if}} -`````` -{{contents}} -`````` -{{/each}} -{{/if}} -{{/if}} diff --git a/assets/settings/default.json b/assets/settings/default.json index 10aa98498b09d4cbcf4f231393df3e9203a0512a..f62cc1844732db2a49dc835a155e861f4268632f 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -1329,7 +1329,7 @@ "model": null, "max_tokens": null }, - // Whether edit predictions are enabled when editing text threads. + // Whether edit predictions are enabled when editing text threads in the agent panel. // This setting has no effect if globally disabled. "enabled_in_text_threads": true }, @@ -1700,6 +1700,7 @@ "preferred_line_length": 72 }, "Go": { + "hard_tabs": true, "code_actions_on_format": { "source.organizeImports": true }, @@ -1769,7 +1770,8 @@ } }, "Plain Text": { - "allow_rewrap": "anywhere" + "allow_rewrap": "anywhere", + "soft_wrap": "editor_width" }, "Python": { "code_actions_on_format": { diff --git a/assets/themes/gruvbox/gruvbox.json b/assets/themes/gruvbox/gruvbox.json index 402d190b34bb3c730e01b9817d815da53cff288d..a0f0a3ad637a4d212c8bf38f95f2e8424919d6bf 100644 --- a/assets/themes/gruvbox/gruvbox.json +++ b/assets/themes/gruvbox/gruvbox.json @@ -6,8 +6,8 @@ { "name": "Gruvbox Dark", "appearance": "dark", - "accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"], "style": { + "accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"], "border": "#5b534dff", "border.variant": "#494340ff", "border.focused": "#303a36ff", @@ -412,8 +412,8 @@ { "name": "Gruvbox Dark Hard", "appearance": "dark", - "accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"], "style": { + "accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"], "border": "#5b534dff", "border.variant": "#494340ff", "border.focused": "#303a36ff", @@ -818,8 +818,8 @@ { "name": "Gruvbox Dark Soft", "appearance": "dark", - "accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"], "style": { + "accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"], "border": "#5b534dff", "border.variant": "#494340ff", "border.focused": "#303a36ff", @@ -1224,8 +1224,8 @@ { "name": "Gruvbox Light", "appearance": "light", - "accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"], "style": { + "accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"], "border": "#c8b899ff", "border.variant": "#ddcca7ff", "border.focused": "#adc5ccff", @@ -1630,8 +1630,8 @@ { "name": "Gruvbox Light Hard", "appearance": "light", - "accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"], "style": { + "accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"], "border": "#c8b899ff", "border.variant": "#ddcca7ff", "border.focused": "#adc5ccff", @@ -2036,8 +2036,8 @@ { "name": "Gruvbox Light Soft", "appearance": "light", - "accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"], "style": { + "accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"], "border": "#c8b899ff", "border.variant": "#ddcca7ff", "border.focused": "#adc5ccff", diff --git a/ci/Dockerfile.namespace b/ci/Dockerfile.namespace new file mode 100644 index 0000000000000000000000000000000000000000..f370dae194a0a3e614354ba70f65237e27c3382e --- /dev/null +++ b/ci/Dockerfile.namespace @@ -0,0 +1,21 @@ +ARG NAMESPACE_BASE_IMAGE_REF="" + +# Your image must build FROM NAMESPACE_BASE_IMAGE_REF +FROM ${NAMESPACE_BASE_IMAGE_REF} AS base + +# Remove problematic git-lfs packagecloud source +RUN sudo rm -f /etc/apt/sources.list.d/*git-lfs*.list +# Install git and SSH for cloning private repositories +RUN sudo apt-get update && \ + sudo apt-get install -y git openssh-client + +# Clone the Zed repository +RUN git clone https://github.com/zed-industries/zed.git ~/zed + +# Run the Linux installation script +WORKDIR /home/runner/zed +RUN ./script/linux + +# Clean up unnecessary files to reduce image size +RUN sudo apt-get clean && sudo rm -rf \ + /home/runner/zed diff --git a/clippy.toml b/clippy.toml index 4e9f2de8585e74afe76840c59306ad8ed87fd947..0ce7a6cd68d4e8210788eb7a67aa06c742cc8274 100644 --- a/clippy.toml +++ b/clippy.toml @@ -9,6 +9,9 @@ disallowed-methods = [ { path = "std::process::Command::spawn", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::spawn" }, { path = "std::process::Command::output", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::output" }, { path = "std::process::Command::status", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::status" }, + { path = "std::process::Command::stdin", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stdin" }, + { path = "std::process::Command::stdout", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stdout" }, + { path = "std::process::Command::stderr", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stderr" }, { path = "serde_json::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892. Use `serde_json::from_slice` instead." }, { path = "serde_json_lenient::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892, Use `serde_json_lenient::from_slice` instead." }, ] diff --git a/compose.yml b/compose.yml index 00a5780b597738260f90020f139627e7d0b0107c..cee63e968b2153235bd47dec1429ccbc5a55db8e 100644 --- a/compose.yml +++ b/compose.yml @@ -33,32 +33,6 @@ services: volumes: - ./livekit.yaml:/livekit.yaml - postgrest_app: - image: docker.io/postgrest/postgrest - container_name: postgrest_app - ports: - - 8081:8081 - environment: - PGRST_DB_URI: postgres://postgres@postgres:5432/zed - volumes: - - ./crates/collab/postgrest_app.conf:/etc/postgrest.conf - command: postgrest /etc/postgrest.conf - depends_on: - - postgres - - postgrest_llm: - image: docker.io/postgrest/postgrest - container_name: postgrest_llm - ports: - - 8082:8082 - environment: - PGRST_DB_URI: postgres://postgres@postgres:5432/zed_llm - volumes: - - ./crates/collab/postgrest_llm.conf:/etc/postgrest.conf - command: postgrest /etc/postgrest.conf - depends_on: - - postgres - stripe-mock: image: docker.io/stripe/stripe-mock:v0.178.0 ports: diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index 99c62201fa0c2576e588c5cc7325d525c2d03503..5ecf2be445ecf8afc6a93e2961302758ea0037ae 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -35,7 +35,7 @@ use std::rc::Rc; use std::time::{Duration, Instant}; use std::{fmt::Display, mem, path::PathBuf, sync::Arc}; use ui::App; -use util::{ResultExt, get_default_system_shell_preferring_bash}; +use util::{ResultExt, get_default_system_shell_preferring_bash, paths::PathStyle}; use uuid::Uuid; #[derive(Debug)] @@ -95,9 +95,14 @@ pub enum AssistantMessageChunk { } impl AssistantMessageChunk { - pub fn from_str(chunk: &str, language_registry: &Arc, cx: &mut App) -> Self { + pub fn from_str( + chunk: &str, + language_registry: &Arc, + path_style: PathStyle, + cx: &mut App, + ) -> Self { Self::Message { - block: ContentBlock::new(chunk.into(), language_registry, cx), + block: ContentBlock::new(chunk.into(), language_registry, path_style, cx), } } @@ -186,6 +191,7 @@ impl ToolCall { tool_call: acp::ToolCall, status: ToolCallStatus, language_registry: Arc, + path_style: PathStyle, terminals: &HashMap>, cx: &mut App, ) -> Result { @@ -199,6 +205,7 @@ impl ToolCall { content.push(ToolCallContent::from_acp( item, language_registry.clone(), + path_style, terminals, cx, )?); @@ -223,6 +230,7 @@ impl ToolCall { &mut self, fields: acp::ToolCallUpdateFields, language_registry: Arc, + path_style: PathStyle, terminals: &HashMap>, cx: &mut App, ) -> Result<()> { @@ -260,12 +268,13 @@ impl ToolCall { // Reuse existing content if we can for (old, new) in self.content.iter_mut().zip(content.by_ref()) { - old.update_from_acp(new, language_registry.clone(), terminals, cx)?; + old.update_from_acp(new, language_registry.clone(), path_style, terminals, cx)?; } for new in content { self.content.push(ToolCallContent::from_acp( new, language_registry.clone(), + path_style, terminals, cx, )?) @@ -450,21 +459,23 @@ impl ContentBlock { pub fn new( block: acp::ContentBlock, language_registry: &Arc, + path_style: PathStyle, cx: &mut App, ) -> Self { let mut this = Self::Empty; - this.append(block, language_registry, cx); + this.append(block, language_registry, path_style, cx); this } pub fn new_combined( blocks: impl IntoIterator, language_registry: Arc, + path_style: PathStyle, cx: &mut App, ) -> Self { let mut this = Self::Empty; for block in blocks { - this.append(block, &language_registry, cx); + this.append(block, &language_registry, path_style, cx); } this } @@ -473,6 +484,7 @@ impl ContentBlock { &mut self, block: acp::ContentBlock, language_registry: &Arc, + path_style: PathStyle, cx: &mut App, ) { if matches!(self, ContentBlock::Empty) @@ -482,7 +494,7 @@ impl ContentBlock { return; } - let new_content = self.block_string_contents(block); + let new_content = self.block_string_contents(block, path_style); match self { ContentBlock::Empty => { @@ -492,7 +504,7 @@ impl ContentBlock { markdown.update(cx, |markdown, cx| markdown.append(&new_content, cx)); } ContentBlock::ResourceLink { resource_link } => { - let existing_content = Self::resource_link_md(&resource_link.uri); + let existing_content = Self::resource_link_md(&resource_link.uri, path_style); let combined = format!("{}\n{}", existing_content, new_content); *self = Self::create_markdown_block(combined, language_registry, cx); @@ -511,11 +523,11 @@ impl ContentBlock { } } - fn block_string_contents(&self, block: acp::ContentBlock) -> String { + fn block_string_contents(&self, block: acp::ContentBlock, path_style: PathStyle) -> String { match block { acp::ContentBlock::Text(text_content) => text_content.text, acp::ContentBlock::ResourceLink(resource_link) => { - Self::resource_link_md(&resource_link.uri) + Self::resource_link_md(&resource_link.uri, path_style) } acp::ContentBlock::Resource(acp::EmbeddedResource { resource: @@ -524,14 +536,14 @@ impl ContentBlock { .. }), .. - }) => Self::resource_link_md(&uri), + }) => Self::resource_link_md(&uri, path_style), acp::ContentBlock::Image(image) => Self::image_md(&image), acp::ContentBlock::Audio(_) | acp::ContentBlock::Resource(_) => String::new(), } } - fn resource_link_md(uri: &str) -> String { - if let Some(uri) = MentionUri::parse(uri).log_err() { + fn resource_link_md(uri: &str, path_style: PathStyle) -> String { + if let Some(uri) = MentionUri::parse(uri, path_style).log_err() { uri.as_link().to_string() } else { uri.to_string() @@ -577,6 +589,7 @@ impl ToolCallContent { pub fn from_acp( content: acp::ToolCallContent, language_registry: Arc, + path_style: PathStyle, terminals: &HashMap>, cx: &mut App, ) -> Result { @@ -584,6 +597,7 @@ impl ToolCallContent { acp::ToolCallContent::Content { content } => Ok(Self::ContentBlock(ContentBlock::new( content, &language_registry, + path_style, cx, ))), acp::ToolCallContent::Diff { diff } => Ok(Self::Diff(cx.new(|cx| { @@ -607,6 +621,7 @@ impl ToolCallContent { &mut self, new: acp::ToolCallContent, language_registry: Arc, + path_style: PathStyle, terminals: &HashMap>, cx: &mut App, ) -> Result<()> { @@ -622,7 +637,7 @@ impl ToolCallContent { }; if needs_update { - *self = Self::from_acp(new, language_registry, terminals, cx)?; + *self = Self::from_acp(new, language_registry, path_style, terminals, cx)?; } Ok(()) } @@ -1142,6 +1157,7 @@ impl AcpThread { cx: &mut Context, ) { let language_registry = self.project.read(cx).languages().clone(); + let path_style = self.project.read(cx).path_style(cx); let entries_len = self.entries.len(); if let Some(last_entry) = self.entries.last_mut() @@ -1153,12 +1169,12 @@ impl AcpThread { }) = last_entry { *id = message_id.or(id.take()); - content.append(chunk.clone(), &language_registry, cx); + content.append(chunk.clone(), &language_registry, path_style, cx); chunks.push(chunk); let idx = entries_len - 1; cx.emit(AcpThreadEvent::EntryUpdated(idx)); } else { - let content = ContentBlock::new(chunk.clone(), &language_registry, cx); + let content = ContentBlock::new(chunk.clone(), &language_registry, path_style, cx); self.push_entry( AgentThreadEntry::UserMessage(UserMessage { id: message_id, @@ -1178,6 +1194,7 @@ impl AcpThread { cx: &mut Context, ) { let language_registry = self.project.read(cx).languages().clone(); + let path_style = self.project.read(cx).path_style(cx); let entries_len = self.entries.len(); if let Some(last_entry) = self.entries.last_mut() && let AgentThreadEntry::AssistantMessage(AssistantMessage { chunks }) = last_entry @@ -1187,10 +1204,10 @@ impl AcpThread { match (chunks.last_mut(), is_thought) { (Some(AssistantMessageChunk::Message { block }), false) | (Some(AssistantMessageChunk::Thought { block }), true) => { - block.append(chunk, &language_registry, cx) + block.append(chunk, &language_registry, path_style, cx) } _ => { - let block = ContentBlock::new(chunk, &language_registry, cx); + let block = ContentBlock::new(chunk, &language_registry, path_style, cx); if is_thought { chunks.push(AssistantMessageChunk::Thought { block }) } else { @@ -1199,7 +1216,7 @@ impl AcpThread { } } } else { - let block = ContentBlock::new(chunk, &language_registry, cx); + let block = ContentBlock::new(chunk, &language_registry, path_style, cx); let chunk = if is_thought { AssistantMessageChunk::Thought { block } } else { @@ -1251,6 +1268,7 @@ impl AcpThread { ) -> Result<()> { let update = update.into(); let languages = self.project.read(cx).languages().clone(); + let path_style = self.project.read(cx).path_style(cx); let ix = match self.index_for_tool_call(update.id()) { Some(ix) => ix, @@ -1267,6 +1285,7 @@ impl AcpThread { meta: None, }), &languages, + path_style, cx, ))], status: ToolCallStatus::Failed, @@ -1286,7 +1305,7 @@ impl AcpThread { match update { ToolCallUpdate::UpdateFields(update) => { let location_updated = update.fields.locations.is_some(); - call.update_fields(update.fields, languages, &self.terminals, cx)?; + call.update_fields(update.fields, languages, path_style, &self.terminals, cx)?; if location_updated { self.resolve_locations(update.id, cx); } @@ -1325,6 +1344,7 @@ impl AcpThread { cx: &mut Context, ) -> Result<(), acp::Error> { let language_registry = self.project.read(cx).languages().clone(); + let path_style = self.project.read(cx).path_style(cx); let id = update.id.clone(); if let Some(ix) = self.index_for_tool_call(&id) { @@ -1332,7 +1352,13 @@ impl AcpThread { unreachable!() }; - call.update_fields(update.fields, language_registry, &self.terminals, cx)?; + call.update_fields( + update.fields, + language_registry, + path_style, + &self.terminals, + cx, + )?; call.status = status; cx.emit(AcpThreadEvent::EntryUpdated(ix)); @@ -1341,6 +1367,7 @@ impl AcpThread { update.try_into()?, status, language_registry, + self.project.read(cx).path_style(cx), &self.terminals, cx, )?; @@ -1620,6 +1647,7 @@ impl AcpThread { let block = ContentBlock::new_combined( message.clone(), self.project.read(cx).languages().clone(), + self.project.read(cx).path_style(cx), cx, ); let request = acp::PromptRequest { diff --git a/crates/acp_thread/src/mention.rs b/crates/acp_thread/src/mention.rs index bbd13da5fa4124546d5457755f2bd2f5d737ccac..b78eac4903a259a1044892fb2c8233f7e973f025 100644 --- a/crates/acp_thread/src/mention.rs +++ b/crates/acp_thread/src/mention.rs @@ -7,10 +7,10 @@ use std::{ fmt, ops::RangeInclusive, path::{Path, PathBuf}, - str::FromStr, }; use ui::{App, IconName, SharedString}; use url::Url; +use util::paths::PathStyle; #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] pub enum MentionUri { @@ -49,7 +49,7 @@ pub enum MentionUri { } impl MentionUri { - pub fn parse(input: &str) -> Result { + pub fn parse(input: &str, path_style: PathStyle) -> Result { fn parse_line_range(fragment: &str) -> Result> { let range = fragment .strip_prefix("L") @@ -74,25 +74,34 @@ impl MentionUri { let path = url.path(); match url.scheme() { "file" => { - let path = url.to_file_path().ok().context("Extracting file path")?; + let path = if path_style.is_windows() { + path.trim_start_matches("/") + } else { + path + }; + if let Some(fragment) = url.fragment() { let line_range = parse_line_range(fragment)?; if let Some(name) = single_query_param(&url, "symbol")? { Ok(Self::Symbol { name, - abs_path: path, + abs_path: path.into(), line_range, }) } else { Ok(Self::Selection { - abs_path: Some(path), + abs_path: Some(path.into()), line_range, }) } } else if input.ends_with("/") { - Ok(Self::Directory { abs_path: path }) + Ok(Self::Directory { + abs_path: path.into(), + }) } else { - Ok(Self::File { abs_path: path }) + Ok(Self::File { + abs_path: path.into(), + }) } } "zed" => { @@ -213,18 +222,14 @@ impl MentionUri { pub fn to_uri(&self) -> Url { match self { MentionUri::File { abs_path } => { - let mut url = Url::parse("zed:///").unwrap(); - url.set_path("/agent/file"); - url.query_pairs_mut() - .append_pair("path", &abs_path.to_string_lossy()); + let mut url = Url::parse("file:///").unwrap(); + url.set_path(&abs_path.to_string_lossy()); url } MentionUri::PastedImage => Url::parse("zed:///agent/pasted-image").unwrap(), MentionUri::Directory { abs_path } => { - let mut url = Url::parse("zed:///").unwrap(); - url.set_path("/agent/directory"); - url.query_pairs_mut() - .append_pair("path", &abs_path.to_string_lossy()); + let mut url = Url::parse("file:///").unwrap(); + url.set_path(&abs_path.to_string_lossy()); url } MentionUri::Symbol { @@ -232,10 +237,9 @@ impl MentionUri { name, line_range, } => { - let mut url = Url::parse("zed:///").unwrap(); - url.set_path(&format!("/agent/symbol/{name}")); - url.query_pairs_mut() - .append_pair("path", &abs_path.to_string_lossy()); + let mut url = Url::parse("file:///").unwrap(); + url.set_path(&abs_path.to_string_lossy()); + url.query_pairs_mut().append_pair("symbol", name); url.set_fragment(Some(&format!( "L{}:{}", line_range.start() + 1, @@ -247,13 +251,14 @@ impl MentionUri { abs_path, line_range, } => { - let mut url = Url::parse("zed:///").unwrap(); - if let Some(abs_path) = abs_path { - url.set_path("/agent/selection"); - url.query_pairs_mut() - .append_pair("path", &abs_path.to_string_lossy()); + let mut url = if let Some(path) = abs_path { + let mut url = Url::parse("file:///").unwrap(); + url.set_path(&path.to_string_lossy()); + url } else { + let mut url = Url::parse("zed:///").unwrap(); url.set_path("/agent/untitled-buffer"); + url }; url.set_fragment(Some(&format!( "L{}:{}", @@ -288,14 +293,6 @@ impl MentionUri { } } -impl FromStr for MentionUri { - type Err = anyhow::Error; - - fn from_str(s: &str) -> anyhow::Result { - Self::parse(s) - } -} - pub struct MentionLink<'a>(&'a MentionUri); impl fmt::Display for MentionLink<'_> { @@ -338,93 +335,81 @@ mod tests { #[test] fn test_parse_file_uri() { - let old_uri = uri!("file:///path/to/file.rs"); - let parsed = MentionUri::parse(old_uri).unwrap(); + let file_uri = uri!("file:///path/to/file.rs"); + let parsed = MentionUri::parse(file_uri, PathStyle::local()).unwrap(); match &parsed { MentionUri::File { abs_path } => { - assert_eq!(abs_path.to_str().unwrap(), path!("/path/to/file.rs")); + assert_eq!(abs_path, Path::new(path!("/path/to/file.rs"))); } _ => panic!("Expected File variant"), } - let new_uri = parsed.to_uri().to_string(); - assert!(new_uri.starts_with("zed:///agent/file")); - assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed); + assert_eq!(parsed.to_uri().to_string(), file_uri); } #[test] fn test_parse_directory_uri() { - let old_uri = uri!("file:///path/to/dir/"); - let parsed = MentionUri::parse(old_uri).unwrap(); + let file_uri = uri!("file:///path/to/dir/"); + let parsed = MentionUri::parse(file_uri, PathStyle::local()).unwrap(); match &parsed { MentionUri::Directory { abs_path } => { - assert_eq!(abs_path.to_str().unwrap(), path!("/path/to/dir/")); + assert_eq!(abs_path, Path::new(path!("/path/to/dir/"))); } _ => panic!("Expected Directory variant"), } - let new_uri = parsed.to_uri().to_string(); - assert!(new_uri.starts_with("zed:///agent/directory")); - assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed); + assert_eq!(parsed.to_uri().to_string(), file_uri); } #[test] fn test_to_directory_uri_without_slash() { let uri = MentionUri::Directory { - abs_path: PathBuf::from(path!("/path/to/dir")), + abs_path: PathBuf::from(path!("/path/to/dir/")), }; - let uri_string = uri.to_uri().to_string(); - assert!(uri_string.starts_with("zed:///agent/directory")); - assert_eq!(MentionUri::parse(&uri_string).unwrap(), uri); + let expected = uri!("file:///path/to/dir/"); + assert_eq!(uri.to_uri().to_string(), expected); } #[test] fn test_parse_symbol_uri() { - let old_uri = uri!("file:///path/to/file.rs?symbol=MySymbol#L10:20"); - let parsed = MentionUri::parse(old_uri).unwrap(); + let symbol_uri = uri!("file:///path/to/file.rs?symbol=MySymbol#L10:20"); + let parsed = MentionUri::parse(symbol_uri, PathStyle::local()).unwrap(); match &parsed { MentionUri::Symbol { abs_path: path, name, line_range, } => { - assert_eq!(path.to_str().unwrap(), path!("/path/to/file.rs")); + assert_eq!(path, Path::new(path!("/path/to/file.rs"))); assert_eq!(name, "MySymbol"); assert_eq!(line_range.start(), &9); assert_eq!(line_range.end(), &19); } _ => panic!("Expected Symbol variant"), } - let new_uri = parsed.to_uri().to_string(); - assert!(new_uri.starts_with("zed:///agent/symbol/MySymbol")); - assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed); + assert_eq!(parsed.to_uri().to_string(), symbol_uri); } #[test] fn test_parse_selection_uri() { - let old_uri = uri!("file:///path/to/file.rs#L5:15"); - let parsed = MentionUri::parse(old_uri).unwrap(); + let selection_uri = uri!("file:///path/to/file.rs#L5:15"); + let parsed = MentionUri::parse(selection_uri, PathStyle::local()).unwrap(); match &parsed { MentionUri::Selection { abs_path: path, line_range, } => { - assert_eq!( - path.as_ref().unwrap().to_str().unwrap(), - path!("/path/to/file.rs") - ); + assert_eq!(path.as_ref().unwrap(), Path::new(path!("/path/to/file.rs"))); assert_eq!(line_range.start(), &4); assert_eq!(line_range.end(), &14); } _ => panic!("Expected Selection variant"), } - let new_uri = parsed.to_uri().to_string(); - assert!(new_uri.starts_with("zed:///agent/selection")); - assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed); + assert_eq!(parsed.to_uri().to_string(), selection_uri); } #[test] fn test_parse_untitled_selection_uri() { let selection_uri = uri!("zed:///agent/untitled-buffer#L1:10"); - let parsed = MentionUri::parse(selection_uri).unwrap(); + let parsed = MentionUri::parse(selection_uri, PathStyle::local()).unwrap(); match &parsed { MentionUri::Selection { abs_path: None, @@ -441,7 +426,7 @@ mod tests { #[test] fn test_parse_thread_uri() { let thread_uri = "zed:///agent/thread/session123?name=Thread+name"; - let parsed = MentionUri::parse(thread_uri).unwrap(); + let parsed = MentionUri::parse(thread_uri, PathStyle::local()).unwrap(); match &parsed { MentionUri::Thread { id: thread_id, @@ -458,7 +443,7 @@ mod tests { #[test] fn test_parse_rule_uri() { let rule_uri = "zed:///agent/rule/d8694ff2-90d5-4b6f-be33-33c1763acd52?name=Some+rule"; - let parsed = MentionUri::parse(rule_uri).unwrap(); + let parsed = MentionUri::parse(rule_uri, PathStyle::local()).unwrap(); match &parsed { MentionUri::Rule { id, name } => { assert_eq!(id.to_string(), "d8694ff2-90d5-4b6f-be33-33c1763acd52"); @@ -472,7 +457,7 @@ mod tests { #[test] fn test_parse_fetch_http_uri() { let http_uri = "http://example.com/path?query=value#fragment"; - let parsed = MentionUri::parse(http_uri).unwrap(); + let parsed = MentionUri::parse(http_uri, PathStyle::local()).unwrap(); match &parsed { MentionUri::Fetch { url } => { assert_eq!(url.to_string(), http_uri); @@ -485,7 +470,7 @@ mod tests { #[test] fn test_parse_fetch_https_uri() { let https_uri = "https://example.com/api/endpoint"; - let parsed = MentionUri::parse(https_uri).unwrap(); + let parsed = MentionUri::parse(https_uri, PathStyle::local()).unwrap(); match &parsed { MentionUri::Fetch { url } => { assert_eq!(url.to_string(), https_uri); @@ -497,40 +482,55 @@ mod tests { #[test] fn test_invalid_scheme() { - assert!(MentionUri::parse("ftp://example.com").is_err()); - assert!(MentionUri::parse("ssh://example.com").is_err()); - assert!(MentionUri::parse("unknown://example.com").is_err()); + assert!(MentionUri::parse("ftp://example.com", PathStyle::local()).is_err()); + assert!(MentionUri::parse("ssh://example.com", PathStyle::local()).is_err()); + assert!(MentionUri::parse("unknown://example.com", PathStyle::local()).is_err()); } #[test] fn test_invalid_zed_path() { - assert!(MentionUri::parse("zed:///invalid/path").is_err()); - assert!(MentionUri::parse("zed:///agent/unknown/test").is_err()); + assert!(MentionUri::parse("zed:///invalid/path", PathStyle::local()).is_err()); + assert!(MentionUri::parse("zed:///agent/unknown/test", PathStyle::local()).is_err()); } #[test] fn test_invalid_line_range_format() { // Missing L prefix - assert!(MentionUri::parse(uri!("file:///path/to/file.rs#10:20")).is_err()); + assert!( + MentionUri::parse(uri!("file:///path/to/file.rs#10:20"), PathStyle::local()).is_err() + ); // Missing colon separator - assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L1020")).is_err()); + assert!( + MentionUri::parse(uri!("file:///path/to/file.rs#L1020"), PathStyle::local()).is_err() + ); // Invalid numbers - assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L10:abc")).is_err()); - assert!(MentionUri::parse(uri!("file:///path/to/file.rs#Labc:20")).is_err()); + assert!( + MentionUri::parse(uri!("file:///path/to/file.rs#L10:abc"), PathStyle::local()).is_err() + ); + assert!( + MentionUri::parse(uri!("file:///path/to/file.rs#Labc:20"), PathStyle::local()).is_err() + ); } #[test] fn test_invalid_query_parameters() { // Invalid query parameter name - assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L10:20?invalid=test")).is_err()); + assert!( + MentionUri::parse( + uri!("file:///path/to/file.rs#L10:20?invalid=test"), + PathStyle::local() + ) + .is_err() + ); // Too many query parameters assert!( - MentionUri::parse(uri!( - "file:///path/to/file.rs#L10:20?symbol=test&another=param" - )) + MentionUri::parse( + uri!("file:///path/to/file.rs#L10:20?symbol=test&another=param"), + PathStyle::local() + ) .is_err() ); } @@ -538,8 +538,14 @@ mod tests { #[test] fn test_zero_based_line_numbers() { // Test that 0-based line numbers are rejected (should be 1-based) - assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L0:10")).is_err()); - assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L1:0")).is_err()); - assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L0:0")).is_err()); + assert!( + MentionUri::parse(uri!("file:///path/to/file.rs#L0:10"), PathStyle::local()).is_err() + ); + assert!( + MentionUri::parse(uri!("file:///path/to/file.rs#L1:0"), PathStyle::local()).is_err() + ); + assert!( + MentionUri::parse(uri!("file:///path/to/file.rs#L0:0"), PathStyle::local()).is_err() + ); } } diff --git a/crates/acp_tools/src/acp_tools.rs b/crates/acp_tools/src/acp_tools.rs index 69722815306e412745a62832115d2f010b2b8607..a40bcbd93c878a85c85d7edd312e713988234966 100644 --- a/crates/acp_tools/src/acp_tools.rs +++ b/crates/acp_tools/src/acp_tools.rs @@ -259,6 +259,15 @@ impl AcpTools { serde_json::to_string_pretty(&messages).ok() } + fn clear_messages(&mut self, cx: &mut Context) { + if let Some(connection) = self.watched_connection.as_mut() { + connection.messages.clear(); + connection.list_state.reset(0); + self.expanded.clear(); + cx.notify(); + } + } + fn render_message( &mut self, index: usize, @@ -547,10 +556,16 @@ impl Render for AcpToolsToolbarItemView { }; let acp_tools = acp_tools.clone(); + let has_messages = acp_tools + .read(cx) + .watched_connection + .as_ref() + .is_some_and(|connection| !connection.messages.is_empty()); h_flex() .gap_2() - .child( + .child({ + let acp_tools = acp_tools.clone(); IconButton::new( "copy_all_messages", if self.just_copied { @@ -565,13 +580,7 @@ impl Render for AcpToolsToolbarItemView { } else { "Copy All Messages" })) - .disabled( - acp_tools - .read(cx) - .watched_connection - .as_ref() - .is_none_or(|connection| connection.messages.is_empty()), - ) + .disabled(!has_messages) .on_click(cx.listener(move |this, _, _window, cx| { if let Some(content) = acp_tools.read(cx).serialize_observed_messages() { cx.write_to_clipboard(ClipboardItem::new_string(content)); @@ -586,7 +595,18 @@ impl Render for AcpToolsToolbarItemView { }) .detach(); } - })), + })) + }) + .child( + IconButton::new("clear_messages", IconName::Trash) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Clear Messages")) + .disabled(!has_messages) + .on_click(cx.listener(move |_this, _, _window, cx| { + acp_tools.update(cx, |acp_tools, cx| { + acp_tools.clear_messages(cx); + }); + })), ) .into_any() } diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index 84d1291dad6d235e8d90d21bfcaf78a7e2ec042d..09cc2fb9568ca01748435c73fd8834efdbb50839 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -11,7 +11,7 @@ use language::{ LanguageServerStatusUpdate, ServerHealth, }; use project::{ - LanguageServerProgress, LspStoreEvent, Project, ProjectEnvironmentEvent, + LanguageServerProgress, LspStoreEvent, ProgressToken, Project, ProjectEnvironmentEvent, git_store::{GitStoreEvent, Repository}, }; use smallvec::SmallVec; @@ -61,7 +61,7 @@ struct ServerStatus { struct PendingWork<'a> { language_server_id: LanguageServerId, - progress_token: &'a str, + progress_token: &'a ProgressToken, progress: &'a LanguageServerProgress, } @@ -313,9 +313,9 @@ impl ActivityIndicator { let mut pending_work = status .pending_work .iter() - .map(|(token, progress)| PendingWork { + .map(|(progress_token, progress)| PendingWork { language_server_id: server_id, - progress_token: token.as_str(), + progress_token, progress, }) .collect::>(); @@ -358,11 +358,7 @@ impl ActivityIndicator { .. }) = pending_work.next() { - let mut message = progress - .title - .as_deref() - .unwrap_or(progress_token) - .to_string(); + let mut message = progress.title.clone().unwrap_or(progress_token.to_string()); if let Some(percentage) = progress.percentage { write!(&mut message, " ({}%)", percentage).unwrap(); @@ -773,7 +769,7 @@ impl Render for ActivityIndicator { let Some(content) = self.content_to_render(cx) else { return result; }; - let this = cx.entity().downgrade(); + let activity_indicator = cx.entity().downgrade(); let truncate_content = content.message.len() > MAX_MESSAGE_LEN; result.gap_2().child( PopoverMenu::new("activity-indicator-popover") @@ -815,22 +811,21 @@ impl Render for ActivityIndicator { ) .anchor(gpui::Corner::BottomLeft) .menu(move |window, cx| { - let strong_this = this.upgrade()?; + let strong_this = activity_indicator.upgrade()?; let mut has_work = false; let menu = ContextMenu::build(window, cx, |mut menu, _, cx| { for work in strong_this.read(cx).pending_language_server_work(cx) { has_work = true; - let this = this.clone(); + let activity_indicator = activity_indicator.clone(); let mut title = work .progress .title - .as_deref() - .unwrap_or(work.progress_token) - .to_owned(); + .clone() + .unwrap_or(work.progress_token.to_string()); if work.progress.is_cancellable { let language_server_id = work.language_server_id; - let token = work.progress_token.to_string(); + let token = work.progress_token.clone(); let title = SharedString::from(title); menu = menu.custom_entry( move |_, _| { @@ -842,18 +837,23 @@ impl Render for ActivityIndicator { .into_any_element() }, move |_, cx| { - this.update(cx, |this, cx| { - this.project.update(cx, |project, cx| { - project.cancel_language_server_work( - language_server_id, - Some(token.clone()), + let token = token.clone(); + activity_indicator + .update(cx, |activity_indicator, cx| { + activity_indicator.project.update( cx, + |project, cx| { + project.cancel_language_server_work( + language_server_id, + Some(token), + cx, + ); + }, ); - }); - this.context_menu_handle.hide(cx); - cx.notify(); - }) - .ok(); + activity_indicator.context_menu_handle.hide(cx); + cx.notify(); + }) + .ok(); }, ); } else { diff --git a/crates/agent/Cargo.toml b/crates/agent/Cargo.toml index e0f2d9dcb97e298dd3c906e3f902974821efcdc0..e962c876a38f788607706aad4e53ee5e0488b08d 100644 --- a/crates/agent/Cargo.toml +++ b/crates/agent/Cargo.toml @@ -11,7 +11,7 @@ path = "src/agent.rs" [features] test-support = ["db/test-support"] eval = [] -edit-agent-eval = [] +unit-eval = [] e2e = [] [lints] diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs index 63ee0adf191cbe309229c57b950d11ca7a3680e3..631c1122f85421e8f4f19a7a64efd82da0528162 100644 --- a/crates/agent/src/agent.rs +++ b/crates/agent/src/agent.rs @@ -1035,12 +1035,13 @@ impl acp_thread::AgentConnection for NativeAgentConnection { let session_id = params.session_id.clone(); log::info!("Received prompt request for session: {}", session_id); log::debug!("Prompt blocks count: {}", params.prompt.len()); + let path_style = self.0.read(cx).project.read(cx).path_style(cx); - self.run_turn(session_id, cx, |thread, cx| { + self.run_turn(session_id, cx, move |thread, cx| { let content: Vec = params .prompt .into_iter() - .map(Into::into) + .map(|block| UserMessageContent::from_content_block(block, path_style)) .collect::>(); log::debug!("Converted prompt to message: {} chars", content.len()); log::debug!("Message id: {:?}", id); diff --git a/crates/agent/src/edit_agent/evals.rs b/crates/agent/src/edit_agent/evals.rs index 48977df1974cc104bc10fdf8975ed09172a1a938..84cdd101f57546a0bfbc86a290bf1f453e69a979 100644 --- a/crates/agent/src/edit_agent/evals.rs +++ b/crates/agent/src/edit_agent/evals.rs @@ -31,7 +31,7 @@ use std::{ use util::path; #[test] -#[cfg_attr(not(feature = "edit-agent-eval"), ignore)] +#[cfg_attr(not(feature = "unit-eval"), ignore)] fn eval_extract_handle_command_output() { // Test how well agent generates multiple edit hunks. // @@ -108,7 +108,7 @@ fn eval_extract_handle_command_output() { } #[test] -#[cfg_attr(not(feature = "edit-agent-eval"), ignore)] +#[cfg_attr(not(feature = "unit-eval"), ignore)] fn eval_delete_run_git_blame() { // Model | Pass rate // ----------------------------|---------- @@ -171,7 +171,7 @@ fn eval_delete_run_git_blame() { } #[test] -#[cfg_attr(not(feature = "edit-agent-eval"), ignore)] +#[cfg_attr(not(feature = "unit-eval"), ignore)] fn eval_translate_doc_comments() { // Model | Pass rate // ============================================ @@ -234,7 +234,7 @@ fn eval_translate_doc_comments() { } #[test] -#[cfg_attr(not(feature = "edit-agent-eval"), ignore)] +#[cfg_attr(not(feature = "unit-eval"), ignore)] fn eval_use_wasi_sdk_in_compile_parser_to_wasm() { // Model | Pass rate // ============================================ @@ -360,7 +360,7 @@ fn eval_use_wasi_sdk_in_compile_parser_to_wasm() { } #[test] -#[cfg_attr(not(feature = "edit-agent-eval"), ignore)] +#[cfg_attr(not(feature = "unit-eval"), ignore)] fn eval_disable_cursor_blinking() { // Model | Pass rate // ============================================ @@ -446,7 +446,7 @@ fn eval_disable_cursor_blinking() { } #[test] -#[cfg_attr(not(feature = "edit-agent-eval"), ignore)] +#[cfg_attr(not(feature = "unit-eval"), ignore)] fn eval_from_pixels_constructor() { // Results for 2025-06-13 // @@ -656,7 +656,7 @@ fn eval_from_pixels_constructor() { } #[test] -#[cfg_attr(not(feature = "edit-agent-eval"), ignore)] +#[cfg_attr(not(feature = "unit-eval"), ignore)] fn eval_zode() { // Model | Pass rate // ============================================ @@ -763,7 +763,7 @@ fn eval_zode() { } #[test] -#[cfg_attr(not(feature = "edit-agent-eval"), ignore)] +#[cfg_attr(not(feature = "unit-eval"), ignore)] fn eval_add_overwrite_test() { // Model | Pass rate // ============================================ @@ -995,7 +995,7 @@ fn eval_add_overwrite_test() { } #[test] -#[cfg_attr(not(feature = "edit-agent-eval"), ignore)] +#[cfg_attr(not(feature = "unit-eval"), ignore)] fn eval_create_empty_file() { // Check that Edit Agent can create a file without writing its // thoughts into it. This issue is not specific to empty files, but diff --git a/crates/agent/src/tests/mod.rs b/crates/agent/src/tests/mod.rs index ddddbfc5279ca23fb95527892e929b23b8cefbf6..20fc40f242831552630f1e15f59917fd80b1ecdb 100644 --- a/crates/agent/src/tests/mod.rs +++ b/crates/agent/src/tests/mod.rs @@ -160,6 +160,42 @@ async fn test_system_prompt(cx: &mut TestAppContext) { ); } +#[gpui::test] +async fn test_system_prompt_without_tools(cx: &mut TestAppContext) { + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["abc"], cx) + }) + .unwrap(); + cx.run_until_parked(); + let mut pending_completions = fake_model.pending_completions(); + assert_eq!( + pending_completions.len(), + 1, + "unexpected pending completions: {:?}", + pending_completions + ); + + let pending_completion = pending_completions.pop().unwrap(); + assert_eq!(pending_completion.messages[0].role, Role::System); + + let system_message = &pending_completion.messages[0]; + let system_prompt = system_message.content[0].to_str().unwrap(); + assert!( + !system_prompt.contains("## Tool Use"), + "unexpected system message: {:?}", + system_message + ); + assert!( + !system_prompt.contains("## Fixing Diagnostics"), + "unexpected system message: {:?}", + system_message + ); +} + #[gpui::test] async fn test_prompt_caching(cx: &mut TestAppContext) { let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index d3414d84c8f5594a567e5b38b45ddf0739965365..64e512690beeaebd4a343bc5f2df473c795aed3f 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -50,7 +50,7 @@ use std::{ time::{Duration, Instant}, }; use std::{fmt::Write, path::PathBuf}; -use util::{ResultExt, debug_panic, markdown::MarkdownCodeBlock}; +use util::{ResultExt, debug_panic, markdown::MarkdownCodeBlock, paths::PathStyle}; use uuid::Uuid; const TOOL_CANCELED_MESSAGE: &str = "Tool canceled by user"; @@ -1816,9 +1816,15 @@ impl Thread { log::debug!("Completion intent: {:?}", completion_intent); log::debug!("Completion mode: {:?}", self.completion_mode); - let messages = self.build_request_messages(cx); + let available_tools: Vec<_> = self + .running_turn + .as_ref() + .map(|turn| turn.tools.keys().cloned().collect()) + .unwrap_or_default(); + + log::debug!("Request includes {} tools", available_tools.len()); + let messages = self.build_request_messages(available_tools, cx); log::debug!("Request will include {} messages", messages.len()); - log::debug!("Request includes {} tools", tools.len()); let request = LanguageModelRequest { thread_id: Some(self.id.to_string()), @@ -1909,7 +1915,11 @@ impl Thread { self.running_turn.as_ref()?.tools.get(name).cloned() } - fn build_request_messages(&self, cx: &App) -> Vec { + fn build_request_messages( + &self, + available_tools: Vec, + cx: &App, + ) -> Vec { log::trace!( "Building request messages from {} thread messages", self.messages.len() @@ -1917,7 +1927,7 @@ impl Thread { let system_prompt = SystemPromptTemplate { project: self.project_context.read(cx), - available_tools: self.tools.keys().cloned().collect(), + available_tools, } .render(&self.templates) .context("failed to build system prompt") @@ -2538,8 +2548,8 @@ impl From<&str> for UserMessageContent { } } -impl From for UserMessageContent { - fn from(value: acp::ContentBlock) -> Self { +impl UserMessageContent { + pub fn from_content_block(value: acp::ContentBlock, path_style: PathStyle) -> Self { match value { acp::ContentBlock::Text(text_content) => Self::Text(text_content.text), acp::ContentBlock::Image(image_content) => Self::Image(convert_image(image_content)), @@ -2548,7 +2558,7 @@ impl From for UserMessageContent { Self::Text("[audio]".to_string()) } acp::ContentBlock::ResourceLink(resource_link) => { - match MentionUri::parse(&resource_link.uri) { + match MentionUri::parse(&resource_link.uri, path_style) { Ok(uri) => Self::Mention { uri, content: String::new(), @@ -2561,7 +2571,7 @@ impl From for UserMessageContent { } acp::ContentBlock::Resource(resource) => match resource.resource { acp::EmbeddedResourceResource::TextResourceContents(resource) => { - match MentionUri::parse(&resource.uri) { + match MentionUri::parse(&resource.uri, path_style) { Ok(uri) => Self::Mention { uri, content: resource.text, diff --git a/crates/agent_ui/src/acp/completion_provider.rs b/crates/agent_ui/src/acp/completion_provider.rs index c5ab47fe18970791c047ef157f6664188c95e346..583d8070d98697f4620bf45a3284d88760ebf9e7 100644 --- a/crates/agent_ui/src/acp/completion_provider.rs +++ b/crates/agent_ui/src/acp/completion_provider.rs @@ -253,17 +253,22 @@ impl ContextPickerCompletionProvider { ) -> Option { let project = workspace.read(cx).project().clone(); - let label = CodeLabel::plain(symbol.name.clone(), None); - - let abs_path = match &symbol.path { - SymbolLocation::InProject(project_path) => { - project.read(cx).absolute_path(&project_path, cx)? - } + let (abs_path, file_name) = match &symbol.path { + SymbolLocation::InProject(project_path) => ( + project.read(cx).absolute_path(&project_path, cx)?, + project_path.path.file_name()?.to_string().into(), + ), SymbolLocation::OutsideProject { abs_path, signature: _, - } => PathBuf::from(abs_path.as_ref()), + } => ( + PathBuf::from(abs_path.as_ref()), + abs_path.file_name().map(|f| f.to_string_lossy())?, + ), }; + + let label = build_symbol_label(&symbol.name, &file_name, symbol.range.start.0.row + 1, cx); + let uri = MentionUri::Symbol { abs_path, name: symbol.name.clone(), @@ -570,6 +575,7 @@ impl ContextPickerCompletionProvider { .unwrap_or_default(); let workspace = workspace.read(cx); let project = workspace.project().read(cx); + let include_root_name = workspace.visible_worktrees(cx).count() > 1; if let Some(agent_panel) = workspace.panel::(cx) && let Some(thread) = agent_panel.read(cx).active_agent_thread(cx) @@ -596,7 +602,11 @@ impl ContextPickerCompletionProvider { project .worktree_for_id(project_path.worktree_id, cx) .map(|worktree| { - let path_prefix = worktree.read(cx).root_name().into(); + let path_prefix = if include_root_name { + worktree.read(cx).root_name().into() + } else { + RelPath::empty().into() + }; Match::File(FileMatch { mat: fuzzy::PathMatch { score: 1., @@ -674,6 +684,17 @@ impl ContextPickerCompletionProvider { } } +fn build_symbol_label(symbol_name: &str, file_name: &str, line: u32, cx: &App) -> CodeLabel { + let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId); + let mut label = CodeLabelBuilder::default(); + + label.push_str(symbol_name, None); + label.push_str(" ", None); + label.push_str(&format!("{} L{}", file_name, line), comment_id); + + label.build() +} + fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx: &App) -> CodeLabel { let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId); let mut label = CodeLabelBuilder::default(); @@ -812,9 +833,21 @@ impl CompletionProvider for ContextPickerCompletionProvider { path: mat.path.clone(), }; + // If path is empty, this means we're matching with the root directory itself + // so we use the path_prefix as the name + let path_prefix = if mat.path.is_empty() { + project + .read(cx) + .worktree_for_id(project_path.worktree_id, cx) + .map(|wt| wt.read(cx).root_name().into()) + .unwrap_or_else(|| mat.path_prefix.clone()) + } else { + mat.path_prefix.clone() + }; + Self::completion_for_path( project_path, - &mat.path_prefix, + &path_prefix, is_recent, mat.is_dir, source_range.clone(), diff --git a/crates/agent_ui/src/acp/message_editor.rs b/crates/agent_ui/src/acp/message_editor.rs index c24cefcf2d5fc04baffeb9f3d1a1ecaf9dd05268..5fe591caca5b88b97351884593a8b1550d8a1d11 100644 --- a/crates/agent_ui/src/acp/message_editor.rs +++ b/crates/agent_ui/src/acp/message_editor.rs @@ -702,7 +702,7 @@ impl MessageEditor { let mut all_tracked_buffers = Vec::new(); let result = editor.update(cx, |editor, cx| { - let mut ix = 0; + let mut ix = text.chars().position(|c| !c.is_whitespace()).unwrap_or(0); let mut chunks: Vec = Vec::new(); let text = editor.text(cx); editor.display_map.update(cx, |map, cx| { @@ -714,15 +714,6 @@ impl MessageEditor { let crease_range = crease.range().to_offset(&snapshot.buffer_snapshot()); if crease_range.start > ix { - //todo(): Custom slash command ContentBlock? - // let chunk = if prevent_slash_commands - // && ix == 0 - // && parse_slash_command(&text[ix..]).is_some() - // { - // format!(" {}", &text[ix..crease_range.start]).into() - // } else { - // text[ix..crease_range.start].into() - // }; let chunk = text[ix..crease_range.start].into(); chunks.push(chunk); } @@ -783,15 +774,6 @@ impl MessageEditor { } if ix < text.len() { - //todo(): Custom slash command ContentBlock? - // let last_chunk = if prevent_slash_commands - // && ix == 0 - // && parse_slash_command(&text[ix..]).is_some() - // { - // format!(" {}", text[ix..].trim_end()) - // } else { - // text[ix..].trim_end().to_owned() - // }; let last_chunk = text[ix..].trim_end().to_owned(); if !last_chunk.is_empty() { chunks.push(last_chunk.into()); @@ -1062,6 +1044,7 @@ impl MessageEditor { ) { self.clear(window, cx); + let path_style = self.project.read(cx).path_style(cx); let mut text = String::new(); let mut mentions = Vec::new(); @@ -1074,7 +1057,8 @@ impl MessageEditor { resource: acp::EmbeddedResourceResource::TextResourceContents(resource), .. }) => { - let Some(mention_uri) = MentionUri::parse(&resource.uri).log_err() else { + let Some(mention_uri) = MentionUri::parse(&resource.uri, path_style).log_err() + else { continue; }; let start = text.len(); @@ -1090,7 +1074,9 @@ impl MessageEditor { )); } acp::ContentBlock::ResourceLink(resource) => { - if let Some(mention_uri) = MentionUri::parse(&resource.uri).log_err() { + if let Some(mention_uri) = + MentionUri::parse(&resource.uri, path_style).log_err() + { let start = text.len(); write!(&mut text, "{}", mention_uri.as_link()).ok(); let end = text.len(); @@ -1105,7 +1091,7 @@ impl MessageEditor { meta: _, }) => { let mention_uri = if let Some(uri) = uri { - MentionUri::parse(&uri) + MentionUri::parse(&uri, path_style) } else { Ok(MentionUri::PastedImage) }; @@ -2179,10 +2165,10 @@ mod tests { assert_eq!( current_completion_labels(editor), &[ - format!("eight.txt dir{slash}b{slash}"), - format!("seven.txt dir{slash}b{slash}"), - format!("six.txt dir{slash}b{slash}"), - format!("five.txt dir{slash}b{slash}"), + format!("eight.txt b{slash}"), + format!("seven.txt b{slash}"), + format!("six.txt b{slash}"), + format!("five.txt b{slash}"), ] ); editor.set_text("", window, cx); @@ -2210,10 +2196,10 @@ mod tests { assert_eq!( current_completion_labels(editor), &[ - format!("eight.txt dir{slash}b{slash}"), - format!("seven.txt dir{slash}b{slash}"), - format!("six.txt dir{slash}b{slash}"), - format!("five.txt dir{slash}b{slash}"), + format!("eight.txt b{slash}"), + format!("seven.txt b{slash}"), + format!("six.txt b{slash}"), + format!("five.txt b{slash}"), "Files & Directories".into(), "Symbols".into(), "Threads".into(), @@ -2246,7 +2232,7 @@ mod tests { assert!(editor.has_visible_completions_menu()); assert_eq!( current_completion_labels(editor), - vec![format!("one.txt dir{slash}a{slash}")] + vec![format!("one.txt a{slash}")] ); }); @@ -2293,7 +2279,10 @@ mod tests { panic!("Unexpected mentions"); }; pretty_assertions::assert_eq!(content, "1"); - pretty_assertions::assert_eq!(uri, &url_one.parse::().unwrap()); + pretty_assertions::assert_eq!( + uri, + &MentionUri::parse(&url_one, PathStyle::local()).unwrap() + ); } let contents = message_editor @@ -2314,7 +2303,10 @@ mod tests { let [(uri, Mention::UriOnly)] = contents.as_slice() else { panic!("Unexpected mentions"); }; - pretty_assertions::assert_eq!(uri, &url_one.parse::().unwrap()); + pretty_assertions::assert_eq!( + uri, + &MentionUri::parse(&url_one, PathStyle::local()).unwrap() + ); } cx.simulate_input(" "); @@ -2375,7 +2367,10 @@ mod tests { panic!("Unexpected mentions"); }; pretty_assertions::assert_eq!(content, "8"); - pretty_assertions::assert_eq!(uri, &url_eight.parse::().unwrap()); + pretty_assertions::assert_eq!( + uri, + &MentionUri::parse(&url_eight, PathStyle::local()).unwrap() + ); } editor.update(&mut cx, |editor, cx| { @@ -2460,7 +2455,7 @@ mod tests { format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) @symbol ") ); assert!(editor.has_visible_completions_menu()); - assert_eq!(current_completion_labels(editor), &["MySymbol"]); + assert_eq!(current_completion_labels(editor), &["MySymbol one.txt L1"]); }); editor.update_in(&mut cx, |editor, window, cx| { @@ -2516,7 +2511,7 @@ mod tests { format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({}) @file x.png", symbol.to_uri()) ); assert!(editor.has_visible_completions_menu()); - assert_eq!(current_completion_labels(editor), &[format!("x.png dir{slash}")]); + assert_eq!(current_completion_labels(editor), &["x.png "]); }); editor.update_in(&mut cx, |editor, window, cx| { @@ -2558,7 +2553,7 @@ mod tests { format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({}) @file x.png", symbol.to_uri()) ); assert!(editor.has_visible_completions_menu()); - assert_eq!(current_completion_labels(editor), &[format!("x.png dir{slash}")]); + assert_eq!(current_completion_labels(editor), &["x.png "]); }); editor.update_in(&mut cx, |editor, window, cx| { @@ -2734,4 +2729,62 @@ mod tests { _ => panic!("Expected Text mention for small file"), } } + + #[gpui::test] + async fn test_whitespace_trimming(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/project", json!({"file.rs": "fn main() {}"})) + .await; + let project = Project::test(fs, [Path::new(path!("/project"))], cx).await; + + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + + let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx)); + + let message_editor = cx.update(|window, cx| { + cx.new(|cx| { + MessageEditor::new( + workspace.downgrade(), + project.clone(), + history_store.clone(), + None, + Default::default(), + Default::default(), + "Test Agent".into(), + "Test", + EditorMode::AutoHeight { + min_lines: 1, + max_lines: None, + }, + window, + cx, + ) + }) + }); + let editor = message_editor.update(cx, |message_editor, _| message_editor.editor.clone()); + + cx.run_until_parked(); + + editor.update_in(cx, |editor, window, cx| { + editor.set_text(" hello world ", window, cx); + }); + + let (content, _) = message_editor + .update(cx, |message_editor, cx| message_editor.contents(false, cx)) + .await + .unwrap(); + + assert_eq!( + content, + vec![acp::ContentBlock::Text(acp::TextContent { + text: "hello world".into(), + annotations: None, + meta: None + })] + ); + } } diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index 7e5d5b48a13adb7c3133245cd520f7b48c46517a..3638faf9336f79d692f820df39266ab7b85360a8 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -4305,7 +4305,8 @@ impl AcpThreadView { return; }; - if let Some(mention) = MentionUri::parse(&url).log_err() { + if let Some(mention) = MentionUri::parse(&url, workspace.read(cx).path_style(cx)).log_err() + { workspace.update(cx, |workspace, cx| match mention { MentionUri::File { abs_path } => { let project = workspace.project(); diff --git a/crates/agent_ui/src/context_picker.rs b/crates/agent_ui/src/context_picker.rs index caffb31521e397ca7cd6b1fa0c8f4ae73d5ab9ff..0a6e811673aa47339087e538003e87b1940d0039 100644 --- a/crates/agent_ui/src/context_picker.rs +++ b/crates/agent_ui/src/context_picker.rs @@ -662,6 +662,7 @@ pub(crate) fn recent_context_picker_entries( let mut recent = Vec::with_capacity(6); let workspace = workspace.read(cx); let project = workspace.project().read(cx); + let include_root_name = workspace.visible_worktrees(cx).count() > 1; recent.extend( workspace @@ -675,9 +676,16 @@ pub(crate) fn recent_context_picker_entries( .filter_map(|(project_path, _)| { project .worktree_for_id(project_path.worktree_id, cx) - .map(|worktree| RecentEntry::File { - project_path, - path_prefix: worktree.read(cx).root_name().into(), + .map(|worktree| { + let path_prefix = if include_root_name { + worktree.read(cx).root_name().into() + } else { + RelPath::empty().into() + }; + RecentEntry::File { + project_path, + path_prefix, + } }) }), ); diff --git a/crates/agent_ui/src/context_picker/completion_provider.rs b/crates/agent_ui/src/context_picker/completion_provider.rs index 56444141f12903db4868f9e154cccdb872b48514..3a3ea45c800e3031dc8939c1801ca989a220bf0c 100644 --- a/crates/agent_ui/src/context_picker/completion_provider.rs +++ b/crates/agent_ui/src/context_picker/completion_provider.rs @@ -655,13 +655,12 @@ impl ContextPickerCompletionProvider { let SymbolLocation::InProject(symbol_path) = &symbol.path else { return None; }; - let path_prefix = workspace + let _path_prefix = workspace .read(cx) .project() .read(cx) - .worktree_for_id(symbol_path.worktree_id, cx)? - .read(cx) - .root_name(); + .worktree_for_id(symbol_path.worktree_id, cx)?; + let path_prefix = RelPath::empty(); let (file_name, directory) = super::file_context_picker::extract_file_name_and_directory( &symbol_path.path, @@ -818,9 +817,21 @@ impl CompletionProvider for ContextPickerCompletionProvider { return None; } + // If path is empty, this means we're matching with the root directory itself + // so we use the path_prefix as the name + let path_prefix = if mat.path.is_empty() { + project + .read(cx) + .worktree_for_id(project_path.worktree_id, cx) + .map(|wt| wt.read(cx).root_name().into()) + .unwrap_or_else(|| mat.path_prefix.clone()) + } else { + mat.path_prefix.clone() + }; + Some(Self::completion_for_path( project_path, - &mat.path_prefix, + &path_prefix, is_recent, mat.is_dir, excerpt_id, @@ -1309,10 +1320,10 @@ mod tests { assert_eq!( current_completion_labels(editor), &[ - format!("seven.txt dir{slash}b{slash}"), - format!("six.txt dir{slash}b{slash}"), - format!("five.txt dir{slash}b{slash}"), - format!("four.txt dir{slash}a{slash}"), + format!("seven.txt b{slash}"), + format!("six.txt b{slash}"), + format!("five.txt b{slash}"), + format!("four.txt a{slash}"), "Files & Directories".into(), "Symbols".into(), "Fetch".into() @@ -1344,7 +1355,7 @@ mod tests { assert!(editor.has_visible_completions_menu()); assert_eq!( current_completion_labels(editor), - vec![format!("one.txt dir{slash}a{slash}")] + vec![format!("one.txt a{slash}")] ); }); @@ -1356,12 +1367,12 @@ mod tests { editor.update(&mut cx, |editor, cx| { assert_eq!( editor.text(cx), - format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) ") + format!("Lorem [@one.txt](@file:a{slash}one.txt) ") ); assert!(!editor.has_visible_completions_menu()); assert_eq!( fold_ranges(editor, cx), - vec![Point::new(0, 6)..Point::new(0, 37)] + vec![Point::new(0, 6)..Point::new(0, 33)] ); }); @@ -1370,12 +1381,12 @@ mod tests { editor.update(&mut cx, |editor, cx| { assert_eq!( editor.text(cx), - format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) ") + format!("Lorem [@one.txt](@file:a{slash}one.txt) ") ); assert!(!editor.has_visible_completions_menu()); assert_eq!( fold_ranges(editor, cx), - vec![Point::new(0, 6)..Point::new(0, 37)] + vec![Point::new(0, 6)..Point::new(0, 33)] ); }); @@ -1384,12 +1395,12 @@ mod tests { editor.update(&mut cx, |editor, cx| { assert_eq!( editor.text(cx), - format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) Ipsum "), + format!("Lorem [@one.txt](@file:a{slash}one.txt) Ipsum "), ); assert!(!editor.has_visible_completions_menu()); assert_eq!( fold_ranges(editor, cx), - vec![Point::new(0, 6)..Point::new(0, 37)] + vec![Point::new(0, 6)..Point::new(0, 33)] ); }); @@ -1398,12 +1409,12 @@ mod tests { editor.update(&mut cx, |editor, cx| { assert_eq!( editor.text(cx), - format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) Ipsum @file "), + format!("Lorem [@one.txt](@file:a{slash}one.txt) Ipsum @file "), ); assert!(editor.has_visible_completions_menu()); assert_eq!( fold_ranges(editor, cx), - vec![Point::new(0, 6)..Point::new(0, 37)] + vec![Point::new(0, 6)..Point::new(0, 33)] ); }); @@ -1416,14 +1427,14 @@ mod tests { editor.update(&mut cx, |editor, cx| { assert_eq!( editor.text(cx), - format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) Ipsum [@seven.txt](@file:dir{slash}b{slash}seven.txt) ") + format!("Lorem [@one.txt](@file:a{slash}one.txt) Ipsum [@seven.txt](@file:b{slash}seven.txt) ") ); assert!(!editor.has_visible_completions_menu()); assert_eq!( fold_ranges(editor, cx), vec![ - Point::new(0, 6)..Point::new(0, 37), - Point::new(0, 45)..Point::new(0, 80) + Point::new(0, 6)..Point::new(0, 33), + Point::new(0, 41)..Point::new(0, 72) ] ); }); @@ -1433,14 +1444,14 @@ mod tests { editor.update(&mut cx, |editor, cx| { assert_eq!( editor.text(cx), - format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) Ipsum [@seven.txt](@file:dir{slash}b{slash}seven.txt) \n@") + format!("Lorem [@one.txt](@file:a{slash}one.txt) Ipsum [@seven.txt](@file:b{slash}seven.txt) \n@") ); assert!(editor.has_visible_completions_menu()); assert_eq!( fold_ranges(editor, cx), vec![ - Point::new(0, 6)..Point::new(0, 37), - Point::new(0, 45)..Point::new(0, 80) + Point::new(0, 6)..Point::new(0, 33), + Point::new(0, 41)..Point::new(0, 72) ] ); }); @@ -1454,20 +1465,203 @@ mod tests { editor.update(&mut cx, |editor, cx| { assert_eq!( editor.text(cx), - format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) Ipsum [@seven.txt](@file:dir{slash}b{slash}seven.txt) \n[@six.txt](@file:dir{slash}b{slash}six.txt) ") + format!("Lorem [@one.txt](@file:a{slash}one.txt) Ipsum [@seven.txt](@file:b{slash}seven.txt) \n[@six.txt](@file:b{slash}six.txt) ") ); assert!(!editor.has_visible_completions_menu()); assert_eq!( fold_ranges(editor, cx), vec![ - Point::new(0, 6)..Point::new(0, 37), - Point::new(0, 45)..Point::new(0, 80), - Point::new(1, 0)..Point::new(1, 31) + Point::new(0, 6)..Point::new(0, 33), + Point::new(0, 41)..Point::new(0, 72), + Point::new(1, 0)..Point::new(1, 27) ] ); }); } + #[gpui::test] + async fn test_context_completion_provider_multiple_worktrees(cx: &mut TestAppContext) { + init_test(cx); + + let app_state = cx.update(AppState::test); + + cx.update(|cx| { + language::init(cx); + editor::init(cx); + workspace::init(app_state.clone(), cx); + Project::init_settings(cx); + }); + + app_state + .fs + .as_fake() + .insert_tree( + path!("/project1"), + json!({ + "a": { + "one.txt": "", + "two.txt": "", + } + }), + ) + .await; + + app_state + .fs + .as_fake() + .insert_tree( + path!("/project2"), + json!({ + "b": { + "three.txt": "", + "four.txt": "", + } + }), + ) + .await; + + let project = Project::test( + app_state.fs.clone(), + [path!("/project1").as_ref(), path!("/project2").as_ref()], + cx, + ) + .await; + let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let workspace = window.root(cx).unwrap(); + + let worktrees = project.update(cx, |project, cx| { + let worktrees = project.worktrees(cx).collect::>(); + assert_eq!(worktrees.len(), 2); + worktrees + }); + + let mut cx = VisualTestContext::from_window(*window.deref(), cx); + let slash = PathStyle::local().separator(); + + for (worktree_idx, paths) in [ + vec![rel_path("a/one.txt"), rel_path("a/two.txt")], + vec![rel_path("b/three.txt"), rel_path("b/four.txt")], + ] + .iter() + .enumerate() + { + let worktree_id = worktrees[worktree_idx].read_with(&cx, |wt, _| wt.id()); + for path in paths { + workspace + .update_in(&mut cx, |workspace, window, cx| { + workspace.open_path( + ProjectPath { + worktree_id, + path: (*path).into(), + }, + None, + false, + window, + cx, + ) + }) + .await + .unwrap(); + } + } + + let editor = workspace.update_in(&mut cx, |workspace, window, cx| { + let editor = cx.new(|cx| { + Editor::new( + editor::EditorMode::full(), + multi_buffer::MultiBuffer::build_simple("", cx), + None, + window, + cx, + ) + }); + workspace.active_pane().update(cx, |pane, cx| { + pane.add_item( + Box::new(cx.new(|_| AtMentionEditor(editor.clone()))), + true, + true, + None, + window, + cx, + ); + }); + editor + }); + + let context_store = cx.new(|_| ContextStore::new(project.downgrade())); + + let editor_entity = editor.downgrade(); + editor.update_in(&mut cx, |editor, window, cx| { + window.focus(&editor.focus_handle(cx)); + editor.set_completion_provider(Some(Rc::new(ContextPickerCompletionProvider::new( + workspace.downgrade(), + context_store.downgrade(), + None, + None, + editor_entity, + None, + )))); + }); + + cx.simulate_input("@"); + + // With multiple worktrees, we should see the project name as prefix + editor.update(&mut cx, |editor, cx| { + assert_eq!(editor.text(cx), "@"); + assert!(editor.has_visible_completions_menu()); + let labels = current_completion_labels(editor); + + assert!( + labels.contains(&format!("four.txt project2{slash}b{slash}")), + "Expected 'four.txt project2{slash}b{slash}' in labels: {:?}", + labels + ); + assert!( + labels.contains(&format!("three.txt project2{slash}b{slash}")), + "Expected 'three.txt project2{slash}b{slash}' in labels: {:?}", + labels + ); + }); + + editor.update_in(&mut cx, |editor, window, cx| { + editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx); + editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx); + editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx); + editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx); + editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx); + }); + + cx.run_until_parked(); + + editor.update(&mut cx, |editor, cx| { + assert_eq!(editor.text(cx), "@file "); + assert!(editor.has_visible_completions_menu()); + }); + + cx.simulate_input("one"); + + editor.update(&mut cx, |editor, cx| { + assert_eq!(editor.text(cx), "@file one"); + assert!(editor.has_visible_completions_menu()); + assert_eq!( + current_completion_labels(editor), + vec![format!("one.txt project1{slash}a{slash}")] + ); + }); + + editor.update_in(&mut cx, |editor, window, cx| { + editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx); + }); + + editor.update(&mut cx, |editor, cx| { + assert_eq!( + editor.text(cx), + format!("[@one.txt](@file:project1{slash}a{slash}one.txt) ") + ); + assert!(!editor.has_visible_completions_menu()); + }); + } + fn fold_ranges(editor: &Editor, cx: &mut App) -> Vec> { let snapshot = editor.buffer().read(cx).snapshot(cx); editor.display_map.update(cx, |display_map, cx| { diff --git a/crates/agent_ui/src/context_picker/file_context_picker.rs b/crates/agent_ui/src/context_picker/file_context_picker.rs index 8d1e5cb46dfba7bc89770356334fb08a7bf7a0c5..ded24caa922d27d8821e46e5c58b5ed22ab754ff 100644 --- a/crates/agent_ui/src/context_picker/file_context_picker.rs +++ b/crates/agent_ui/src/context_picker/file_context_picker.rs @@ -197,34 +197,50 @@ pub(crate) fn search_files( if query.is_empty() { let workspace = workspace.read(cx); let project = workspace.project().read(cx); + let visible_worktrees = workspace.visible_worktrees(cx).collect::>(); + let include_root_name = visible_worktrees.len() > 1; + let recent_matches = workspace .recent_navigation_history(Some(10), cx) .into_iter() - .filter_map(|(project_path, _)| { - let worktree = project.worktree_for_id(project_path.worktree_id, cx)?; - Some(FileMatch { + .map(|(project_path, _)| { + let path_prefix = if include_root_name { + project + .worktree_for_id(project_path.worktree_id, cx) + .map(|wt| wt.read(cx).root_name().into()) + .unwrap_or_else(|| RelPath::empty().into()) + } else { + RelPath::empty().into() + }; + + FileMatch { mat: PathMatch { score: 0., positions: Vec::new(), worktree_id: project_path.worktree_id.to_usize(), path: project_path.path, - path_prefix: worktree.read(cx).root_name().into(), + path_prefix, distance_to_relative_ancestor: 0, is_dir: false, }, is_recent: true, - }) + } }); - let file_matches = project.worktrees(cx).flat_map(|worktree| { + let file_matches = visible_worktrees.into_iter().flat_map(|worktree| { let worktree = worktree.read(cx); + let path_prefix: Arc = if include_root_name { + worktree.root_name().into() + } else { + RelPath::empty().into() + }; worktree.entries(false, 0).map(move |entry| FileMatch { mat: PathMatch { score: 0., positions: Vec::new(), worktree_id: worktree.id().to_usize(), path: entry.path.clone(), - path_prefix: worktree.root_name().into(), + path_prefix: path_prefix.clone(), distance_to_relative_ancestor: 0, is_dir: entry.is_dir(), }, @@ -235,6 +251,7 @@ pub(crate) fn search_files( Task::ready(recent_matches.chain(file_matches).collect()) } else { let worktrees = workspace.read(cx).visible_worktrees(cx).collect::>(); + let include_root_name = worktrees.len() > 1; let candidate_sets = worktrees .into_iter() .map(|worktree| { @@ -243,7 +260,7 @@ pub(crate) fn search_files( PathMatchCandidateSet { snapshot: worktree.snapshot(), include_ignored: worktree.root_entry().is_some_and(|entry| entry.is_ignored), - include_root_name: true, + include_root_name, candidates: project::Candidates::Entries, } }) @@ -276,6 +293,12 @@ pub fn extract_file_name_and_directory( path_prefix: &RelPath, path_style: PathStyle, ) -> (SharedString, Option) { + // If path is empty, this means we're matching with the root directory itself + // so we use the path_prefix as the name + if path.is_empty() && !path_prefix.is_empty() { + return (path_prefix.display(path_style).to_string().into(), None); + } + let full_path = path_prefix.join(path); let file_name = full_path.file_name().unwrap_or_default(); let display_path = full_path.display(path_style); diff --git a/crates/auto_update_helper/manifest.xml b/crates/auto_update_helper/manifest.xml index 5a69b434865166dc5f85a9558d28bea6cd646ffe..c3a99d23ff9e60e3604fe0aa8a203345e9c355be 100644 --- a/crates/auto_update_helper/manifest.xml +++ b/crates/auto_update_helper/manifest.xml @@ -1,16 +1,32 @@ - - - - true + + + + + + + + + + + + + + + + + true/pm PerMonitorV2 - - + + - + version='6.0.0.0' + processorArchitecture='*' + publicKeyToken='6595b64144ccf1df' + /> diff --git a/crates/auto_update_helper/src/updater.rs b/crates/auto_update_helper/src/updater.rs index 2acd9cee3a3df9634cb384311a204be4eec88538..bbf385ecc870cb7630bb109cda4db19cd33c07ec 100644 --- a/crates/auto_update_helper/src/updater.rs +++ b/crates/auto_update_helper/src/updater.rs @@ -36,13 +36,31 @@ pub(crate) const JOBS: &[Job] = &[ std::fs::remove_file(&zed_wsl) .context(format!("Failed to remove old file {}", zed_wsl.display())) }, + // TODO: remove after a few weeks once everyone is on the new version and this file never exists |app_dir| { let open_console = app_dir.join("OpenConsole.exe"); - log::info!("Removing old file: {}", open_console.display()); - std::fs::remove_file(&open_console).context(format!( - "Failed to remove old file {}", - open_console.display() - )) + if open_console.exists() { + log::info!("Removing old file: {}", open_console.display()); + std::fs::remove_file(&open_console).context(format!( + "Failed to remove old file {}", + open_console.display() + ))? + } + Ok(()) + }, + |app_dir| { + let archs = ["x64", "arm64"]; + for arch in archs { + let open_console = app_dir.join(format!("{arch}\\OpenConsole.exe")); + if open_console.exists() { + log::info!("Removing old file: {}", open_console.display()); + std::fs::remove_file(&open_console).context(format!( + "Failed to remove old file {}", + open_console.display() + ))? + } + } + Ok(()) }, |app_dir| { let conpty = app_dir.join("conpty.dll"); @@ -100,20 +118,32 @@ pub(crate) const JOBS: &[Job] = &[ )) }, |app_dir| { - let open_console_source = app_dir.join("install\\OpenConsole.exe"); - let open_console_dest = app_dir.join("OpenConsole.exe"); - log::info!( - "Copying new file {} to {}", - open_console_source.display(), - open_console_dest.display() - ); - std::fs::copy(&open_console_source, &open_console_dest) - .map(|_| ()) - .context(format!( - "Failed to copy new file {} to {}", - open_console_source.display(), - open_console_dest.display() - )) + let archs = ["x64", "arm64"]; + for arch in archs { + let open_console_source = app_dir.join(format!("install\\{arch}\\OpenConsole.exe")); + let open_console_dest = app_dir.join(format!("{arch}\\OpenConsole.exe")); + if open_console_source.exists() { + log::info!( + "Copying new file {} to {}", + open_console_source.display(), + open_console_dest.display() + ); + let parent = open_console_dest.parent().context(format!( + "Failed to get parent directory of {}", + open_console_dest.display() + ))?; + std::fs::create_dir_all(parent) + .context(format!("Failed to create directory {}", parent.display()))?; + std::fs::copy(&open_console_source, &open_console_dest) + .map(|_| ()) + .context(format!( + "Failed to copy new file {} to {}", + open_console_source.display(), + open_console_dest.display() + ))? + } + } + Ok(()) }, |app_dir| { let conpty_source = app_dir.join("install\\conpty.dll"); diff --git a/crates/bedrock/src/models.rs b/crates/bedrock/src/models.rs index ab0426bb7d0c28fd9b14028bd0b2c885a5310da0..1691ffe199975983fbb40b781aac00a2703871ea 100644 --- a/crates/bedrock/src/models.rs +++ b/crates/bedrock/src/models.rs @@ -66,6 +66,8 @@ pub enum Model { Claude3Sonnet, #[serde(rename = "claude-3-5-haiku", alias = "claude-3-5-haiku-latest")] Claude3_5Haiku, + #[serde(rename = "claude-haiku-4-5", alias = "claude-haiku-4-5-latest")] + ClaudeHaiku4_5, Claude3_5Sonnet, Claude3Haiku, // Amazon Nova Models @@ -147,6 +149,8 @@ impl Model { Ok(Self::Claude3Sonnet) } else if id.starts_with("claude-3-5-haiku") { Ok(Self::Claude3_5Haiku) + } else if id.starts_with("claude-haiku-4-5") { + Ok(Self::ClaudeHaiku4_5) } else if id.starts_with("claude-3-7-sonnet") { Ok(Self::Claude3_7Sonnet) } else if id.starts_with("claude-3-7-sonnet-thinking") { @@ -180,6 +184,7 @@ impl Model { Model::Claude3Sonnet => "claude-3-sonnet", Model::Claude3Haiku => "claude-3-haiku", Model::Claude3_5Haiku => "claude-3-5-haiku", + Model::ClaudeHaiku4_5 => "claude-haiku-4-5", Model::Claude3_7Sonnet => "claude-3-7-sonnet", Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-thinking", Model::AmazonNovaLite => "amazon-nova-lite", @@ -246,6 +251,7 @@ impl Model { Model::Claude3Sonnet => "anthropic.claude-3-sonnet-20240229-v1:0", Model::Claude3Haiku => "anthropic.claude-3-haiku-20240307-v1:0", Model::Claude3_5Haiku => "anthropic.claude-3-5-haiku-20241022-v1:0", + Model::ClaudeHaiku4_5 => "anthropic.claude-haiku-4-5-20251001-v1:0", Model::Claude3_7Sonnet | Model::Claude3_7SonnetThinking => { "anthropic.claude-3-7-sonnet-20250219-v1:0" } @@ -309,6 +315,7 @@ impl Model { Self::Claude3Sonnet => "Claude 3 Sonnet", Self::Claude3Haiku => "Claude 3 Haiku", Self::Claude3_5Haiku => "Claude 3.5 Haiku", + Self::ClaudeHaiku4_5 => "Claude Haiku 4.5", Self::Claude3_7Sonnet => "Claude 3.7 Sonnet", Self::Claude3_7SonnetThinking => "Claude 3.7 Sonnet Thinking", Self::AmazonNovaLite => "Amazon Nova Lite", @@ -363,6 +370,7 @@ impl Model { | Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3_5Haiku + | Self::ClaudeHaiku4_5 | Self::Claude3_7Sonnet | Self::ClaudeSonnet4 | Self::ClaudeOpus4 @@ -385,7 +393,7 @@ impl Model { Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3_5Haiku => 4_096, Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking => 128_000, Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking => 64_000, - Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4_5Thinking => 64_000, + Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4_5Thinking | Self::ClaudeHaiku4_5 => 64_000, Self::ClaudeOpus4 | Self::ClaudeOpus4Thinking | Self::ClaudeOpus4_1 @@ -404,6 +412,7 @@ impl Model { | Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3_5Haiku + | Self::ClaudeHaiku4_5 | Self::Claude3_7Sonnet | Self::ClaudeOpus4 | Self::ClaudeOpus4Thinking @@ -438,7 +447,8 @@ impl Model { | Self::ClaudeSonnet4Thinking | Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4_5Thinking - | Self::Claude3_5Haiku => true, + | Self::Claude3_5Haiku + | Self::ClaudeHaiku4_5 => true, // Amazon Nova models (all support tool use) Self::AmazonNovaPremier @@ -464,6 +474,7 @@ impl Model { // Nova models support only text caching // https://docs.aws.amazon.com/bedrock/latest/userguide/prompt-caching.html#prompt-caching-models Self::Claude3_5Haiku + | Self::ClaudeHaiku4_5 | Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking | Self::ClaudeSonnet4 @@ -500,7 +511,7 @@ impl Model { min_total_token: 1024, }), - Self::Claude3_5Haiku => Some(BedrockModelCacheConfiguration { + Self::Claude3_5Haiku | Self::ClaudeHaiku4_5 => Some(BedrockModelCacheConfiguration { max_cache_anchors: 4, min_total_token: 2048, }), @@ -569,6 +580,7 @@ impl Model { ( Model::AmazonNovaPremier | Model::Claude3_5Haiku + | Model::ClaudeHaiku4_5 | Model::Claude3_5Sonnet | Model::Claude3_5SonnetV2 | Model::Claude3_7Sonnet @@ -606,6 +618,7 @@ impl Model { // Models available in EU ( Model::Claude3_5Sonnet + | Model::ClaudeHaiku4_5 | Model::Claude3_7Sonnet | Model::Claude3_7SonnetThinking | Model::ClaudeSonnet4 @@ -624,6 +637,7 @@ impl Model { ( Model::Claude3_5Sonnet | Model::Claude3_5SonnetV2 + | Model::ClaudeHaiku4_5 | Model::Claude3Haiku | Model::Claude3Sonnet | Model::Claude3_7Sonnet diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index bb2f2effbbf8ff0d8995b8607c1ce2ddff66ac0c..a16f5a3bab9849ee93abac4e2eccb602698b65de 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -358,6 +358,7 @@ fn main() -> Result<()> { rayon::ThreadPoolBuilder::new() .num_threads(4) + .stack_size(10 * 1024 * 1024) .thread_name(|ix| format!("RayonWorker{}", ix)) .build_global() .unwrap(); diff --git a/crates/cloud_llm_client/src/predict_edits_v3.rs b/crates/cloud_llm_client/src/predict_edits_v3.rs index e03541e0f7d66bd54d6fbd918debbdc3d6c8d9e7..7166139d9077394e684a8b53ce3d8300cb5fa2db 100644 --- a/crates/cloud_llm_client/src/predict_edits_v3.rs +++ b/crates/cloud_llm_client/src/predict_edits_v3.rs @@ -23,7 +23,11 @@ pub struct PredictEditsRequest { pub cursor_point: Point, /// Within `signatures` pub excerpt_parent: Option, + #[serde(skip_serializing_if = "Vec::is_empty", default)] + pub included_files: Vec, + #[serde(skip_serializing_if = "Vec::is_empty", default)] pub signatures: Vec, + #[serde(skip_serializing_if = "Vec::is_empty", default)] pub referenced_declarations: Vec, pub events: Vec, #[serde(default)] @@ -44,6 +48,19 @@ pub struct PredictEditsRequest { pub prompt_format: PromptFormat, } +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct IncludedFile { + pub path: Arc, + pub max_row: Line, + pub excerpts: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Excerpt { + pub start_line: Line, + pub text: Arc, +} + #[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, EnumIter)] pub enum PromptFormat { MarkedExcerpt, diff --git a/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs b/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs index 284b245acf2305350e6a6a5e7c38dfaa9b16c5d4..1c8b1caf80db28ef936aa9a747b4a163e183134f 100644 --- a/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs +++ b/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs @@ -1,11 +1,14 @@ //! Zeta2 prompt planning and generation code shared with cloud. use anyhow::{Context as _, Result, anyhow}; -use cloud_llm_client::predict_edits_v3::{self, Line, Point, PromptFormat, ReferencedDeclaration}; +use cloud_llm_client::predict_edits_v3::{ + self, Excerpt, Line, Point, PromptFormat, ReferencedDeclaration, +}; use indoc::indoc; use ordered_float::OrderedFloat; use rustc_hash::{FxHashMap, FxHashSet}; use serde::Serialize; +use std::cmp; use std::fmt::Write; use std::sync::Arc; use std::{cmp::Reverse, collections::BinaryHeap, ops::Range, path::Path}; @@ -96,7 +99,177 @@ const UNIFIED_DIFF_REMINDER: &str = indoc! {" If you're editing multiple files, be sure to reflect filename in the hunk's header. "}; -pub struct PlannedPrompt<'a> { +pub fn build_prompt( + request: &predict_edits_v3::PredictEditsRequest, +) -> Result<(String, SectionLabels)> { + let mut insertions = match request.prompt_format { + PromptFormat::MarkedExcerpt => vec![ + ( + Point { + line: request.excerpt_line_range.start, + column: 0, + }, + EDITABLE_REGION_START_MARKER_WITH_NEWLINE, + ), + (request.cursor_point, CURSOR_MARKER), + ( + Point { + line: request.excerpt_line_range.end, + column: 0, + }, + EDITABLE_REGION_END_MARKER_WITH_NEWLINE, + ), + ], + PromptFormat::LabeledSections => vec![(request.cursor_point, CURSOR_MARKER)], + PromptFormat::NumLinesUniDiff => { + vec![(request.cursor_point, CURSOR_MARKER)] + } + PromptFormat::OnlySnippets => vec![], + }; + + let mut prompt = match request.prompt_format { + PromptFormat::MarkedExcerpt => MARKED_EXCERPT_INSTRUCTIONS.to_string(), + PromptFormat::LabeledSections => LABELED_SECTIONS_INSTRUCTIONS.to_string(), + PromptFormat::NumLinesUniDiff => NUMBERED_LINES_INSTRUCTIONS.to_string(), + // only intended for use via zeta_cli + PromptFormat::OnlySnippets => String::new(), + }; + + if request.events.is_empty() { + prompt.push_str("(No edit history)\n\n"); + } else { + prompt.push_str( + "The following are the latest edits made by the user, from earlier to later.\n\n", + ); + push_events(&mut prompt, &request.events); + } + + if request.prompt_format == PromptFormat::NumLinesUniDiff { + if request.referenced_declarations.is_empty() { + prompt.push_str(indoc! {" + # File under the cursor: + + The cursor marker <|user_cursor|> indicates the current user cursor position. + The file is in current state, edits from edit history have been applied. + We prepend line numbers (e.g., `123|`); they are not part of the file. + + "}); + } else { + // Note: This hasn't been trained on yet + prompt.push_str(indoc! {" + # Code Excerpts: + + The cursor marker <|user_cursor|> indicates the current user cursor position. + Other excerpts of code from the project have been included as context based on their similarity to the code under the cursor. + Context excerpts are not guaranteed to be relevant, so use your own judgement. + Files are in their current state, edits from edit history have been applied. + We prepend line numbers (e.g., `123|`); they are not part of the file. + + "}); + } + } else { + prompt.push_str("\n## Code\n\n"); + } + + let mut section_labels = Default::default(); + + if !request.referenced_declarations.is_empty() || !request.signatures.is_empty() { + let syntax_based_prompt = SyntaxBasedPrompt::populate(request)?; + section_labels = syntax_based_prompt.write(&mut insertions, &mut prompt)?; + } else { + if request.prompt_format == PromptFormat::LabeledSections { + anyhow::bail!("PromptFormat::LabeledSections cannot be used with ContextMode::Llm"); + } + + for related_file in &request.included_files { + writeln!(&mut prompt, "`````filename={}", related_file.path.display()).unwrap(); + write_excerpts( + &related_file.excerpts, + if related_file.path == request.excerpt_path { + &insertions + } else { + &[] + }, + related_file.max_row, + request.prompt_format == PromptFormat::NumLinesUniDiff, + &mut prompt, + ); + write!(&mut prompt, "`````\n\n").unwrap(); + } + } + + if request.prompt_format == PromptFormat::NumLinesUniDiff { + prompt.push_str(UNIFIED_DIFF_REMINDER); + } + + Ok((prompt, section_labels)) +} + +pub fn write_excerpts<'a>( + excerpts: impl IntoIterator, + sorted_insertions: &[(Point, &str)], + file_line_count: Line, + include_line_numbers: bool, + output: &mut String, +) { + let mut current_row = Line(0); + let mut sorted_insertions = sorted_insertions.iter().peekable(); + + for excerpt in excerpts { + if excerpt.start_line > current_row { + writeln!(output, "…").unwrap(); + } + if excerpt.text.is_empty() { + return; + } + + current_row = excerpt.start_line; + + for mut line in excerpt.text.lines() { + if include_line_numbers { + write!(output, "{}|", current_row.0 + 1).unwrap(); + } + + while let Some((insertion_location, insertion_marker)) = sorted_insertions.peek() { + match current_row.cmp(&insertion_location.line) { + cmp::Ordering::Equal => { + let (prefix, suffix) = line.split_at(insertion_location.column as usize); + output.push_str(prefix); + output.push_str(insertion_marker); + line = suffix; + sorted_insertions.next(); + } + cmp::Ordering::Less => break, + cmp::Ordering::Greater => { + sorted_insertions.next(); + break; + } + } + } + output.push_str(line); + output.push('\n'); + current_row.0 += 1; + } + } + + if current_row < file_line_count { + writeln!(output, "…").unwrap(); + } +} + +fn push_events(output: &mut String, events: &[predict_edits_v3::Event]) { + if events.is_empty() { + return; + }; + + writeln!(output, "`````diff").unwrap(); + for event in events { + writeln!(output, "{}", event).unwrap(); + } + writeln!(output, "`````\n").unwrap(); +} + +pub struct SyntaxBasedPrompt<'a> { request: &'a predict_edits_v3::PredictEditsRequest, /// Snippets to include in the prompt. These may overlap - they are merged / deduplicated in /// `to_prompt_string`. @@ -120,13 +293,13 @@ pub enum DeclarationStyle { Declaration, } -#[derive(Clone, Debug, Serialize)] +#[derive(Default, Clone, Debug, Serialize)] pub struct SectionLabels { pub excerpt_index: usize, pub section_ranges: Vec<(Arc, Range)>, } -impl<'a> PlannedPrompt<'a> { +impl<'a> SyntaxBasedPrompt<'a> { /// Greedy one-pass knapsack algorithm to populate the prompt plan. Does the following: /// /// Initializes a priority queue by populating it with each snippet, finding the @@ -149,7 +322,7 @@ impl<'a> PlannedPrompt<'a> { /// /// * Does not include file paths / other text when considering max_bytes. pub fn populate(request: &'a predict_edits_v3::PredictEditsRequest) -> Result { - let mut this = PlannedPrompt { + let mut this = Self { request, snippets: Vec::new(), budget_used: request.excerpt.len(), @@ -354,7 +527,11 @@ impl<'a> PlannedPrompt<'a> { /// Renders the planned context. Each file starts with "```FILE_PATH\n` and ends with triple /// backticks, with a newline after each file. Outputs a line with "..." between nonconsecutive /// chunks. - pub fn to_prompt_string(&'a self) -> Result<(String, SectionLabels)> { + pub fn write( + &'a self, + excerpt_file_insertions: &mut Vec<(Point, &'static str)>, + prompt: &mut String, + ) -> Result { let mut file_to_snippets: FxHashMap<&'a std::path::Path, Vec<&PlannedSnippet<'a>>> = FxHashMap::default(); for snippet in &self.snippets { @@ -383,95 +560,10 @@ impl<'a> PlannedPrompt<'a> { excerpt_file_snippets.push(&excerpt_snippet); file_snippets.push((&self.request.excerpt_path, excerpt_file_snippets, true)); - let mut excerpt_file_insertions = match self.request.prompt_format { - PromptFormat::MarkedExcerpt => vec![ - ( - Point { - line: self.request.excerpt_line_range.start, - column: 0, - }, - EDITABLE_REGION_START_MARKER_WITH_NEWLINE, - ), - (self.request.cursor_point, CURSOR_MARKER), - ( - Point { - line: self.request.excerpt_line_range.end, - column: 0, - }, - EDITABLE_REGION_END_MARKER_WITH_NEWLINE, - ), - ], - PromptFormat::LabeledSections => vec![(self.request.cursor_point, CURSOR_MARKER)], - PromptFormat::NumLinesUniDiff => { - vec![(self.request.cursor_point, CURSOR_MARKER)] - } - PromptFormat::OnlySnippets => vec![], - }; - - let mut prompt = match self.request.prompt_format { - PromptFormat::MarkedExcerpt => MARKED_EXCERPT_INSTRUCTIONS.to_string(), - PromptFormat::LabeledSections => LABELED_SECTIONS_INSTRUCTIONS.to_string(), - PromptFormat::NumLinesUniDiff => NUMBERED_LINES_INSTRUCTIONS.to_string(), - // only intended for use via zeta_cli - PromptFormat::OnlySnippets => String::new(), - }; - - if self.request.events.is_empty() { - prompt.push_str("(No edit history)\n\n"); - } else { - prompt.push_str( - "The following are the latest edits made by the user, from earlier to later.\n\n", - ); - Self::push_events(&mut prompt, &self.request.events); - } - - if self.request.prompt_format == PromptFormat::NumLinesUniDiff { - if self.request.referenced_declarations.is_empty() { - prompt.push_str(indoc! {" - # File under the cursor: - - The cursor marker <|user_cursor|> indicates the current user cursor position. - The file is in current state, edits from edit history have been applied. - We prepend line numbers (e.g., `123|`); they are not part of the file. - - "}); - } else { - // Note: This hasn't been trained on yet - prompt.push_str(indoc! {" - # Code Excerpts: - - The cursor marker <|user_cursor|> indicates the current user cursor position. - Other excerpts of code from the project have been included as context based on their similarity to the code under the cursor. - Context excerpts are not guaranteed to be relevant, so use your own judgement. - Files are in their current state, edits from edit history have been applied. - We prepend line numbers (e.g., `123|`); they are not part of the file. - - "}); - } - } else { - prompt.push_str("\n## Code\n\n"); - } - let section_labels = - self.push_file_snippets(&mut prompt, &mut excerpt_file_insertions, file_snippets)?; - - if self.request.prompt_format == PromptFormat::NumLinesUniDiff { - prompt.push_str(UNIFIED_DIFF_REMINDER); - } - - Ok((prompt, section_labels)) - } + self.push_file_snippets(prompt, excerpt_file_insertions, file_snippets)?; - fn push_events(output: &mut String, events: &[predict_edits_v3::Event]) { - if events.is_empty() { - return; - }; - - writeln!(output, "`````diff").unwrap(); - for event in events { - writeln!(output, "{}", event).unwrap(); - } - writeln!(output, "`````\n").unwrap(); + Ok(section_labels) } fn push_file_snippets( diff --git a/crates/codestral/src/codestral.rs b/crates/codestral/src/codestral.rs index a266212355795c2284fa30b054338608cb45fa9c..fe6b6678c99181facc4269df187c32c5a72ab565 100644 --- a/crates/codestral/src/codestral.rs +++ b/crates/codestral/src/codestral.rs @@ -79,6 +79,7 @@ impl CodestralCompletionProvider { suffix: String, model: String, max_tokens: Option, + api_url: String, ) -> Result { let start_time = Instant::now(); @@ -111,7 +112,7 @@ impl CodestralCompletionProvider { let http_request = http_client::Request::builder() .method(http_client::Method::POST) - .uri(format!("{}/v1/fim/completions", CODESTRAL_API_URL)) + .uri(format!("{}/v1/fim/completions", api_url)) .header("Content-Type", "application/json") .header("Authorization", format!("Bearer {}", api_key)) .body(http_client::AsyncBody::from(request_body))?; @@ -211,6 +212,12 @@ impl EditPredictionProvider for CodestralCompletionProvider { .clone() .unwrap_or_else(|| "codestral-latest".to_string()); let max_tokens = settings.edit_predictions.codestral.max_tokens; + let api_url = settings + .edit_predictions + .codestral + .api_url + .clone() + .unwrap_or_else(|| CODESTRAL_API_URL.to_string()); self.pending_request = Some(cx.spawn(async move |this, cx| { if debounce { @@ -242,6 +249,7 @@ impl EditPredictionProvider for CodestralCompletionProvider { suffix, model, max_tokens, + api_url, ) .await { diff --git a/crates/collab/k8s/postgrest.template.yml b/crates/collab/k8s/postgrest.template.yml deleted file mode 100644 index 4819408bffd629235d47fb6be930baf48d982c7b..0000000000000000000000000000000000000000 --- a/crates/collab/k8s/postgrest.template.yml +++ /dev/null @@ -1,175 +0,0 @@ ---- -kind: Service -apiVersion: v1 -metadata: - namespace: ${ZED_KUBE_NAMESPACE} - name: postgrest - annotations: - service.beta.kubernetes.io/do-loadbalancer-name: "postgrest-${ZED_KUBE_NAMESPACE}" - service.beta.kubernetes.io/do-loadbalancer-tls-ports: "443" - service.beta.kubernetes.io/do-loadbalancer-certificate-id: ${ZED_DO_CERTIFICATE_ID} - service.beta.kubernetes.io/do-loadbalancer-disable-lets-encrypt-dns-records: "true" -spec: - type: LoadBalancer - selector: - app: nginx - ports: - - name: web - protocol: TCP - port: 443 - targetPort: 8080 - ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - namespace: ${ZED_KUBE_NAMESPACE} - name: nginx -spec: - replicas: 1 - selector: - matchLabels: - app: nginx - template: - metadata: - labels: - app: nginx - spec: - containers: - - name: nginx - image: nginx:latest - ports: - - containerPort: 8080 - protocol: TCP - volumeMounts: - - name: nginx-config - mountPath: /etc/nginx/nginx.conf - subPath: nginx.conf - volumes: - - name: nginx-config - configMap: - name: nginx-config - ---- -apiVersion: v1 -kind: ConfigMap -metadata: - namespace: ${ZED_KUBE_NAMESPACE} - name: nginx-config -data: - nginx.conf: | - events {} - - http { - server { - listen 8080; - - location /app/ { - proxy_pass http://postgrest-app:8080/; - } - - location /llm/ { - proxy_pass http://postgrest-llm:8080/; - } - } - } - ---- -apiVersion: v1 -kind: Service -metadata: - namespace: ${ZED_KUBE_NAMESPACE} - name: postgrest-app -spec: - selector: - app: postgrest-app - ports: - - protocol: TCP - port: 8080 - targetPort: 8080 - ---- -apiVersion: v1 -kind: Service -metadata: - namespace: ${ZED_KUBE_NAMESPACE} - name: postgrest-llm -spec: - selector: - app: postgrest-llm - ports: - - protocol: TCP - port: 8080 - targetPort: 8080 - ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - namespace: ${ZED_KUBE_NAMESPACE} - name: postgrest-app -spec: - replicas: 1 - selector: - matchLabels: - app: postgrest-app - template: - metadata: - labels: - app: postgrest-app - spec: - containers: - - name: postgrest - image: "postgrest/postgrest" - ports: - - containerPort: 8080 - protocol: TCP - env: - - name: PGRST_SERVER_PORT - value: "8080" - - name: PGRST_DB_URI - valueFrom: - secretKeyRef: - name: database - key: url - - name: PGRST_JWT_SECRET - valueFrom: - secretKeyRef: - name: postgrest - key: jwt_secret - ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - namespace: ${ZED_KUBE_NAMESPACE} - name: postgrest-llm -spec: - replicas: 1 - selector: - matchLabels: - app: postgrest-llm - template: - metadata: - labels: - app: postgrest-llm - spec: - containers: - - name: postgrest - image: "postgrest/postgrest" - ports: - - containerPort: 8080 - protocol: TCP - env: - - name: PGRST_SERVER_PORT - value: "8080" - - name: PGRST_DB_URI - valueFrom: - secretKeyRef: - name: llm-database - key: url - - name: PGRST_JWT_SECRET - valueFrom: - secretKeyRef: - name: postgrest - key: jwt_secret diff --git a/crates/collab/postgrest_app.conf b/crates/collab/postgrest_app.conf deleted file mode 100644 index 5d3b0e65b738ed2291c782f62d7e45a8b43c9895..0000000000000000000000000000000000000000 --- a/crates/collab/postgrest_app.conf +++ /dev/null @@ -1,4 +0,0 @@ -db-uri = "postgres://postgres@localhost/zed" -server-port = 8081 -jwt-secret = "the-postgrest-jwt-secret-for-authorization" -log-level = "info" diff --git a/crates/collab/postgrest_llm.conf b/crates/collab/postgrest_llm.conf deleted file mode 100644 index 3a0cdfa4933065f6ac4beeb6f5ec52b0c6cecf0a..0000000000000000000000000000000000000000 --- a/crates/collab/postgrest_llm.conf +++ /dev/null @@ -1,4 +0,0 @@ -db-uri = "postgres://postgres@localhost/zed_llm" -server-port = 8082 -jwt-secret = "the-postgrest-jwt-secret-for-authorization" -log-level = "info" diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index f675cd3522b0f0e273db7528d62f31e37ceda794..73fdd8da8890d62f7da39f944edfe333d2c983aa 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -17,12 +17,14 @@ use editor::{ use fs::Fs; use futures::{SinkExt, StreamExt, channel::mpsc, lock::Mutex}; use git::repository::repo_path; -use gpui::{App, Rgba, TestAppContext, UpdateGlobal, VisualContext, VisualTestContext}; +use gpui::{ + App, Rgba, SharedString, TestAppContext, UpdateGlobal, VisualContext, VisualTestContext, +}; use indoc::indoc; use language::FakeLspAdapter; use lsp::LSP_REQUEST_TIMEOUT; use project::{ - ProjectPath, SERVER_PROGRESS_THROTTLE_TIMEOUT, + ProgressToken, ProjectPath, SERVER_PROGRESS_THROTTLE_TIMEOUT, lsp_store::lsp_ext_command::{ExpandedMacro, LspExtExpandMacro}, }; use recent_projects::disconnected_overlay::DisconnectedOverlay; @@ -1283,12 +1285,14 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes }); executor.run_until_parked(); + let token = ProgressToken::String(SharedString::from("the-token")); + project_a.read_with(cx_a, |project, cx| { let status = project.language_server_statuses(cx).next().unwrap().1; assert_eq!(status.name.0, "the-language-server"); assert_eq!(status.pending_work.len(), 1); assert_eq!( - status.pending_work["the-token"].message.as_ref().unwrap(), + status.pending_work[&token].message.as_ref().unwrap(), "the-message" ); }); @@ -1322,7 +1326,7 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes assert_eq!(status.name.0, "the-language-server"); assert_eq!(status.pending_work.len(), 1); assert_eq!( - status.pending_work["the-token"].message.as_ref().unwrap(), + status.pending_work[&token].message.as_ref().unwrap(), "the-message-2" ); }); @@ -1332,7 +1336,7 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes assert_eq!(status.name.0, "the-language-server"); assert_eq!(status.pending_work.len(), 1); assert_eq!( - status.pending_work["the-token"].message.as_ref().unwrap(), + status.pending_work[&token].message.as_ref().unwrap(), "the-message-2" ); }); @@ -2585,7 +2589,7 @@ async fn test_lsp_pull_diagnostics( capabilities: capabilities.clone(), initializer: Some(Box::new(move |fake_language_server| { let expected_workspace_diagnostic_token = lsp::ProgressToken::String(format!( - "workspace/diagnostic-{}-1", + "workspace/diagnostic/{}/1", fake_language_server.server.server_id() )); let closure_workspace_diagnostics_pulls_result_ids = closure_workspace_diagnostics_pulls_result_ids.clone(); diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index bfbf9721fab6df79ddd97810fa5b1d70ee701866..57d510d94c15e9c7c2d62873870019eda3e0d7d5 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -1265,6 +1265,13 @@ impl CollabPanel { window.handler_for(&this, move |this, _, cx| { this.copy_channel_link(channel_id, cx) }), + ) + .entry( + "Copy Channel Notes Link", + None, + window.handler_for(&this, move |this, _, cx| { + this.copy_channel_notes_link(channel_id, cx) + }), ); let mut has_destructive_actions = false; @@ -2220,6 +2227,15 @@ impl CollabPanel { cx.write_to_clipboard(item) } + fn copy_channel_notes_link(&mut self, channel_id: ChannelId, cx: &mut Context) { + let channel_store = self.channel_store.read(cx); + let Some(channel) = channel_store.channel_for_id(channel_id) else { + return; + }; + let item = ClipboardItem::new_string(channel.notes_link(None, cx)); + cx.write_to_clipboard(item) + } + fn render_signed_out(&mut self, cx: &mut Context) -> Div { let collab_blurb = "Work with your team in realtime with collaborative editing, voice, shared notes and more."; diff --git a/crates/command_palette/Cargo.toml b/crates/command_palette/Cargo.toml index 6e8d2bb5ae7ce079296b061a0c00616191b4382a..f21c202721fa29644e17df499fcfb288a72dc492 100644 --- a/crates/command_palette/Cargo.toml +++ b/crates/command_palette/Cargo.toml @@ -20,6 +20,7 @@ command_palette_hooks.workspace = true db.workspace = true fuzzy.workspace = true gpui.workspace = true +menu.workspace = true log.workspace = true picker.workspace = true postage.workspace = true diff --git a/crates/command_palette/src/command_palette.rs b/crates/command_palette/src/command_palette.rs index 4b883d890b3ca5b54459bd0ead3322acfe5b6f41..aacc7c5262c87bf8bcf2d17f7bbda1a63b020f91 100644 --- a/crates/command_palette/src/command_palette.rs +++ b/crates/command_palette/src/command_palette.rs @@ -22,7 +22,7 @@ use persistence::COMMAND_PALETTE_HISTORY; use picker::{Picker, PickerDelegate}; use postage::{sink::Sink, stream::Stream}; use settings::Settings; -use ui::{HighlightedLabel, KeyBinding, ListItem, ListItemSpacing, h_flex, prelude::*, v_flex}; +use ui::{HighlightedLabel, KeyBinding, ListItem, ListItemSpacing, prelude::*}; use util::ResultExt; use workspace::{ModalView, Workspace, WorkspaceSettings}; use zed_actions::{OpenZedUrl, command_palette::Toggle}; @@ -143,7 +143,7 @@ impl Focusable for CommandPalette { } impl Render for CommandPalette { - fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { + fn render(&mut self, _window: &mut Window, _: &mut Context) -> impl IntoElement { v_flex() .key_context("CommandPalette") .w(rems(34.)) @@ -261,6 +261,17 @@ impl CommandPaletteDelegate { HashMap::new() } } + + fn selected_command(&self) -> Option<&Command> { + let action_ix = self + .matches + .get(self.selected_ix) + .map(|m| m.candidate_id) + .unwrap_or(self.selected_ix); + // this gets called in headless tests where there are no commands loaded + // so we need to return an Option here + self.commands.get(action_ix) + } } impl PickerDelegate for CommandPaletteDelegate { @@ -411,7 +422,20 @@ impl PickerDelegate for CommandPaletteDelegate { .log_err(); } - fn confirm(&mut self, _: bool, window: &mut Window, cx: &mut Context>) { + fn confirm(&mut self, secondary: bool, window: &mut Window, cx: &mut Context>) { + if secondary { + let Some(selected_command) = self.selected_command() else { + return; + }; + let action_name = selected_command.action.name(); + let open_keymap = Box::new(zed_actions::ChangeKeybinding { + action: action_name.to_string(), + }); + window.dispatch_action(open_keymap, cx); + self.dismissed(window, cx); + return; + } + if self.matches.is_empty() { self.dismissed(window, cx); return; @@ -448,6 +472,7 @@ impl PickerDelegate for CommandPaletteDelegate { ) -> Option { let matching_command = self.matches.get(ix)?; let command = self.commands.get(matching_command.candidate_id)?; + Some( ListItem::new(ix) .inset(true) @@ -470,6 +495,59 @@ impl PickerDelegate for CommandPaletteDelegate { ), ) } + + fn render_footer( + &self, + window: &mut Window, + cx: &mut Context>, + ) -> Option { + let selected_command = self.selected_command()?; + let keybind = + KeyBinding::for_action_in(&*selected_command.action, &self.previous_focus_handle, cx); + + let focus_handle = &self.previous_focus_handle; + let keybinding_buttons = if keybind.has_binding(window) { + Button::new("change", "Change Keybinding…") + .key_binding( + KeyBinding::for_action_in(&menu::SecondaryConfirm, focus_handle, cx) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(move |_, window, cx| { + window.dispatch_action(menu::SecondaryConfirm.boxed_clone(), cx); + }) + } else { + Button::new("add", "Add Keybinding…") + .key_binding( + KeyBinding::for_action_in(&menu::SecondaryConfirm, focus_handle, cx) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(move |_, window, cx| { + window.dispatch_action(menu::SecondaryConfirm.boxed_clone(), cx); + }) + }; + + Some( + h_flex() + .w_full() + .p_1p5() + .gap_1() + .justify_end() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + .child(keybinding_buttons) + .child( + Button::new("run-action", "Run") + .key_binding( + KeyBinding::for_action_in(&menu::Confirm, &focus_handle, cx) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(|_, window, cx| { + window.dispatch_action(menu::Confirm.boxed_clone(), cx) + }), + ) + .into_any(), + ) + } } pub fn humanize_action_name(name: &str) -> String { diff --git a/crates/debugger_ui/src/session/running/stack_frame_list.rs b/crates/debugger_ui/src/session/running/stack_frame_list.rs index 3fc7e8ce392b5ea3982a168fcc8f6dcfad1f7313..a8fabd327a3de630ff884899fe7af1167932618c 100644 --- a/crates/debugger_ui/src/session/running/stack_frame_list.rs +++ b/crates/debugger_ui/src/session/running/stack_frame_list.rs @@ -566,6 +566,7 @@ impl StackFrameList { this.activate_selected_entry(window, cx); })) .hover(|style| style.bg(cx.theme().colors().element_hover).cursor_pointer()) + .overflow_x_scroll() .child( v_flex() .gap_0p5() diff --git a/crates/debugger_ui/src/session/running/variable_list.rs b/crates/debugger_ui/src/session/running/variable_list.rs index f2b79523fe3d7329073ad618a9d5c5d219a32f3c..c69bdfbe7ca8712284dd971d2e86f31f99cd696d 100644 --- a/crates/debugger_ui/src/session/running/variable_list.rs +++ b/crates/debugger_ui/src/session/running/variable_list.rs @@ -1129,6 +1129,7 @@ impl VariableList { this.color(Color::from(color)) }), ) + .tooltip(Tooltip::text(value)) } }) .into_any_element() diff --git a/crates/editor/src/actions.rs b/crates/editor/src/actions.rs index 810b84efcd40de6e507dfe12b1a1a7f89d2ec4cf..276f20a7aacc9315f27a929876984342edc8d394 100644 --- a/crates/editor/src/actions.rs +++ b/crates/editor/src/actions.rs @@ -539,6 +539,10 @@ actions!( GoToParentModule, /// Goes to the previous change in the file. GoToPreviousChange, + /// Goes to the next reference to the symbol under the cursor. + GoToNextReference, + /// Goes to the previous reference to the symbol under the cursor. + GoToPreviousReference, /// Goes to the type definition of the symbol at cursor. GoToTypeDefinition, /// Goes to type definition in a split pane. @@ -617,6 +621,8 @@ actions!( NextEditPrediction, /// Scrolls to the next screen. NextScreen, + /// Goes to the next snippet tabstop if one exists. + NextSnippetTabstop, /// Opens the context menu at cursor position. OpenContextMenu, /// Opens excerpts from the current file. @@ -650,6 +656,8 @@ actions!( Paste, /// Navigates to the previous edit prediction. PreviousEditPrediction, + /// Goes to the previous snippet tabstop if one exists. + PreviousSnippetTabstop, /// Redoes the last undone edit. Redo, /// Redoes the last selection change. diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs index 567533aef556c10a966bc2574a0056c3a115f916..7a63723f53a49483eaa728373a5ae8530aa6f4d6 100644 --- a/crates/editor/src/display_map/tab_map.rs +++ b/crates/editor/src/display_map/tab_map.rs @@ -1440,7 +1440,7 @@ where self.current_chunk.as_ref().and_then(|(chunk, idx)| { let mut idx = *idx; let mut diff = 0; - while idx > 0 && chunk.chars & (1 << idx) == 0 { + while idx > 0 && chunk.chars & (1u128.unbounded_shl(idx)) == 0 { idx -= 1; diff += 1; } @@ -1460,7 +1460,7 @@ where fn is_char_boundary(&self) -> bool { self.current_chunk .as_ref() - .is_some_and(|(chunk, idx)| (chunk.chars & (1 << *idx.min(&127))) != 0) + .is_some_and(|(chunk, idx)| (chunk.chars & 1u128.unbounded_shl(*idx)) != 0) } /// distance: length to move forward while searching for the next tab stop @@ -1483,18 +1483,20 @@ where self.byte_offset += overshoot; self.char_offset += get_char_offset( - chunk_position..(chunk_position + overshoot).saturating_sub(1).min(127), + chunk_position..(chunk_position + overshoot).saturating_sub(1), chunk.chars, ); - self.current_chunk = Some((chunk, chunk_position + overshoot)); + if chunk_position + overshoot < 128 { + self.current_chunk = Some((chunk, chunk_position + overshoot)); + } return None; } self.byte_offset += chunk_distance; self.char_offset += get_char_offset( - chunk_position..(chunk_position + chunk_distance).saturating_sub(1).min(127), + chunk_position..(chunk_position + chunk_distance).saturating_sub(1), chunk.chars, ); distance_traversed += chunk_distance; @@ -1546,8 +1548,6 @@ where #[inline(always)] fn get_char_offset(range: Range, bit_map: u128) -> u32 { - // This edge case can happen when we're at chunk position 128 - if range.start == range.end { return if (1u128 << range.start) & bit_map == 0 { 0 @@ -1555,7 +1555,7 @@ fn get_char_offset(range: Range, bit_map: u128) -> u32 { 1 }; } - let end_shift: u128 = 127u128 - range.end.min(127) as u128; + let end_shift: u128 = 127u128 - range.end as u128; let mut bit_mask = (u128::MAX >> range.start) << range.start; bit_mask = (bit_mask << end_shift) >> end_shift; let bit_map = bit_map & bit_mask; diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index e789318dff7e2238d3c7b9df8d440485873e6779..720b5b0fb6630dfb60d1fea43379e224e0d8d92a 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -2454,6 +2454,10 @@ impl Editor { key_context.add("renaming"); } + if !self.snippet_stack.is_empty() { + key_context.add("in_snippet"); + } + match self.context_menu.borrow().as_ref() { Some(CodeContextMenu::Completions(menu)) => { if menu.visible() { @@ -9963,6 +9967,38 @@ impl Editor { self.outdent(&Outdent, window, cx); } + pub fn next_snippet_tabstop( + &mut self, + _: &NextSnippetTabstop, + window: &mut Window, + cx: &mut Context, + ) { + if self.mode.is_single_line() || self.snippet_stack.is_empty() { + return; + } + + if self.move_to_next_snippet_tabstop(window, cx) { + self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); + return; + } + } + + pub fn previous_snippet_tabstop( + &mut self, + _: &PreviousSnippetTabstop, + window: &mut Window, + cx: &mut Context, + ) { + if self.mode.is_single_line() || self.snippet_stack.is_empty() { + return; + } + + if self.move_to_prev_snippet_tabstop(window, cx) { + self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); + return; + } + } + pub fn tab(&mut self, _: &Tab, window: &mut Window, cx: &mut Context) { if self.mode.is_single_line() { cx.propagate(); @@ -10535,7 +10571,7 @@ impl Editor { for selection in self .selections - .all::(&self.display_snapshot(cx)) + .all_adjusted(&self.display_snapshot(cx)) .iter() { let Some(wrap_config) = snapshot @@ -14348,10 +14384,6 @@ impl Editor { let last_selection = selections.iter().max_by_key(|s| s.id).unwrap(); let mut next_selected_range = None; - // Collect and sort selection ranges for efficient overlap checking - let mut selection_ranges: Vec<_> = selections.iter().map(|s| s.range()).collect(); - selection_ranges.sort_by_key(|r| r.start); - let bytes_after_last_selection = buffer.bytes_in_range(last_selection.end..buffer.len()); let bytes_before_first_selection = buffer.bytes_in_range(0..first_selection.start); @@ -14373,18 +14405,11 @@ impl Editor { || (!buffer.is_inside_word(offset_range.start, None) && !buffer.is_inside_word(offset_range.end, None)) { - // Use binary search to check for overlap (O(log n)) - let overlaps = selection_ranges - .binary_search_by(|range| { - if range.end <= offset_range.start { - std::cmp::Ordering::Less - } else if range.start >= offset_range.end { - std::cmp::Ordering::Greater - } else { - std::cmp::Ordering::Equal - } - }) - .is_ok(); + let idx = selections + .partition_point(|selection| selection.end <= offset_range.start); + let overlaps = selections + .get(idx) + .map_or(false, |selection| selection.start < offset_range.end); if !overlaps { next_selected_range = Some(offset_range); @@ -15869,7 +15894,7 @@ impl Editor { ) { let current_scroll_position = self.scroll_position(cx); let lines_to_expand = EditorSettings::get_global(cx).expand_excerpt_lines; - let mut should_scroll_up = false; + let mut scroll = None; if direction == ExpandExcerptDirection::Down { let multi_buffer = self.buffer.read(cx); @@ -15882,17 +15907,30 @@ impl Editor { let excerpt_end_row = Point::from_anchor(&excerpt_range.end, &buffer_snapshot).row; let last_row = buffer_snapshot.max_point().row; let lines_below = last_row.saturating_sub(excerpt_end_row); - should_scroll_up = lines_below >= lines_to_expand; + if lines_below >= lines_to_expand { + scroll = Some( + current_scroll_position + + gpui::Point::new(0.0, lines_to_expand as ScrollOffset), + ); + } } } + if direction == ExpandExcerptDirection::Up + && self + .buffer + .read(cx) + .snapshot(cx) + .excerpt_before(excerpt) + .is_none() + { + scroll = Some(current_scroll_position); + } self.buffer.update(cx, |buffer, cx| { buffer.expand_excerpts([excerpt], lines_to_expand, direction, cx) }); - if should_scroll_up { - let new_scroll_position = - current_scroll_position + gpui::Point::new(0.0, lines_to_expand as ScrollOffset); + if let Some(new_scroll_position) = scroll { self.set_scroll_position(new_scroll_position, window, cx); } } @@ -16710,6 +16748,139 @@ impl Editor { }) } + fn go_to_next_reference( + &mut self, + _: &GoToNextReference, + window: &mut Window, + cx: &mut Context, + ) { + let task = self.go_to_reference_before_or_after_position(Direction::Next, 1, window, cx); + if let Some(task) = task { + task.detach(); + }; + } + + fn go_to_prev_reference( + &mut self, + _: &GoToPreviousReference, + window: &mut Window, + cx: &mut Context, + ) { + let task = self.go_to_reference_before_or_after_position(Direction::Prev, 1, window, cx); + if let Some(task) = task { + task.detach(); + }; + } + + pub fn go_to_reference_before_or_after_position( + &mut self, + direction: Direction, + count: usize, + window: &mut Window, + cx: &mut Context, + ) -> Option>> { + let selection = self.selections.newest_anchor(); + let head = selection.head(); + + let multi_buffer = self.buffer.read(cx); + + let (buffer, text_head) = multi_buffer.text_anchor_for_position(head, cx)?; + let workspace = self.workspace()?; + let project = workspace.read(cx).project().clone(); + let references = + project.update(cx, |project, cx| project.references(&buffer, text_head, cx)); + Some(cx.spawn_in(window, async move |editor, cx| -> Result<()> { + let Some(locations) = references.await? else { + return Ok(()); + }; + + if locations.is_empty() { + // totally normal - the cursor may be on something which is not + // a symbol (e.g. a keyword) + log::info!("no references found under cursor"); + return Ok(()); + } + + let multi_buffer = editor.read_with(cx, |editor, _| editor.buffer().clone())?; + + let multi_buffer_snapshot = + multi_buffer.read_with(cx, |multi_buffer, cx| multi_buffer.snapshot(cx))?; + + let (locations, current_location_index) = + multi_buffer.update(cx, |multi_buffer, cx| { + let mut locations = locations + .into_iter() + .filter_map(|loc| { + let start = multi_buffer.buffer_anchor_to_anchor( + &loc.buffer, + loc.range.start, + cx, + )?; + let end = multi_buffer.buffer_anchor_to_anchor( + &loc.buffer, + loc.range.end, + cx, + )?; + Some(start..end) + }) + .collect::>(); + + // There is an O(n) implementation, but given this list will be + // small (usually <100 items), the extra O(log(n)) factor isn't + // worth the (surprisingly large amount of) extra complexity. + locations + .sort_unstable_by(|l, r| l.start.cmp(&r.start, &multi_buffer_snapshot)); + + let head_offset = head.to_offset(&multi_buffer_snapshot); + + let current_location_index = locations.iter().position(|loc| { + loc.start.to_offset(&multi_buffer_snapshot) <= head_offset + && loc.end.to_offset(&multi_buffer_snapshot) >= head_offset + }); + + (locations, current_location_index) + })?; + + let Some(current_location_index) = current_location_index else { + // This indicates something has gone wrong, because we already + // handle the "no references" case above + log::error!( + "failed to find current reference under cursor. Total references: {}", + locations.len() + ); + return Ok(()); + }; + + let destination_location_index = match direction { + Direction::Next => (current_location_index + count) % locations.len(), + Direction::Prev => { + (current_location_index + locations.len() - count % locations.len()) + % locations.len() + } + }; + + // TODO(cameron): is this needed? + // the thinking is to avoid "jumping to the current location" (avoid + // polluting "jumplist" in vim terms) + if current_location_index == destination_location_index { + return Ok(()); + } + + let Range { start, end } = locations[destination_location_index]; + + editor.update_in(cx, |editor, window, cx| { + let effects = SelectionEffects::default(); + + editor.unfold_ranges(&[start..end], false, false, cx); + editor.change_selections(effects, window, cx, |s| { + s.select_ranges([start..start]); + }); + })?; + + Ok(()) + })) + } + pub fn find_all_references( &mut self, _: &FindAllReferences, diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index a319ad654d016204dbad748d0aa169dee545a44f..06fbd9d3381f70955049ddde1c7a395945d67c66 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -11066,6 +11066,129 @@ async fn test_snippet_placeholder_choices(cx: &mut TestAppContext) { }); } +#[gpui::test] +async fn test_snippet_tabstop_navigation_with_placeholders(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + fn assert_state(editor: &mut Editor, cx: &mut Context, marked_text: &str) { + let (expected_text, selection_ranges) = marked_text_ranges(marked_text, false); + assert_eq!(editor.text(cx), expected_text); + assert_eq!( + editor + .selections + .ranges::(&editor.display_snapshot(cx)), + selection_ranges + ); + } + + let (text, insertion_ranges) = marked_text_ranges( + indoc! {" + ˇ + "}, + false, + ); + + let buffer = cx.update(|cx| MultiBuffer::build_simple(&text, cx)); + let (editor, cx) = cx.add_window_view(|window, cx| build_editor(buffer, window, cx)); + + _ = editor.update_in(cx, |editor, window, cx| { + let snippet = Snippet::parse("type ${1|,i32,u32|} = $2; $3").unwrap(); + + editor + .insert_snippet(&insertion_ranges, snippet, window, cx) + .unwrap(); + + assert_state( + editor, + cx, + indoc! {" + type «» = ;• + "}, + ); + + assert!( + editor.context_menu_visible(), + "Context menu should be visible for placeholder choices" + ); + + editor.next_snippet_tabstop(&NextSnippetTabstop, window, cx); + + assert_state( + editor, + cx, + indoc! {" + type = «»;• + "}, + ); + + assert!( + !editor.context_menu_visible(), + "Context menu should be hidden after moving to next tabstop" + ); + + editor.next_snippet_tabstop(&NextSnippetTabstop, window, cx); + + assert_state( + editor, + cx, + indoc! {" + type = ; ˇ + "}, + ); + + editor.next_snippet_tabstop(&NextSnippetTabstop, window, cx); + + assert_state( + editor, + cx, + indoc! {" + type = ; ˇ + "}, + ); + }); + + _ = editor.update_in(cx, |editor, window, cx| { + editor.select_all(&SelectAll, window, cx); + editor.backspace(&Backspace, window, cx); + + let snippet = Snippet::parse("fn ${1|,foo,bar|} = ${2:value}; $3").unwrap(); + let insertion_ranges = editor + .selections + .all(&editor.display_snapshot(cx)) + .iter() + .map(|s| s.range()) + .collect::>(); + + editor + .insert_snippet(&insertion_ranges, snippet, window, cx) + .unwrap(); + + assert_state(editor, cx, "fn «» = value;•"); + + assert!( + editor.context_menu_visible(), + "Context menu should be visible for placeholder choices" + ); + + editor.next_snippet_tabstop(&NextSnippetTabstop, window, cx); + + assert_state(editor, cx, "fn = «valueˇ»;•"); + + editor.previous_snippet_tabstop(&PreviousSnippetTabstop, window, cx); + + assert_state(editor, cx, "fn «» = value;•"); + + assert!( + editor.context_menu_visible(), + "Context menu should be visible again after returning to first tabstop" + ); + + editor.previous_snippet_tabstop(&PreviousSnippetTabstop, window, cx); + + assert_state(editor, cx, "fn «» = value;•"); + }); +} + #[gpui::test] async fn test_snippets(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -12629,12 +12752,6 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) { ); } }); - - #[cfg(target_os = "windows")] - let line_ending = "\r\n"; - #[cfg(not(target_os = "windows"))] - let line_ending = "\n"; - // Handle formatting requests to the language server. cx.lsp .set_request_handler::({ @@ -12658,7 +12775,7 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) { ), ( lsp::Range::new(lsp::Position::new(3, 4), lsp::Position::new(3, 4)), - line_ending.into() + "\n".into() ), ] ); @@ -12669,14 +12786,14 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) { lsp::Position::new(1, 0), lsp::Position::new(1, 0), ), - new_text: line_ending.into(), + new_text: "\n".into(), }, lsp::TextEdit { range: lsp::Range::new( lsp::Position::new(2, 0), lsp::Position::new(2, 0), ), - new_text: line_ending.into(), + new_text: "\n".into(), }, ])) } @@ -26662,83 +26779,6 @@ async fn test_paste_url_from_other_app_creates_markdown_link_selectively_in_mult )); } -#[gpui::test] -async fn test_non_linux_line_endings_registration(cx: &mut TestAppContext) { - init_test(cx, |_| {}); - - let unix_newlines_file_text = "fn main() { - let a = 5; - }"; - let clrf_file_text = unix_newlines_file_text.lines().join("\r\n"); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/a"), - json!({ - "first.rs": &clrf_file_text, - }), - ) - .await; - - let project = Project::test(fs, [path!("/a").as_ref()], cx).await; - let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); - let cx = &mut VisualTestContext::from_window(*workspace, cx); - - let registered_text = Arc::new(Mutex::new(Vec::new())); - let language_registry = project.read_with(cx, |project, _| project.languages().clone()); - language_registry.add(rust_lang()); - let mut fake_servers = language_registry.register_fake_lsp( - "Rust", - FakeLspAdapter { - capabilities: lsp::ServerCapabilities { - color_provider: Some(lsp::ColorProviderCapability::Simple(true)), - ..lsp::ServerCapabilities::default() - }, - name: "rust-analyzer", - initializer: Some({ - let registered_text = registered_text.clone(); - Box::new(move |fake_server| { - fake_server.handle_notification::({ - let registered_text = registered_text.clone(); - move |params, _| { - registered_text.lock().push(params.text_document.text); - } - }); - }) - }), - ..FakeLspAdapter::default() - }, - ); - - let editor = workspace - .update(cx, |workspace, window, cx| { - workspace.open_abs_path( - PathBuf::from(path!("/a/first.rs")), - OpenOptions::default(), - window, - cx, - ) - }) - .unwrap() - .await - .unwrap() - .downcast::() - .unwrap(); - let _fake_language_server = fake_servers.next().await.unwrap(); - cx.executor().run_until_parked(); - - assert_eq!( - editor.update(cx, |editor, cx| editor.text(cx)), - unix_newlines_file_text, - "Default text API returns \n-separated text", - ); - assert_eq!( - vec![clrf_file_text], - registered_text.lock().drain(..).collect::>(), - "Expected the language server to receive the exact same text from the FS", - ); -} - #[gpui::test] async fn test_race_in_multibuffer_save(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -26942,3 +26982,123 @@ async fn test_end_of_editor_context(cx: &mut TestAppContext) { assert!(!e.key_context(window, cx).contains("end_of_input")); }); } + +#[gpui::test] +async fn test_next_prev_reference(cx: &mut TestAppContext) { + const CYCLE_POSITIONS: &[&'static str] = &[ + indoc! {" + fn foo() { + let ˇabc = 123; + let x = abc + 1; + let y = abc + 2; + let z = abc + 2; + } + "}, + indoc! {" + fn foo() { + let abc = 123; + let x = ˇabc + 1; + let y = abc + 2; + let z = abc + 2; + } + "}, + indoc! {" + fn foo() { + let abc = 123; + let x = abc + 1; + let y = ˇabc + 2; + let z = abc + 2; + } + "}, + indoc! {" + fn foo() { + let abc = 123; + let x = abc + 1; + let y = abc + 2; + let z = ˇabc + 2; + } + "}, + ]; + + init_test(cx, |_| {}); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + references_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + cx, + ) + .await; + + // importantly, the cursor is in the middle + cx.set_state(indoc! {" + fn foo() { + let aˇbc = 123; + let x = abc + 1; + let y = abc + 2; + let z = abc + 2; + } + "}); + + let reference_ranges = [ + lsp::Position::new(1, 8), + lsp::Position::new(2, 12), + lsp::Position::new(3, 12), + lsp::Position::new(4, 12), + ] + .map(|start| lsp::Range::new(start, lsp::Position::new(start.line, start.character + 3))); + + cx.lsp + .set_request_handler::(move |params, _cx| async move { + Ok(Some( + reference_ranges + .map(|range| lsp::Location { + uri: params.text_document_position.text_document.uri.clone(), + range, + }) + .to_vec(), + )) + }); + + let _move = async |direction, count, cx: &mut EditorLspTestContext| { + cx.update_editor(|editor, window, cx| { + editor.go_to_reference_before_or_after_position(direction, count, window, cx) + }) + .unwrap() + .await + .unwrap() + }; + + _move(Direction::Next, 1, &mut cx).await; + cx.assert_editor_state(CYCLE_POSITIONS[1]); + + _move(Direction::Next, 1, &mut cx).await; + cx.assert_editor_state(CYCLE_POSITIONS[2]); + + _move(Direction::Next, 1, &mut cx).await; + cx.assert_editor_state(CYCLE_POSITIONS[3]); + + // loops back to the start + _move(Direction::Next, 1, &mut cx).await; + cx.assert_editor_state(CYCLE_POSITIONS[0]); + + // loops back to the end + _move(Direction::Prev, 1, &mut cx).await; + cx.assert_editor_state(CYCLE_POSITIONS[3]); + + _move(Direction::Prev, 1, &mut cx).await; + cx.assert_editor_state(CYCLE_POSITIONS[2]); + + _move(Direction::Prev, 1, &mut cx).await; + cx.assert_editor_state(CYCLE_POSITIONS[1]); + + _move(Direction::Prev, 1, &mut cx).await; + cx.assert_editor_state(CYCLE_POSITIONS[0]); + + _move(Direction::Next, 3, &mut cx).await; + cx.assert_editor_state(CYCLE_POSITIONS[3]); + + _move(Direction::Prev, 2, &mut cx).await; + cx.assert_editor_state(CYCLE_POSITIONS[1]); +} diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 41a9809bfa75f091c1c03d924ffebf117d4fd2d7..17b9ea9ced8d34396426e0a2640904b6e8df97a4 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -232,6 +232,8 @@ impl EditorElement { register_action(editor, window, Editor::blame_hover); register_action(editor, window, Editor::delete); register_action(editor, window, Editor::tab); + register_action(editor, window, Editor::next_snippet_tabstop); + register_action(editor, window, Editor::previous_snippet_tabstop); register_action(editor, window, Editor::backtab); register_action(editor, window, Editor::indent); register_action(editor, window, Editor::outdent); @@ -495,6 +497,8 @@ impl EditorElement { register_action(editor, window, Editor::collapse_all_diff_hunks); register_action(editor, window, Editor::go_to_previous_change); register_action(editor, window, Editor::go_to_next_change); + register_action(editor, window, Editor::go_to_prev_reference); + register_action(editor, window, Editor::go_to_next_reference); register_action(editor, window, |editor, action, window, cx| { if let Some(task) = editor.format(action, window, cx) { @@ -3173,7 +3177,7 @@ impl EditorElement { i += 1; } delta = 1; - i = head_idx.min(buffer_rows.len() as u32 - 1); + i = head_idx.min(buffer_rows.len().saturating_sub(1) as u32); while i > 0 && buffer_rows[i as usize].buffer_row.is_none() { i -= 1; } @@ -5105,6 +5109,7 @@ impl EditorElement { snapshot, visible_display_row_range.clone(), max_size, + &editor.text_layout_details(window), window, cx, ) diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index 3d83e3a5cce937b92255810003a6ff951bb84d95..4f210cc9db8913eb7c46c6150d1ecd5d4f9020bb 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -1,6 +1,7 @@ use crate::Editor; use anyhow::Result; use collections::HashMap; +use futures::StreamExt; use git::{ GitHostingProviderRegistry, GitRemote, Oid, blame::{Blame, BlameEntry, ParsedCommitMessage}, @@ -507,7 +508,7 @@ impl GitBlame { let buffer_edits = buffer.update(cx, |buffer, _| buffer.subscribe()); let blame_buffer = project.blame_buffer(&buffer, None, cx); - Some((id, snapshot, buffer_edits, blame_buffer)) + Some(async move { (id, snapshot, buffer_edits, blame_buffer.await) }) }) .collect::>() }); @@ -517,10 +518,14 @@ impl GitBlame { let (result, errors) = cx .background_spawn({ async move { + let blame = futures::stream::iter(blame) + .buffered(4) + .collect::>() + .await; let mut res = vec![]; let mut errors = vec![]; for (id, snapshot, buffer_edits, blame) in blame { - match blame.await { + match blame { Ok(Some(Blame { entries, messages, diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index e6eb5c1ea28c07248ef663097cac2c586b7db107..6227d90e9be7a5fbbe98b9dd8900860c219d07d2 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -3,6 +3,7 @@ use crate::{ EditorSnapshot, GlobalDiagnosticRenderer, Hover, display_map::{InlayOffset, ToDisplayPoint, invisibles::is_invisible}, hover_links::{InlayHighlight, RangeInEditor}, + movement::TextLayoutDetails, scroll::ScrollAmount, }; use anyhow::Context as _; @@ -766,9 +767,13 @@ impl HoverState { snapshot: &EditorSnapshot, visible_rows: Range, max_size: Size, + text_layout_details: &TextLayoutDetails, window: &mut Window, cx: &mut Context, ) -> Option<(DisplayPoint, Vec)> { + if !self.visible() { + return None; + } // If there is a diagnostic, position the popovers based on that. // Otherwise use the start of the hover range let anchor = self @@ -791,11 +796,29 @@ impl HoverState { } }) })?; - let point = anchor.to_display_point(&snapshot.display_snapshot); - - // Don't render if the relevant point isn't on screen - if !self.visible() || !visible_rows.contains(&point.row()) { - return None; + let mut point = anchor.to_display_point(&snapshot.display_snapshot); + + // Clamp the point within the visible rows in case the popup source spans multiple lines + if point.row() < visible_rows.start { + point = crate::movement::down_by_rows( + &snapshot.display_snapshot, + point, + (visible_rows.start - point.row()).0, + text::SelectionGoal::None, + true, + text_layout_details, + ) + .0; + } else if visible_rows.end <= point.row() { + point = crate::movement::up_by_rows( + &snapshot.display_snapshot, + point, + (visible_rows.end - point.row()).0, + text::SelectionGoal::None, + true, + text_layout_details, + ) + .0; } let mut elements = Vec::new(); diff --git a/crates/editor/src/inlays/inlay_hints.rs b/crates/editor/src/inlays/inlay_hints.rs index 3aab7be8207ef09b9ae18140cbfcd749eb9bc0d1..74fe9988763b976f315624b8e1ab36110e2137ee 100644 --- a/crates/editor/src/inlays/inlay_hints.rs +++ b/crates/editor/src/inlays/inlay_hints.rs @@ -344,7 +344,7 @@ impl Editor { .extend(invalidate_hints_for_buffers); let mut buffers_to_query = HashMap::default(); - for (excerpt_id, (buffer, buffer_version, visible_range)) in visible_excerpts { + for (_, (buffer, buffer_version, visible_range)) in visible_excerpts { let buffer_id = buffer.read(cx).remote_id(); if !self.registered_buffers.contains_key(&buffer_id) { continue; @@ -358,13 +358,11 @@ impl Editor { buffers_to_query .entry(buffer_id) .or_insert_with(|| VisibleExcerpts { - excerpts: Vec::new(), ranges: Vec::new(), buffer_version: buffer_version.clone(), buffer: buffer.clone(), }); visible_excerpts.buffer_version = buffer_version; - visible_excerpts.excerpts.push(excerpt_id); visible_excerpts.ranges.push(buffer_anchor_range); } @@ -850,7 +848,6 @@ impl Editor { #[derive(Debug)] struct VisibleExcerpts { - excerpts: Vec, ranges: Vec>, buffer_version: Global, buffer: Entity, @@ -1184,17 +1181,17 @@ pub mod tests { }) .unwrap(); - let progress_token = "test_progress_token"; + let progress_token = 42; fake_server .request::(lsp::WorkDoneProgressCreateParams { - token: lsp::ProgressToken::String(progress_token.to_string()), + token: lsp::ProgressToken::Number(progress_token), }) .await .into_response() .expect("work done progress create request failed"); cx.executor().run_until_parked(); fake_server.notify::(lsp::ProgressParams { - token: lsp::ProgressToken::String(progress_token.to_string()), + token: lsp::ProgressToken::Number(progress_token), value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::Begin( lsp::WorkDoneProgressBegin::default(), )), @@ -1214,7 +1211,7 @@ pub mod tests { .unwrap(); fake_server.notify::(lsp::ProgressParams { - token: lsp::ProgressToken::String(progress_token.to_string()), + token: lsp::ProgressToken::Number(progress_token), value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::End( lsp::WorkDoneProgressEnd::default(), )), @@ -2017,7 +2014,7 @@ pub mod tests { task_lsp_request_ranges.lock().push(params.range); task_lsp_request_count.fetch_add(1, Ordering::Release); Ok(Some(vec![lsp::InlayHint { - position: params.range.end, + position: params.range.start, label: lsp::InlayHintLabel::String( params.range.end.line.to_string(), ), @@ -2698,7 +2695,7 @@ let c = 3;"# ), ( "main.rs", - lsp::Range::new(lsp::Position::new(50, 0), lsp::Position::new(100, 11)) + lsp::Range::new(lsp::Position::new(50, 0), lsp::Position::new(100, 0)) ), ], lsp_request_ranges @@ -2757,7 +2754,7 @@ let c = 3;"# ), ( "main.rs", - lsp::Range::new(lsp::Position::new(50, 0), lsp::Position::new(100, 11)) + lsp::Range::new(lsp::Position::new(50, 0), lsp::Position::new(100, 0)) ), ], lsp_request_ranges diff --git a/crates/extension/src/extension_builder.rs b/crates/extension/src/extension_builder.rs index 15ff230ec7d1d1b99f4fd91400065f78b88b0b9d..7804910633ad2dbefdbda7a0dfef27a6797eeb97 100644 --- a/crates/extension/src/extension_builder.rs +++ b/crates/extension/src/extension_builder.rs @@ -5,7 +5,7 @@ use crate::{ use anyhow::{Context as _, Result, bail}; use async_compression::futures::bufread::GzipDecoder; use async_tar::Archive; -use futures::io::BufReader; +use futures::{AsyncReadExt, io::Cursor}; use heck::ToSnakeCase; use http_client::{self, AsyncBody, HttpClient}; use serde::Deserialize; @@ -411,6 +411,8 @@ impl ExtensionBuilder { let mut clang_path = wasi_sdk_dir.clone(); clang_path.extend(["bin", &format!("clang{}", env::consts::EXE_SUFFIX)]); + log::info!("downloading wasi-sdk to {}", wasi_sdk_dir.display()); + if fs::metadata(&clang_path).is_ok_and(|metadata| metadata.is_file()) { return Ok(clang_path); } @@ -423,13 +425,19 @@ impl ExtensionBuilder { log::info!("downloading wasi-sdk to {}", wasi_sdk_dir.display()); let mut response = self.http.get(&url, AsyncBody::default(), true).await?; - let body = BufReader::new(response.body_mut()); - let body = GzipDecoder::new(body); + let body = GzipDecoder::new({ + // stream the entire request into memory at once as the artifact is quite big (100MB+) + let mut b = vec![]; + response.body_mut().read_to_end(&mut b).await?; + Cursor::new(b) + }); let tar = Archive::new(body); + log::info!("un-tarring wasi-sdk to {}", wasi_sdk_dir.display()); tar.unpack(&tar_out_dir) .await .context("failed to unpack wasi-sdk archive")?; + log::info!("finished downloading wasi-sdk"); let inner_dir = fs::read_dir(&tar_out_dir)? .next() diff --git a/crates/extension_host/src/extension_store_test.rs b/crates/extension_host/src/extension_store_test.rs index 41b7b35d463a520888d4419f141ffdeca332fdac..af09b3e4fb28be1a7f339ac4be6b1e789bcff0f0 100644 --- a/crates/extension_host/src/extension_store_test.rs +++ b/crates/extension_host/src/extension_store_test.rs @@ -31,7 +31,8 @@ use util::test::TempTree; #[cfg(test)] #[ctor::ctor] fn init_logger() { - zlog::init_test(); + // show info logs while we debug the extension_store tests hanging. + zlog::init_test_with("info"); } #[gpui::test] @@ -529,10 +530,6 @@ async fn test_extension_store(cx: &mut TestAppContext) { }); } -// todo(windows) -// Disable this test on Windows for now. Because this test hangs at -// `let fake_server = fake_servers.next().await.unwrap();`. -// Reenable this test when we figure out why. #[gpui::test] async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { init_test(cx); diff --git a/crates/extension_host/src/wasm_host.rs b/crates/extension_host/src/wasm_host.rs index bf3732b7e8497a09d5067e11ab78e7165fb54a46..eb26c44f20519b7cdb3a38859f23ce99365fe505 100644 --- a/crates/extension_host/src/wasm_host.rs +++ b/crates/extension_host/src/wasm_host.rs @@ -658,7 +658,7 @@ impl WasmHost { }; cx.spawn(async move |cx| { let (extension_task, manifest, work_dir, tx, zed_api_version) = - load_extension_task.await?; + cx.background_executor().spawn(load_extension_task).await?; // we need to run run the task in an extension context as wasmtime_wasi may // call into tokio, accessing its runtime handle let task = Arc::new(gpui_tokio::Tokio::spawn(cx, extension_task)?); diff --git a/crates/extensions_ui/src/components.rs b/crates/extensions_ui/src/components.rs index 957980e49f8f4774ce7eb601503db79ce74baceb..bf11abd679c657c6533f5e9e075b1b69c01e8622 100644 --- a/crates/extensions_ui/src/components.rs +++ b/crates/extensions_ui/src/components.rs @@ -1,5 +1,3 @@ mod extension_card; -mod feature_upsell; pub use extension_card::*; -pub use feature_upsell::*; diff --git a/crates/extensions_ui/src/components/feature_upsell.rs b/crates/extensions_ui/src/components/feature_upsell.rs deleted file mode 100644 index 0515dd46d30ce9f7e87331f99542940c3efa837a..0000000000000000000000000000000000000000 --- a/crates/extensions_ui/src/components/feature_upsell.rs +++ /dev/null @@ -1,77 +0,0 @@ -use gpui::{AnyElement, Div, StyleRefinement}; -use smallvec::SmallVec; -use ui::prelude::*; - -#[derive(IntoElement)] -pub struct FeatureUpsell { - base: Div, - text: SharedString, - docs_url: Option, - children: SmallVec<[AnyElement; 2]>, -} - -impl FeatureUpsell { - pub fn new(text: impl Into) -> Self { - Self { - base: h_flex(), - text: text.into(), - docs_url: None, - children: SmallVec::new(), - } - } - - pub fn docs_url(mut self, docs_url: impl Into) -> Self { - self.docs_url = Some(docs_url.into()); - self - } -} - -impl ParentElement for FeatureUpsell { - fn extend(&mut self, elements: impl IntoIterator) { - self.children.extend(elements) - } -} - -// Style methods. -impl FeatureUpsell { - fn style(&mut self) -> &mut StyleRefinement { - self.base.style() - } - - gpui::border_style_methods!({ - visibility: pub - }); -} - -impl RenderOnce for FeatureUpsell { - fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { - self.base - .py_2() - .px_4() - .justify_between() - .flex_wrap() - .border_color(cx.theme().colors().border_variant) - .child(Label::new(self.text)) - .child(h_flex().gap_2().children(self.children).when_some( - self.docs_url, - |el, docs_url| { - el.child( - Button::new("open_docs", "View Documentation") - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::Small) - .icon_position(IconPosition::End) - .on_click({ - move |_event, _window, cx| { - telemetry::event!( - "Documentation Viewed", - source = "Feature Upsell", - url = docs_url, - ); - cx.open_url(&docs_url) - } - }), - ) - }, - )) - } -} diff --git a/crates/extensions_ui/src/extensions_ui.rs b/crates/extensions_ui/src/extensions_ui.rs index 1fc1384a133946651f16b3b9bdba742c2882b9a8..cf59f7d200962b2e541c429c7918f622d6e06587 100644 --- a/crates/extensions_ui/src/extensions_ui.rs +++ b/crates/extensions_ui/src/extensions_ui.rs @@ -24,8 +24,8 @@ use settings::{Settings, SettingsContent}; use strum::IntoEnumIterator as _; use theme::ThemeSettings; use ui::{ - CheckboxWithLabel, Chip, ContextMenu, PopoverMenu, ScrollableHandle, ToggleButton, Tooltip, - WithScrollbar, prelude::*, + Banner, Chip, ContextMenu, Divider, PopoverMenu, ScrollableHandle, Switch, ToggleButton, + Tooltip, WithScrollbar, prelude::*, }; use vim_mode_setting::VimModeSetting; use workspace::{ @@ -34,7 +34,7 @@ use workspace::{ }; use zed_actions::ExtensionCategoryFilter; -use crate::components::{ExtensionCard, FeatureUpsell}; +use crate::components::ExtensionCard; use crate::extension_version_selector::{ ExtensionVersionSelector, ExtensionVersionSelectorDelegate, }; @@ -225,9 +225,9 @@ impl ExtensionFilter { #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] enum Feature { + ExtensionRuff, + ExtensionTailwind, Git, - OpenIn, - Vim, LanguageBash, LanguageC, LanguageCpp, @@ -236,13 +236,28 @@ enum Feature { LanguageReact, LanguageRust, LanguageTypescript, + OpenIn, + Vim, } fn keywords_by_feature() -> &'static BTreeMap> { static KEYWORDS_BY_FEATURE: OnceLock>> = OnceLock::new(); KEYWORDS_BY_FEATURE.get_or_init(|| { BTreeMap::from_iter([ + (Feature::ExtensionRuff, vec!["ruff"]), + (Feature::ExtensionTailwind, vec!["tail", "tailwind"]), (Feature::Git, vec!["git"]), + (Feature::LanguageBash, vec!["sh", "bash"]), + (Feature::LanguageC, vec!["c", "clang"]), + (Feature::LanguageCpp, vec!["c++", "cpp", "clang"]), + (Feature::LanguageGo, vec!["go", "golang"]), + (Feature::LanguagePython, vec!["python", "py"]), + (Feature::LanguageReact, vec!["react"]), + (Feature::LanguageRust, vec!["rust", "rs"]), + ( + Feature::LanguageTypescript, + vec!["type", "typescript", "ts"], + ), ( Feature::OpenIn, vec![ @@ -257,17 +272,6 @@ fn keywords_by_feature() -> &'static BTreeMap> { ], ), (Feature::Vim, vec!["vim"]), - (Feature::LanguageBash, vec!["sh", "bash"]), - (Feature::LanguageC, vec!["c", "clang"]), - (Feature::LanguageCpp, vec!["c++", "cpp", "clang"]), - (Feature::LanguageGo, vec!["go", "golang"]), - (Feature::LanguagePython, vec!["python", "py"]), - (Feature::LanguageReact, vec!["react"]), - (Feature::LanguageRust, vec!["rust", "rs"]), - ( - Feature::LanguageTypescript, - vec!["type", "typescript", "ts"], - ), ]) }) } @@ -1336,58 +1340,172 @@ impl ExtensionsPage { } } - fn render_feature_upsells(&self, cx: &mut Context) -> impl IntoElement { - let upsells_count = self.upsells.len(); + fn render_feature_upsell_banner( + &self, + label: SharedString, + docs_url: SharedString, + vim: bool, + cx: &mut Context, + ) -> impl IntoElement { + let docs_url_button = Button::new("open_docs", "View Documentation") + .icon(IconName::ArrowUpRight) + .icon_size(IconSize::Small) + .icon_position(IconPosition::End) + .on_click({ + move |_event, _window, cx| { + telemetry::event!( + "Documentation Viewed", + source = "Feature Upsell", + url = docs_url, + ); + cx.open_url(&docs_url) + } + }); - v_flex().children(self.upsells.iter().enumerate().map(|(ix, feature)| { - let upsell = match feature { - Feature::Git => FeatureUpsell::new( - "Zed comes with basic Git support. More Git features are coming in the future.", - ) - .docs_url("https://zed.dev/docs/git"), - Feature::OpenIn => FeatureUpsell::new( - "Zed supports linking to a source line on GitHub and others.", - ) - .docs_url("https://zed.dev/docs/git#git-integrations"), - Feature::Vim => FeatureUpsell::new("Vim support is built-in to Zed!") - .docs_url("https://zed.dev/docs/vim") - .child(CheckboxWithLabel::new( - "enable-vim", - Label::new("Enable vim mode"), - if VimModeSetting::get_global(cx).0 { - ui::ToggleState::Selected + div() + .pt_4() + .px_4() + .child( + Banner::new() + .severity(Severity::Success) + .child(Label::new(label).mt_0p5()) + .map(|this| { + if vim { + this.action_slot( + h_flex() + .gap_1() + .child(docs_url_button) + .child(Divider::vertical().color(ui::DividerColor::Border)) + .child( + h_flex() + .pl_1() + .gap_1() + .child(Label::new("Enable Vim mode")) + .child( + Switch::new( + "enable-vim", + if VimModeSetting::get_global(cx).0 { + ui::ToggleState::Selected + } else { + ui::ToggleState::Unselected + }, + ) + .on_click(cx.listener( + move |this, selection, _, cx| { + telemetry::event!( + "Vim Mode Toggled", + source = "Feature Upsell" + ); + this.update_settings( + selection, + cx, + |setting, value| { + setting.vim_mode = Some(value) + }, + ); + }, + )) + .color(ui::SwitchColor::Accent), + ), + ), + ) } else { - ui::ToggleState::Unselected - }, - cx.listener(move |this, selection, _, cx| { - telemetry::event!("Vim Mode Toggled", source = "Feature Upsell"); - this.update_settings(selection, cx, |setting, value| { - setting.vim_mode = Some(value) - }); - }), - )), - Feature::LanguageBash => FeatureUpsell::new("Shell support is built-in to Zed!") - .docs_url("https://zed.dev/docs/languages/bash"), - Feature::LanguageC => FeatureUpsell::new("C support is built-in to Zed!") - .docs_url("https://zed.dev/docs/languages/c"), - Feature::LanguageCpp => FeatureUpsell::new("C++ support is built-in to Zed!") - .docs_url("https://zed.dev/docs/languages/cpp"), - Feature::LanguageGo => FeatureUpsell::new("Go support is built-in to Zed!") - .docs_url("https://zed.dev/docs/languages/go"), - Feature::LanguagePython => FeatureUpsell::new("Python support is built-in to Zed!") - .docs_url("https://zed.dev/docs/languages/python"), - Feature::LanguageReact => FeatureUpsell::new("React support is built-in to Zed!") - .docs_url("https://zed.dev/docs/languages/typescript"), - Feature::LanguageRust => FeatureUpsell::new("Rust support is built-in to Zed!") - .docs_url("https://zed.dev/docs/languages/rust"), - Feature::LanguageTypescript => { - FeatureUpsell::new("Typescript support is built-in to Zed!") - .docs_url("https://zed.dev/docs/languages/typescript") - } + this.action_slot(docs_url_button) + } + }), + ) + .into_any_element() + } + + fn render_feature_upsells(&self, cx: &mut Context) -> impl IntoElement { + let mut container = v_flex(); + + for feature in &self.upsells { + let banner = match feature { + Feature::ExtensionRuff => self.render_feature_upsell_banner( + "Ruff (linter for Python) support is built-in to Zed!".into(), + "https://zed.dev/docs/languages/python#code-formatting--linting".into(), + false, + cx, + ), + Feature::ExtensionTailwind => self.render_feature_upsell_banner( + "Tailwind CSS support is built-in to Zed!".into(), + "https://zed.dev/docs/languages/tailwindcss".into(), + false, + cx, + ), + Feature::Git => self.render_feature_upsell_banner( + "Zed comes with basic Git support—more features are coming in the future." + .into(), + "https://zed.dev/docs/git".into(), + false, + cx, + ), + Feature::LanguageBash => self.render_feature_upsell_banner( + "Shell support is built-in to Zed!".into(), + "https://zed.dev/docs/languages/bash".into(), + false, + cx, + ), + Feature::LanguageC => self.render_feature_upsell_banner( + "C support is built-in to Zed!".into(), + "https://zed.dev/docs/languages/c".into(), + false, + cx, + ), + Feature::LanguageCpp => self.render_feature_upsell_banner( + "C++ support is built-in to Zed!".into(), + "https://zed.dev/docs/languages/cpp".into(), + false, + cx, + ), + Feature::LanguageGo => self.render_feature_upsell_banner( + "Go support is built-in to Zed!".into(), + "https://zed.dev/docs/languages/go".into(), + false, + cx, + ), + Feature::LanguagePython => self.render_feature_upsell_banner( + "Python support is built-in to Zed!".into(), + "https://zed.dev/docs/languages/python".into(), + false, + cx, + ), + Feature::LanguageReact => self.render_feature_upsell_banner( + "React support is built-in to Zed!".into(), + "https://zed.dev/docs/languages/typescript".into(), + false, + cx, + ), + Feature::LanguageRust => self.render_feature_upsell_banner( + "Rust support is built-in to Zed!".into(), + "https://zed.dev/docs/languages/rust".into(), + false, + cx, + ), + Feature::LanguageTypescript => self.render_feature_upsell_banner( + "Typescript support is built-in to Zed!".into(), + "https://zed.dev/docs/languages/typescript".into(), + false, + cx, + ), + Feature::OpenIn => self.render_feature_upsell_banner( + "Zed supports linking to a source line on GitHub and others.".into(), + "https://zed.dev/docs/git#git-integrations".into(), + false, + cx, + ), + Feature::Vim => self.render_feature_upsell_banner( + "Vim support is built-in to Zed!".into(), + "https://zed.dev/docs/vim".into(), + true, + cx, + ), }; + container = container.child(banner); + } - upsell.when(ix < upsells_count, |upsell| upsell.border_b_1()) - })) + container } } diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 06bc5ec4114af01ae4c90f12d676ad027d0c5cc0..eaefd4ba22c34ac2e3c30e822e6dbcd31468f9b8 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -1279,18 +1279,17 @@ impl GitRepository for RealGitRepository { .remote_url("upstream") .or_else(|| self.remote_url("origin")); - self.executor - .spawn(async move { - crate::blame::Blame::for_path( - &git_binary_path, - &working_directory?, - &path, - &content, - remote_url, - ) - .await - }) - .boxed() + async move { + crate::blame::Blame::for_path( + &git_binary_path, + &working_directory?, + &path, + &content, + remote_url, + ) + .await + } + .boxed() } fn diff(&self, diff: DiffType) -> BoxFuture<'_, Result> { diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index 9b0fb6d8c16b0e44b1bbfd1464f44bb7e88b0cde..b9654ab14e1826c6d90c92878bbc4b55d1ef2959 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -16,7 +16,7 @@ use text::{Bias, Point}; use theme::ActiveTheme; use ui::prelude::*; use util::paths::FILE_ROW_COLUMN_DELIMITER; -use workspace::ModalView; +use workspace::{DismissDecision, ModalView}; pub fn init(cx: &mut App) { LineIndicatorFormat::register(cx); @@ -31,7 +31,16 @@ pub struct GoToLine { _subscriptions: Vec, } -impl ModalView for GoToLine {} +impl ModalView for GoToLine { + fn on_before_dismiss( + &mut self, + _window: &mut Window, + _cx: &mut Context, + ) -> DismissDecision { + self.prev_scroll_position.take(); + DismissDecision::Dismiss(true) + } +} impl Focusable for GoToLine { fn focus_handle(&self, cx: &App) -> FocusHandle { @@ -769,4 +778,171 @@ mod tests { state }) } + + #[gpui::test] + async fn test_scroll_position_on_outside_click(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let file_content = (0..100) + .map(|i| format!("struct Line{};", i)) + .collect::>() + .join("\n"); + fs.insert_tree(path!("/dir"), json!({"a.rs": file_content})) + .await; + + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let worktree_id = workspace.update(cx, |workspace, cx| { + workspace.project().update(cx, |project, cx| { + project.worktrees(cx).next().unwrap().read(cx).id() + }) + }); + let _buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/dir/a.rs"), cx) + }) + .await + .unwrap(); + let editor = workspace + .update_in(cx, |workspace, window, cx| { + workspace.open_path((worktree_id, rel_path("a.rs")), None, true, window, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + let go_to_line_view = open_go_to_line_view(&workspace, cx); + + let scroll_position_before_input = + editor.update(cx, |editor, cx| editor.scroll_position(cx)); + cx.simulate_input("47"); + let scroll_position_after_input = + editor.update(cx, |editor, cx| editor.scroll_position(cx)); + assert_ne!(scroll_position_before_input, scroll_position_after_input); + + drop(go_to_line_view); + workspace.update_in(cx, |workspace, window, cx| { + workspace.hide_modal(window, cx); + }); + cx.run_until_parked(); + + let scroll_position_after_auto_dismiss = + editor.update(cx, |editor, cx| editor.scroll_position(cx)); + assert_eq!( + scroll_position_after_auto_dismiss, scroll_position_after_input, + "Dismissing via outside click should maintain new scroll position" + ); + } + + #[gpui::test] + async fn test_scroll_position_on_cancel(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let file_content = (0..100) + .map(|i| format!("struct Line{};", i)) + .collect::>() + .join("\n"); + fs.insert_tree(path!("/dir"), json!({"a.rs": file_content})) + .await; + + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let worktree_id = workspace.update(cx, |workspace, cx| { + workspace.project().update(cx, |project, cx| { + project.worktrees(cx).next().unwrap().read(cx).id() + }) + }); + let _buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/dir/a.rs"), cx) + }) + .await + .unwrap(); + let editor = workspace + .update_in(cx, |workspace, window, cx| { + workspace.open_path((worktree_id, rel_path("a.rs")), None, true, window, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + let go_to_line_view = open_go_to_line_view(&workspace, cx); + + let scroll_position_before_input = + editor.update(cx, |editor, cx| editor.scroll_position(cx)); + cx.simulate_input("47"); + let scroll_position_after_input = + editor.update(cx, |editor, cx| editor.scroll_position(cx)); + assert_ne!(scroll_position_before_input, scroll_position_after_input); + + cx.dispatch_action(menu::Cancel); + drop(go_to_line_view); + cx.run_until_parked(); + + let scroll_position_after_cancel = + editor.update(cx, |editor, cx| editor.scroll_position(cx)); + assert_eq!( + scroll_position_after_cancel, scroll_position_after_input, + "Cancel should maintain new scroll position" + ); + } + + #[gpui::test] + async fn test_scroll_position_on_confirm(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let file_content = (0..100) + .map(|i| format!("struct Line{};", i)) + .collect::>() + .join("\n"); + fs.insert_tree(path!("/dir"), json!({"a.rs": file_content})) + .await; + + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let worktree_id = workspace.update(cx, |workspace, cx| { + workspace.project().update(cx, |project, cx| { + project.worktrees(cx).next().unwrap().read(cx).id() + }) + }); + let _buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/dir/a.rs"), cx) + }) + .await + .unwrap(); + let editor = workspace + .update_in(cx, |workspace, window, cx| { + workspace.open_path((worktree_id, rel_path("a.rs")), None, true, window, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + let go_to_line_view = open_go_to_line_view(&workspace, cx); + + let scroll_position_before_input = + editor.update(cx, |editor, cx| editor.scroll_position(cx)); + cx.simulate_input("47"); + let scroll_position_after_input = + editor.update(cx, |editor, cx| editor.scroll_position(cx)); + assert_ne!(scroll_position_before_input, scroll_position_after_input); + + cx.dispatch_action(menu::Confirm); + drop(go_to_line_view); + cx.run_until_parked(); + + let scroll_position_after_confirm = + editor.update(cx, |editor, cx| editor.scroll_position(cx)); + assert_eq!( + scroll_position_after_confirm, scroll_position_after_input, + "Confirm should maintain new scroll position" + ); + } } diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index af23a336f6230a16040cd98f1f3377c817af05fb..3bec72b2f2726d6373449f6c6828943d7c086909 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -39,6 +39,7 @@ macos-blade = [ "objc2-metal", ] wayland = [ + "bitflags", "blade-graphics", "blade-macros", "blade-util", @@ -52,6 +53,7 @@ wayland = [ "wayland-cursor", "wayland-protocols", "wayland-protocols-plasma", + "wayland-protocols-wlr", "filedescriptor", "xkbcommon", "open", @@ -85,7 +87,8 @@ doctest = false [dependencies] anyhow.workspace = true async-task = "4.7" -backtrace = { version = "0.3", optional = true } +backtrace = { workspace = true, optional = true } +bitflags = { workspace = true, optional = true } blade-graphics = { workspace = true, optional = true } blade-macros = { workspace = true, optional = true } blade-util = { workspace = true, optional = true } @@ -202,6 +205,9 @@ wayland-protocols = { version = "0.31.2", features = [ wayland-protocols-plasma = { version = "0.2.0", features = [ "client", ], optional = true } +wayland-protocols-wlr = { version = "0.3.9", features = [ + "client", +], optional = true } # X11 as-raw-xcb-connection = { version = "1", optional = true } @@ -234,7 +240,7 @@ windows-numerics = "0.2" windows-registry = "0.5" [dev-dependencies] -backtrace = "0.3" +backtrace.workspace = true collections = { workspace = true, features = ["test-support"] } env_logger.workspace = true http_client = { workspace = true, features = ["test-support"] } diff --git a/crates/gpui/examples/layer_shell.rs b/crates/gpui/examples/layer_shell.rs new file mode 100644 index 0000000000000000000000000000000000000000..51577b1b26491b8416a7df17ee310fd50dade8a3 --- /dev/null +++ b/crates/gpui/examples/layer_shell.rs @@ -0,0 +1,87 @@ +fn main() { + #[cfg(all(target_os = "linux", feature = "wayland"))] + example::main(); + + #[cfg(not(all(target_os = "linux", feature = "wayland")))] + panic!("This example requires the `wayland` feature and a linux system."); +} + +#[cfg(all(target_os = "linux", feature = "wayland"))] +mod example { + use std::time::{Duration, SystemTime, UNIX_EPOCH}; + + use gpui::{ + App, Application, Bounds, Context, FontWeight, Size, Window, WindowBackgroundAppearance, + WindowBounds, WindowKind, WindowOptions, div, layer_shell::*, point, prelude::*, px, rems, + rgba, white, + }; + + struct LayerShellExample; + + impl LayerShellExample { + fn new(cx: &mut Context) -> Self { + cx.spawn(async move |this, cx| { + loop { + let _ = this.update(cx, |_, cx| cx.notify()); + cx.background_executor() + .timer(Duration::from_millis(500)) + .await; + } + }) + .detach(); + + LayerShellExample + } + } + + impl Render for LayerShellExample { + fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { + let now = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_secs(); + + let hours = (now / 3600) % 24; + let minutes = (now / 60) % 60; + let seconds = now % 60; + + div() + .size_full() + .flex() + .items_center() + .justify_center() + .text_size(rems(4.5)) + .font_weight(FontWeight::EXTRA_BOLD) + .text_color(white()) + .bg(rgba(0x0000044)) + .rounded_xl() + .child(format!("{:02}:{:02}:{:02}", hours, minutes, seconds)) + } + } + + pub fn main() { + Application::new().run(|cx: &mut App| { + cx.open_window( + WindowOptions { + titlebar: None, + window_bounds: Some(WindowBounds::Windowed(Bounds { + origin: point(px(0.), px(0.)), + size: Size::new(px(500.), px(200.)), + })), + app_id: Some("gpui-layer-shell-example".to_string()), + window_background: WindowBackgroundAppearance::Transparent, + kind: WindowKind::LayerShell(LayerShellOptions { + namespace: "gpui".to_string(), + anchor: Anchor::LEFT | Anchor::RIGHT | Anchor::BOTTOM, + margin: Some((px(0.), px(0.), px(40.), px(0.))), + keyboard_interactivity: KeyboardInteractivity::None, + ..Default::default() + }), + ..Default::default() + }, + |_, cx| cx.new(LayerShellExample::new), + ) + .unwrap(); + }); + } +} diff --git a/crates/gpui/resources/windows/gpui.manifest.xml b/crates/gpui/resources/windows/gpui.manifest.xml index 5a69b434865166dc5f85a9558d28bea6cd646ffe..c3a99d23ff9e60e3604fe0aa8a203345e9c355be 100644 --- a/crates/gpui/resources/windows/gpui.manifest.xml +++ b/crates/gpui/resources/windows/gpui.manifest.xml @@ -1,16 +1,32 @@ - - - - true + + + + + + + + + + + + + + + + + true/pm PerMonitorV2 - - + + - + version='6.0.0.0' + processorArchitecture='*' + publicKeyToken='6595b64144ccf1df' + /> diff --git a/crates/gpui/src/elements/img.rs b/crates/gpui/src/elements/img.rs index 075c7cf32beb400d800ca4f8970f51dae6da7afe..fcba6a6a4e5b3d82262129bc9f7d9bdc72c88da9 100644 --- a/crates/gpui/src/elements/img.rs +++ b/crates/gpui/src/elements/img.rs @@ -2,14 +2,13 @@ use crate::{ AnyElement, AnyImageCache, App, Asset, AssetLogger, Bounds, DefiniteLength, Element, ElementId, Entity, GlobalElementId, Hitbox, Image, ImageCache, InspectorElementId, InteractiveElement, Interactivity, IntoElement, LayoutId, Length, ObjectFit, Pixels, RenderImage, Resource, - SMOOTH_SVG_SCALE_FACTOR, SharedString, SharedUri, StyleRefinement, Styled, SvgSize, Task, - Window, px, swap_rgba_pa_to_bgra, + SharedString, SharedUri, StyleRefinement, Styled, Task, Window, px, }; use anyhow::{Context as _, Result}; use futures::{AsyncReadExt, Future}; use image::{ - AnimationDecoder, DynamicImage, Frame, ImageBuffer, ImageError, ImageFormat, Rgba, + AnimationDecoder, DynamicImage, Frame, ImageError, ImageFormat, Rgba, codecs::{gif::GifDecoder, webp::WebPDecoder}, }; use smallvec::SmallVec; @@ -160,13 +159,15 @@ pub trait StyledImage: Sized { self } - /// Set the object fit for the image. + /// Set a fallback function that will be invoked to render an error view should + /// the image fail to load. fn with_fallback(mut self, fallback: impl Fn() -> AnyElement + 'static) -> Self { self.image_style().fallback = Some(Box::new(fallback)); self } - /// Set the object fit for the image. + /// Set a fallback function that will be invoked to render a view while the image + /// is still being loaded. fn with_loading(mut self, loading: impl Fn() -> AnyElement + 'static) -> Self { self.image_style().loading = Some(Box::new(loading)); self @@ -631,7 +632,7 @@ impl Asset for ImageAssetLoader { } }; - let data = if let Ok(format) = image::guess_format(&bytes) { + if let Ok(format) = image::guess_format(&bytes) { let data = match format { ImageFormat::Gif => { let decoder = GifDecoder::new(Cursor::new(&bytes))?; @@ -689,25 +690,12 @@ impl Asset for ImageAssetLoader { } }; - RenderImage::new(data) + Ok(Arc::new(RenderImage::new(data))) } else { - let pixmap = - // TODO: Can we make svgs always rescale? - svg_renderer.render_pixmap(&bytes, SvgSize::ScaleFactor(SMOOTH_SVG_SCALE_FACTOR))?; - - let mut buffer = - ImageBuffer::from_raw(pixmap.width(), pixmap.height(), pixmap.take()).unwrap(); - - for pixel in buffer.chunks_exact_mut(4) { - swap_rgba_pa_to_bgra(pixel); - } - - let mut image = RenderImage::new(SmallVec::from_elem(Frame::new(buffer), 1)); - image.scale_factor = SMOOTH_SVG_SCALE_FACTOR; - image - }; - - Ok(Arc::new(data)) + svg_renderer + .render_single_frame(&bytes, 1.0, true) + .map_err(Into::into) + } } } } diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index 841fbe924cd011bd2afa7d8d344e3a1c5a51e7a1..b820e120dd738df8a39d3a40379414984942f158 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -38,7 +38,7 @@ pub struct BackgroundExecutor { /// This is intentionally `!Send` via the `not_send` marker field. This is because /// `ForegroundExecutor::spawn` does not require `Send` but checks at runtime that the future is /// only polled from the same thread it was spawned from. These checks would fail when spawning -/// foreground tasks from from background threads. +/// foreground tasks from background threads. #[derive(Clone)] pub struct ForegroundExecutor { #[doc(hidden)] @@ -281,6 +281,9 @@ impl BackgroundExecutor { }); let mut cx = std::task::Context::from_waker(&waker); + let duration = Duration::from_secs(500); + let mut test_should_end_by = Instant::now() + duration; + loop { match future.as_mut().poll(&mut cx) { Poll::Ready(result) => return Ok(result), @@ -313,7 +316,12 @@ impl BackgroundExecutor { ) } dispatcher.set_unparker(unparker.clone()); - parker.park(); + parker.park_timeout( + test_should_end_by.saturating_duration_since(Instant::now()), + ); + if Instant::now() > test_should_end_by { + panic!("test timed out after {duration:?} with allow_parking") + } } } } diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index 805dbbdfe740acbac4929170953e70c923403bb7..2e391b6e442126a74884046a5058976c0495abfd 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -95,7 +95,7 @@ pub use smol::Timer; pub use style::*; pub use styled::*; pub use subscription::*; -use svg_renderer::*; +pub use svg_renderer::*; pub(crate) use tab_stop::*; pub use taffy::{AvailableSpace, LayoutId}; #[cfg(any(test, feature = "test-support"))] diff --git a/crates/gpui/src/interactive.rs b/crates/gpui/src/interactive.rs index dafe623dfada7ba7b21140fc36c7c824e8b5f3f6..dd521ff718322d663f761e05598edce83432bf2d 100644 --- a/crates/gpui/src/interactive.rs +++ b/crates/gpui/src/interactive.rs @@ -115,6 +115,16 @@ impl InputEvent for MouseDownEvent { } impl MouseEvent for MouseDownEvent {} +impl MouseDownEvent { + /// Returns true if this mouse up event should focus the element. + pub fn is_focusing(&self) -> bool { + match self.button { + MouseButton::Left => true, + _ => false, + } + } +} + /// A mouse up event from the platform #[derive(Clone, Debug, Default)] pub struct MouseUpEvent { @@ -137,8 +147,19 @@ impl InputEvent for MouseUpEvent { PlatformInput::MouseUp(self) } } + impl MouseEvent for MouseUpEvent {} +impl MouseUpEvent { + /// Returns true if this mouse up event should focus the element. + pub fn is_focusing(&self) -> bool { + match self.button { + MouseButton::Left => true, + _ => false, + } + } +} + /// A click event, generated when a mouse button is pressed and released. #[derive(Clone, Debug, Default)] pub struct MouseClickEvent { @@ -482,6 +503,7 @@ impl InputEvent for MouseExitEvent { PlatformInput::MouseExited(self) } } + impl MouseEvent for MouseExitEvent {} impl Deref for MouseExitEvent { diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index dd50a08c6b12ab198f1898ba79bae35969e6a5d0..20a135df51cc935ce725f88e3978abb9f3fc07c9 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -40,7 +40,7 @@ use crate::{ DEFAULT_WINDOW_SIZE, DevicePixels, DispatchEventResult, Font, FontId, FontMetrics, FontRun, ForegroundExecutor, GlyphId, GpuSpecs, ImageSource, Keymap, LineLayout, Pixels, PlatformInput, Point, RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, Scene, ShapedGlyph, - ShapedRun, SharedString, Size, SvgRenderer, SvgSize, SystemWindowTab, Task, TaskLabel, Window, + ShapedRun, SharedString, Size, SvgRenderer, SystemWindowTab, Task, TaskLabel, Window, WindowControlArea, hash, point, px, size, }; use anyhow::Result; @@ -82,6 +82,9 @@ pub(crate) use test::*; #[cfg(target_os = "windows")] pub(crate) use windows::*; +#[cfg(all(target_os = "linux", feature = "wayland"))] +pub use linux::layer_shell; + #[cfg(any(test, feature = "test-support"))] pub use test::{TestDispatcher, TestScreenCaptureSource, TestScreenCaptureStream}; @@ -120,6 +123,15 @@ pub(crate) fn current_platform(headless: bool) -> Rc { } } +#[cfg(target_os = "windows")] +pub(crate) fn current_platform(_headless: bool) -> Rc { + Rc::new( + WindowsPlatform::new() + .inspect_err(|err| show_error("Failed to launch", err.to_string())) + .unwrap(), + ) +} + /// Return which compositor we're guessing we'll use. /// Does not attempt to connect to the given compositor #[cfg(any(target_os = "linux", target_os = "freebsd"))] @@ -151,15 +163,6 @@ pub fn guess_compositor() -> &'static str { } } -#[cfg(target_os = "windows")] -pub(crate) fn current_platform(_headless: bool) -> Rc { - Rc::new( - WindowsPlatform::new() - .inspect_err(|err| show_error("Failed to launch", err.to_string())) - .unwrap(), - ) -} - pub(crate) trait Platform: 'static { fn background_executor(&self) -> BackgroundExecutor; fn foreground_executor(&self) -> ForegroundExecutor; @@ -1293,7 +1296,7 @@ pub struct TitlebarOptions { } /// The kind of window to create -#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[derive(Clone, Debug, PartialEq, Eq)] pub enum WindowKind { /// A normal application window Normal, @@ -1304,6 +1307,11 @@ pub enum WindowKind { /// A floating window that appears on top of its parent window Floating, + + /// A Wayland LayerShell window, used to draw overlays or backgrounds for applications such as + /// docks, notifications or wallpapers. + #[cfg(all(target_os = "linux", feature = "wayland"))] + LayerShell(layer_shell::LayerShellOptions), } /// The appearance of the window, as defined by the operating system. @@ -1817,13 +1825,9 @@ impl Image { ImageFormat::Tiff => frames_for_image(&self.bytes, image::ImageFormat::Tiff)?, ImageFormat::Ico => frames_for_image(&self.bytes, image::ImageFormat::Ico)?, ImageFormat::Svg => { - let pixmap = svg_renderer.render_pixmap(&self.bytes, SvgSize::ScaleFactor(1.0))?; - - let buffer = - image::ImageBuffer::from_raw(pixmap.width(), pixmap.height(), pixmap.take()) - .unwrap(); - - SmallVec::from_elem(Frame::new(buffer), 1) + return svg_renderer + .render_single_frame(&self.bytes, 1.0, false) + .map_err(Into::into); } }; diff --git a/crates/gpui/src/platform/linux.rs b/crates/gpui/src/platform/linux.rs index 5221f71f9970eb24508954304055acf974ed059d..f7d7ed0ebaa4165065f9963ee1be6d05601cf4ce 100644 --- a/crates/gpui/src/platform/linux.rs +++ b/crates/gpui/src/platform/linux.rs @@ -27,3 +27,6 @@ pub(crate) use x11::*; pub(crate) type PlatformScreenCaptureFrame = scap::frame::Frame; #[cfg(not(all(feature = "screen-capture", any(feature = "wayland", feature = "x11"))))] pub(crate) type PlatformScreenCaptureFrame = (); + +#[cfg(feature = "wayland")] +pub use wayland::layer_shell; diff --git a/crates/gpui/src/platform/linux/wayland.rs b/crates/gpui/src/platform/linux/wayland.rs index 487bc9f38c927609100a238ac4726c2aab3b87b0..366b5703e448522a59d397e00cbd268951cb1873 100644 --- a/crates/gpui/src/platform/linux/wayland.rs +++ b/crates/gpui/src/platform/linux/wayland.rs @@ -5,6 +5,9 @@ mod display; mod serial; mod window; +/// Contains Types for configuring layer_shell surfaces. +pub mod layer_shell; + pub(crate) use client::*; use wayland_protocols::wp::cursor_shape::v1::client::wp_cursor_shape_device_v1::Shape; diff --git a/crates/gpui/src/platform/linux/wayland/client.rs b/crates/gpui/src/platform/linux/wayland/client.rs index 1ebdda3a266af0f9e8d82dabd5b36372e0972438..6461bf69738cfae2f791bf8eea69fe9a2a038a43 100644 --- a/crates/gpui/src/platform/linux/wayland/client.rs +++ b/crates/gpui/src/platform/linux/wayland/client.rs @@ -62,6 +62,7 @@ use wayland_protocols::xdg::decoration::zv1::client::{ }; use wayland_protocols::xdg::shell::client::{xdg_surface, xdg_toplevel, xdg_wm_base}; use wayland_protocols_plasma::blur::client::{org_kde_kwin_blur, org_kde_kwin_blur_manager}; +use wayland_protocols_wlr::layer_shell::v1::client::{zwlr_layer_shell_v1, zwlr_layer_surface_v1}; use xkbcommon::xkb::ffi::XKB_KEYMAP_FORMAT_TEXT_V1; use xkbcommon::xkb::{self, KEYMAP_COMPILE_NO_FLAGS, Keycode}; @@ -115,6 +116,7 @@ pub struct Globals { pub fractional_scale_manager: Option, pub decoration_manager: Option, + pub layer_shell: Option, pub blur_manager: Option, pub text_input_manager: Option, pub executor: ForegroundExecutor, @@ -152,6 +154,7 @@ impl Globals { viewporter: globals.bind(&qh, 1..=1, ()).ok(), fractional_scale_manager: globals.bind(&qh, 1..=1, ()).ok(), decoration_manager: globals.bind(&qh, 1..=1, ()).ok(), + layer_shell: globals.bind(&qh, 1..=5, ()).ok(), blur_manager: globals.bind(&qh, 1..=1, ()).ok(), text_input_manager: globals.bind(&qh, 1..=1, ()).ok(), executor, @@ -695,7 +698,10 @@ impl LinuxClient for WaylandClient { ) -> anyhow::Result> { let mut state = self.0.borrow_mut(); - let parent = state.keyboard_focused_window.as_ref().map(|w| w.toplevel()); + let parent = state + .keyboard_focused_window + .as_ref() + .and_then(|w| w.toplevel()); let (window, surface_id) = WaylandWindow::new( handle, @@ -945,6 +951,7 @@ delegate_noop!(WaylandClientStatePtr: ignore wl_buffer::WlBuffer); delegate_noop!(WaylandClientStatePtr: ignore wl_region::WlRegion); delegate_noop!(WaylandClientStatePtr: ignore wp_fractional_scale_manager_v1::WpFractionalScaleManagerV1); delegate_noop!(WaylandClientStatePtr: ignore zxdg_decoration_manager_v1::ZxdgDecorationManagerV1); +delegate_noop!(WaylandClientStatePtr: ignore zwlr_layer_shell_v1::ZwlrLayerShellV1); delegate_noop!(WaylandClientStatePtr: ignore org_kde_kwin_blur_manager::OrgKdeKwinBlurManager); delegate_noop!(WaylandClientStatePtr: ignore zwp_text_input_manager_v3::ZwpTextInputManagerV3); delegate_noop!(WaylandClientStatePtr: ignore org_kde_kwin_blur::OrgKdeKwinBlur); @@ -1087,6 +1094,31 @@ impl Dispatch for WaylandClientStatePtr { } } +impl Dispatch for WaylandClientStatePtr { + fn event( + this: &mut Self, + _: &zwlr_layer_surface_v1::ZwlrLayerSurfaceV1, + event: ::Event, + surface_id: &ObjectId, + _: &Connection, + _: &QueueHandle, + ) { + let client = this.get_client(); + let mut state = client.borrow_mut(); + let Some(window) = get_window(&mut state, surface_id) else { + return; + }; + + drop(state); + let should_close = window.handle_layersurface_event(event); + + if should_close { + // The close logic will be handled in drop_window() + window.close(); + } + } +} + impl Dispatch for WaylandClientStatePtr { fn event( _: &mut Self, diff --git a/crates/gpui/src/platform/linux/wayland/layer_shell.rs b/crates/gpui/src/platform/linux/wayland/layer_shell.rs new file mode 100644 index 0000000000000000000000000000000000000000..0f165ed8e0ca2c1ec8d5b7c4cbdfea6cb5eec71b --- /dev/null +++ b/crates/gpui/src/platform/linux/wayland/layer_shell.rs @@ -0,0 +1,111 @@ +use bitflags::bitflags; +use thiserror::Error; +use wayland_protocols_wlr::layer_shell::v1::client::{zwlr_layer_shell_v1, zwlr_layer_surface_v1}; + +use crate::Pixels; + +/// The layer the surface is rendered on. Multiple surfaces can share a layer, and ordering within +/// a single layer is undefined. +#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] +pub enum Layer { + /// The background layer, typically used for wallpapers. + Background, + + /// The bottom layer. + Bottom, + + /// The top layer, typically used for fullscreen windows. + Top, + + /// The overlay layer, used for surfaces that should always be on top. + #[default] + Overlay, +} + +impl From for zwlr_layer_shell_v1::Layer { + fn from(layer: Layer) -> Self { + match layer { + Layer::Background => Self::Background, + Layer::Bottom => Self::Bottom, + Layer::Top => Self::Top, + Layer::Overlay => Self::Overlay, + } + } +} + +bitflags! { + /// Screen anchor point for layer_shell surfaces. These can be used in any combination, e.g. + /// specifying `Anchor::LEFT | Anchor::RIGHT` will stretch the surface across the width of the + /// screen. + #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] + pub struct Anchor: u32 { + /// Anchor to the top edge of the screen. + const TOP = 1; + /// Anchor to the bottom edge of the screen. + const BOTTOM = 2; + /// Anchor to the left edge of the screen. + const LEFT = 4; + /// Anchor to the right edge of the screen. + const RIGHT = 8; + } +} + +impl From for zwlr_layer_surface_v1::Anchor { + fn from(anchor: Anchor) -> Self { + Self::from_bits_truncate(anchor.bits()) + } +} + +/// Keyboard interactivity mode for the layer_shell surfaces. +#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] +pub enum KeyboardInteractivity { + /// No keyboard inputs will be delivered to the surface and it won't be able to receive + /// keyboard focus. + None, + + /// The surface will receive exclusive keyboard focus as long as it is above the shell surface + /// layer, and no other layer_shell surfaces are above it. + Exclusive, + + /// The surface can be focused similarly to a normal window. + #[default] + OnDemand, +} + +impl From for zwlr_layer_surface_v1::KeyboardInteractivity { + fn from(value: KeyboardInteractivity) -> Self { + match value { + KeyboardInteractivity::None => Self::None, + KeyboardInteractivity::Exclusive => Self::Exclusive, + KeyboardInteractivity::OnDemand => Self::OnDemand, + } + } +} + +/// Options for creating a layer_shell window. +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub struct LayerShellOptions { + /// The namespace for the surface, mostly used by compositors to apply rules, can not be + /// changed after the surface is created. + pub namespace: String, + /// The layer the surface is rendered on. + pub layer: Layer, + /// The anchor point of the surface. + pub anchor: Anchor, + /// Requests that the compositor avoids occluding an area with other surfaces. + pub exclusive_zone: Option, + /// The anchor point of the exclusive zone, will be determined using the anchor if left + /// unspecified. + pub exclusive_edge: Option, + /// Margins between the surface and its anchor point(s). + /// Specified in CSS order: top, right, bottom, left. + pub margin: Option<(Pixels, Pixels, Pixels, Pixels)>, + /// How keyboard events should be delivered to the surface. + pub keyboard_interactivity: KeyboardInteractivity, +} + +/// An error indicating that an action failed because the compositor doesn't support the required +/// layer_shell protocol. +#[derive(Debug, Error)] +#[error("Compositor doesn't support zwlr_layer_shell_v1")] +pub struct LayerShellNotSupportedError; diff --git a/crates/gpui/src/platform/linux/wayland/window.rs b/crates/gpui/src/platform/linux/wayland/window.rs index aa3b7141be77dbdc73893783620523c6c8d68e4e..c02d1f3bc3d0d1ecf7589ae959f8c9b0e3f0fde5 100644 --- a/crates/gpui/src/platform/linux/wayland/window.rs +++ b/crates/gpui/src/platform/linux/wayland/window.rs @@ -23,12 +23,14 @@ use wayland_protocols::{ xdg::shell::client::xdg_toplevel::XdgToplevel, }; use wayland_protocols_plasma::blur::client::org_kde_kwin_blur; +use wayland_protocols_wlr::layer_shell::v1::client::zwlr_layer_surface_v1; use crate::{ AnyWindowHandle, Bounds, Decorations, Globals, GpuSpecs, Modifiers, Output, Pixels, PlatformDisplay, PlatformInput, Point, PromptButton, PromptLevel, RequestFrameOptions, ResizeEdge, Size, Tiling, WaylandClientStatePtr, WindowAppearance, WindowBackgroundAppearance, - WindowBounds, WindowControlArea, WindowControls, WindowDecorations, WindowParams, px, size, + WindowBounds, WindowControlArea, WindowControls, WindowDecorations, WindowParams, + layer_shell::LayerShellNotSupportedError, px, size, }; use crate::{ Capslock, @@ -83,14 +85,12 @@ struct InProgressConfigure { } pub struct WaylandWindowState { - xdg_surface: xdg_surface::XdgSurface, + surface_state: WaylandSurfaceState, acknowledged_first_configure: bool, pub surface: wl_surface::WlSurface, - decoration: Option, app_id: Option, appearance: WindowAppearance, blur: Option, - toplevel: xdg_toplevel::XdgToplevel, viewport: Option, outputs: HashMap, display: Option<(ObjectId, Output)>, @@ -116,6 +116,161 @@ pub struct WaylandWindowState { client_inset: Option, } +pub enum WaylandSurfaceState { + Xdg(WaylandXdgSurfaceState), + LayerShell(WaylandLayerSurfaceState), +} + +impl WaylandSurfaceState { + fn new( + surface: &wl_surface::WlSurface, + globals: &Globals, + params: &WindowParams, + parent: Option, + ) -> anyhow::Result { + // For layer_shell windows, create a layer surface instead of an xdg surface + if let WindowKind::LayerShell(options) = ¶ms.kind { + let Some(layer_shell) = globals.layer_shell.as_ref() else { + return Err(LayerShellNotSupportedError.into()); + }; + + let layer_surface = layer_shell.get_layer_surface( + &surface, + None, + options.layer.into(), + options.namespace.clone(), + &globals.qh, + surface.id(), + ); + + let width = params.bounds.size.width.0; + let height = params.bounds.size.height.0; + layer_surface.set_size(width as u32, height as u32); + + layer_surface.set_anchor(options.anchor.into()); + layer_surface.set_keyboard_interactivity(options.keyboard_interactivity.into()); + + if let Some(margin) = options.margin { + layer_surface.set_margin( + margin.0.0 as i32, + margin.1.0 as i32, + margin.2.0 as i32, + margin.3.0 as i32, + ) + } + + if let Some(exclusive_zone) = options.exclusive_zone { + layer_surface.set_exclusive_zone(exclusive_zone.0 as i32); + } + + if let Some(exclusive_edge) = options.exclusive_edge { + layer_surface.set_exclusive_edge(exclusive_edge.into()); + } + + return Ok(WaylandSurfaceState::LayerShell(WaylandLayerSurfaceState { + layer_surface, + })); + } + + // All other WindowKinds result in a regular xdg surface + let xdg_surface = globals + .wm_base + .get_xdg_surface(&surface, &globals.qh, surface.id()); + + let toplevel = xdg_surface.get_toplevel(&globals.qh, surface.id()); + if params.kind == WindowKind::Floating { + toplevel.set_parent(parent.as_ref()); + } + + if let Some(size) = params.window_min_size { + toplevel.set_min_size(size.width.0 as i32, size.height.0 as i32); + } + + // Attempt to set up window decorations based on the requested configuration + let decoration = globals + .decoration_manager + .as_ref() + .map(|decoration_manager| { + decoration_manager.get_toplevel_decoration(&toplevel, &globals.qh, surface.id()) + }); + + Ok(WaylandSurfaceState::Xdg(WaylandXdgSurfaceState { + xdg_surface, + toplevel, + decoration, + })) + } +} + +pub struct WaylandXdgSurfaceState { + xdg_surface: xdg_surface::XdgSurface, + toplevel: xdg_toplevel::XdgToplevel, + decoration: Option, +} + +pub struct WaylandLayerSurfaceState { + layer_surface: zwlr_layer_surface_v1::ZwlrLayerSurfaceV1, +} + +impl WaylandSurfaceState { + fn ack_configure(&self, serial: u32) { + match self { + WaylandSurfaceState::Xdg(WaylandXdgSurfaceState { xdg_surface, .. }) => { + xdg_surface.ack_configure(serial); + } + WaylandSurfaceState::LayerShell(WaylandLayerSurfaceState { layer_surface, .. }) => { + layer_surface.ack_configure(serial); + } + } + } + + fn decoration(&self) -> Option<&zxdg_toplevel_decoration_v1::ZxdgToplevelDecorationV1> { + if let WaylandSurfaceState::Xdg(WaylandXdgSurfaceState { decoration, .. }) = self { + decoration.as_ref() + } else { + None + } + } + + fn toplevel(&self) -> Option<&xdg_toplevel::XdgToplevel> { + if let WaylandSurfaceState::Xdg(WaylandXdgSurfaceState { toplevel, .. }) = self { + Some(toplevel) + } else { + None + } + } + + fn set_geometry(&self, x: i32, y: i32, width: i32, height: i32) { + match self { + WaylandSurfaceState::Xdg(WaylandXdgSurfaceState { xdg_surface, .. }) => { + xdg_surface.set_window_geometry(x, y, width, height); + } + WaylandSurfaceState::LayerShell(WaylandLayerSurfaceState { layer_surface, .. }) => { + // cannot set window position of a layer surface + layer_surface.set_size(width as u32, height as u32); + } + } + } + + fn destroy(&mut self) { + match self { + WaylandSurfaceState::Xdg(WaylandXdgSurfaceState { + xdg_surface, + toplevel, + decoration: _decoration, + }) => { + // The role object (toplevel) must always be destroyed before the xdg_surface. + // See https://wayland.app/protocols/xdg-shell#xdg_surface:request:destroy + toplevel.destroy(); + xdg_surface.destroy(); + } + WaylandSurfaceState::LayerShell(WaylandLayerSurfaceState { layer_surface }) => { + layer_surface.destroy(); + } + } + } +} + #[derive(Clone)] pub struct WaylandWindowStatePtr { state: Rc>, @@ -126,9 +281,7 @@ impl WaylandWindowState { pub(crate) fn new( handle: AnyWindowHandle, surface: wl_surface::WlSurface, - xdg_surface: xdg_surface::XdgSurface, - toplevel: xdg_toplevel::XdgToplevel, - decoration: Option, + surface_state: WaylandSurfaceState, appearance: WindowAppearance, viewport: Option, client: WaylandClientStatePtr, @@ -157,14 +310,18 @@ impl WaylandWindowState { BladeRenderer::new(gpu_context, &raw_window, config)? }; + if let WaylandSurfaceState::Xdg(ref xdg_state) = surface_state { + if let Some(title) = options.titlebar.and_then(|titlebar| titlebar.title) { + xdg_state.toplevel.set_title(title.to_string()); + } + } + Ok(Self { - xdg_surface, + surface_state, acknowledged_first_configure: false, surface, - decoration, app_id: None, blur: None, - toplevel, viewport, globals, outputs: HashMap::default(), @@ -237,17 +394,29 @@ impl Drop for WaylandWindow { let client = state.client.clone(); state.renderer.destroy(); - if let Some(decoration) = &state.decoration { - decoration.destroy(); - } + + // Destroy blur first, this has no dependencies. if let Some(blur) = &state.blur { blur.release(); } - state.toplevel.destroy(); + + // Decorations must be destroyed before the xdg state. + // See https://wayland.app/protocols/xdg-decoration-unstable-v1#zxdg_toplevel_decoration_v1 + if let Some(decoration) = &state.surface_state.decoration() { + decoration.destroy(); + } + + // Surface state might contain xdg_toplevel/xdg_surface which can be destroyed now that + // decorations are gone. layer_surface has no dependencies. + state.surface_state.destroy(); + + // Viewport must be destroyed before the wl_surface. + // See https://wayland.app/protocols/viewporter#wp_viewport if let Some(viewport) = &state.viewport { viewport.destroy(); } - state.xdg_surface.destroy(); + + // The wl_surface itself should always be destroyed last. state.surface.destroy(); let state_ptr = self.0.clone(); @@ -282,31 +451,12 @@ impl WaylandWindow { parent: Option, ) -> anyhow::Result<(Self, ObjectId)> { let surface = globals.compositor.create_surface(&globals.qh, ()); - let xdg_surface = globals - .wm_base - .get_xdg_surface(&surface, &globals.qh, surface.id()); - let toplevel = xdg_surface.get_toplevel(&globals.qh, surface.id()); - - if params.kind == WindowKind::Floating { - toplevel.set_parent(parent.as_ref()); - } - - if let Some(size) = params.window_min_size { - toplevel.set_min_size(size.width.0 as i32, size.height.0 as i32); - } + let surface_state = WaylandSurfaceState::new(&surface, &globals, ¶ms, parent)?; if let Some(fractional_scale_manager) = globals.fractional_scale_manager.as_ref() { fractional_scale_manager.get_fractional_scale(&surface, &globals.qh, surface.id()); } - // Attempt to set up window decorations based on the requested configuration - let decoration = globals - .decoration_manager - .as_ref() - .map(|decoration_manager| { - decoration_manager.get_toplevel_decoration(&toplevel, &globals.qh, surface.id()) - }); - let viewport = globals .viewporter .as_ref() @@ -316,9 +466,7 @@ impl WaylandWindow { state: Rc::new(RefCell::new(WaylandWindowState::new( handle, surface.clone(), - xdg_surface, - toplevel, - decoration, + surface_state, appearance, viewport, client, @@ -345,8 +493,8 @@ impl WaylandWindowStatePtr { self.state.borrow().surface.clone() } - pub fn toplevel(&self) -> xdg_toplevel::XdgToplevel { - self.state.borrow().toplevel.clone() + pub fn toplevel(&self) -> Option { + self.state.borrow().surface_state.toplevel().cloned() } pub fn ptr_eq(&self, other: &Self) -> bool { @@ -413,7 +561,7 @@ impl WaylandWindowStatePtr { } } let mut state = self.state.borrow_mut(); - state.xdg_surface.ack_configure(serial); + state.surface_state.ack_configure(serial); let window_geometry = inset_by_tiling( state.bounds.map_origin(|_| px(0.0)), @@ -423,7 +571,7 @@ impl WaylandWindowStatePtr { .map(|v| v.0 as i32) .map_size(|v| if v <= 0 { 1 } else { v }); - state.xdg_surface.set_window_geometry( + state.surface_state.set_geometry( window_geometry.origin.x, window_geometry.origin.y, window_geometry.size.width, @@ -582,6 +730,42 @@ impl WaylandWindowStatePtr { } } + pub fn handle_layersurface_event(&self, event: zwlr_layer_surface_v1::Event) -> bool { + match event { + zwlr_layer_surface_v1::Event::Configure { + width, + height, + serial, + } => { + let mut size = if width == 0 || height == 0 { + None + } else { + Some(size(px(width as f32), px(height as f32))) + }; + + let mut state = self.state.borrow_mut(); + state.in_progress_configure = Some(InProgressConfigure { + size, + fullscreen: false, + maximized: false, + resizing: false, + tiling: Tiling::default(), + }); + drop(state); + + // just do the same thing we'd do as an xdg_surface + self.handle_xdg_surface_event(xdg_surface::Event::Configure { serial }); + + false + } + zwlr_layer_surface_v1::Event::Closed => { + // unlike xdg, we don't have a choice here: the surface is closing. + true + } + _ => false, + } + } + #[allow(clippy::mutable_key_type)] pub fn handle_surface_event( &self, @@ -843,7 +1027,7 @@ impl PlatformWindow for WaylandWindow { let state_ptr = self.0.clone(); let dp_size = size.to_device_pixels(self.scale_factor()); - state.xdg_surface.set_window_geometry( + state.surface_state.set_geometry( state.bounds.origin.x.0 as i32, state.bounds.origin.y.0 as i32, dp_size.width.0, @@ -937,12 +1121,16 @@ impl PlatformWindow for WaylandWindow { } fn set_title(&mut self, title: &str) { - self.borrow().toplevel.set_title(title.to_string()); + if let Some(toplevel) = self.borrow().surface_state.toplevel() { + toplevel.set_title(title.to_string()); + } } fn set_app_id(&mut self, app_id: &str) { let mut state = self.borrow_mut(); - state.toplevel.set_app_id(app_id.to_owned()); + if let Some(toplevel) = state.surface_state.toplevel() { + toplevel.set_app_id(app_id.to_owned()); + } state.app_id = Some(app_id.to_owned()); } @@ -953,24 +1141,30 @@ impl PlatformWindow for WaylandWindow { } fn minimize(&self) { - self.borrow().toplevel.set_minimized(); + if let Some(toplevel) = self.borrow().surface_state.toplevel() { + toplevel.set_minimized(); + } } fn zoom(&self) { let state = self.borrow(); - if !state.maximized { - state.toplevel.set_maximized(); - } else { - state.toplevel.unset_maximized(); + if let Some(toplevel) = state.surface_state.toplevel() { + if !state.maximized { + toplevel.set_maximized(); + } else { + toplevel.unset_maximized(); + } } } fn toggle_fullscreen(&self) { - let mut state = self.borrow_mut(); - if !state.fullscreen { - state.toplevel.set_fullscreen(None); - } else { - state.toplevel.unset_fullscreen(); + let mut state = self.borrow(); + if let Some(toplevel) = state.surface_state.toplevel() { + if !state.fullscreen { + toplevel.set_fullscreen(None); + } else { + toplevel.unset_fullscreen(); + } } } @@ -1035,27 +1229,33 @@ impl PlatformWindow for WaylandWindow { fn show_window_menu(&self, position: Point) { let state = self.borrow(); let serial = state.client.get_serial(SerialKind::MousePress); - state.toplevel.show_window_menu( - &state.globals.seat, - serial, - position.x.0 as i32, - position.y.0 as i32, - ); + if let Some(toplevel) = state.surface_state.toplevel() { + toplevel.show_window_menu( + &state.globals.seat, + serial, + position.x.0 as i32, + position.y.0 as i32, + ); + } } fn start_window_move(&self) { let state = self.borrow(); let serial = state.client.get_serial(SerialKind::MousePress); - state.toplevel._move(&state.globals.seat, serial); + if let Some(toplevel) = state.surface_state.toplevel() { + toplevel._move(&state.globals.seat, serial); + } } fn start_window_resize(&self, edge: crate::ResizeEdge) { let state = self.borrow(); - state.toplevel.resize( - &state.globals.seat, - state.client.get_serial(SerialKind::MousePress), - edge.to_xdg(), - ) + if let Some(toplevel) = state.surface_state.toplevel() { + toplevel.resize( + &state.globals.seat, + state.client.get_serial(SerialKind::MousePress), + edge.to_xdg(), + ) + } } fn window_decorations(&self) -> Decorations { @@ -1071,7 +1271,7 @@ impl PlatformWindow for WaylandWindow { fn request_decorations(&self, decorations: WindowDecorations) { let mut state = self.borrow_mut(); state.decorations = decorations; - if let Some(decoration) = state.decoration.as_ref() { + if let Some(decoration) = state.surface_state.decoration() { decoration.set_mode(decorations.to_xdg()); update_window(state); } diff --git a/crates/gpui/src/styled.rs b/crates/gpui/src/styled.rs index 8dcb43c6ce6bb5522b3d1337390ae5436809720e..f653f050c07848340c8da1fa0e01a2a4da985bdb 100644 --- a/crates/gpui/src/styled.rs +++ b/crates/gpui/src/styled.rs @@ -308,6 +308,16 @@ pub trait Styled: Sized { self } + /// Sets the element to justify items along the container's main axis such + /// that there is an equal amount of space around each item, but also + /// accounting for the doubling of space you would normally see between + /// each item when using justify-around. + /// [Docs](https://tailwindcss.com/docs/justify-content#space-evenly) + fn justify_evenly(mut self) -> Self { + self.style().justify_content = Some(JustifyContent::SpaceEvenly); + self + } + /// Sets the element to pack content items in their default position as if no align-content value was set. /// [Docs](https://tailwindcss.com/docs/align-content#normal) fn content_normal(mut self) -> Self { diff --git a/crates/gpui/src/svg_renderer.rs b/crates/gpui/src/svg_renderer.rs index b2bf126967cd0c533eb6faac8c168508fe5c1d34..1e2e34897af0b550542f9af148bb7c19f8f8ed18 100644 --- a/crates/gpui/src/svg_renderer.rs +++ b/crates/gpui/src/svg_renderer.rs @@ -1,5 +1,10 @@ -use crate::{AssetSource, DevicePixels, IsZero, Result, SharedString, Size}; +use crate::{ + AssetSource, DevicePixels, IsZero, RenderImage, Result, SharedString, Size, + swap_rgba_pa_to_bgra, +}; +use image::Frame; use resvg::tiny_skia::Pixmap; +use smallvec::SmallVec; use std::{ hash::Hash, sync::{Arc, LazyLock}, @@ -15,17 +20,22 @@ pub(crate) struct RenderSvgParams { } #[derive(Clone)] +/// A struct holding everything necessary to render SVGs. pub struct SvgRenderer { asset_source: Arc, usvg_options: Arc>, } +/// The size in which to render the SVG. pub enum SvgSize { + /// An absolute size in device pixels. Size(Size), + /// A scaling factor to apply to the size provided by the SVG. ScaleFactor(f32), } impl SvgRenderer { + /// Creates a new SVG renderer with the provided asset source. pub fn new(asset_source: Arc) -> Self { static FONT_DB: LazyLock> = LazyLock::new(|| { let mut db = usvg::fontdb::Database::new(); @@ -54,7 +64,35 @@ impl SvgRenderer { } } - pub(crate) fn render( + /// Renders the given bytes into an image buffer. + pub fn render_single_frame( + &self, + bytes: &[u8], + scale_factor: f32, + to_brga: bool, + ) -> Result, usvg::Error> { + self.render_pixmap( + bytes, + SvgSize::ScaleFactor(scale_factor * SMOOTH_SVG_SCALE_FACTOR), + ) + .map(|pixmap| { + let mut buffer = + image::ImageBuffer::from_raw(pixmap.width(), pixmap.height(), pixmap.take()) + .unwrap(); + + if to_brga { + for pixel in buffer.chunks_exact_mut(4) { + swap_rgba_pa_to_bgra(pixel); + } + } + + let mut image = RenderImage::new(SmallVec::from_const([Frame::new(buffer)])); + image.scale_factor = SMOOTH_SVG_SCALE_FACTOR; + Arc::new(image) + }) + } + + pub(crate) fn render_alpha_mask( &self, params: &RenderSvgParams, ) -> Result, Vec)>> { @@ -80,7 +118,7 @@ impl SvgRenderer { Ok(Some((size, alpha_mask))) } - pub fn render_pixmap(&self, bytes: &[u8], size: SvgSize) -> Result { + fn render_pixmap(&self, bytes: &[u8], size: SvgSize) -> Result { let tree = usvg::Tree::from_data(bytes, &self.usvg_options)?; let svg_size = tree.size(); let scale = match size { diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 17d09e67dbafbf51be604180f3ff1333cc732cfd..c44b0d642a2970dfb803109591d8dc0e2c6cacc6 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -822,6 +822,12 @@ impl Frame { } } +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] +enum InputModality { + Mouse, + Keyboard, +} + /// Holds the state for a specific window. pub struct Window { pub(crate) handle: AnyWindowHandle, @@ -870,7 +876,7 @@ pub struct Window { hovered: Rc>, pub(crate) needs_present: Rc>, pub(crate) last_input_timestamp: Rc>, - last_input_was_keyboard: bool, + last_input_modality: InputModality, pub(crate) refreshing: bool, pub(crate) activation_observers: SubscriberSet<(), AnyObserver>, pub(crate) focus: Option, @@ -1254,7 +1260,7 @@ impl Window { hovered, needs_present, last_input_timestamp, - last_input_was_keyboard: false, + last_input_modality: InputModality::Mouse, refreshing: false, activation_observers: SubscriberSet::new(), focus: None, @@ -1910,7 +1916,7 @@ impl Window { /// Returns true if the last input event was keyboard-based (key press, tab navigation, etc.) /// This is used for focus-visible styling to show focus indicators only for keyboard navigation. pub fn last_input_was_keyboard(&self) -> bool { - self.last_input_was_keyboard + self.last_input_modality == InputModality::Keyboard } /// The current state of the keyboard's capslock @@ -3098,7 +3104,7 @@ impl Window { let Some(tile) = self.sprite_atlas .get_or_insert_with(¶ms.clone().into(), &mut || { - let Some((size, bytes)) = cx.svg_renderer.render(¶ms)? else { + let Some((size, bytes)) = cx.svg_renderer.render_alpha_mask(¶ms)? else { return Ok(None); }; Ok(Some((size, Cow::Owned(bytes)))) @@ -3591,12 +3597,13 @@ impl Window { self.last_input_timestamp.set(Instant::now()); // Track whether this input was keyboard-based for focus-visible styling - self.last_input_was_keyboard = matches!( - event, - PlatformInput::KeyDown(_) - | PlatformInput::KeyUp(_) - | PlatformInput::ModifiersChanged(_) - ); + self.last_input_modality = match &event { + PlatformInput::KeyDown(_) | PlatformInput::ModifiersChanged(_) => { + InputModality::Keyboard + } + PlatformInput::MouseDown(e) if e.is_focusing() => InputModality::Mouse, + _ => self.last_input_modality, + }; // Handlers may set this to false by calling `stop_propagation`. cx.propagate_event = true; diff --git a/crates/gpui/src/window/prompts.rs b/crates/gpui/src/window/prompts.rs index 778ee1dab0eb8312161dcbca0ddf8964afe0c6bb..63ad1668bec298a6b59d218bf7d4ca7cdce11e8c 100644 --- a/crates/gpui/src/window/prompts.rs +++ b/crates/gpui/src/window/prompts.rs @@ -142,6 +142,7 @@ impl Render for FallbackPromptRenderer { .id(ix) .on_click(cx.listener(move |_, _, _, cx| { cx.emit(PromptResponse(ix)); + cx.stop_propagation(); })) })); diff --git a/crates/icons/src/icons.rs b/crates/icons/src/icons.rs index 4c026bc2b82deebb0cb24da40d476b2cd91bb012..1442c482d89f0c46e45ccd280e678021e6ba63c7 100644 --- a/crates/icons/src/icons.rs +++ b/crates/icons/src/icons.rs @@ -145,6 +145,7 @@ pub enum IconName { Keyboard, Library, LineHeight, + Link, ListCollapse, ListFilter, ListTodo, diff --git a/crates/keymap_editor/src/keymap_editor.rs b/crates/keymap_editor/src/keymap_editor.rs index 8e50a7303fb98febb492eb3f8b4aed4d928a879e..e3fb30d46eb57059afc53682c57be392ec8254ed 100644 --- a/crates/keymap_editor/src/keymap_editor.rs +++ b/crates/keymap_editor/src/keymap_editor.rs @@ -1,9 +1,10 @@ use std::{ + cell::RefCell, cmp::{self}, ops::{Not as _, Range}, rc::Rc, sync::Arc, - time::Duration, + time::{Duration, Instant}, }; mod ui_components; @@ -41,7 +42,7 @@ use workspace::{ }; pub use ui_components::*; -use zed_actions::OpenKeymap; +use zed_actions::{ChangeKeybinding, OpenKeymap}; use crate::{ persistence::KEYBINDING_EDITORS, @@ -80,37 +81,77 @@ pub fn init(cx: &mut App) { let keymap_event_channel = KeymapEventChannel::new(); cx.set_global(keymap_event_channel); - cx.on_action(|_: &OpenKeymap, cx| { + fn common(filter: Option, cx: &mut App) { workspace::with_active_or_new_workspace(cx, move |workspace, window, cx| { workspace - .with_local_workspace(window, cx, |workspace, window, cx| { + .with_local_workspace(window, cx, move |workspace, window, cx| { let existing = workspace .active_pane() .read(cx) .items() .find_map(|item| item.downcast::()); - if let Some(existing) = existing { + let keymap_editor = if let Some(existing) = existing { workspace.activate_item(&existing, true, true, window, cx); + existing } else { let keymap_editor = cx.new(|cx| KeymapEditor::new(workspace.weak_handle(), window, cx)); workspace.add_item_to_active_pane( - Box::new(keymap_editor), + Box::new(keymap_editor.clone()), None, true, window, cx, ); + keymap_editor + }; + + if let Some(filter) = filter { + keymap_editor.update(cx, |editor, cx| { + editor.filter_editor.update(cx, |editor, cx| { + editor.clear(window, cx); + editor.insert(&filter, window, cx); + }); + if !editor.has_binding_for(&filter) { + open_binding_modal_after_loading(cx) + } + }) } }) .detach(); }) - }); + } + + cx.on_action(|_: &OpenKeymap, cx| common(None, cx)); + cx.on_action(|action: &ChangeKeybinding, cx| common(Some(action.action.clone()), cx)); register_serializable_item::(cx); } +fn open_binding_modal_after_loading(cx: &mut Context) { + let started_at = Instant::now(); + let observer = Rc::new(RefCell::new(None)); + let handle = { + let observer = Rc::clone(&observer); + cx.observe(&cx.entity(), move |editor, _, cx| { + let subscription = observer.borrow_mut().take(); + + if started_at.elapsed().as_secs() > 10 { + return; + } + if !editor.matches.is_empty() { + editor.selected_index = Some(0); + cx.dispatch_action(&CreateBinding); + return; + } + + *observer.borrow_mut() = subscription; + }) + }; + *observer.borrow_mut() = Some(handle); +} + pub struct KeymapEventChannel {} impl Global for KeymapEventChannel {} @@ -521,6 +562,11 @@ impl KeymapEditor { } } + fn clear_action_query(&self, window: &mut Window, cx: &mut Context) { + self.filter_editor + .update(cx, |editor, cx| editor.clear(window, cx)) + } + fn on_query_changed(&mut self, cx: &mut Context) { let action_query = self.current_action_query(cx); let keystroke_query = self.current_keystroke_query(cx); @@ -1320,6 +1366,13 @@ impl KeymapEditor { editor.set_keystrokes(keystrokes, cx); }); } + + fn has_binding_for(&self, action_name: &str) -> bool { + self.keybindings + .iter() + .filter(|kb| kb.keystrokes().is_some()) + .any(|kb| kb.action().name == action_name) + } } struct HumanizedActionNameCache { @@ -2447,7 +2500,7 @@ impl KeybindingEditorModal { } fn get_matching_bindings_count(&self, cx: &Context) -> usize { - let current_keystrokes = self.keybind_editor.read(cx).keystrokes().to_vec(); + let current_keystrokes = self.keybind_editor.read(cx).keystrokes(); if current_keystrokes.is_empty() { return 0; @@ -2464,17 +2517,20 @@ impl KeybindingEditorModal { return false; } - binding - .keystrokes() - .map(|keystrokes| keystrokes_match_exactly(keystrokes, ¤t_keystrokes)) - .unwrap_or(false) + binding.keystrokes().is_some_and(|keystrokes| { + keystrokes_match_exactly(keystrokes, current_keystrokes) + }) }) .count() } - fn show_matching_bindings(&mut self, _window: &mut Window, cx: &mut Context) { + fn show_matching_bindings(&mut self, window: &mut Window, cx: &mut Context) { let keystrokes = self.keybind_editor.read(cx).keystrokes().to_vec(); + self.keymap_editor.update(cx, |keymap_editor, cx| { + keymap_editor.clear_action_query(window, cx) + }); + // Dismiss the modal cx.emit(DismissEvent); diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index bbbf9e31a5b39069e93a5f52f18df16bbc9f9671..ffc5ad85d14c293eeeaff9172b21ef58cf9a1cf0 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -20,7 +20,6 @@ test-support = [ "text/test-support", "tree-sitter-rust", "tree-sitter-python", - "tree-sitter-rust", "tree-sitter-typescript", "settings/test-support", "util/test-support", diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 78f25371a67821ec028cd72b21bd02290f3632bc..0b48c119683ee44040b48aa60f4ead1db2c5c433 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -3905,6 +3905,32 @@ impl BufferSnapshot { include_extra_context: bool, theme: Option<&SyntaxTheme>, ) -> Vec> { + self.outline_items_containing_internal( + range, + include_extra_context, + theme, + |this, range| this.anchor_after(range.start)..this.anchor_before(range.end), + ) + } + + pub fn outline_items_as_points_containing( + &self, + range: Range, + include_extra_context: bool, + theme: Option<&SyntaxTheme>, + ) -> Vec> { + self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| { + range + }) + } + + fn outline_items_containing_internal( + &self, + range: Range, + include_extra_context: bool, + theme: Option<&SyntaxTheme>, + range_callback: fn(&Self, Range) -> Range, + ) -> Vec> { let range = range.to_offset(self); let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| { grammar.outline_config.as_ref().map(|c| &c.query) @@ -3977,19 +4003,16 @@ impl BufferSnapshot { anchor_items.push(OutlineItem { depth: item_ends_stack.len(), - range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end), + range: range_callback(self, item.range.clone()), + source_range_for_text: range_callback(self, item.source_range_for_text.clone()), text: item.text, highlight_ranges: item.highlight_ranges, name_ranges: item.name_ranges, - body_range: item - .body_range - .map(|r| self.anchor_after(r.start)..self.anchor_before(r.end)), + body_range: item.body_range.map(|r| range_callback(self, r)), annotation_range: annotation_row_range.map(|annotation_range| { - self.anchor_after(Point::new(annotation_range.start, 0)) - ..self.anchor_before(Point::new( - annotation_range.end, - self.line_len(annotation_range.end), - )) + let point_range = Point::new(annotation_range.start, 0) + ..Point::new(annotation_range.end, self.line_len(annotation_range.end)); + range_callback(self, point_range) }), }); item_ends_stack.push(item.range.end); @@ -4056,14 +4079,13 @@ impl BufferSnapshot { if buffer_ranges.is_empty() { return None; } + let source_range_for_text = + buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end; let mut text = String::new(); let mut highlight_ranges = Vec::new(); let mut name_ranges = Vec::new(); - let mut chunks = self.chunks( - buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end, - true, - ); + let mut chunks = self.chunks(source_range_for_text.clone(), true); let mut last_buffer_range_end = 0; for (buffer_range, is_name) in buffer_ranges { let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end; @@ -4109,6 +4131,7 @@ impl BufferSnapshot { Some(OutlineItem { depth: 0, // We'll calculate the depth later range: item_point_range, + source_range_for_text: source_range_for_text.to_point(self), text, highlight_ranges, name_ranges, diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index b6c65ede0596fe96ba1a750bcbcbcb971a3be617..b485065689832995cdb100ae47a4f1f197ad1a70 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -420,6 +420,8 @@ pub struct CodestralSettings { pub model: Option, /// Maximum tokens to generate. pub max_tokens: Option, + /// Custom API URL to use for Codestral. + pub api_url: Option, } impl AllLanguageSettings { @@ -636,6 +638,7 @@ impl settings::Settings for AllLanguageSettings { let codestral_settings = CodestralSettings { model: codestral.model, max_tokens: codestral.max_tokens, + api_url: codestral.api_url, }; let enabled_in_text_threads = edit_predictions.enabled_in_text_threads.unwrap(); diff --git a/crates/language/src/outline.rs b/crates/language/src/outline.rs index d96cd90e03142c6498ae17bc63e1787d99e8557a..2ce2b42734465a4710a7439f5e2225debc96b04a 100644 --- a/crates/language/src/outline.rs +++ b/crates/language/src/outline.rs @@ -16,6 +16,7 @@ pub struct Outline { pub struct OutlineItem { pub depth: usize, pub range: Range, + pub source_range_for_text: Range, pub text: String, pub highlight_ranges: Vec<(Range, HighlightStyle)>, pub name_ranges: Vec>, @@ -32,6 +33,8 @@ impl OutlineItem { OutlineItem { depth: self.depth, range: self.range.start.to_point(buffer)..self.range.end.to_point(buffer), + source_range_for_text: self.source_range_for_text.start.to_point(buffer) + ..self.source_range_for_text.end.to_point(buffer), text: self.text.clone(), highlight_ranges: self.highlight_ranges.clone(), name_ranges: self.name_ranges.clone(), @@ -205,6 +208,7 @@ mod tests { OutlineItem { depth: 0, range: Point::new(0, 0)..Point::new(5, 0), + source_range_for_text: Point::new(0, 0)..Point::new(0, 9), text: "class Foo".to_string(), highlight_ranges: vec![], name_ranges: vec![6..9], @@ -214,6 +218,7 @@ mod tests { OutlineItem { depth: 0, range: Point::new(2, 0)..Point::new(2, 7), + source_range_for_text: Point::new(0, 0)..Point::new(0, 7), text: "private".to_string(), highlight_ranges: vec![], name_ranges: vec![], @@ -238,6 +243,7 @@ mod tests { OutlineItem { depth: 0, range: Point::new(0, 0)..Point::new(5, 0), + source_range_for_text: Point::new(0, 0)..Point::new(0, 10), text: "fn process".to_string(), highlight_ranges: vec![], name_ranges: vec![3..10], @@ -247,6 +253,7 @@ mod tests { OutlineItem { depth: 0, range: Point::new(7, 0)..Point::new(12, 0), + source_range_for_text: Point::new(0, 0)..Point::new(0, 20), text: "struct DataProcessor".to_string(), highlight_ranges: vec![], name_ranges: vec![7..20], diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index c255ed3f09f733321c1066520b12355f76941931..f676f5a7a6f028c095d52273fb8c616472a35ee5 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -1866,12 +1866,8 @@ impl LspAdapter for BasedPyrightLspAdapter { } // Basedpyright by default uses `strict` type checking, we tone it down as to not surpris users maybe!({ - let basedpyright = object - .entry("basedpyright") - .or_insert(Value::Object(serde_json::Map::default())); - let analysis = basedpyright - .as_object_mut()? - .entry("analysis") + let analysis = object + .entry("basedpyright.analysis") .or_insert(Value::Object(serde_json::Map::default())); if let serde_json::map::Entry::Vacant(v) = analysis.as_object_mut()?.entry("typeCheckingMode") diff --git a/crates/languages/src/rust/injections.scm b/crates/languages/src/rust/injections.scm index 91c092b353b615c5dff1f7189af816c9205cbf21..20d4cf83541f9241b2e296f8dbc4a5cb7a3a5fe7 100644 --- a/crates/languages/src/rust/injections.scm +++ b/crates/languages/src/rust/injections.scm @@ -2,8 +2,10 @@ (#set! injection.language "comment")) (macro_invocation - macro: [(identifier) (scoped_identifier)] @_macro_name - (#not-any-of? @_macro_name "view" "html") + macro: [ + ((identifier) @_macro_name) + (scoped_identifier (identifier) @_macro_name .) + ] (token_tree) @injection.content (#set! injection.language "rust")) @@ -11,8 +13,48 @@ ; it wants to inject inside of rust, instead of modifying the rust ; injections to support leptos injections (macro_invocation - macro: [(identifier) (scoped_identifier)] @_macro_name + macro: [ + ((identifier) @_macro_name) + (scoped_identifier (identifier) @_macro_name .) + ] (#any-of? @_macro_name "view" "html") (token_tree) @injection.content (#set! injection.language "rstml") ) + +(macro_invocation + macro: [ + ((identifier) @_macro_name) + (scoped_identifier (identifier) @_macro_name .) + ] + (#any-of? @_macro_name "sql") + (_) @injection.content + (#set! injection.language "sql") + ) + +; lazy_regex +(macro_invocation + macro: [ + ((identifier) @_macro_name) + (scoped_identifier (identifier) @_macro_name .) + ] + (token_tree [ + (string_literal (string_content) @injection.content) + (raw_string_literal (string_content) @injection.content) + ]) + (#set! injection.language "regex") + (#any-of? @_macro_name "regex" "bytes_regex") +) + +(call_expression + function: (scoped_identifier) @_fn_path + arguments: (arguments + [ + (string_literal (string_content) @injection.content) + (raw_string_literal (string_content) @injection.content) + ] + ) + + (#match? @_fn_path ".*Regex(Builder)?::new") + (#set! injection.language "regex") +) diff --git a/crates/languages/src/rust/outline.scm b/crates/languages/src/rust/outline.scm index 3012995e2a7f23f66b0c1a891789f8fbc3524e6c..a99f53dd2b3154aa3717f67fd683da4a8b57d31b 100644 --- a/crates/languages/src/rust/outline.scm +++ b/crates/languages/src/rust/outline.scm @@ -20,7 +20,7 @@ trait: (_)? @name "for"? @context type: (_) @name - body: (_ "{" @open (_)* "}" @close)) @item + body: (_ . "{" @open "}" @close .)) @item (trait_item (visibility_modifier)? @context @@ -31,7 +31,8 @@ (visibility_modifier)? @context (function_modifiers)? @context "fn" @context - name: (_) @name) @item + name: (_) @name + body: (_ . "{" @open "}" @close .)) @item (function_signature_item (visibility_modifier)? @context diff --git a/crates/markdown_preview/src/markdown_renderer.rs b/crates/markdown_preview/src/markdown_renderer.rs index ce63cf96099a3f5eb0973a6ee97263e2734d3225..38b38466394cf8073b26b1ca5728eecc8230d9c1 100644 --- a/crates/markdown_preview/src/markdown_renderer.rs +++ b/crates/markdown_preview/src/markdown_renderer.rs @@ -233,10 +233,11 @@ fn render_markdown_list_item( cx: &mut RenderContext, ) -> AnyElement { use ParsedMarkdownListItemType::*; + let depth = parsed.depth.saturating_sub(1) as usize; let bullet = match &parsed.item_type { - Ordered(order) => format!("{}.", order).into_any_element(), - Unordered => "•".into_any_element(), + Ordered(order) => list_item_prefix(*order as usize, true, depth).into_any_element(), + Unordered => list_item_prefix(1, false, depth).into_any_element(), Task(checked, range) => div() .id(cx.next_id(range)) .mt(cx.scaled_rems(3.0 / 16.0)) @@ -292,10 +293,8 @@ fn render_markdown_list_item( .collect(); let item = h_flex() - .when(!parsed.nested, |this| { - this.pl(cx.scaled_rems(parsed.depth.saturating_sub(1) as f32)) - }) - .when(parsed.nested && parsed.depth > 1, |this| this.ml_neg_1p5()) + .when(!parsed.nested, |this| this.pl(cx.scaled_rems(depth as f32))) + .when(parsed.nested && depth > 0, |this| this.ml_neg_1p5()) .items_start() .children(vec![ bullet, @@ -880,6 +879,38 @@ impl Render for InteractiveMarkdownElementTooltip { } } +/// Returns the prefix for a list item. +fn list_item_prefix(order: usize, ordered: bool, depth: usize) -> String { + let ix = order.saturating_sub(1); + const NUMBERED_PREFIXES_1: &str = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; + const NUMBERED_PREFIXES_2: &str = "abcdefghijklmnopqrstuvwxyz"; + const BULLETS: [&str; 5] = ["•", "◦", "▪", "‣", "⁃"]; + + if ordered { + match depth { + 0 => format!("{}. ", order), + 1 => format!( + "{}. ", + NUMBERED_PREFIXES_1 + .chars() + .nth(ix % NUMBERED_PREFIXES_1.len()) + .unwrap() + ), + _ => format!( + "{}. ", + NUMBERED_PREFIXES_2 + .chars() + .nth(ix % NUMBERED_PREFIXES_2.len()) + .unwrap() + ), + } + } else { + let depth = depth.min(BULLETS.len() - 1); + let bullet = BULLETS[depth]; + return format!("{} ", bullet); + } +} + #[cfg(test)] mod tests { use super::*; @@ -1020,4 +1051,25 @@ mod tests { ]) ); } + + #[test] + fn test_list_item_prefix() { + assert_eq!(list_item_prefix(1, true, 0), "1. "); + assert_eq!(list_item_prefix(2, true, 0), "2. "); + assert_eq!(list_item_prefix(3, true, 0), "3. "); + assert_eq!(list_item_prefix(11, true, 0), "11. "); + assert_eq!(list_item_prefix(1, true, 1), "A. "); + assert_eq!(list_item_prefix(2, true, 1), "B. "); + assert_eq!(list_item_prefix(3, true, 1), "C. "); + assert_eq!(list_item_prefix(1, true, 2), "a. "); + assert_eq!(list_item_prefix(2, true, 2), "b. "); + assert_eq!(list_item_prefix(7, true, 2), "g. "); + assert_eq!(list_item_prefix(1, true, 1), "A. "); + assert_eq!(list_item_prefix(1, true, 2), "a. "); + assert_eq!(list_item_prefix(1, false, 0), "• "); + assert_eq!(list_item_prefix(1, false, 1), "◦ "); + assert_eq!(list_item_prefix(1, false, 2), "▪ "); + assert_eq!(list_item_prefix(1, false, 3), "‣ "); + assert_eq!(list_item_prefix(1, false, 4), "⁃ "); + } } diff --git a/crates/migrator/Cargo.toml b/crates/migrator/Cargo.toml index edb48a00e2ca93232d9022b6fb778449d2ecc7e4..e0a75784749c2d3a2a981b44cbbe449a7685c605 100644 --- a/crates/migrator/Cargo.toml +++ b/crates/migrator/Cargo.toml @@ -22,7 +22,7 @@ tree-sitter-json.workspace = true tree-sitter.workspace = true serde_json_lenient.workspace = true serde_json.workspace = true -settings.workspace = true +settings_json.workspace = true [dev-dependencies] pretty_assertions.workspace = true diff --git a/crates/migrator/src/migrator.rs b/crates/migrator/src/migrator.rs index 28021042825988ee70c04993ca71c5e9abe86bb4..ff9635dcef7664b17eb02a03b7584ea18ac9a91b 100644 --- a/crates/migrator/src/migrator.rs +++ b/crates/migrator/src/migrator.rs @@ -15,6 +15,7 @@ //! You only need to write replacement logic for x-1 to x because you can be certain that, internally, every user will be at x-1, regardless of their on disk state. use anyhow::{Context as _, Result}; +use settings_json::{infer_json_indent_size, parse_json_with_comments, update_value_in_json_text}; use std::{cmp::Reverse, ops::Range, sync::LazyLock}; use streaming_iterator::StreamingIterator; use tree_sitter::{Query, QueryMatch}; @@ -74,7 +75,7 @@ fn run_migrations(text: &str, migrations: &[MigrationType]) -> Result = None; - let json_indent_size = settings::infer_json_indent_size(¤t_text); + let json_indent_size = infer_json_indent_size(¤t_text); for migration in migrations.iter() { let migrated_text = match migration { MigrationType::TreeSitter(patterns, query) => migrate(¤t_text, patterns, query)?, @@ -83,14 +84,14 @@ fn run_migrations(text: &str, migrations: &[MigrationType]) -> Result, + anchor: text::Anchor, + cx: &App, + ) -> Option { + let snapshot = buffer.read(cx).snapshot(); + for (excerpt_id, range) in self.excerpts_for_buffer(snapshot.remote_id(), cx) { + if range.context.start.cmp(&anchor, &snapshot).is_le() + && range.context.end.cmp(&anchor, &snapshot).is_ge() + { + return Some(Anchor::in_buffer(excerpt_id, snapshot.remote_id(), anchor)); + } + } + + None + } + pub fn remove_excerpts( &mut self, excerpt_ids: impl IntoIterator, @@ -5459,6 +5477,8 @@ impl MultiBufferSnapshot { Some(OutlineItem { depth: item.depth, range: self.anchor_range_in_excerpt(*excerpt_id, item.range)?, + source_range_for_text: self + .anchor_range_in_excerpt(*excerpt_id, item.source_range_for_text)?, text: item.text, highlight_ranges: item.highlight_ranges, name_ranges: item.name_ranges, @@ -5492,6 +5512,11 @@ impl MultiBufferSnapshot { .flat_map(|item| { Some(OutlineItem { depth: item.depth, + source_range_for_text: Anchor::range_in_buffer( + excerpt_id, + buffer_id, + item.source_range_for_text, + ), range: Anchor::range_in_buffer(excerpt_id, buffer_id, item.range), text: item.text, highlight_ranges: item.highlight_ranges, diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index ebc5946acf97b763d7ec06d264aeaa7169d7c68b..112aa3d21ebda9ef57d3bedda20e3f90735a0173 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -2484,6 +2484,7 @@ impl OutlinePanel { annotation_range: None, range: search_data.context_range.clone(), text: search_data.context_text.clone(), + source_range_for_text: search_data.context_range.clone(), highlight_ranges: search_data .highlights_data .get() diff --git a/crates/paths/src/paths.rs b/crates/paths/src/paths.rs index 207e1f3bb4324d17784b1d8df53ba4bfbc4adddb..1197e9c546075dbe9342efe49ace1766fd281925 100644 --- a/crates/paths/src/paths.rs +++ b/crates/paths/src/paths.rs @@ -370,12 +370,12 @@ pub fn debug_adapters_dir() -> &'static PathBuf { DEBUG_ADAPTERS_DIR.get_or_init(|| data_dir().join("debug_adapters")) } -/// Returns the path to the agent servers directory +/// Returns the path to the external agents directory /// /// This is where agent servers are downloaded to -pub fn agent_servers_dir() -> &'static PathBuf { - static AGENT_SERVERS_DIR: OnceLock = OnceLock::new(); - AGENT_SERVERS_DIR.get_or_init(|| data_dir().join("agent_servers")) +pub fn external_agents_dir() -> &'static PathBuf { + static EXTERNAL_AGENTS_DIR: OnceLock = OnceLock::new(); + EXTERNAL_AGENTS_DIR.get_or_init(|| data_dir().join("external_agents")) } /// Returns the path to the Copilot directory. diff --git a/crates/picker/src/picker.rs b/crates/picker/src/picker.rs index 90423bcace0ad405e0c88703efe09f39a8763778..d9a23ec93b80287dd1b7b483c8b6315b2119bfd5 100644 --- a/crates/picker/src/picker.rs +++ b/crates/picker/src/picker.rs @@ -275,6 +275,15 @@ impl Picker { Self::new(delegate, ContainerKind::UniformList, head, window, cx) } + /// A picker, which displays its matches using `gpui::list`, matches can have different heights. + /// The picker allows the user to perform search items by text. + /// If `PickerDelegate::render_match` only returns items with the same height, use `Picker::uniform_list` as its implementation is optimized for that. + pub fn nonsearchable_list(delegate: D, window: &mut Window, cx: &mut Context) -> Self { + let head = Head::empty(Self::on_empty_head_blur, window, cx); + + Self::new(delegate, ContainerKind::List, head, window, cx) + } + /// A picker, which displays its matches using `gpui::list`, matches can have different heights. /// The picker allows the user to perform search items by text. /// If `PickerDelegate::render_match` only returns items with the same height, use `Picker::uniform_list` as its implementation is optimized for that. diff --git a/crates/project/src/agent_server_store.rs b/crates/project/src/agent_server_store.rs index 29060fa9448bae4f0be4b2929196da3c3d2951db..a1897a89d1f0fe52fedf8902e8c631a367627b20 100644 --- a/crates/project/src/agent_server_store.rs +++ b/crates/project/src/agent_server_store.rs @@ -773,9 +773,7 @@ fn get_or_npm_install_builtin_agent( ) -> Task> { cx.spawn(async move |cx| { let node_path = node_runtime.binary_path().await?; - let dir = paths::data_dir() - .join("external_agents") - .join(binary_name.as_str()); + let dir = paths::external_agents_dir().join(binary_name.as_str()); fs.create_dir(&dir).await?; let mut stream = fs.read_dir(&dir).await?; @@ -1246,7 +1244,7 @@ impl ExternalAgentServer for LocalCodex { custom_command.env = Some(env); custom_command } else { - let dir = paths::data_dir().join("external_agents").join(CODEX_NAME); + let dir = paths::external_agents_dir().join(CODEX_NAME); fs.create_dir(&dir).await?; // Find or install the latest Codex release (no update checks for now). @@ -1418,7 +1416,7 @@ impl ExternalAgentServer for LocalExtensionArchiveAgent { env.extend(extra_env); let cache_key = format!("{}/{}", extension_id, agent_id); - let dir = paths::data_dir().join("external_agents").join(&cache_key); + let dir = paths::external_agents_dir().join(&cache_key); fs.create_dir(&dir).await?; // Determine platform key diff --git a/crates/project/src/git_store/branch_diff.rs b/crates/project/src/git_store/branch_diff.rs index 554b5b83a10afc5cc38b1568ad8d175b2cb94b83..08dbd77a541f01a52dbb9b0d10c5af3a377170f9 100644 --- a/crates/project/src/git_store/branch_diff.rs +++ b/crates/project/src/git_store/branch_diff.rs @@ -274,9 +274,10 @@ impl BranchDiff { .as_ref() .and_then(|t| t.entries.get(&item.repo_path)) .cloned(); - let status = self - .merge_statuses(Some(item.status), branch_diff.as_ref()) - .unwrap(); + let Some(status) = self.merge_statuses(Some(item.status), branch_diff.as_ref()) + else { + continue; + }; if !status.has_changes() { continue; } diff --git a/crates/project/src/lsp_command.rs b/crates/project/src/lsp_command.rs index a1eef32e8fc545046c6f7978be57ae2cc2131058..89b3315272b137e507a65df19f98ac28aa194d6a 100644 --- a/crates/project/src/lsp_command.rs +++ b/crates/project/src/lsp_command.rs @@ -26,8 +26,8 @@ use language::{ use lsp::{ AdapterServerCapabilities, CodeActionKind, CodeActionOptions, CodeDescription, CompletionContext, CompletionListItemDefaultsEditRange, CompletionTriggerKind, - DocumentHighlightKind, LanguageServer, LanguageServerId, LinkedEditingRangeServerCapabilities, - OneOf, RenameOptions, ServerCapabilities, + DiagnosticServerCapabilities, DocumentHighlightKind, LanguageServer, LanguageServerId, + LinkedEditingRangeServerCapabilities, OneOf, RenameOptions, ServerCapabilities, }; use serde_json::Value; use signature_help::{lsp_to_proto_signature, proto_to_lsp_signature}; @@ -262,6 +262,9 @@ pub(crate) struct LinkedEditingRange { #[derive(Clone, Debug)] pub(crate) struct GetDocumentDiagnostics { + /// We cannot blindly rely on server's capabilities.diagnostic_provider, as they're a singular field, whereas + /// a server can register multiple diagnostic providers post-mortem. + pub dynamic_caps: DiagnosticServerCapabilities, pub previous_result_id: Option, } @@ -4031,26 +4034,22 @@ impl LspCommand for GetDocumentDiagnostics { "Get diagnostics" } - fn check_capabilities(&self, server_capabilities: AdapterServerCapabilities) -> bool { - server_capabilities - .server_capabilities - .diagnostic_provider - .is_some() + fn check_capabilities(&self, _: AdapterServerCapabilities) -> bool { + true } fn to_lsp( &self, path: &Path, _: &Buffer, - language_server: &Arc, + _: &Arc, _: &App, ) -> Result { - let identifier = match language_server.capabilities().diagnostic_provider { - Some(lsp::DiagnosticServerCapabilities::Options(options)) => options.identifier, - Some(lsp::DiagnosticServerCapabilities::RegistrationOptions(options)) => { - options.diagnostic_options.identifier + let identifier = match &self.dynamic_caps { + lsp::DiagnosticServerCapabilities::Options(options) => options.identifier.clone(), + lsp::DiagnosticServerCapabilities::RegistrationOptions(options) => { + options.diagnostic_options.identifier.clone() } - None => None, }; Ok(lsp::DocumentDiagnosticParams { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 1d6d4240de0ae8a6781b49f78341d10b5127cdc1..762070796f068fb01b19522b4a506eb693b9bd63 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -75,12 +75,12 @@ use language::{ range_from_lsp, range_to_lsp, }; use lsp::{ - AdapterServerCapabilities, CodeActionKind, CompletionContext, DiagnosticSeverity, - DiagnosticTag, DidChangeWatchedFilesRegistrationOptions, Edit, FileOperationFilter, - FileOperationPatternKind, FileOperationRegistrationOptions, FileRename, FileSystemWatcher, - LSP_REQUEST_TIMEOUT, LanguageServer, LanguageServerBinary, LanguageServerBinaryOptions, - LanguageServerId, LanguageServerName, LanguageServerSelector, LspRequestFuture, - MessageActionItem, MessageType, OneOf, RenameFilesParams, SymbolKind, + AdapterServerCapabilities, CodeActionKind, CompletionContext, DiagnosticServerCapabilities, + DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions, Edit, + FileOperationFilter, FileOperationPatternKind, FileOperationRegistrationOptions, FileRename, + FileSystemWatcher, LSP_REQUEST_TIMEOUT, LanguageServer, LanguageServerBinary, + LanguageServerBinaryOptions, LanguageServerId, LanguageServerName, LanguageServerSelector, + LspRequestFuture, MessageActionItem, MessageType, OneOf, RenameFilesParams, SymbolKind, TextDocumentSyncSaveOptions, TextEdit, Uri, WillRenameFiles, WorkDoneProgressCancelParams, WorkspaceFolder, notification::DidRenameFiles, }; @@ -138,6 +138,54 @@ pub use worktree::{ const SERVER_LAUNCHING_BEFORE_SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(5); pub const SERVER_PROGRESS_THROTTLE_TIMEOUT: Duration = Duration::from_millis(100); +const WORKSPACE_DIAGNOSTICS_TOKEN_START: &str = "id:"; + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)] +pub enum ProgressToken { + Number(i32), + String(SharedString), +} + +impl std::fmt::Display for ProgressToken { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Number(number) => write!(f, "{number}"), + Self::String(string) => write!(f, "{string}"), + } + } +} + +impl ProgressToken { + fn from_lsp(value: lsp::NumberOrString) -> Self { + match value { + lsp::NumberOrString::Number(number) => Self::Number(number), + lsp::NumberOrString::String(string) => Self::String(SharedString::new(string)), + } + } + + fn to_lsp(&self) -> lsp::NumberOrString { + match self { + Self::Number(number) => lsp::NumberOrString::Number(*number), + Self::String(string) => lsp::NumberOrString::String(string.to_string()), + } + } + + fn from_proto(value: proto::ProgressToken) -> Option { + Some(match value.value? { + proto::progress_token::Value::Number(number) => Self::Number(number), + proto::progress_token::Value::String(string) => Self::String(SharedString::new(string)), + }) + } + + fn to_proto(&self) -> proto::ProgressToken { + proto::ProgressToken { + value: Some(match self { + Self::Number(number) => proto::progress_token::Value::Number(*number), + Self::String(string) => proto::progress_token::Value::String(string.to_string()), + }), + } + } +} #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum FormatTrigger { @@ -190,6 +238,12 @@ pub struct DocumentDiagnostics { version: Option, } +#[derive(Default)] +struct DynamicRegistrations { + did_change_watched_files: HashMap>, + diagnostics: HashMap, DiagnosticServerCapabilities>, +} + pub struct LocalLspStore { weak: WeakEntity, worktree_store: Entity, @@ -207,8 +261,7 @@ pub struct LocalLspStore { watched_manifest_filenames: HashSet, language_server_paths_watched_for_rename: HashMap, - language_server_watcher_registrations: - HashMap>>, + language_server_dynamic_registrations: HashMap, supplementary_language_servers: HashMap)>, prettier_store: Entity, @@ -707,9 +760,10 @@ impl LocalLspStore { async move { this.update(&mut cx, |this, _| { if let Some(status) = this.language_server_statuses.get_mut(&server_id) - && let lsp::NumberOrString::String(token) = params.token { - status.progress_tokens.insert(token); + status + .progress_tokens + .insert(ProgressToken::from_lsp(params.token)); } })?; @@ -2487,7 +2541,7 @@ impl LocalLspStore { uri.clone(), adapter.language_id(&language.name()), 0, - initial_snapshot.text_with_original_line_endings(), + initial_snapshot.text(), ); vec![snapshot] @@ -3184,7 +3238,7 @@ impl LocalLspStore { for watcher in watchers { if let Some((worktree, literal_prefix, pattern)) = - self.worktree_and_path_for_file_watcher(&worktrees, watcher, cx) + Self::worktree_and_path_for_file_watcher(&worktrees, watcher, cx) { worktree.update(cx, |worktree, _| { if let Some((tree, glob)) = @@ -3282,7 +3336,6 @@ impl LocalLspStore { } fn worktree_and_path_for_file_watcher( - &self, worktrees: &[Entity], watcher: &FileSystemWatcher, cx: &App, @@ -3330,15 +3383,18 @@ impl LocalLspStore { language_server_id: LanguageServerId, cx: &mut Context, ) { - let Some(watchers) = self - .language_server_watcher_registrations + let Some(registrations) = self + .language_server_dynamic_registrations .get(&language_server_id) else { return; }; - let watch_builder = - self.rebuild_watched_paths_inner(language_server_id, watchers.values().flatten(), cx); + let watch_builder = self.rebuild_watched_paths_inner( + language_server_id, + registrations.did_change_watched_files.values().flatten(), + cx, + ); let watcher = watch_builder.build(self.fs.clone(), language_server_id, cx); self.language_server_watched_paths .insert(language_server_id, watcher); @@ -3354,11 +3410,13 @@ impl LocalLspStore { cx: &mut Context, ) { let registrations = self - .language_server_watcher_registrations + .language_server_dynamic_registrations .entry(language_server_id) .or_default(); - registrations.insert(registration_id.to_string(), params.watchers); + registrations + .did_change_watched_files + .insert(registration_id.to_string(), params.watchers); self.rebuild_watched_paths(language_server_id, cx); } @@ -3370,11 +3428,15 @@ impl LocalLspStore { cx: &mut Context, ) { let registrations = self - .language_server_watcher_registrations + .language_server_dynamic_registrations .entry(language_server_id) .or_default(); - if registrations.remove(registration_id).is_some() { + if registrations + .did_change_watched_files + .remove(registration_id) + .is_some() + { log::info!( "language server {}: unregistered workspace/DidChangeWatchedFiles capability with id {}", language_server_id, @@ -3619,9 +3681,9 @@ pub enum LspStoreEvent { #[derive(Clone, Debug, Serialize)] pub struct LanguageServerStatus { pub name: LanguageServerName, - pub pending_work: BTreeMap, + pub pending_work: BTreeMap, pub has_pending_diagnostic_updates: bool, - progress_tokens: HashSet, + progress_tokens: HashSet, pub worktree: Option, } @@ -3782,7 +3844,7 @@ impl LspStore { last_workspace_edits_by_language_server: Default::default(), language_server_watched_paths: Default::default(), language_server_paths_watched_for_rename: Default::default(), - language_server_watcher_registrations: Default::default(), + language_server_dynamic_registrations: Default::default(), buffers_being_formatted: Default::default(), buffer_snapshots: Default::default(), prettier_store, @@ -4367,7 +4429,7 @@ impl LspStore { cx: &App, ) -> bool where - F: Fn(&lsp::ServerCapabilities) -> bool, + F: FnMut(&lsp::ServerCapabilities) -> bool, { let Some(language) = buffer.read(cx).language().cloned() else { return false; @@ -4471,7 +4533,7 @@ impl LspStore { this.update(cx, |this, cx| { this.on_lsp_work_start( language_server.server_id(), - id.to_string(), + ProgressToken::Number(id), LanguageServerProgress { is_disk_based_diagnostics_progress: false, is_cancellable: false, @@ -4489,7 +4551,11 @@ impl LspStore { Some(defer(|| { cx.update(|cx| { this.update(cx, |this, cx| { - this.on_lsp_work_end(language_server.server_id(), id.to_string(), cx); + this.on_lsp_work_end( + language_server.server_id(), + ProgressToken::Number(id), + cx, + ); }) }) .log_err(); @@ -6447,12 +6513,30 @@ impl LspStore { let buffer_id = buffer.read(cx).remote_id(); if let Some((client, upstream_project_id)) = self.upstream_client() { + let mut suitable_capabilities = None; + // Are we capable for proto request? + let any_server_has_diagnostics_provider = self.check_if_capable_for_proto_request( + &buffer, + |capabilities| { + if let Some(caps) = &capabilities.diagnostic_provider { + suitable_capabilities = Some(caps.clone()); + true + } else { + false + } + }, + cx, + ); + // We don't really care which caps are passed into the request, as they're ignored by RPC anyways. + let Some(dynamic_caps) = suitable_capabilities else { + return Task::ready(Ok(None)); + }; + assert!(any_server_has_diagnostics_provider); + let request = GetDocumentDiagnostics { previous_result_id: None, + dynamic_caps, }; - if !self.is_capable_for_proto_request(&buffer, &request, cx) { - return Task::ready(Ok(None)); - } let request_task = client.request_lsp( upstream_project_id, None, @@ -6468,23 +6552,44 @@ impl LspStore { Ok(None) }) } else { - let server_ids = buffer.update(cx, |buffer, cx| { + let servers = buffer.update(cx, |buffer, cx| { self.language_servers_for_local_buffer(buffer, cx) - .map(|(_, server)| server.server_id()) + .map(|(_, server)| server.clone()) .collect::>() }); - let pull_diagnostics = server_ids + + let pull_diagnostics = servers .into_iter() - .map(|server_id| { - let result_id = self.result_id(server_id, buffer_id, cx); - self.request_lsp( - buffer.clone(), - LanguageServerToQuery::Other(server_id), - GetDocumentDiagnostics { - previous_result_id: result_id, - }, - cx, - ) + .flat_map(|server| { + let result = maybe!({ + let local = self.as_local()?; + let server_id = server.server_id(); + let providers_with_identifiers = local + .language_server_dynamic_registrations + .get(&server_id) + .into_iter() + .flat_map(|registrations| registrations.diagnostics.values().cloned()) + .collect::>(); + Some( + providers_with_identifiers + .into_iter() + .map(|dynamic_caps| { + let result_id = self.result_id(server_id, buffer_id, cx); + self.request_lsp( + buffer.clone(), + LanguageServerToQuery::Other(server_id), + GetDocumentDiagnostics { + previous_result_id: result_id, + dynamic_caps, + }, + cx, + ) + }) + .collect::>(), + ) + }); + + result.unwrap_or_default() }) .collect::>(); @@ -6556,9 +6661,9 @@ impl LspStore { return HashMap::default(); } - let last_chunk_number = applicable_chunks.len() - 1; + let last_chunk_number = existing_inlay_hints.buffer_chunks_len() - 1; - for (i, row_chunk) in applicable_chunks.into_iter().enumerate() { + for row_chunk in applicable_chunks { match ( existing_inlay_hints .cached_hints(&row_chunk) @@ -6571,7 +6676,7 @@ impl LspStore { .cloned(), ) { (None, None) => { - let end = if last_chunk_number == i { + let end = if last_chunk_number == row_chunk.id { Point::new(row_chunk.end, buffer_snapshot.line_len(row_chunk.end)) } else { Point::new(row_chunk.end, 0) @@ -6792,7 +6897,7 @@ impl LspStore { && range.start.is_valid(&buffer_snapshot) && range.end.is_valid(&buffer_snapshot) && hint.position.cmp(&range.start, &buffer_snapshot).is_ge() - && hint.position.cmp(&range.end, &buffer_snapshot).is_le() + && hint.position.cmp(&range.end, &buffer_snapshot).is_lt() }); (server_id, new_hints) }) @@ -7522,7 +7627,6 @@ impl LspStore { let previous_snapshot = buffer_snapshots.last()?; let build_incremental_change = || { - let line_ending = next_snapshot.line_ending(); buffer .edits_since::>( previous_snapshot.snapshot.version(), @@ -7530,18 +7634,16 @@ impl LspStore { .map(|edit| { let edit_start = edit.new.start.0; let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0); + let new_text = next_snapshot + .text_for_range(edit.new.start.1..edit.new.end.1) + .collect(); lsp::TextDocumentContentChangeEvent { range: Some(lsp::Range::new( point_to_lsp(edit_start), point_to_lsp(edit_end), )), range_length: None, - // Collect changed text and preserve line endings. - // text_for_range returns chunks with normalized \n, so we need to - // convert to the buffer's actual line ending for LSP. - text: line_ending.into_string( - next_snapshot.text_for_range(edit.new.start.1..edit.new.end.1), - ), + text: new_text, } }) .collect() @@ -7561,7 +7663,7 @@ impl LspStore { vec![lsp::TextDocumentContentChangeEvent { range: None, range_length: None, - text: next_snapshot.text_with_original_line_endings(), + text: next_snapshot.text(), }] } Some(lsp::TextDocumentSyncKind::INCREMENTAL) => build_incremental_change(), @@ -8876,7 +8978,8 @@ impl LspStore { proto::update_language_server::Variant::WorkStart(payload) => { lsp_store.on_lsp_work_start( language_server_id, - payload.token, + ProgressToken::from_proto(payload.token.context("missing progress token")?) + .context("invalid progress token value")?, LanguageServerProgress { title: payload.title, is_disk_based_diagnostics_progress: false, @@ -8891,7 +8994,8 @@ impl LspStore { proto::update_language_server::Variant::WorkProgress(payload) => { lsp_store.on_lsp_work_progress( language_server_id, - payload.token, + ProgressToken::from_proto(payload.token.context("missing progress token")?) + .context("invalid progress token value")?, LanguageServerProgress { title: None, is_disk_based_diagnostics_progress: false, @@ -8905,7 +9009,12 @@ impl LspStore { } proto::update_language_server::Variant::WorkEnd(payload) => { - lsp_store.on_lsp_work_end(language_server_id, payload.token, cx); + lsp_store.on_lsp_work_end( + language_server_id, + ProgressToken::from_proto(payload.token.context("missing progress token")?) + .context("invalid progress token value")?, + cx, + ); } proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => { @@ -9298,38 +9407,38 @@ impl LspStore { fn on_lsp_progress( &mut self, - progress: lsp::ProgressParams, + progress_params: lsp::ProgressParams, language_server_id: LanguageServerId, disk_based_diagnostics_progress_token: Option, cx: &mut Context, ) { - let token = match progress.token { - lsp::NumberOrString::String(token) => token, - lsp::NumberOrString::Number(token) => { - log::info!("skipping numeric progress token {}", token); - return; - } - }; - - match progress.value { + match progress_params.value { lsp::ProgressParamsValue::WorkDone(progress) => { self.handle_work_done_progress( progress, language_server_id, disk_based_diagnostics_progress_token, - token, + ProgressToken::from_lsp(progress_params.token), cx, ); } lsp::ProgressParamsValue::WorkspaceDiagnostic(report) => { + let identifier = match progress_params.token { + lsp::NumberOrString::Number(_) => None, + lsp::NumberOrString::String(token) => token + .split_once(WORKSPACE_DIAGNOSTICS_TOKEN_START) + .map(|(_, id)| id.to_owned()), + }; if let Some(LanguageServerState::Running { - workspace_refresh_task: Some(workspace_refresh_task), + workspace_diagnostics_refresh_tasks, .. }) = self .as_local_mut() .and_then(|local| local.language_servers.get_mut(&language_server_id)) + && let Some(workspace_diagnostics) = + workspace_diagnostics_refresh_tasks.get_mut(&identifier) { - workspace_refresh_task.progress_tx.try_send(()).ok(); + workspace_diagnostics.progress_tx.try_send(()).ok(); self.apply_workspace_diagnostic_report(language_server_id, report, cx) } } @@ -9341,7 +9450,7 @@ impl LspStore { progress: lsp::WorkDoneProgress, language_server_id: LanguageServerId, disk_based_diagnostics_progress_token: Option, - token: String, + token: ProgressToken, cx: &mut Context, ) { let language_server_status = @@ -9355,9 +9464,14 @@ impl LspStore { return; } - let is_disk_based_diagnostics_progress = disk_based_diagnostics_progress_token - .as_ref() - .is_some_and(|disk_based_token| token.starts_with(disk_based_token)); + let is_disk_based_diagnostics_progress = + if let (Some(disk_based_token), ProgressToken::String(token)) = + (&disk_based_diagnostics_progress_token, &token) + { + token.starts_with(disk_based_token) + } else { + false + }; match progress { lsp::WorkDoneProgress::Begin(report) => { @@ -9404,7 +9518,7 @@ impl LspStore { fn on_lsp_work_start( &mut self, language_server_id: LanguageServerId, - token: String, + token: ProgressToken, progress: LanguageServerProgress, cx: &mut Context, ) { @@ -9418,7 +9532,7 @@ impl LspStore { .language_server_adapter_for_id(language_server_id) .map(|adapter| adapter.name()), message: proto::update_language_server::Variant::WorkStart(proto::LspWorkStart { - token, + token: Some(token.to_proto()), title: progress.title, message: progress.message, percentage: progress.percentage.map(|p| p as u32), @@ -9430,7 +9544,7 @@ impl LspStore { fn on_lsp_work_progress( &mut self, language_server_id: LanguageServerId, - token: String, + token: ProgressToken, progress: LanguageServerProgress, cx: &mut Context, ) { @@ -9470,7 +9584,7 @@ impl LspStore { .map(|adapter| adapter.name()), message: proto::update_language_server::Variant::WorkProgress( proto::LspWorkProgress { - token, + token: Some(token.to_proto()), message: progress.message, percentage: progress.percentage.map(|p| p as u32), is_cancellable: Some(progress.is_cancellable), @@ -9483,7 +9597,7 @@ impl LspStore { fn on_lsp_work_end( &mut self, language_server_id: LanguageServerId, - token: String, + token: ProgressToken, cx: &mut Context, ) { if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) { @@ -9500,7 +9614,9 @@ impl LspStore { name: self .language_server_adapter_for_id(language_server_id) .map(|adapter| adapter.name()), - message: proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd { token }), + message: proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd { + token: Some(token.to_proto()), + }), }) } @@ -9912,25 +10028,33 @@ impl LspStore { } pub async fn handle_cancel_language_server_work( - this: Entity, + lsp_store: Entity, envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - this.update(&mut cx, |this, cx| { + lsp_store.update(&mut cx, |lsp_store, cx| { if let Some(work) = envelope.payload.work { match work { proto::cancel_language_server_work::Work::Buffers(buffers) => { let buffers = - this.buffer_ids_to_buffers(buffers.buffer_ids.into_iter(), cx); - this.cancel_language_server_work_for_buffers(buffers, cx); + lsp_store.buffer_ids_to_buffers(buffers.buffer_ids.into_iter(), cx); + lsp_store.cancel_language_server_work_for_buffers(buffers, cx); } proto::cancel_language_server_work::Work::LanguageServerWork(work) => { let server_id = LanguageServerId::from_proto(work.language_server_id); - this.cancel_language_server_work(server_id, work.token, cx); + let token = work + .token + .map(|token| { + ProgressToken::from_proto(token) + .context("invalid work progress token") + }) + .transpose()?; + lsp_store.cancel_language_server_work(server_id, token, cx); } } } - })?; + anyhow::Ok(()) + })??; Ok(proto::Ack {}) } @@ -10784,13 +10908,28 @@ impl LspStore { let workspace_folders = workspace_folders.lock().clone(); language_server.set_workspace_folders(workspace_folders); + let workspace_diagnostics_refresh_tasks = language_server + .capabilities() + .diagnostic_provider + .and_then(|provider| { + local + .language_server_dynamic_registrations + .entry(server_id) + .or_default() + .diagnostics + .entry(None) + .or_insert(provider.clone()); + let workspace_refresher = + lsp_workspace_diagnostics_refresh(None, provider, language_server.clone(), cx)?; + + Some((None, workspace_refresher)) + }) + .into_iter() + .collect(); local.language_servers.insert( server_id, LanguageServerState::Running { - workspace_refresh_task: lsp_workspace_diagnostics_refresh( - language_server.clone(), - cx, - ), + workspace_diagnostics_refresh_tasks, adapter: adapter.clone(), server: language_server.clone(), simulate_disk_based_diagnostics_completion: None, @@ -10925,12 +11064,13 @@ impl LspStore { let snapshot = versions.last().unwrap(); let version = snapshot.version; + let initial_snapshot = &snapshot.snapshot; let uri = lsp::Uri::from_file_path(file.abs_path(cx)).unwrap(); language_server.register_buffer( uri, adapter.language_id(&language.name()), version, - buffer_handle.read(cx).text_with_original_line_endings(), + initial_snapshot.text(), ); buffer_paths_registered.push((buffer_id, file.abs_path(cx))); local @@ -11025,7 +11165,7 @@ impl LspStore { pub(crate) fn cancel_language_server_work( &mut self, server_id: LanguageServerId, - token_to_cancel: Option, + token_to_cancel: Option, cx: &mut Context, ) { if let Some(local) = self.as_local() { @@ -11043,7 +11183,7 @@ impl LspStore { server .notify::( WorkDoneProgressCancelParams { - token: lsp::NumberOrString::String(token.clone()), + token: token.to_lsp(), }, ) .ok(); @@ -11057,7 +11197,7 @@ impl LspStore { proto::cancel_language_server_work::Work::LanguageServerWork( proto::cancel_language_server_work::LanguageServerWork { language_server_id: server_id.to_proto(), - token: token_to_cancel, + token: token_to_cancel.map(|token| token.to_proto()), }, ), ), @@ -11495,13 +11635,15 @@ impl LspStore { pub fn pull_workspace_diagnostics(&mut self, server_id: LanguageServerId) { if let Some(LanguageServerState::Running { - workspace_refresh_task: Some(workspace_refresh_task), + workspace_diagnostics_refresh_tasks, .. }) = self .as_local_mut() .and_then(|local| local.language_servers.get_mut(&server_id)) { - workspace_refresh_task.refresh_tx.try_send(()).ok(); + for diagnostics in workspace_diagnostics_refresh_tasks.values_mut() { + diagnostics.refresh_tx.try_send(()).ok(); + } } } @@ -11517,11 +11659,13 @@ impl LspStore { local.language_server_ids_for_buffer(buffer, cx) }) { if let Some(LanguageServerState::Running { - workspace_refresh_task: Some(workspace_refresh_task), + workspace_diagnostics_refresh_tasks, .. }) = local.language_servers.get_mut(&server_id) { - workspace_refresh_task.refresh_tx.try_send(()).ok(); + for diagnostics in workspace_diagnostics_refresh_tasks.values_mut() { + diagnostics.refresh_tx.try_send(()).ok(); + } } } } @@ -11847,26 +11991,49 @@ impl LspStore { "textDocument/diagnostic" => { if let Some(caps) = reg .register_options - .map(serde_json::from_value) + .map(serde_json::from_value::) .transpose()? { - let state = self + let local = self .as_local_mut() - .context("Expected LSP Store to be local")? + .context("Expected LSP Store to be local")?; + let state = local .language_servers .get_mut(&server_id) .context("Could not obtain Language Servers state")?; - server.update_capabilities(|capabilities| { - capabilities.diagnostic_provider = Some(caps); - }); + local + .language_server_dynamic_registrations + .get_mut(&server_id) + .and_then(|registrations| { + registrations + .diagnostics + .insert(Some(reg.id.clone()), caps.clone()) + }); + + let mut can_now_provide_diagnostics = false; if let LanguageServerState::Running { - workspace_refresh_task, + workspace_diagnostics_refresh_tasks, .. } = state - && workspace_refresh_task.is_none() + && let Some(task) = lsp_workspace_diagnostics_refresh( + Some(reg.id.clone()), + caps.clone(), + server.clone(), + cx, + ) { - *workspace_refresh_task = - lsp_workspace_diagnostics_refresh(server.clone(), cx) + workspace_diagnostics_refresh_tasks.insert(Some(reg.id), task); + can_now_provide_diagnostics = true; + } + + // We don't actually care about capabilities.diagnostic_provider, but it IS relevant for the remote peer + // to know that there's at least one provider. Otherwise, it will never ask us to issue documentdiagnostic calls on their behalf, + // as it'll think that they're not supported. + if can_now_provide_diagnostics { + server.update_capabilities(|capabilities| { + debug_assert!(capabilities.diagnostic_provider.is_none()); + capabilities.diagnostic_provider = Some(caps); + }); } notify_server_capabilities_updated(&server, cx); @@ -12029,22 +12196,45 @@ impl LspStore { notify_server_capabilities_updated(&server, cx); } "textDocument/diagnostic" => { - server.update_capabilities(|capabilities| { - capabilities.diagnostic_provider = None; - }); - let state = self + let local = self .as_local_mut() - .context("Expected LSP Store to be local")? + .context("Expected LSP Store to be local")?; + + let state = local .language_servers .get_mut(&server_id) .context("Could not obtain Language Servers state")?; - if let LanguageServerState::Running { - workspace_refresh_task, - .. - } = state + let options = local + .language_server_dynamic_registrations + .get_mut(&server_id) + .with_context(|| { + format!("Expected dynamic registration to exist for server {server_id}") + })?.diagnostics + .remove(&Some(unreg.id.clone())) + .with_context(|| format!( + "Attempted to unregister non-existent diagnostic registration with ID {}", + unreg.id) + )?; + + let mut has_any_diagnostic_providers_still = true; + if let Some(identifier) = diagnostic_identifier(&options) + && let LanguageServerState::Running { + workspace_diagnostics_refresh_tasks, + .. + } = state { - _ = workspace_refresh_task.take(); + workspace_diagnostics_refresh_tasks.remove(&identifier); + has_any_diagnostic_providers_still = + !workspace_diagnostics_refresh_tasks.is_empty(); } + + if !has_any_diagnostic_providers_still { + server.update_capabilities(|capabilities| { + debug_assert!(capabilities.diagnostic_provider.is_some()); + capabilities.diagnostic_provider = None; + }); + } + notify_server_capabilities_updated(&server, cx); } "textDocument/documentColor" => { @@ -12333,24 +12523,12 @@ fn subscribe_to_binary_statuses( } fn lsp_workspace_diagnostics_refresh( + registration_id: Option, + options: DiagnosticServerCapabilities, server: Arc, cx: &mut Context<'_, LspStore>, ) -> Option { - let identifier = match server.capabilities().diagnostic_provider? { - lsp::DiagnosticServerCapabilities::Options(diagnostic_options) => { - if !diagnostic_options.workspace_diagnostics { - return None; - } - diagnostic_options.identifier - } - lsp::DiagnosticServerCapabilities::RegistrationOptions(registration_options) => { - let diagnostic_options = registration_options.diagnostic_options; - if !diagnostic_options.workspace_diagnostics { - return None; - } - diagnostic_options.identifier - } - }; + let identifier = diagnostic_identifier(&options)?; let (progress_tx, mut progress_rx) = mpsc::channel(1); let (mut refresh_tx, mut refresh_rx) = mpsc::channel(1); @@ -12396,7 +12574,14 @@ fn lsp_workspace_diagnostics_refresh( return; }; - let token = format!("workspace/diagnostic-{}-{}", server.server_id(), requests); + let token = if let Some(identifier) = ®istration_id { + format!( + "workspace/diagnostic/{}/{requests}/{WORKSPACE_DIAGNOSTICS_TOKEN_START}{identifier}", + server.server_id(), + ) + } else { + format!("workspace/diagnostic/{}/{requests}", server.server_id()) + }; progress_rx.try_recv().ok(); let timer = @@ -12462,6 +12647,24 @@ fn lsp_workspace_diagnostics_refresh( }) } +fn diagnostic_identifier(options: &DiagnosticServerCapabilities) -> Option> { + match &options { + lsp::DiagnosticServerCapabilities::Options(diagnostic_options) => { + if !diagnostic_options.workspace_diagnostics { + return None; + } + Some(diagnostic_options.identifier.clone()) + } + lsp::DiagnosticServerCapabilities::RegistrationOptions(registration_options) => { + let diagnostic_options = ®istration_options.diagnostic_options; + if !diagnostic_options.workspace_diagnostics { + return None; + } + Some(diagnostic_options.identifier.clone()) + } + } +} + fn resolve_word_completion(snapshot: &BufferSnapshot, completion: &mut Completion) { let CompletionSource::BufferWord { word_range, @@ -12866,7 +13069,7 @@ pub enum LanguageServerState { adapter: Arc, server: Arc, simulate_disk_based_diagnostics_completion: Option>, - workspace_refresh_task: Option, + workspace_diagnostics_refresh_tasks: HashMap, WorkspaceRefreshTask>, }, } diff --git a/crates/project/src/lsp_store/inlay_hint_cache.rs b/crates/project/src/lsp_store/inlay_hint_cache.rs index 0d527b83d2eef03b9473edc2711041c0ebccadb6..7d3ec27e5af83c4d83b269c171943d90754bd1a6 100644 --- a/crates/project/src/lsp_store/inlay_hint_cache.rs +++ b/crates/project/src/lsp_store/inlay_hint_cache.rs @@ -67,7 +67,7 @@ struct HintForId { /// #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct BufferChunk { - id: usize, + pub id: usize, pub start: BufferRow, pub end: BufferRow, } @@ -218,4 +218,8 @@ impl BufferInlayHints { debug_assert_eq!(*hint_id, id, "Invalid pointer {hint_for_id:?}"); Some(hint) } + + pub fn buffer_chunks_len(&self) -> usize { + self.buffer_chunks.len() + } } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 910e217a67785249b4d83b7929b32c21b079a5d7..e188ebd5e32947777f987ff43df52f09d006d58f 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -146,7 +146,7 @@ pub use buffer_store::ProjectTransaction; pub use lsp_store::{ DiagnosticSummary, InvalidationStrategy, LanguageServerLogType, LanguageServerProgress, LanguageServerPromptRequest, LanguageServerStatus, LanguageServerToQuery, LspStore, - LspStoreEvent, SERVER_PROGRESS_THROTTLE_TIMEOUT, + LspStoreEvent, ProgressToken, SERVER_PROGRESS_THROTTLE_TIMEOUT, }; pub use toolchain_store::{ToolchainStore, Toolchains}; const MAX_PROJECT_SEARCH_HISTORY_SIZE: usize = 500; @@ -3451,7 +3451,7 @@ impl Project { pub fn cancel_language_server_work( &mut self, server_id: LanguageServerId, - token_to_cancel: Option, + token_to_cancel: Option, cx: &mut Context, ) { self.lsp_store.update(cx, |lsp_store, cx| { diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 676fac507252646a0650be87dc7a22689a1e70d0..d1ff7a9e8d661b0d642b307281dd8a7ba9b76b41 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -728,6 +728,7 @@ impl SettingsObserver { cx.update_global(|settings_store: &mut SettingsStore, cx| { settings_store .set_user_settings(&envelope.payload.contents, cx) + .result() .context("setting new user settings")?; anyhow::Ok(()) })??; diff --git a/crates/prompt_store/Cargo.toml b/crates/prompt_store/Cargo.toml index 1e9552f1978857f04920406eadb3a64df0c51d88..13bacbfad3bf2b5deb4a20af866f37dad47288ff 100644 --- a/crates/prompt_store/Cargo.toml +++ b/crates/prompt_store/Cargo.toml @@ -28,7 +28,6 @@ parking_lot.workspace = true paths.workspace = true rope.workspace = true serde.workspace = true -serde_json.workspace = true text.workspace = true util.workspace = true uuid.workspace = true diff --git a/crates/prompt_store/src/prompts.rs b/crates/prompt_store/src/prompts.rs index e6a9144a23a7bb31c18a119fc197709aebf935f4..3d47fbce7014e8e791ca8961447c8df1adf45abf 100644 --- a/crates/prompt_store/src/prompts.rs +++ b/crates/prompt_store/src/prompts.rs @@ -51,22 +51,6 @@ impl ProjectContext { } } -#[derive(Debug, Clone, Serialize)] -pub struct ModelContext { - pub available_tools: Vec, -} - -#[derive(Serialize)] -struct PromptTemplateContext { - #[serde(flatten)] - project: ProjectContext, - - #[serde(flatten)] - model: ModelContext, - - has_tools: bool, -} - #[derive(Debug, Clone, Serialize)] pub struct UserRulesContext { pub uuid: UserPromptId, @@ -144,40 +128,9 @@ impl PromptBuilder { .unwrap_or_else(|| Arc::new(Self::new(None).unwrap())) } - /// Helper function for handlebars templates to check if a specific tool is enabled - fn has_tool_helper( - h: &handlebars::Helper, - _: &Handlebars, - ctx: &handlebars::Context, - _: &mut handlebars::RenderContext, - out: &mut dyn handlebars::Output, - ) -> handlebars::HelperResult { - let tool_name = h.param(0).and_then(|v| v.value().as_str()).ok_or_else(|| { - handlebars::RenderError::new("has_tool helper: missing or invalid tool name parameter") - })?; - - let enabled_tools = ctx - .data() - .get("available_tools") - .and_then(|v| v.as_array()) - .map(|arr| arr.iter().filter_map(|v| v.as_str()).collect::>()) - .ok_or_else(|| { - handlebars::RenderError::new( - "has_tool handlebars helper: available_tools not found or not an array", - ) - })?; - - if enabled_tools.contains(&tool_name) { - out.write("true")?; - } - - Ok(()) - } - pub fn new(loading_params: Option) -> Result { let mut handlebars = Handlebars::new(); Self::register_built_in_templates(&mut handlebars)?; - handlebars.register_helper("has_tool", Box::new(Self::has_tool_helper)); let handlebars = Arc::new(Mutex::new(handlebars)); @@ -323,22 +276,6 @@ impl PromptBuilder { Ok(()) } - pub fn generate_assistant_system_prompt( - &self, - context: &ProjectContext, - model_context: &ModelContext, - ) -> Result { - let template_context = PromptTemplateContext { - project: context.clone(), - model: model_context.clone(), - has_tools: !model_context.available_tools.is_empty(), - }; - - self.handlebars - .lock() - .render("assistant_system_prompt", &template_context) - } - pub fn generate_inline_transformation_prompt( &self, user_prompt: String, @@ -445,99 +382,3 @@ impl PromptBuilder { .render("terminal_assistant_prompt", &context) } } - -#[cfg(test)] -mod test { - use super::*; - use serde_json; - use util::rel_path::rel_path; - use uuid::Uuid; - - #[test] - fn test_assistant_system_prompt_renders() { - let worktrees = vec![WorktreeContext { - root_name: "path".into(), - abs_path: Path::new("/path/to/root").into(), - rules_file: Some(RulesFileContext { - path_in_worktree: rel_path(".rules").into(), - text: "".into(), - project_entry_id: 0, - }), - }]; - let default_user_rules = vec![UserRulesContext { - uuid: UserPromptId(Uuid::nil()), - title: Some("Rules title".into()), - contents: "Rules contents".into(), - }]; - let project_context = ProjectContext::new(worktrees, default_user_rules); - let model_context = ModelContext { - available_tools: ["grep".into()].to_vec(), - }; - let prompt = PromptBuilder::new(None) - .unwrap() - .generate_assistant_system_prompt(&project_context, &model_context) - .unwrap(); - assert!( - prompt.contains("Rules contents"), - "Expected default user rules to be in rendered prompt" - ); - } - - #[test] - fn test_assistant_system_prompt_depends_on_enabled_tools() { - let worktrees = vec![WorktreeContext { - root_name: "path".into(), - abs_path: Path::new("/path/to/root").into(), - rules_file: None, - }]; - let default_user_rules = vec![]; - let project_context = ProjectContext::new(worktrees, default_user_rules); - let prompt_builder = PromptBuilder::new(None).unwrap(); - - // When the `grep` tool is enabled, it should be mentioned in the prompt - let model_context = ModelContext { - available_tools: ["grep".into()].to_vec(), - }; - let prompt_with_grep = prompt_builder - .generate_assistant_system_prompt(&project_context, &model_context) - .unwrap(); - assert!( - prompt_with_grep.contains("grep"), - "`grep` tool should be mentioned in prompt when the tool is enabled" - ); - - // When the `grep` tool is disabled, it should not be mentioned in the prompt - let model_context = ModelContext { - available_tools: [].to_vec(), - }; - let prompt_without_grep = prompt_builder - .generate_assistant_system_prompt(&project_context, &model_context) - .unwrap(); - assert!( - !prompt_without_grep.contains("grep"), - "`grep` tool should not be mentioned in prompt when the tool is disabled" - ); - } - - #[test] - fn test_has_tool_helper() { - let mut handlebars = Handlebars::new(); - handlebars.register_helper("has_tool", Box::new(PromptBuilder::has_tool_helper)); - handlebars - .register_template_string( - "test_template", - "{{#if (has_tool 'grep')}}grep is enabled{{else}}grep is disabled{{/if}}", - ) - .unwrap(); - - // grep available - let data = serde_json::json!({"available_tools": ["grep", "fetch"]}); - let result = handlebars.render("test_template", &data).unwrap(); - assert_eq!(result, "grep is enabled"); - - // grep not available - let data = serde_json::json!({"available_tools": ["terminal", "fetch"]}); - let result = handlebars.render("test_template", &data).unwrap(); - assert_eq!(result, "grep is disabled"); - } -} diff --git a/crates/proto/proto/lsp.proto b/crates/proto/proto/lsp.proto index 7e446a915febbc03f2dd5920faf12a58a5d9b639..30059431094bf1b11c1e481979ed5ea651f1d40b 100644 --- a/crates/proto/proto/lsp.proto +++ b/crates/proto/proto/lsp.proto @@ -552,23 +552,33 @@ message UpdateLanguageServer { } } +message ProgressToken { + oneof value { + int32 number = 1; + string string = 2; + } +} + message LspWorkStart { - string token = 1; + reserved 1; optional string title = 4; optional string message = 2; optional uint32 percentage = 3; optional bool is_cancellable = 5; + ProgressToken token = 6; } message LspWorkProgress { - string token = 1; + reserved 1; optional string message = 2; optional uint32 percentage = 3; optional bool is_cancellable = 4; + ProgressToken token = 5; } message LspWorkEnd { - string token = 1; + reserved 1; + ProgressToken token = 2; } message LspDiskBasedDiagnosticsUpdating {} @@ -708,7 +718,8 @@ message CancelLanguageServerWork { message LanguageServerWork { uint64 language_server_id = 1; - optional string token = 2; + reserved 2; + optional ProgressToken token = 3; } } diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index db9721063d61de5d0d9ec1b4902a249ef8b0fd75..80c20ca21f1add0962995c1e948acdbeff14c374 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -28,7 +28,8 @@ use ui::{KeyBinding, ListItem, ListItemSpacing, Tooltip, prelude::*, tooltip_con use util::{ResultExt, paths::PathExt}; use workspace::{ CloseIntent, HistoryManager, ModalView, OpenOptions, PathList, SerializedWorkspaceLocation, - WORKSPACE_DB, Workspace, WorkspaceId, with_active_or_new_workspace, + WORKSPACE_DB, Workspace, WorkspaceId, notifications::DetachAndPromptErr, + with_active_or_new_workspace, }; use zed_actions::{OpenRecent, OpenRemote}; @@ -420,77 +421,79 @@ impl PickerDelegate for RecentProjectsDelegate { } else { !secondary }; - workspace - .update(cx, |workspace, cx| { - if workspace.database_id() == Some(*candidate_workspace_id) { - Task::ready(Ok(())) - } else { - match candidate_workspace_location.clone() { - SerializedWorkspaceLocation::Local => { - let paths = candidate_workspace_paths.paths().to_vec(); - if replace_current_window { - cx.spawn_in(window, async move |workspace, cx| { - let continue_replacing = workspace - .update_in(cx, |workspace, window, cx| { - workspace.prepare_to_close( - CloseIntent::ReplaceWindow, - window, - cx, - ) - })? - .await?; - if continue_replacing { + workspace.update(cx, |workspace, cx| { + if workspace.database_id() == Some(*candidate_workspace_id) { + return; + } + match candidate_workspace_location.clone() { + SerializedWorkspaceLocation::Local => { + let paths = candidate_workspace_paths.paths().to_vec(); + if replace_current_window { + cx.spawn_in(window, async move |workspace, cx| { + let continue_replacing = workspace + .update_in(cx, |workspace, window, cx| { + workspace.prepare_to_close( + CloseIntent::ReplaceWindow, + window, + cx, + ) + })? + .await?; + if continue_replacing { + workspace + .update_in(cx, |workspace, window, cx| { workspace - .update_in(cx, |workspace, window, cx| { - workspace.open_workspace_for_paths( - true, paths, window, cx, - ) - })? - .await - } else { - Ok(()) - } - }) + .open_workspace_for_paths(true, paths, window, cx) + })? + .await } else { - workspace.open_workspace_for_paths(false, paths, window, cx) + Ok(()) } - } - SerializedWorkspaceLocation::Remote(mut connection) => { - let app_state = workspace.app_state().clone(); - - let replace_window = if replace_current_window { - window.window_handle().downcast::() - } else { - None - }; - - let open_options = OpenOptions { - replace_window, - ..Default::default() - }; - - if let RemoteConnectionOptions::Ssh(connection) = &mut connection { - SshSettings::get_global(cx) - .fill_connection_options_from_settings(connection); - }; - - let paths = candidate_workspace_paths.paths().to_vec(); - - cx.spawn_in(window, async move |_, cx| { - open_remote_project( - connection.clone(), - paths, - app_state, - open_options, - cx, - ) - .await - }) - } + }) + } else { + workspace.open_workspace_for_paths(false, paths, window, cx) } } - }) - .detach_and_log_err(cx); + SerializedWorkspaceLocation::Remote(mut connection) => { + let app_state = workspace.app_state().clone(); + + let replace_window = if replace_current_window { + window.window_handle().downcast::() + } else { + None + }; + + let open_options = OpenOptions { + replace_window, + ..Default::default() + }; + + if let RemoteConnectionOptions::Ssh(connection) = &mut connection { + SshSettings::get_global(cx) + .fill_connection_options_from_settings(connection); + }; + + let paths = candidate_workspace_paths.paths().to_vec(); + + cx.spawn_in(window, async move |_, cx| { + open_remote_project( + connection.clone(), + paths, + app_state, + open_options, + cx, + ) + .await + }) + } + } + .detach_and_prompt_err( + "Failed to open project", + window, + cx, + |_, _, _| None, + ); + }); cx.emit(DismissEvent); } } diff --git a/crates/recent_projects/src/remote_connections.rs b/crates/recent_projects/src/remote_connections.rs index 8744bacf420b28ccb38c96dc949515e6e6ebadaf..c371b27ce1dcfe665d96f548bca2c893559005ec 100644 --- a/crates/recent_projects/src/remote_connections.rs +++ b/crates/recent_projects/src/remote_connections.rs @@ -483,7 +483,7 @@ impl remote::RemoteClientDelegate for RemoteClientDelegate { cx: &mut AsyncApp, ) -> Task> { cx.spawn(async move |cx| { - let binary_path = AutoUpdater::download_remote_server_release( + AutoUpdater::download_remote_server_release( platform.os, platform.arch, release_channel, @@ -500,8 +500,7 @@ impl remote::RemoteClientDelegate for RemoteClientDelegate { platform.os, platform.arch, ) - })?; - Ok(binary_path) + }) }) } diff --git a/crates/remote/src/remote_client.rs b/crates/remote/src/remote_client.rs index c5f511db5f94421b4e1c2872fdec4222381ba23a..54ec6644b9abef23446aaf0f8ddd21c0da6bdf05 100644 --- a/crates/remote/src/remote_client.rs +++ b/crates/remote/src/remote_client.rs @@ -87,6 +87,7 @@ pub trait RemoteClientDelegate: Send + Sync { const MAX_MISSED_HEARTBEATS: usize = 5; const HEARTBEAT_INTERVAL: Duration = Duration::from_secs(5); const HEARTBEAT_TIMEOUT: Duration = Duration::from_secs(5); +const INITIAL_CONNECTION_TIMEOUT: Duration = Duration::from_secs(60); const MAX_RECONNECT_ATTEMPTS: usize = 3; @@ -350,7 +351,7 @@ impl RemoteClient { let ready = client .wait_for_remote_started() - .with_timeout(HEARTBEAT_TIMEOUT, cx.background_executor()) + .with_timeout(INITIAL_CONNECTION_TIMEOUT, cx.background_executor()) .await; match ready { Ok(Some(_)) => {} @@ -527,6 +528,7 @@ impl RemoteClient { let reconnect_task = cx.spawn(async move |this, cx| { macro_rules! failed { ($error:expr, $attempts:expr, $ssh_connection:expr, $delegate:expr) => { + delegate.set_status(Some(&format!("{error:#}", error = $error)), cx); return State::ReconnectFailed { error: anyhow!($error), attempts: $attempts, @@ -998,11 +1000,10 @@ impl ConnectionPool { let connection = self.connections.get(&opts); match connection { Some(ConnectionPoolEntry::Connecting(task)) => { - let delegate = delegate.clone(); - cx.spawn(async move |cx| { - delegate.set_status(Some("Waiting for existing connection attempt"), cx); - }) - .detach(); + delegate.set_status( + Some("Waiting for existing connection attempt"), + &mut cx.to_async(), + ); return task.clone(); } Some(ConnectionPoolEntry::Connected(ssh)) => { diff --git a/crates/remote/src/transport.rs b/crates/remote/src/transport.rs index 6f76977ff9fdeaa1bbc0b7cb5008d7b0cb292d69..14a23257ce0bffbe138567f7aa27fc6a6d63d817 100644 --- a/crates/remote/src/transport.rs +++ b/crates/remote/src/transport.rs @@ -176,6 +176,10 @@ async fn build_remote_server_from_source( }; if platform.os == "linux" && use_musl { rust_flags.push_str(" -C target-feature=+crt-static"); + + if let Ok(path) = std::env::var("ZED_ZSTD_MUSL_LIB") { + rust_flags.push_str(&format!(" -C link-arg=-L{path}")); + } } if build_remote_server.contains("mold") { rust_flags.push_str(" -C link-arg=-fuse-ld=mold"); @@ -202,32 +206,26 @@ async fn build_remote_server_from_source( ) .await?; } else { - let which = cx - .background_spawn(async move { which::which("zig") }) - .await; - - if which.is_err() { - #[cfg(not(target_os = "windows"))] - { - anyhow::bail!( - "zig not found on $PATH, install zig (see https://ziglang.org/learn/getting-started or use zigup)" - ) - } - #[cfg(target_os = "windows")] - { - anyhow::bail!( - "zig not found on $PATH, install zig (use `winget install -e --id zig.zig` or see https://ziglang.org/learn/getting-started or use zigup)" - ) - } + if which("zig", cx).await?.is_none() { + anyhow::bail!(if cfg!(not(windows)) { + "zig not found on $PATH, install zig (see https://ziglang.org/learn/getting-started or use zigup)" + } else { + "zig not found on $PATH, install zig (use `winget install -e --id zig.zig` or see https://ziglang.org/learn/getting-started or use zigup)" + }); } + let rustup = which("rustup", cx) + .await? + .context("rustup not found on $PATH, install rustup (see https://rustup.rs/)")?; delegate.set_status(Some("Adding rustup target for cross-compilation"), cx); log::info!("adding rustup target"); - run_cmd(Command::new("rustup").args(["target", "add"]).arg(&triple)).await?; + run_cmd(Command::new(rustup).args(["target", "add"]).arg(&triple)).await?; - delegate.set_status(Some("Installing cargo-zigbuild for cross-compilation"), cx); - log::info!("installing cargo-zigbuild"); - run_cmd(Command::new("cargo").args(["install", "--locked", "cargo-zigbuild"])).await?; + if which("cargo-zigbuild", cx).await?.is_none() { + delegate.set_status(Some("Installing cargo-zigbuild for cross-compilation"), cx); + log::info!("installing cargo-zigbuild"); + run_cmd(Command::new("cargo").args(["install", "--locked", "cargo-zigbuild"])).await?; + } delegate.set_status( Some(&format!( @@ -270,7 +268,9 @@ async fn build_remote_server_from_source( #[cfg(target_os = "windows")] { // On Windows, we use 7z to compress the binary - let seven_zip = which::which("7z.exe").context("7z.exe not found on $PATH, install it (e.g. with `winget install -e --id 7zip.7zip`) or, if you don't want this behaviour, set $env:ZED_BUILD_REMOTE_SERVER=\"nocompress\"")?; + let seven_zip = which("7z.exe",cx) + .await? + .context("7z.exe not found on $PATH, install it (e.g. with `winget install -e --id 7zip.7zip`) or, if you don't want this behaviour, set $env:ZED_BUILD_REMOTE_SERVER=\"nocompress\"")?; let gz_path = format!("target/remote_server/{}/debug/remote_server.gz", triple); if smol::fs::metadata(&gz_path).await.is_ok() { smol::fs::remove_file(&gz_path).await?; @@ -293,3 +293,22 @@ async fn build_remote_server_from_source( Ok(Some(path)) } + +#[cfg(debug_assertions)] +async fn which( + binary_name: impl AsRef, + cx: &mut AsyncApp, +) -> Result> { + let binary_name = binary_name.as_ref().to_string(); + let binary_name_cloned = binary_name.clone(); + let res = cx + .background_spawn(async move { which::which(binary_name_cloned) }) + .await; + match res { + Ok(path) => Ok(Some(path)), + Err(which::Error::CannotFindBinaryPath) => Ok(None), + Err(err) => Err(anyhow::anyhow!( + "Failed to run 'which' to find the binary '{binary_name}': {err}" + )), + } +} diff --git a/crates/remote/src/transport/ssh.rs b/crates/remote/src/transport/ssh.rs index 9099caea67d280e37575ebe478ff2b6006c4777b..86d93ac2454a41a45d531dd8076066988634e5ce 100644 --- a/crates/remote/src/transport/ssh.rs +++ b/crates/remote/src/transport/ssh.rs @@ -290,40 +290,47 @@ impl RemoteConnection for SshRemoteConnection { self.build_scp_command(&src_path, &dest_path_str, Some(&["-C", "-r"])); cx.background_spawn(async move { + // We will try SFTP first, and if that fails, we will fall back to SCP. + // If SCP fails also, we give up and return an error. + // The reason we allow a fallback from SFTP to SCP is that if the user has to specify a password, + // depending on the implementation of SSH stack, SFTP may disable interactive password prompts in batch mode. + // This is for example the case on Windows as evidenced by this implementation snippet: + // https://github.com/PowerShell/openssh-portable/blob/b8c08ef9da9450a94a9c5ef717d96a7bd83f3332/sshconnect2.c#L417 if Self::is_sftp_available().await { log::debug!("using SFTP for directory upload"); let mut child = sftp_command.spawn()?; if let Some(mut stdin) = child.stdin.take() { use futures::AsyncWriteExt; - let sftp_batch = format!("put -r {} {}\n", src_path.display(), dest_path_str); + let sftp_batch = format!("put -r {src_path_display} {dest_path_str}\n"); stdin.write_all(sftp_batch.as_bytes()).await?; drop(stdin); } let output = child.output().await?; - anyhow::ensure!( - output.status.success(), - "failed to upload directory via SFTP {} -> {}: {}", - src_path_display, - dest_path_str, - String::from_utf8_lossy(&output.stderr) - ); + if output.status.success() { + return Ok(()); + } - return Ok(()); + let stderr = String::from_utf8_lossy(&output.stderr); + log::debug!("failed to upload directory via SFTP {src_path_display} -> {dest_path_str}: {stderr}"); } log::debug!("using SCP for directory upload"); let output = scp_command.output().await?; - anyhow::ensure!( - output.status.success(), - "failed to upload directory via SCP {} -> {}: {}", + if output.status.success() { + return Ok(()); + } + + let stderr = String::from_utf8_lossy(&output.stderr); + log::debug!("failed to upload directory via SCP {src_path_display} -> {dest_path_str}: {stderr}"); + + anyhow::bail!( + "failed to upload directory via SFTP/SCP {} -> {}: {}", src_path_display, dest_path_str, - String::from_utf8_lossy(&output.stderr) + stderr, ); - - Ok(()) }) } @@ -790,12 +797,19 @@ impl SshRemoteConnection { async fn upload_file(&self, src_path: &Path, dest_path: &RelPath) -> Result<()> { log::debug!("uploading file {:?} to {:?}", src_path, dest_path); + let src_path_display = src_path.display().to_string(); let dest_path_str = dest_path.display(self.path_style()); + // We will try SFTP first, and if that fails, we will fall back to SCP. + // If SCP fails also, we give up and return an error. + // The reason we allow a fallback from SFTP to SCP is that if the user has to specify a password, + // depending on the implementation of SSH stack, SFTP may disable interactive password prompts in batch mode. + // This is for example the case on Windows as evidenced by this implementation snippet: + // https://github.com/PowerShell/openssh-portable/blob/b8c08ef9da9450a94a9c5ef717d96a7bd83f3332/sshconnect2.c#L417 if Self::is_sftp_available().await { log::debug!("using SFTP for file upload"); let mut command = self.build_sftp_command(); - let sftp_batch = format!("put {} {}\n", src_path.display(), dest_path_str); + let sftp_batch = format!("put {src_path_display} {dest_path_str}\n"); let mut child = command.spawn()?; if let Some(mut stdin) = child.stdin.take() { @@ -805,30 +819,34 @@ impl SshRemoteConnection { } let output = child.output().await?; - anyhow::ensure!( - output.status.success(), - "failed to upload file via SFTP {} -> {}: {}", - src_path.display(), - dest_path_str, - String::from_utf8_lossy(&output.stderr) - ); + if output.status.success() { + return Ok(()); + } - Ok(()) - } else { - log::debug!("using SCP for file upload"); - let mut command = self.build_scp_command(src_path, &dest_path_str, None); - let output = command.output().await?; - - anyhow::ensure!( - output.status.success(), - "failed to upload file via SCP {} -> {}: {}", - src_path.display(), - dest_path_str, - String::from_utf8_lossy(&output.stderr) + let stderr = String::from_utf8_lossy(&output.stderr); + log::debug!( + "failed to upload file via SFTP {src_path_display} -> {dest_path_str}: {stderr}" ); + } - Ok(()) + log::debug!("using SCP for file upload"); + let mut command = self.build_scp_command(src_path, &dest_path_str, None); + let output = command.output().await?; + + if output.status.success() { + return Ok(()); } + + let stderr = String::from_utf8_lossy(&output.stderr); + log::debug!( + "failed to upload file via SCP {src_path_display} -> {dest_path_str}: {stderr}", + ); + anyhow::bail!( + "failed to upload file via STFP/SCP {} -> {}: {}", + src_path_display, + dest_path_str, + stderr, + ); } async fn is_sftp_available() -> bool { diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 4010d033c09473cb475ae40b977af70fca390b82..969363fb2bd02e7bc514cd68d488ca57aef9f0b9 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -10,7 +10,7 @@ use language_model::LanguageModelToolResultContent; use extension::ExtensionHostProxy; use fs::{FakeFs, Fs}; -use gpui::{AppContext as _, Entity, SemanticVersion, TestAppContext}; +use gpui::{AppContext as _, Entity, SemanticVersion, SharedString, TestAppContext}; use http_client::{BlockedHttpClient, FakeHttpClient}; use language::{ Buffer, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageRegistry, LineEnding, @@ -19,7 +19,7 @@ use language::{ use lsp::{CompletionContext, CompletionResponse, CompletionTriggerKind, LanguageServerName}; use node_runtime::NodeRuntime; use project::{ - Project, + ProgressToken, Project, agent_server_store::AgentServerCommand, search::{SearchQuery, SearchResult}, }; @@ -710,7 +710,11 @@ async fn test_remote_cancel_language_server_work( cx.executor().run_until_parked(); project.update(cx, |project, cx| { - project.cancel_language_server_work(server_id, Some(progress_token.into()), cx) + project.cancel_language_server_work( + server_id, + Some(ProgressToken::String(SharedString::from(progress_token))), + cx, + ) }); cx.executor().run_until_parked(); @@ -721,7 +725,7 @@ async fn test_remote_cancel_language_server_work( .await; assert_eq!( cancel_notification.token, - lsp::NumberOrString::String(progress_token.into()) + lsp::NumberOrString::String(progress_token.to_owned()) ); } } diff --git a/crates/remote_server/src/unix.rs b/crates/remote_server/src/unix.rs index 1a7dc8c9621354a385a65567960a6215f680528c..d11fb4031e6386e66090b1cfb106dd5d0a7dac05 100644 --- a/crates/remote_server/src/unix.rs +++ b/crates/remote_server/src/unix.rs @@ -372,6 +372,7 @@ pub fn execute_run( rayon::ThreadPoolBuilder::new() .num_threads(4) + .stack_size(10 * 1024 * 1024) .thread_name(|ix| format!("RayonWorker{}", ix)) .build_global() .unwrap(); diff --git a/crates/repl/src/kernels/native_kernel.rs b/crates/repl/src/kernels/native_kernel.rs index cf88fbc582296e1d61ea729a642a7e8ec8e290df..8630768decc6e788efdd3eaaadafc0c957e86d7e 100644 --- a/crates/repl/src/kernels/native_kernel.rs +++ b/crates/repl/src/kernels/native_kernel.rs @@ -3,7 +3,7 @@ use futures::{ AsyncBufReadExt as _, SinkExt as _, channel::mpsc::{self}, io::BufReader, - stream::{SelectAll, StreamExt}, + stream::{FuturesUnordered, SelectAll, StreamExt}, }; use gpui::{App, AppContext as _, Entity, EntityId, Task, Window}; use jupyter_protocol::{ @@ -88,9 +88,6 @@ async fn peek_ports(ip: IpAddr) -> Result<[u16; 5]> { pub struct NativeRunningKernel { pub process: smol::process::Child, - _shell_task: Task>, - _control_task: Task>, - _routing_task: Task>, connection_path: PathBuf, _process_status_task: Option>, pub working_directory: PathBuf, @@ -185,27 +182,25 @@ impl NativeRunningKernel { }) .ok(); } - anyhow::Ok(()) } }) .detach(); // iopub task - cx.spawn({ + let iopub_task = cx.spawn({ let session = session.clone(); - async move |cx| { - while let Ok(message) = iopub_socket.read().await { + async move |cx| -> anyhow::Result<()> { + loop { + let message = iopub_socket.read().await?; session .update_in(cx, |session, window, cx| { session.route(&message, window, cx); }) .ok(); } - anyhow::Ok(()) } - }) - .detach(); + }); let (mut control_request_tx, mut control_request_rx) = futures::channel::mpsc::channel(100); @@ -279,6 +274,41 @@ impl NativeRunningKernel { }) .detach(); + cx.spawn({ + let session = session.clone(); + async move |cx| { + async fn with_name( + name: &'static str, + task: Task>, + ) -> (&'static str, Result<()>) { + (name, task.await) + } + + let mut tasks = FuturesUnordered::new(); + tasks.push(with_name("iopub task", iopub_task)); + tasks.push(with_name("shell task", shell_task)); + tasks.push(with_name("control task", control_task)); + tasks.push(with_name("routing task", routing_task)); + + while let Some((name, result)) = tasks.next().await { + if let Err(err) = result { + log::error!("kernel: handling failed for {name}: {err:?}"); + + session + .update(cx, |session, cx| { + session.kernel_errored( + format!("handling failed for {name}: {err}"), + cx, + ); + cx.notify(); + }) + .ok(); + } + } + } + }) + .detach(); + let status = process.status(); let process_status_task = cx.spawn(async move |cx| { @@ -312,9 +342,6 @@ impl NativeRunningKernel { request_tx, working_directory, _process_status_task: Some(process_status_task), - _shell_task: shell_task, - _control_task: control_task, - _routing_task: routing_task, connection_path, execution_state: ExecutionState::Idle, kernel_info: None, diff --git a/crates/repl/src/outputs.rs b/crates/repl/src/outputs.rs index 2cd6494d66be1b615e10e537c139e4b2e22af863..a192123865ae1632bef66fdc97d3056219c10d30 100644 --- a/crates/repl/src/outputs.rs +++ b/crates/repl/src/outputs.rs @@ -38,7 +38,8 @@ use gpui::{AnyElement, ClipboardItem, Entity, Render, WeakEntity}; use language::Buffer; use runtimelib::{ExecutionState, JupyterMessageContent, MimeBundle, MimeType}; use ui::{ - CommonAnimationExt, Context, IntoElement, Styled, Tooltip, Window, div, prelude::*, v_flex, + ButtonStyle, CommonAnimationExt, Context, IconButton, IconName, IntoElement, Styled, Tooltip, + Window, div, h_flex, prelude::*, v_flex, }; mod image; @@ -146,13 +147,13 @@ impl Output { IconButton::new(ElementId::Name("copy-output".into()), IconName::Copy) .style(ButtonStyle::Transparent) .tooltip(Tooltip::text("Copy Output")) - .on_click(cx.listener(move |_, _, window, cx| { + .on_click(move |_, window, cx| { let clipboard_content = v.clipboard_content(window, cx); if let Some(clipboard_content) = clipboard_content.as_ref() { cx.write_to_clipboard(clipboard_content.clone()); } - })), + }), ) }) .when(v.has_buffer_content(window, cx), |el| { @@ -164,10 +165,9 @@ impl Output { ) .style(ButtonStyle::Transparent) .tooltip(Tooltip::text("Open in Buffer")) - .on_click(cx.listener({ + .on_click({ let workspace = workspace.clone(); - - move |_, _, window, cx| { + move |_, window, cx| { let buffer_content = v.update(cx, |item, cx| item.buffer_content(window, cx)); @@ -193,7 +193,7 @@ impl Output { .ok(); } } - })), + }), ) }) .into_any_element(), @@ -237,7 +237,87 @@ impl Output { Self::render_output_controls(content.clone(), workspace, window, cx) } Self::ErrorOutput(err) => { - Self::render_output_controls(err.traceback.clone(), workspace, window, cx) + // Add buttons for the traceback section + Some( + h_flex() + .pl_1() + .child( + IconButton::new( + ElementId::Name("copy-full-error-traceback".into()), + IconName::Copy, + ) + .style(ButtonStyle::Transparent) + .tooltip(Tooltip::text("Copy Full Error")) + .on_click({ + let ename = err.ename.clone(); + let evalue = err.evalue.clone(); + let traceback = err.traceback.clone(); + move |_, _window, cx| { + let traceback_text = traceback.read(cx).full_text(); + let full_error = + format!("{}: {}\n{}", ename, evalue, traceback_text); + let clipboard_content = + ClipboardItem::new_string(full_error); + cx.write_to_clipboard(clipboard_content); + } + }), + ) + .child( + IconButton::new( + ElementId::Name("open-full-error-in-buffer-traceback".into()), + IconName::FileTextOutlined, + ) + .style(ButtonStyle::Transparent) + .tooltip(Tooltip::text("Open Full Error in Buffer")) + .on_click({ + let ename = err.ename.clone(); + let evalue = err.evalue.clone(); + let traceback = err.traceback.clone(); + move |_, window, cx| { + if let Some(workspace) = workspace.upgrade() { + let traceback_text = traceback.read(cx).full_text(); + let full_error = format!( + "{}: {}\n{}", + ename, evalue, traceback_text + ); + let buffer = cx.new(|cx| { + let mut buffer = Buffer::local(full_error, cx) + .with_language( + language::PLAIN_TEXT.clone(), + cx, + ); + buffer.set_capability( + language::Capability::ReadOnly, + cx, + ); + buffer + }); + let editor = Box::new(cx.new(|cx| { + let multibuffer = cx.new(|cx| { + let mut multi_buffer = + MultiBuffer::singleton(buffer.clone(), cx); + multi_buffer + .set_title("Full Error".to_string(), cx); + multi_buffer + }); + Editor::for_multibuffer( + multibuffer, + None, + window, + cx, + ) + })); + workspace.update(cx, |workspace, cx| { + workspace.add_item_to_active_pane( + editor, None, true, window, cx, + ); + }); + } + } + }), + ) + .into_any_element(), + ) } Self::Message(_) => None, Self::Table { content, .. } => { diff --git a/crates/repl/src/outputs/plain.rs b/crates/repl/src/outputs/plain.rs index 6addd9a9f49b5094fcbedd148d8ca7c38e1ccd1b..54e4983b9f7f22965a3f92f60c2d5fe75841c781 100644 --- a/crates/repl/src/outputs/plain.rs +++ b/crates/repl/src/outputs/plain.rs @@ -197,7 +197,7 @@ impl TerminalOutput { } } - fn full_text(&self) -> String { + pub fn full_text(&self) -> String { fn sanitize(mut line: String) -> Option { line.retain(|ch| ch != '\u{0}' && ch != '\r'); if line.trim().is_empty() { diff --git a/crates/repl/src/outputs/user_error.rs b/crates/repl/src/outputs/user_error.rs index f42be1c867e3273a4cca3b730d55edbdca38ed33..4218b417c5d1ce2763e9304092c6c2510a1aae32 100644 --- a/crates/repl/src/outputs/user_error.rs +++ b/crates/repl/src/outputs/user_error.rs @@ -4,6 +4,7 @@ use ui::{Label, h_flex, prelude::*, v_flex}; use crate::outputs::plain::TerminalOutput; /// Userspace error from the kernel +#[derive(Clone)] pub struct ErrorView { pub ename: String, pub evalue: String, @@ -24,15 +25,10 @@ impl ErrorView { .font_buffer(cx) .child( Label::new(format!("{}: ", self.ename.clone())) - // .size(LabelSize::Large) .color(Color::Error) .weight(FontWeight::BOLD), ) - .child( - Label::new(self.evalue.clone()) - // .size(LabelSize::Large) - .weight(FontWeight::BOLD), - ), + .child(Label::new(self.evalue.clone()).weight(FontWeight::BOLD)), ) .child( div() diff --git a/crates/rope/Cargo.toml b/crates/rope/Cargo.toml index f38d87fbdad116d8ec22db6668b20fd433c53716..4107c2e012debc13b0cc44003250f4da63e5039f 100644 --- a/crates/rope/Cargo.toml +++ b/crates/rope/Cargo.toml @@ -15,7 +15,6 @@ path = "src/rope.rs" arrayvec = "0.7.1" log.workspace = true rayon.workspace = true -regex.workspace = true sum_tree.workspace = true unicode-segmentation.workspace = true util.workspace = true diff --git a/crates/rope/src/chunk.rs b/crates/rope/src/chunk.rs index 6e17c35d7c770c429fa32725a38bca94a9e1dfc2..4c1e4cd68560f15274722ff1d8249205300c4e68 100644 --- a/crates/rope/src/chunk.rs +++ b/crates/rope/src/chunk.rs @@ -32,6 +32,16 @@ pub struct Chunk { pub text: ArrayString, } +#[inline(always)] +const fn saturating_shl_mask(offset: u32) -> Bitmap { + (1 as Bitmap).unbounded_shl(offset).wrapping_sub(1) +} + +#[inline(always)] +const fn saturating_shr_mask(offset: u32) -> Bitmap { + !Bitmap::MAX.unbounded_shr(offset) +} + impl Chunk { pub const MASK_BITS: usize = Bitmap::BITS as usize; @@ -291,34 +301,19 @@ impl<'a> ChunkSlice<'a> { /// Get number of chars in first line #[inline(always)] pub fn first_line_chars(&self) -> u32 { - if self.newlines == 0 { - self.chars.count_ones() - } else { - let mask = ((1 as Bitmap) << self.newlines.trailing_zeros()) - 1; - (self.chars & mask).count_ones() - } + (self.chars & saturating_shl_mask(self.newlines.trailing_zeros())).count_ones() } /// Get number of chars in last line #[inline(always)] pub fn last_line_chars(&self) -> u32 { - if self.newlines == 0 { - self.chars.count_ones() - } else { - let mask = !(Bitmap::MAX >> self.newlines.leading_zeros()); - (self.chars & mask).count_ones() - } + (self.chars & saturating_shr_mask(self.newlines.leading_zeros())).count_ones() } /// Get number of UTF-16 code units in last line #[inline(always)] pub fn last_line_len_utf16(&self) -> u32 { - if self.newlines == 0 { - self.chars_utf16.count_ones() - } else { - let mask = !(Bitmap::MAX >> self.newlines.leading_zeros()); - (self.chars_utf16 & mask).count_ones() - } + (self.chars_utf16 & saturating_shr_mask(self.newlines.leading_zeros())).count_ones() } /// Get the longest row in the chunk and its length in characters. @@ -492,8 +487,8 @@ impl<'a> ChunkSlice<'a> { #[inline(always)] pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 { - let mask = (1 as Bitmap).unbounded_shl(offset as u32).wrapping_sub(1); - let row = (self.newlines & mask).count_ones(); + let mask = saturating_shl_mask(offset as u32); + let row = (self.newlines & saturating_shl_mask(offset as u32)).count_ones(); let newline_ix = Bitmap::BITS - (self.newlines & mask).leading_zeros(); let column = if newline_ix as usize == MAX_BASE { 0 diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index f8e06d23c245643f9e8c27e4433779e067a7ce5d..394e6ef0ca589d19ffcf7cf07a92bcd15c8e4a18 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -6,13 +6,10 @@ mod unclipped; use arrayvec::ArrayVec; use rayon::iter::{IntoParallelIterator, ParallelIterator as _}; -use regex::Regex; use std::{ - borrow::Cow, cmp, fmt, io, mem, ops::{self, AddAssign, Range}, str, - sync::{Arc, LazyLock}, }; use sum_tree::{Bias, Dimension, Dimensions, SumTree}; @@ -24,95 +21,6 @@ pub use unclipped::Unclipped; use crate::chunk::Bitmap; -static LINE_SEPARATORS_REGEX: LazyLock = - LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX")); - -#[derive(Clone, Copy, Debug, PartialEq)] -pub enum LineEnding { - Unix, - Windows, -} - -impl Default for LineEnding { - fn default() -> Self { - #[cfg(unix)] - return Self::Unix; - - #[cfg(not(unix))] - return Self::Windows; - } -} - -impl LineEnding { - pub fn as_str(&self) -> &'static str { - match self { - LineEnding::Unix => "\n", - LineEnding::Windows => "\r\n", - } - } - - pub fn label(&self) -> &'static str { - match self { - LineEnding::Unix => "LF", - LineEnding::Windows => "CRLF", - } - } - - pub fn detect(text: &str) -> Self { - let mut max_ix = cmp::min(text.len(), 1000); - while !text.is_char_boundary(max_ix) { - max_ix -= 1; - } - - if let Some(ix) = text[..max_ix].find(['\n']) { - if ix > 0 && text.as_bytes()[ix - 1] == b'\r' { - Self::Windows - } else { - Self::Unix - } - } else { - Self::default() - } - } - - pub fn normalize(text: &mut String) { - if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") { - *text = replaced; - } - } - - pub fn normalize_arc(text: Arc) -> Arc { - if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") { - replaced.into() - } else { - text - } - } - - pub fn normalize_cow(text: Cow) -> Cow { - if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") { - replaced.into() - } else { - text - } - } - - /// Converts text chunks into a [`String`] using the current line ending. - pub fn into_string(&self, chunks: Chunks<'_>) -> String { - match self { - LineEnding::Unix => chunks.collect(), - LineEnding::Windows => { - let line_ending = self.as_str(); - let mut result = String::new(); - for chunk in chunks { - result.push_str(&chunk.replace('\n', line_ending)); - } - result - } - } - } -} - #[derive(Clone, Default)] pub struct Rope { chunks: SumTree, @@ -283,9 +191,9 @@ impl Rope { (), ); - #[cfg(not(test))] + #[cfg(all(test, not(rust_analyzer)))] const NUM_CHUNKS: usize = 16; - #[cfg(test)] + #[cfg(not(all(test, not(rust_analyzer))))] const NUM_CHUNKS: usize = 4; // We accommodate for NUM_CHUNKS chunks of size MAX_BASE @@ -340,9 +248,9 @@ impl Rope { text = remainder; } - #[cfg(test)] + #[cfg(all(test, not(rust_analyzer)))] const PARALLEL_THRESHOLD: usize = 4; - #[cfg(not(test))] + #[cfg(not(all(test, not(rust_analyzer))))] const PARALLEL_THRESHOLD: usize = 4 * (2 * sum_tree::TREE_BASE); if new_chunks.len() >= PARALLEL_THRESHOLD { @@ -460,16 +368,6 @@ impl Rope { Chunks::new(self, range, true) } - /// Formats the rope's text with the specified line ending string. - /// This replaces all `\n` characters with the provided line ending. - /// - /// The rope internally stores all line breaks as `\n` (see `Display` impl). - /// Use this method to convert to different line endings for file operations, - /// LSP communication, or other scenarios requiring specific line ending formats. - pub fn to_string_with_line_ending(&self, line_ending: LineEnding) -> String { - line_ending.into_string(self.chunks()) - } - pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 { if offset >= self.summary().len { return self.summary().len_utf16; @@ -711,16 +609,10 @@ impl From<&String> for Rope { } } -/// Display implementation for Rope. -/// -/// Note: This always uses `\n` as the line separator, regardless of the original -/// file's line endings. The rope internally normalizes all line breaks to `\n`. -/// If you need to preserve original line endings (e.g., for LSP communication), -/// use `to_string_with_line_ending` instead. impl fmt::Display for Rope { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { for chunk in self.chunks() { - write!(f, "{chunk}")?; + write!(f, "{}", chunk)?; } Ok(()) } @@ -2370,53 +2262,6 @@ mod tests { } } - #[test] - fn test_to_string_with_line_ending() { - // Test Unix line endings (no conversion) - let rope = Rope::from("line1\nline2\nline3"); - assert_eq!( - rope.to_string_with_line_ending(LineEnding::Unix), - "line1\nline2\nline3" - ); - - // Test Windows line endings - assert_eq!( - rope.to_string_with_line_ending(LineEnding::Windows), - "line1\r\nline2\r\nline3" - ); - - // Test empty rope - let empty_rope = Rope::from(""); - assert_eq!( - empty_rope.to_string_with_line_ending(LineEnding::Windows), - "" - ); - - // Test single line (no newlines) - let single_line = Rope::from("single line"); - assert_eq!( - single_line.to_string_with_line_ending(LineEnding::Windows), - "single line" - ); - - // Test rope ending with newline - let ending_newline = Rope::from("line1\nline2\n"); - assert_eq!( - ending_newline.to_string_with_line_ending(LineEnding::Windows), - "line1\r\nline2\r\n" - ); - - // Test large rope with multiple chunks - let mut large_rope = Rope::new(); - for i in 0..100 { - large_rope.push(&format!("line{}\n", i)); - } - let result = large_rope.to_string_with_line_ending(LineEnding::Windows); - assert!(result.contains("\r\n")); - assert!(!result.contains("\n\n")); - assert_eq!(result.matches("\r\n").count(), 100); - } - fn clip_offset(text: &str, mut offset: usize, bias: Bias) -> usize { while !text.is_char_boundary(offset) { match bias { diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 49c1fc5b297aedcf86c66140d0d803901b18c52a..25697bb45ac5f617b586d7a4346ee8761b7a4ed3 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -468,6 +468,12 @@ impl Focusable for BufferSearchBar { } impl ToolbarItemView for BufferSearchBar { + fn contribute_context(&self, context: &mut KeyContext, _cx: &App) { + if !self.dismissed { + context.add("buffer_search_deployed"); + } + } + fn set_active_pane_item( &mut self, item: Option<&dyn ItemHandle>, diff --git a/crates/settings/Cargo.toml b/crates/settings/Cargo.toml index c4b6bb878a4a1d960c3774fc393d138b530aa7ca..10b69fbb6fa924df5b4644fa05579e7b451b34da 100644 --- a/crates/settings/Cargo.toml +++ b/crates/settings/Cargo.toml @@ -32,16 +32,15 @@ schemars.workspace = true serde.workspace = true serde_json.workspace = true serde_json_lenient.workspace = true -serde_path_to_error.workspace = true serde_repr.workspace = true serde_with.workspace = true +settings_json.workspace = true settings_macros.workspace = true smallvec.workspace = true strum.workspace = true -tree-sitter-json.workspace = true -tree-sitter.workspace = true util.workspace = true zlog.workspace = true +migrator.workspace = true [dev-dependencies] fs = { workspace = true, features = ["test-support"] } diff --git a/crates/settings/src/keymap_file.rs b/crates/settings/src/keymap_file.rs index b69b498a6c5596d8bbc78799e8edbe0befc9c35e..fc86afca2a1cbcd0a26777aa2ccb1fcb29b193a5 100644 --- a/crates/settings/src/keymap_file.rs +++ b/crates/settings/src/keymap_file.rs @@ -17,8 +17,9 @@ use util::{ markdown::{MarkdownEscaped, MarkdownInlineCode, MarkdownString}, }; -use crate::{ - SettingsAssets, append_top_level_array_value_in_json_text, parse_json_with_comments, +use crate::SettingsAssets; +use settings_json::{ + append_top_level_array_value_in_json_text, parse_json_with_comments, replace_top_level_array_value_in_json_text, }; @@ -150,6 +151,9 @@ pub enum KeymapFileLoadResult { impl KeymapFile { pub fn parse(content: &str) -> anyhow::Result { + if content.trim().is_empty() { + return Ok(Self(Vec::new())); + } parse_json_with_comments::(content) } @@ -211,11 +215,6 @@ impl KeymapFile { } pub fn load(content: &str, cx: &App) -> KeymapFileLoadResult { - if content.is_empty() { - return KeymapFileLoadResult::Success { - key_bindings: Vec::new(), - }; - } let keymap_file = match Self::parse(content) { Ok(keymap_file) => keymap_file, Err(error) => { diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index 5dad953b32afcd027c0b6c4ec4be36f9659ce022..cb48f47b1cc496d81a8c727c174e32df63d5a701 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -5,7 +5,6 @@ pub mod merge_from; mod serde_helper; mod settings_content; mod settings_file; -mod settings_json; mod settings_store; mod vscode_import; @@ -26,8 +25,8 @@ pub use serde_helper::*; pub use settings_file::*; pub use settings_json::*; pub use settings_store::{ - InvalidSettingsError, LocalSettingsKind, Settings, SettingsFile, SettingsKey, SettingsLocation, - SettingsStore, + InvalidSettingsError, LocalSettingsKind, MigrationStatus, ParseStatus, Settings, SettingsFile, + SettingsJsonSchemaParams, SettingsKey, SettingsLocation, SettingsStore, }; pub use vscode_import::{VsCodeSettings, VsCodeSettingsSource}; diff --git a/crates/settings/src/settings_content/language.rs b/crates/settings/src/settings_content/language.rs index a5dbd682d2ca4943e6230789acad96c5d7e2a742..a0a8aff3ae82a9001eb52367ab315912b5aac609 100644 --- a/crates/settings/src/settings_content/language.rs +++ b/crates/settings/src/settings_content/language.rs @@ -142,11 +142,27 @@ pub struct CodestralSettingsContent { /// Default: 150 #[serde(default)] pub max_tokens: Option, + /// Api URL to use for completions. + /// + /// Default: "https://codestral.mistral.ai" + #[serde(default)] + pub api_url: Option, } /// The mode in which edit predictions should be displayed. #[derive( - Copy, Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom, + Copy, + Clone, + Debug, + Default, + Eq, + PartialEq, + Serialize, + Deserialize, + JsonSchema, + MergeFrom, + strum::VariantArray, + strum::VariantNames, )] #[serde(rename_all = "snake_case")] pub enum EditPredictionsMode { diff --git a/crates/settings/src/settings_content/project.rs b/crates/settings/src/settings_content/project.rs index 6a77b815fa547d41e6f38541fe1d681c82b3347b..a6af4c617ae7e744926e0a7608ff3fddeb2d0cd2 100644 --- a/crates/settings/src/settings_content/project.rs +++ b/crates/settings/src/settings_content/project.rs @@ -64,6 +64,12 @@ pub struct WorktreeSettingsContent { #[serde(skip_serializing_if = "Maybe::is_unset")] pub project_name: Maybe, + /// Whether to prevent this project from being shared in public channels. + /// + /// Default: false + #[serde(default)] + pub prevent_sharing_in_public_channels: bool, + /// Completely ignore files matching globs from `file_scan_exclusions`. Overrides /// `file_scan_inclusions`. /// diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index e971aedd4cd87b4706a465b532846970c2772e23..32cd16ba3379a79489af8487237364886046a659 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -7,7 +7,7 @@ use futures::{ channel::{mpsc, oneshot}, future::LocalBoxFuture, }; -use gpui::{App, AsyncApp, BorrowAppContext, Global, Task, UpdateGlobal}; +use gpui::{App, AsyncApp, BorrowAppContext, Global, SharedString, Task, UpdateGlobal}; use paths::{EDITORCONFIG_NAME, local_settings_file_relative_path, task_file_name}; use schemars::{JsonSchema, json_schema}; @@ -32,16 +32,15 @@ pub type EditorconfigProperties = ec4rs::Properties; use crate::{ ActiveSettingsProfileName, FontFamilyName, IconThemeName, LanguageSettingsContent, - LanguageToSettingsMap, SettingsJsonSchemaParams, ThemeName, VsCodeSettings, WorktreeId, - infer_json_indent_size, + LanguageToSettingsMap, ThemeName, VsCodeSettings, WorktreeId, merge_from::MergeFrom, - parse_json_with_comments, settings_content::{ ExtensionsSettingsContent, ProjectSettingsContent, SettingsContent, UserSettingsContent, }, - update_value_in_json_text, }; +use settings_json::{infer_json_indent_size, parse_json_with_comments, update_value_in_json_text}; + pub trait SettingsKey: 'static + Send + Sync { /// The name of a key within the JSON file from which this setting should /// be deserialized. If this is `None`, then the setting will be deserialized @@ -148,14 +147,15 @@ pub struct SettingsStore { _setting_file_updates: Task<()>, setting_file_updates_tx: mpsc::UnboundedSender LocalBoxFuture<'static, Result<()>>>>, - file_errors: BTreeMap, + file_errors: BTreeMap, } #[derive(Clone, PartialEq, Eq, Debug)] pub enum SettingsFile { + Default, + Global, User, Server, - Default, /// Represents project settings in ssh projects as well as local projects Project((WorktreeId, Arc)), } @@ -184,6 +184,8 @@ impl Ord for SettingsFile { (_, Server) => Ordering::Greater, (User, _) => Ordering::Less, (_, User) => Ordering::Greater, + (Global, _) => Ordering::Less, + (_, Global) => Ordering::Greater, } } } @@ -235,6 +237,14 @@ trait AnySettingValue: 'static + Send + Sync { fn set_local_value(&mut self, root_id: WorktreeId, path: Arc, value: Box); } +/// Parameters that are used when generating some JSON schemas at runtime. +pub struct SettingsJsonSchemaParams<'a> { + pub language_names: &'a [String], + pub font_names: &'a [String], + pub theme_names: &'a [SharedString], + pub icon_theme_names: &'a [SharedString], +} + impl SettingsStore { pub fn new(cx: &App, default_settings: &str) -> Self { let (setting_file_updates_tx, mut setting_file_updates_rx) = mpsc::unbounded(); @@ -264,7 +274,7 @@ impl SettingsStore { pub fn observe_active_settings_profile_name(cx: &mut App) -> gpui::Subscription { cx.observe_global::(|cx| { Self::update_global(cx, |store, cx| { - store.recompute_values(None, cx).log_err(); + store.recompute_values(None, cx); }); }) } @@ -386,7 +396,7 @@ impl SettingsStore { ..Default::default() }) .unwrap(); - self.set_user_settings(&new_text, cx).unwrap(); + _ = self.set_user_settings(&new_text, cx); } pub async fn load_settings(fs: &Arc) -> Result { @@ -515,6 +525,7 @@ impl SettingsStore { SettingsFile::Default => Some(self.default_settings.as_ref()), SettingsFile::Server => self.server_settings.as_deref(), SettingsFile::Project(ref key) => self.local_settings.get(key), + SettingsFile::Global => self.global_settings.as_deref(), } } @@ -617,22 +628,58 @@ impl SettingsStore { (SettingsFile::Default, None) } - fn handle_potential_file_error( + #[inline(always)] + fn parse_and_migrate_zed_settings( &mut self, + user_settings_content: &str, file: SettingsFile, - result: Result, - ) -> Result { - if let Err(err) = result.as_ref() { - let message = err.to_string(); - self.file_errors.insert(file, message); + ) -> (Option, SettingsParseResult) { + let mut migration_status = MigrationStatus::NotNeeded; + let settings: SettingsContentType = if user_settings_content.is_empty() { + parse_json_with_comments("{}").expect("Empty settings should always be valid") } else { - self.file_errors.remove(&file); - } - return result; + let migration_res = migrator::migrate_settings(user_settings_content); + let content = match &migration_res { + Ok(Some(content)) => content, + Ok(None) => user_settings_content, + Err(_) => user_settings_content, + }; + let parse_result = parse_json_with_comments(content); + migration_status = match migration_res { + Ok(Some(_)) => MigrationStatus::Succeeded, + Ok(None) => MigrationStatus::NotNeeded, + Err(err) => MigrationStatus::Failed { + error: err.to_string(), + }, + }; + match parse_result { + Ok(settings) => settings, + Err(err) => { + let result = SettingsParseResult { + parse_status: ParseStatus::Failed { + error: err.to_string(), + }, + migration_status, + }; + self.file_errors.insert(file, result.clone()); + return (None, result); + } + } + }; + + let result = SettingsParseResult { + parse_status: ParseStatus::Success, + migration_status, + }; + self.file_errors.insert(file, result.clone()); + return (Some(settings), result); } - pub fn error_for_file(&self, file: SettingsFile) -> Option { - self.file_errors.get(&file).cloned() + pub fn error_for_file(&self, file: SettingsFile) -> Option { + self.file_errors + .get(&file) + .filter(|parse_result| parse_result.requires_user_action()) + .cloned() } } @@ -697,41 +744,46 @@ impl SettingsStore { cx: &mut App, ) -> Result<()> { self.default_settings = parse_json_with_comments(default_settings_content)?; - self.recompute_values(None, cx)?; + self.recompute_values(None, cx); Ok(()) } /// Sets the user settings via a JSON string. - pub fn set_user_settings(&mut self, user_settings_content: &str, cx: &mut App) -> Result<()> { - let settings: UserSettingsContent = if user_settings_content.is_empty() { - parse_json_with_comments("{}")? - } else { - self.handle_potential_file_error( - SettingsFile::User, - parse_json_with_comments(user_settings_content), - )? - }; + #[must_use] + pub fn set_user_settings( + &mut self, + user_settings_content: &str, + cx: &mut App, + ) -> SettingsParseResult { + let (settings, parse_result) = self.parse_and_migrate_zed_settings::( + user_settings_content, + SettingsFile::User, + ); - self.user_settings = Some(settings); - self.recompute_values(None, cx)?; - Ok(()) + if let Some(settings) = settings { + self.user_settings = Some(settings); + self.recompute_values(None, cx); + } + return parse_result; } /// Sets the global settings via a JSON string. + #[must_use] pub fn set_global_settings( &mut self, global_settings_content: &str, cx: &mut App, - ) -> Result<()> { - let settings: SettingsContent = if global_settings_content.is_empty() { - parse_json_with_comments("{}")? - } else { - parse_json_with_comments(global_settings_content)? - }; + ) -> SettingsParseResult { + let (settings, parse_result) = self.parse_and_migrate_zed_settings::( + global_settings_content, + SettingsFile::Global, + ); - self.global_settings = Some(Box::new(settings)); - self.recompute_values(None, cx)?; - Ok(()) + if let Some(settings) = settings { + self.global_settings = Some(Box::new(settings)); + self.recompute_values(None, cx); + } + return parse_result; } pub fn set_server_settings( @@ -742,16 +794,13 @@ impl SettingsStore { let settings: Option = if server_settings_content.is_empty() { None } else { - self.handle_potential_file_error( - SettingsFile::Server, - parse_json_with_comments(server_settings_content), - )? + parse_json_with_comments(server_settings_content)? }; // Rewrite the server settings into a content type self.server_settings = settings.map(|settings| Box::new(settings)); - self.recompute_values(None, cx)?; + self.recompute_values(None, cx); Ok(()) } @@ -803,31 +852,36 @@ impl SettingsStore { .remove(&(root_id, directory_path.clone())); } (LocalSettingsKind::Settings, Some(settings_contents)) => { - let new_settings = self - .handle_potential_file_error( + let (new_settings, parse_result) = self + .parse_and_migrate_zed_settings::( + settings_contents, SettingsFile::Project((root_id, directory_path.clone())), - parse_json_with_comments::(settings_contents), - ) - .map_err(|e| InvalidSettingsError::LocalSettings { + ); + match parse_result.parse_status { + ParseStatus::Success => Ok(()), + ParseStatus::Failed { error } => Err(InvalidSettingsError::LocalSettings { path: directory_path.join(local_settings_file_relative_path()), - message: e.to_string(), - })?; - match self.local_settings.entry((root_id, directory_path.clone())) { - btree_map::Entry::Vacant(v) => { - v.insert(SettingsContent { - project: new_settings, - ..Default::default() - }); - zed_settings_changed = true; - } - btree_map::Entry::Occupied(mut o) => { - if &o.get().project != &new_settings { - o.insert(SettingsContent { + message: error, + }), + }?; + if let Some(new_settings) = new_settings { + match self.local_settings.entry((root_id, directory_path.clone())) { + btree_map::Entry::Vacant(v) => { + v.insert(SettingsContent { project: new_settings, ..Default::default() }); zed_settings_changed = true; } + btree_map::Entry::Occupied(mut o) => { + if &o.get().project != &new_settings { + o.insert(SettingsContent { + project: new_settings, + ..Default::default() + }); + zed_settings_changed = true; + } + } } } } @@ -874,7 +928,7 @@ impl SettingsStore { }; if zed_settings_changed { - self.recompute_values(Some((root_id, &directory_path)), cx)?; + self.recompute_values(Some((root_id, &directory_path)), cx); } Ok(()) } @@ -891,7 +945,7 @@ impl SettingsStore { }, ..Default::default() })); - self.recompute_values(None, cx)?; + self.recompute_values(None, cx); Ok(()) } @@ -899,7 +953,7 @@ impl SettingsStore { pub fn clear_local_settings(&mut self, root_id: WorktreeId, cx: &mut App) -> Result<()> { self.local_settings .retain(|(worktree_id, _), _| worktree_id != &root_id); - self.recompute_values(Some((root_id, RelPath::empty())), cx)?; + self.recompute_values(Some((root_id, RelPath::empty())), cx); Ok(()) } @@ -989,12 +1043,11 @@ impl SettingsStore { .to_value() } - // todo -> this function never fails, and should not return a result fn recompute_values( &mut self, changed_local_path: Option<(WorktreeId, &RelPath)>, cx: &mut App, - ) -> std::result::Result<(), InvalidSettingsError> { + ) { // Reload the global and local values for every setting. let mut project_settings_stack = Vec::::new(); let mut paths_stack = Vec::>::new(); @@ -1054,7 +1107,6 @@ impl SettingsStore { setting_value.set_local_value(*root_id, directory_path.clone(), value); } } - Ok(()) } pub fn editorconfig_properties( @@ -1087,6 +1139,96 @@ impl SettingsStore { } } +/// The result of parsing settings, including any migration attempts +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct SettingsParseResult { + /// The result of parsing the settings file (possibly after migration) + pub parse_status: ParseStatus, + /// The result of attempting to migrate the settings file + pub migration_status: MigrationStatus, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ParseStatus { + /// Settings were parsed successfully + Success, + /// Settings failed to parse + Failed { error: String }, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum MigrationStatus { + /// No migration was needed - settings are up to date + NotNeeded, + /// Settings were automatically migrated in memory, but the file needs to be updated + Succeeded, + /// Migration was attempted but failed. Original settings were parsed instead. + Failed { error: String }, +} + +impl Default for SettingsParseResult { + fn default() -> Self { + Self { + parse_status: ParseStatus::Success, + migration_status: MigrationStatus::NotNeeded, + } + } +} + +impl SettingsParseResult { + pub fn unwrap(self) -> bool { + self.result().unwrap() + } + + pub fn expect(self, message: &str) -> bool { + self.result().expect(message) + } + + /// Formats the ParseResult as a Result type. This is a lossy conversion + pub fn result(self) -> Result { + let migration_result = match self.migration_status { + MigrationStatus::NotNeeded => Ok(false), + MigrationStatus::Succeeded => Ok(true), + MigrationStatus::Failed { error } => { + Err(anyhow::format_err!(error)).context("Failed to migrate settings") + } + }; + + let parse_result = match self.parse_status { + ParseStatus::Success => Ok(()), + ParseStatus::Failed { error } => { + Err(anyhow::format_err!(error)).context("Failed to parse settings") + } + }; + + match (migration_result, parse_result) { + (migration_result @ Ok(_), Ok(())) => migration_result, + (Err(migration_err), Ok(())) => Err(migration_err), + (_, Err(parse_err)) => Err(parse_err), + } + } + + /// Returns true if there were any errors migrating and parsing the settings content or if migration was required but there were no errors + pub fn requires_user_action(&self) -> bool { + matches!(self.parse_status, ParseStatus::Failed { .. }) + || matches!( + self.migration_status, + MigrationStatus::Succeeded | MigrationStatus::Failed { .. } + ) + } + + pub fn ok(self) -> Option { + self.result().ok() + } + + pub fn parse_error(&self) -> Option { + match &self.parse_status { + ParseStatus::Failed { error } => Some(error.clone()), + ParseStatus::Success => None, + } + } +} + #[derive(Debug, Clone, PartialEq)] pub enum InvalidSettingsError { LocalSettings { path: Arc, message: String }, diff --git a/crates/settings/src/vscode_import.rs b/crates/settings/src/vscode_import.rs index fd9b343ad9cf6b0fd93ac31bf2dd2e1f2f6023bf..8f9c60960ce9dddf49109d0374d611f7672077ad 100644 --- a/crates/settings/src/vscode_import.rs +++ b/crates/settings/src/vscode_import.rs @@ -855,6 +855,7 @@ impl VsCodeSettings { fn worktree_settings_content(&self) -> WorktreeSettingsContent { WorktreeSettingsContent { project_name: crate::Maybe::Unset, + prevent_sharing_in_public_channels: false, file_scan_exclusions: self .read_value("files.watcherExclude") .and_then(|v| v.as_array()) diff --git a/crates/settings_json/Cargo.toml b/crates/settings_json/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..2ba9887ca016b645bafa2974bbd9029373348838 --- /dev/null +++ b/crates/settings_json/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "settings_json" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/settings_json.rs" + +[features] +default = [] + +[dependencies] +anyhow.workspace = true +tree-sitter.workspace = true +tree-sitter-json.workspace = true +util.workspace = true +serde.workspace = true +serde_json.workspace = true +serde_json_lenient.workspace = true +serde_path_to_error.workspace = true + +[dev-dependencies] +unindent.workspace = true +pretty_assertions.workspace = true + +# Uncomment other workspace dependencies as needed +# assistant.workspace = true +# client.workspace = true +# project.workspace = true +# settings.workspace = true diff --git a/crates/settings_json/LICENSE-GPL b/crates/settings_json/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4 --- /dev/null +++ b/crates/settings_json/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/settings/src/settings_json.rs b/crates/settings_json/src/settings_json.rs similarity index 99% rename from crates/settings/src/settings_json.rs rename to crates/settings_json/src/settings_json.rs index 29ca2a1c9b8bbd64baf88d94bf2f684d8ef988b4..5198e475af82c7a69f7ff568cd58c6945a2453bb 100644 --- a/crates/settings/src/settings_json.rs +++ b/crates/settings_json/src/settings_json.rs @@ -1,19 +1,10 @@ use anyhow::Result; -use gpui::SharedString; use serde::{Serialize, de::DeserializeOwned}; use serde_json::Value; use std::{ops::Range, sync::LazyLock}; use tree_sitter::{Query, StreamingIterator as _}; use util::RangeExt; -/// Parameters that are used when generating some JSON schemas at runtime. -pub struct SettingsJsonSchemaParams<'a> { - pub language_names: &'a [String], - pub font_names: &'a [String], - pub theme_names: &'a [SharedString], - pub icon_theme_names: &'a [SharedString], -} - pub fn update_value_in_json_text<'a>( text: &mut String, key_path: &mut Vec<&'a str>, diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index 394e6821c85f68e08450ba18fe2e44959e0cf865..915d34e1087823841e985bb141879bf781db64fb 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -177,18 +177,17 @@ pub(crate) fn settings_data(cx: &App) -> Vec { }), SettingsPageItem::SectionHeader("Scoped Settings"), SettingsPageItem::SettingItem(SettingItem { - // todo(settings_ui): Implement another setting item type that just shows an edit in settings.json files: USER, title: "Preview Channel", description: "Which settings should be activated only in Preview build of Zed.", field: Box::new( SettingField { - json_path: Some("use_system_prompts"), + json_path: Some("preview_channel_settings"), pick: |settings_content| { - settings_content.workspace.use_system_prompts.as_ref() + Some(settings_content) }, - write: |settings_content, value| { - settings_content.workspace.use_system_prompts = value; + write: |_settings_content, _value| { + }, } .unimplemented(), @@ -201,12 +200,11 @@ pub(crate) fn settings_data(cx: &App) -> Vec { description: "Any number of settings profiles that are temporarily applied on top of your existing user settings.", field: Box::new( SettingField { - json_path: Some(""), + json_path: Some("settings_profiles"), pick: |settings_content| { - settings_content.workspace.use_system_prompts.as_ref() + Some(settings_content) }, - write: |settings_content, value| { - settings_content.workspace.use_system_prompts = value; + write: |_settings_content, _value| { }, } .unimplemented(), @@ -290,6 +288,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec { }, write: |settings_content, value| { let Some(value) = value else { + settings_content.theme.theme = None; return; }; let settings_value = settings_content.theme.theme.get_or_insert_with(|| { @@ -459,6 +458,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec { }, write: |settings_content, value| { let Some(value) = value else { + settings_content.theme.icon_theme = None; return; }; let settings_value = settings_content.theme.icon_theme.get_or_insert_with(|| { @@ -662,6 +662,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec { }, write: |settings_content, value| { let Some(value) = value else { + settings_content.theme.buffer_line_height = None; return; }; let settings_value = settings_content.theme.buffer_line_height.get_or_insert_with(|| { @@ -1123,6 +1124,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec { }, write: |settings_content, value| { let Some(value) = value else { + settings_content.workspace.autosave = None; return; }; let settings_value = settings_content.workspace.autosave.get_or_insert_with(|| { @@ -1171,6 +1173,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec { }, write: |settings_content, value| { let Some(value) = value else { + settings_content.workspace.autosave = None; return; }; match settings_content @@ -2105,7 +2108,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec { description: "A mapping from languages to files and file extensions that should be treated as that language.", field: Box::new( SettingField { - json_path: Some("file_types"), + json_path: Some("file_type_associations"), pick: |settings_content| { settings_content.project.all_languages.file_types.as_ref() }, @@ -2320,6 +2323,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec { language_settings_data() .iter() .chain(non_editor_language_settings_data().iter()) + .chain(edit_prediction_language_settings_section().iter()) .enumerate(), None, window, @@ -4449,6 +4453,9 @@ pub(crate) fn settings_data(cx: &App) -> Vec { }, write: |settings_content, value| { let Some(value) = value else { + if let Some(terminal) = settings_content.terminal.as_mut() { + terminal.project.shell = None; + } return; }; let settings_value = settings_content @@ -4500,7 +4507,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec { title: "Program", description: "The shell program to use.", field: Box::new(SettingField { - json_path: Some("terminal.shell.program"), + json_path: Some("terminal.shell"), pick: |settings_content| { match settings_content.terminal.as_ref()?.project.shell.as_ref() { Some(settings::Shell::Program(program)) => Some(program), @@ -4632,6 +4639,9 @@ pub(crate) fn settings_data(cx: &App) -> Vec { }, write: |settings_content, value| { let Some(value) = value else { + if let Some(terminal) = settings_content.terminal.as_mut() { + terminal.project.working_directory = None; + } return; }; let settings_value = settings_content @@ -5503,209 +5513,283 @@ pub(crate) fn settings_data(cx: &App) -> Vec { }, SettingsPage { title: "AI", - items: vec![ - SettingsPageItem::SectionHeader("General"), - SettingsPageItem::SettingItem(SettingItem { - title: "Disable AI", - description: "Whether to disable all AI features in Zed.", - field: Box::new(SettingField { - json_path: Some("disable_ai"), - pick: |settings_content| settings_content.disable_ai.as_ref(), - write: |settings_content, value| { - settings_content.disable_ai = value; - }, + items: { + let mut items = vec![ + SettingsPageItem::SectionHeader("General"), + SettingsPageItem::SettingItem(SettingItem { + title: "Disable AI", + description: "Whether to disable all AI features in Zed.", + field: Box::new(SettingField { + json_path: Some("disable_ai"), + pick: |settings_content| settings_content.disable_ai.as_ref(), + write: |settings_content, value| { + settings_content.disable_ai = value; + }, + }), + metadata: None, + files: USER, }), - metadata: None, - files: USER, - }), - SettingsPageItem::SectionHeader("Agent Configuration"), - SettingsPageItem::SettingItem(SettingItem { - title: "Always Allow Tool Actions", - description: "When enabled, the agent can run potentially destructive actions without asking for your confirmation. This setting has no effect on external agents.", - field: Box::new(SettingField { - json_path: Some("agent.always_allow_tool_actions"), - pick: |settings_content| { - settings_content - .agent - .as_ref()? - .always_allow_tool_actions - .as_ref() - }, - write: |settings_content, value| { - settings_content - .agent - .get_or_insert_default() - .always_allow_tool_actions = value; - }, + SettingsPageItem::SectionHeader("Agent Configuration"), + SettingsPageItem::SettingItem(SettingItem { + title: "Always Allow Tool Actions", + description: "When enabled, the agent can run potentially destructive actions without asking for your confirmation. This setting has no effect on external agents.", + field: Box::new(SettingField { + json_path: Some("agent.always_allow_tool_actions"), + pick: |settings_content| { + settings_content + .agent + .as_ref()? + .always_allow_tool_actions + .as_ref() + }, + write: |settings_content, value| { + settings_content + .agent + .get_or_insert_default() + .always_allow_tool_actions = value; + }, + }), + metadata: None, + files: USER, }), - metadata: None, - files: USER, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Single File Review", - description: "When enabled, agent edits will also be displayed in single-file buffers for review.", - field: Box::new(SettingField { - json_path: Some("agent.single_file_review"), - pick: |settings_content| { - settings_content.agent.as_ref()?.single_file_review.as_ref() - }, - write: |settings_content, value| { - settings_content - .agent - .get_or_insert_default() - .single_file_review = value; - }, + SettingsPageItem::SettingItem(SettingItem { + title: "Single File Review", + description: "When enabled, agent edits will also be displayed in single-file buffers for review.", + field: Box::new(SettingField { + json_path: Some("agent.single_file_review"), + pick: |settings_content| { + settings_content.agent.as_ref()?.single_file_review.as_ref() + }, + write: |settings_content, value| { + settings_content + .agent + .get_or_insert_default() + .single_file_review = value; + }, + }), + metadata: None, + files: USER, }), - metadata: None, - files: USER, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Enable Feedback", - description: "Show voting thumbs up/down icon buttons for feedback on agent edits.", - field: Box::new(SettingField { - json_path: Some("agent.enable_feedback"), - pick: |settings_content| { - settings_content.agent.as_ref()?.enable_feedback.as_ref() - }, - write: |settings_content, value| { - settings_content - .agent - .get_or_insert_default() - .enable_feedback = value; - }, + SettingsPageItem::SettingItem(SettingItem { + title: "Enable Feedback", + description: "Show voting thumbs up/down icon buttons for feedback on agent edits.", + field: Box::new(SettingField { + json_path: Some("agent.enable_feedback"), + pick: |settings_content| { + settings_content.agent.as_ref()?.enable_feedback.as_ref() + }, + write: |settings_content, value| { + settings_content + .agent + .get_or_insert_default() + .enable_feedback = value; + }, + }), + metadata: None, + files: USER, }), - metadata: None, - files: USER, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Notify When Agent Waiting", - description: "Where to show notifications when the agent has completed its response or needs confirmation before running a tool action.", - field: Box::new(SettingField { - json_path: Some("agent.notify_when_agent_waiting"), - pick: |settings_content| { - settings_content - .agent - .as_ref()? - .notify_when_agent_waiting - .as_ref() - }, - write: |settings_content, value| { - settings_content - .agent - .get_or_insert_default() - .notify_when_agent_waiting = value; - }, + SettingsPageItem::SettingItem(SettingItem { + title: "Notify When Agent Waiting", + description: "Where to show notifications when the agent has completed its response or needs confirmation before running a tool action.", + field: Box::new(SettingField { + json_path: Some("agent.notify_when_agent_waiting"), + pick: |settings_content| { + settings_content + .agent + .as_ref()? + .notify_when_agent_waiting + .as_ref() + }, + write: |settings_content, value| { + settings_content + .agent + .get_or_insert_default() + .notify_when_agent_waiting = value; + }, + }), + metadata: None, + files: USER, }), - metadata: None, - files: USER, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Play Sound When Agent Done", - description: "Whether to play a sound when the agent has either completed its response, or needs user input.", - field: Box::new(SettingField { - json_path: Some("agent.play_sound_when_agent_done"), - pick: |settings_content| { - settings_content - .agent - .as_ref()? - .play_sound_when_agent_done - .as_ref() - }, - write: |settings_content, value| { - settings_content - .agent - .get_or_insert_default() - .play_sound_when_agent_done = value; - }, + SettingsPageItem::SettingItem(SettingItem { + title: "Play Sound When Agent Done", + description: "Whether to play a sound when the agent has either completed its response, or needs user input.", + field: Box::new(SettingField { + json_path: Some("agent.play_sound_when_agent_done"), + pick: |settings_content| { + settings_content + .agent + .as_ref()? + .play_sound_when_agent_done + .as_ref() + }, + write: |settings_content, value| { + settings_content + .agent + .get_or_insert_default() + .play_sound_when_agent_done = value; + }, + }), + metadata: None, + files: USER, }), - metadata: None, - files: USER, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Expand Edit Card", - description: "Whether to have edit cards in the agent panel expanded, showing a Preview of the diff.", - field: Box::new(SettingField { - json_path: Some("agent.expand_edit_card"), - pick: |settings_content| { - settings_content.agent.as_ref()?.expand_edit_card.as_ref() - }, - write: |settings_content, value| { - settings_content - .agent - .get_or_insert_default() - .expand_edit_card = value; - }, + SettingsPageItem::SettingItem(SettingItem { + title: "Expand Edit Card", + description: "Whether to have edit cards in the agent panel expanded, showing a Preview of the diff.", + field: Box::new(SettingField { + json_path: Some("agent.expand_edit_card"), + pick: |settings_content| { + settings_content.agent.as_ref()?.expand_edit_card.as_ref() + }, + write: |settings_content, value| { + settings_content + .agent + .get_or_insert_default() + .expand_edit_card = value; + }, + }), + metadata: None, + files: USER, }), - metadata: None, - files: USER, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Expand Terminal Card", - description: "Whether to have terminal cards in the agent panel expanded, showing the whole command output.", - field: Box::new(SettingField { - json_path: Some("agent.expand_terminal_card"), - pick: |settings_content| { - settings_content - .agent - .as_ref()? - .expand_terminal_card - .as_ref() - }, - write: |settings_content, value| { - settings_content - .agent - .get_or_insert_default() - .expand_terminal_card = value; - }, + SettingsPageItem::SettingItem(SettingItem { + title: "Expand Terminal Card", + description: "Whether to have terminal cards in the agent panel expanded, showing the whole command output.", + field: Box::new(SettingField { + json_path: Some("agent.expand_terminal_card"), + pick: |settings_content| { + settings_content + .agent + .as_ref()? + .expand_terminal_card + .as_ref() + }, + write: |settings_content, value| { + settings_content + .agent + .get_or_insert_default() + .expand_terminal_card = value; + }, + }), + metadata: None, + files: USER, }), - metadata: None, - files: USER, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Use Modifier To Send", - description: "Whether to always use cmd-enter (or ctrl-enter on Linux or Windows) to send messages.", - field: Box::new(SettingField { - json_path: Some("agent.use_modifier_to_send"), - pick: |settings_content| { - settings_content - .agent - .as_ref()? - .use_modifier_to_send - .as_ref() - }, - write: |settings_content, value| { - settings_content - .agent - .get_or_insert_default() - .use_modifier_to_send = value; - }, + SettingsPageItem::SettingItem(SettingItem { + title: "Use Modifier To Send", + description: "Whether to always use cmd-enter (or ctrl-enter on Linux or Windows) to send messages.", + field: Box::new(SettingField { + json_path: Some("agent.use_modifier_to_send"), + pick: |settings_content| { + settings_content + .agent + .as_ref()? + .use_modifier_to_send + .as_ref() + }, + write: |settings_content, value| { + settings_content + .agent + .get_or_insert_default() + .use_modifier_to_send = value; + }, + }), + metadata: None, + files: USER, }), - metadata: None, - files: USER, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Message Editor Min Lines", - description: "Minimum number of lines to display in the agent message editor.", - field: Box::new(SettingField { - json_path: Some("agent.message_editor_min_lines"), - pick: |settings_content| { - settings_content - .agent - .as_ref()? - .message_editor_min_lines - .as_ref() - }, - write: |settings_content, value| { - settings_content - .agent - .get_or_insert_default() - .message_editor_min_lines = value; - }, + SettingsPageItem::SettingItem(SettingItem { + title: "Message Editor Min Lines", + description: "Minimum number of lines to display in the agent message editor.", + field: Box::new(SettingField { + json_path: Some("agent.message_editor_min_lines"), + pick: |settings_content| { + settings_content + .agent + .as_ref()? + .message_editor_min_lines + .as_ref() + }, + write: |settings_content, value| { + settings_content + .agent + .get_or_insert_default() + .message_editor_min_lines = value; + }, + }), + metadata: None, + files: USER, }), - metadata: None, - files: USER, - }), - ], + ]; + items.extend(edit_prediction_language_settings_section()); + items.extend( + [ + SettingsPageItem::SettingItem(SettingItem { + title: "Display Mode", + description: "When to show edit predictions previews in buffer. The eager mode displays them inline, while the subtle mode displays them only when holding a modifier key.", + field: Box::new(SettingField { + json_path: Some("edit_prediction_mode"), + pick: |settings_content| { + settings_content.project.all_languages.edit_predictions.as_ref()?.mode.as_ref() + }, + write: |settings_content, value| { + settings_content.project.all_languages.edit_predictions.get_or_insert_default().mode = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "In Text Threads", + description: "Whether edit predictions are enabled when editing text threads in the agent panel.", + field: Box::new(SettingField { + json_path: Some("edit_prediction_in_text_threads"), + pick: |settings_content| { + settings_content.project.all_languages.edit_predictions.as_ref()?.enabled_in_text_threads.as_ref() + }, + write: |settings_content, value| { + settings_content.project.all_languages.edit_predictions.get_or_insert_default().enabled_in_text_threads = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Copilot Provider", + description: "Set up GitHub Copilot as your edit prediction provider. You can toggle between it and Zed's default provider.", + field: Box::new( + SettingField { + json_path: Some("languages.$(language).wrap_guides"), + pick: |settings_content| { + settings_content.project.all_languages.edit_predictions.as_ref()?.copilot.as_ref() + }, + write: |settings_content, value| { + settings_content.project.all_languages.edit_predictions.get_or_insert_default().copilot = value; + }, + } + .unimplemented(), + ), + metadata: None, + files: USER | PROJECT, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Codestral Provider", + description: "Set up Mistral's Codestral as your edit prediction provider. You can toggle between it and Zed's default provider.", + field: Box::new( + SettingField { + json_path: Some("languages.$(language).wrap_guides"), + pick: |settings_content| { + settings_content.project.all_languages.edit_predictions.as_ref()?.codestral.as_ref() + }, + write: |settings_content, value| { + settings_content.project.all_languages.edit_predictions.get_or_insert_default().codestral = value; + }, + } + .unimplemented(), + ), + metadata: None, + files: USER | PROJECT, + }), + ] + ); + items + }, }, SettingsPage { title: "Network", @@ -6290,48 +6374,6 @@ fn language_settings_data() -> Vec { metadata: None, files: USER | PROJECT, }), - SettingsPageItem::SectionHeader("Edit Predictions"), - SettingsPageItem::SettingItem(SettingItem { - title: "Show Edit Predictions", - description: "Controls whether edit predictions are shown immediately (true) or manually by triggering `editor::showeditprediction` (false).", - field: Box::new(SettingField { - json_path: Some("languages.$(language).show_edit_predictions"), - pick: |settings_content| { - language_settings_field(settings_content, |language| { - language.show_edit_predictions.as_ref() - }) - }, - write: |settings_content, value| { - language_settings_field_mut(settings_content, value, |language, value| { - language.show_edit_predictions = value; - }) - }, - }), - metadata: None, - files: USER | PROJECT, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Edit Predictions Disabled In", - description: "Controls whether edit predictions are shown in the given language scopes.", - field: Box::new( - SettingField { - json_path: Some("languages.$(language).edit_predictions_disabled_in"), - pick: |settings_content| { - language_settings_field(settings_content, |language| { - language.edit_predictions_disabled_in.as_ref() - }) - }, - write: |settings_content, value| { - language_settings_field_mut(settings_content, value, |language, value| { - language.edit_predictions_disabled_in = value; - }) - }, - } - .unimplemented(), - ), - metadata: None, - files: USER | PROJECT, - }), SettingsPageItem::SectionHeader("Whitespace"), SettingsPageItem::SettingItem(SettingItem { title: "Show Whitespaces", @@ -7092,7 +7134,7 @@ fn non_editor_language_settings_data() -> Vec { description: "Default Prettier options, in the format as in package.json section for Prettier.", field: Box::new( SettingField { - json_path: Some("languages.$(language).prettier"), + json_path: Some("languages.$(language).prettier.options"), pick: |settings_content| { language_settings_field(settings_content, |language| { language.prettier.as_ref()?.options.as_ref() @@ -7112,6 +7154,53 @@ fn non_editor_language_settings_data() -> Vec { ] } +fn edit_prediction_language_settings_section() -> Vec { + vec![ + SettingsPageItem::SectionHeader("Edit Predictions"), + SettingsPageItem::SettingItem(SettingItem { + title: "Show Edit Predictions", + description: "Controls whether edit predictions are shown immediately or manually by triggering `editor::showeditprediction` (false).", + field: Box::new(SettingField { + json_path: Some("languages.$(language).show_edit_predictions"), + pick: |settings_content| { + language_settings_field(settings_content, |language| { + language.show_edit_predictions.as_ref() + }) + }, + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.show_edit_predictions = value; + }) + }, + }), + metadata: None, + files: USER | PROJECT, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Edit Predictions Disabled In", + description: "Controls whether edit predictions are shown in the given language scopes.", + field: Box::new( + SettingField { + json_path: Some("languages.$(language).edit_predictions_disabled_in"), + pick: |settings_content| { + language_settings_field(settings_content, |language| { + language.edit_predictions_disabled_in.as_ref() + }) + }, + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.edit_predictions_disabled_in = value; + }) + }, + } + .unimplemented(), + ), + metadata: None, + files: USER | PROJECT, + }), + ] +} + fn show_scrollbar_or_editor( settings_content: &SettingsContent, show: fn(&SettingsContent) -> Option<&settings::ShowScrollbar>, diff --git a/crates/settings_ui/src/settings_ui.rs b/crates/settings_ui/src/settings_ui.rs index 103e2ed8047ca5efce41001d9fa0e92473cf0e07..14e2eaf688f39f6d50fbdcb1102df28e3fa0975e 100644 --- a/crates/settings_ui/src/settings_ui.rs +++ b/crates/settings_ui/src/settings_ui.rs @@ -6,10 +6,10 @@ use editor::{Editor, EditorEvent}; use feature_flags::FeatureFlag; use fuzzy::StringMatchCandidate; use gpui::{ - Action, App, DEFAULT_ADDITIONAL_WINDOW_SIZE, Div, Entity, FocusHandle, Focusable, Global, - ListState, ReadGlobal as _, ScrollHandle, Stateful, Subscription, Task, TitlebarOptions, - UniformListScrollHandle, Window, WindowBounds, WindowHandle, WindowOptions, actions, div, list, - point, prelude::*, px, uniform_list, + Action, App, ClipboardItem, DEFAULT_ADDITIONAL_WINDOW_SIZE, Div, Entity, FocusHandle, + Focusable, Global, KeyContext, ListState, ReadGlobal as _, ScrollHandle, Stateful, + Subscription, Task, TitlebarOptions, UniformListScrollHandle, Window, WindowBounds, + WindowHandle, WindowOptions, actions, div, list, point, prelude::*, px, uniform_list, }; use heck::ToTitleCase as _; use project::{Project, WorktreeId}; @@ -137,7 +137,7 @@ impl SettingField { SettingField { pick: |_| Some(&UnimplementedSettingField), write: |_, _| unreachable!(), - json_path: None, + json_path: self.json_path, } } } @@ -239,6 +239,7 @@ struct SettingFieldRenderer { &SettingItem, SettingsUiFile, Option<&SettingsFieldMetadata>, + bool, &mut Window, &mut Context, ) -> Stateful
, @@ -268,6 +269,7 @@ impl SettingFieldRenderer { field: SettingField, settings_file: SettingsUiFile, metadata: Option<&SettingsFieldMetadata>, + sub_field: bool, window: &mut Window, cx: &mut Context| { render_settings_item( @@ -275,7 +277,7 @@ impl SettingFieldRenderer { item, settings_file.clone(), render_control(field, settings_file, metadata, window, cx), - window, + sub_field, cx, ) }, @@ -290,6 +292,7 @@ impl SettingFieldRenderer { SettingField, SettingsUiFile, Option<&SettingsFieldMetadata>, + bool, &mut Window, &mut Context, ) -> Stateful
@@ -301,6 +304,7 @@ impl SettingFieldRenderer { item: &SettingItem, settings_file: SettingsUiFile, metadata: Option<&SettingsFieldMetadata>, + sub_field: bool, window: &mut Window, cx: &mut Context| { let field = *item @@ -315,6 +319,7 @@ impl SettingFieldRenderer { field, settings_file, metadata, + sub_field, window, cx, ) @@ -491,6 +496,7 @@ fn init_renderers(cx: &mut App) { .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) // please semicolon stay on next line ; } @@ -515,43 +521,10 @@ pub fn open_settings_editor( return; } - settings_window.current_file = SettingsUiFile::User; - settings_window.build_ui(window, cx); - - let mut item_info = None; - 'search: for (nav_entry_index, entry) in settings_window.navbar_entries.iter().enumerate() { - if entry.is_root { - continue; - } - let page_index = entry.page_index; - let header_index = entry - .item_index - .expect("non-root entries should have an item index"); - for item_index in header_index + 1..settings_window.pages[page_index].items.len() { - let item = &settings_window.pages[page_index].items[item_index]; - if let SettingsPageItem::SectionHeader(_) = item { - break; - } - if let SettingsPageItem::SettingItem(item) = item { - if item.field.json_path() == Some(path) { - if !item.files.contains(USER) { - log::error!("Found item {}, but it is not a user setting", path); - return; - } - item_info = Some((item_index, nav_entry_index)); - break 'search; - } - } - } - } - let Some((item_index, navbar_entry_index)) = item_info else { - log::error!("Failed to find item for {}", path); - return; - }; - - settings_window.open_navbar_entry_page(navbar_entry_index); - window.focus(&settings_window.focus_handle_for_content_element(item_index, cx)); - settings_window.scroll_to_content_item(item_index, window, cx); + settings_window.search_bar.update(cx, |editor, cx| { + editor.set_text(format!("#{path}"), window, cx); + }); + settings_window.update_matches(cx); } let existing_window = cx @@ -677,13 +650,14 @@ pub struct SettingsWindow { struct SearchIndex { bm25_engine: bm25::SearchEngine, fuzzy_match_candidates: Vec, - key_lut: Vec, + key_lut: Vec, } -struct SearchItemKey { +struct SearchKeyLUTEntry { page_index: usize, header_index: usize, item_index: usize, + json_path: Option<&'static str>, } struct SubPage { @@ -742,18 +716,20 @@ impl SettingsPageItem { ) -> AnyElement { let file = settings_window.current_file.clone(); - let border_variant = cx.theme().colors().border_variant; let apply_padding = |element: Stateful
| -> Stateful
{ let element = element.pt_4(); if is_last { element.pb_10() } else { - element.pb_4().border_b_1().border_color(border_variant) + element.pb_4() } }; let mut render_setting_item_inner = - |setting_item: &SettingItem, padding: bool, cx: &mut Context| { + |setting_item: &SettingItem, + padding: bool, + sub_field: bool, + cx: &mut Context| { let renderer = cx.default_global::().clone(); let (_, found) = setting_item.field.file_set_in(file.clone(), cx); @@ -777,6 +753,7 @@ impl SettingsPageItem { setting_item, file.clone(), setting_item.metadata.as_deref(), + sub_field, window, cx, ) @@ -794,7 +771,7 @@ impl SettingsPageItem { .tab_index(0_isize) .tooltip(Tooltip::text(setting_item.field.type_name())) .into_any_element(), - window, + sub_field, cx, ), }; @@ -811,6 +788,7 @@ impl SettingsPageItem { match self { SettingsPageItem::SectionHeader(header) => v_flex() .w_full() + .px_8() .gap_1p5() .child( Label::new(SharedString::new_static(header)) @@ -821,56 +799,71 @@ impl SettingsPageItem { .child(Divider::horizontal().color(DividerColor::BorderFaded)) .into_any_element(), SettingsPageItem::SettingItem(setting_item) => { - let (field_with_padding, _) = render_setting_item_inner(setting_item, true, cx); - field_with_padding.into_any_element() + let (field_with_padding, _) = + render_setting_item_inner(setting_item, true, false, cx); + + v_flex() + .group("setting-item") + .px_8() + .child(field_with_padding) + .when(!is_last, |this| this.child(Divider::horizontal())) + .into_any_element() } - SettingsPageItem::SubPageLink(sub_page_link) => h_flex() - .id(sub_page_link.title.clone()) - .w_full() - .min_w_0() - .justify_between() - .map(apply_padding) + SettingsPageItem::SubPageLink(sub_page_link) => v_flex() + .group("setting-item") + .px_8() .child( - v_flex() + h_flex() + .id(sub_page_link.title.clone()) .w_full() - .max_w_1_2() - .child(Label::new(sub_page_link.title.clone())), - ) - .child( - Button::new( - ("sub-page".into(), sub_page_link.title.clone()), - "Configure", - ) - .icon(IconName::ChevronRight) - .tab_index(0_isize) - .icon_position(IconPosition::End) - .icon_color(Color::Muted) - .icon_size(IconSize::Small) - .style(ButtonStyle::OutlinedGhost) - .size(ButtonSize::Medium) - .on_click({ - let sub_page_link = sub_page_link.clone(); - cx.listener(move |this, _, _, cx| { - let mut section_index = item_index; - let current_page = this.current_page(); - - while !matches!( - current_page.items[section_index], - SettingsPageItem::SectionHeader(_) - ) { - section_index -= 1; - } - - let SettingsPageItem::SectionHeader(header) = - current_page.items[section_index] - else { - unreachable!("All items always have a section header above them") - }; + .min_w_0() + .justify_between() + .map(apply_padding) + .child( + v_flex() + .w_full() + .max_w_1_2() + .child(Label::new(sub_page_link.title.clone())), + ) + .child( + Button::new( + ("sub-page".into(), sub_page_link.title.clone()), + "Configure", + ) + .icon(IconName::ChevronRight) + .tab_index(0_isize) + .icon_position(IconPosition::End) + .icon_color(Color::Muted) + .icon_size(IconSize::Small) + .style(ButtonStyle::OutlinedGhost) + .size(ButtonSize::Medium) + .on_click({ + let sub_page_link = sub_page_link.clone(); + cx.listener(move |this, _, _, cx| { + let mut section_index = item_index; + let current_page = this.current_page(); + + while !matches!( + current_page.items[section_index], + SettingsPageItem::SectionHeader(_) + ) { + section_index -= 1; + } + + let SettingsPageItem::SectionHeader(header) = + current_page.items[section_index] + else { + unreachable!( + "All items always have a section header above them" + ) + }; - this.push_sub_page(sub_page_link.clone(), header, cx) - }) - }), + this.push_sub_page(sub_page_link.clone(), header, cx) + }) + }), + ), ) + .when(!is_last, |this| this.child(Divider::horizontal())) .into_any_element(), SettingsPageItem::DynamicItem(DynamicItem { discriminant: discriminant_setting_item, @@ -883,18 +876,22 @@ impl SettingsPageItem { .1; let (discriminant_element, rendered_ok) = - render_setting_item_inner(discriminant_setting_item, true, cx); + render_setting_item_inner(discriminant_setting_item, true, false, cx); let has_sub_fields = rendered_ok && discriminant.map(|d| !fields[d].is_empty()).unwrap_or(false); - let discriminant_element = if has_sub_fields { - discriminant_element.pb_4().border_b_0() - } else { - discriminant_element - }; - - let mut content = v_flex().id("dynamic-item").child(discriminant_element); + let mut content = v_flex() + .id("dynamic-item") + .child( + div() + .group("setting-item") + .px_8() + .child(discriminant_element.when(has_sub_fields, |this| this.pb_4())), + ) + .when(!has_sub_fields, |this| { + this.child(h_flex().px_8().child(Divider::horizontal())) + }); if rendered_ok { let discriminant = @@ -904,12 +901,13 @@ impl SettingsPageItem { for (index, field) in sub_fields.iter().enumerate() { let is_last_sub_field = index == sub_field_count - 1; - let (raw_field, _) = render_setting_item_inner(field, false, cx); + let (raw_field, _) = render_setting_item_inner(field, false, true, cx); content = content.child( raw_field + .group("setting-sub-item") + .mx_8() .p_4() - .border_x_1() .border_t_1() .when(is_last_sub_field, |this| this.border_b_1()) .when(is_last_sub_field && is_last, |this| this.mb_8()) @@ -931,18 +929,33 @@ fn render_settings_item( setting_item: &SettingItem, file: SettingsUiFile, control: AnyElement, - _window: &mut Window, + sub_field: bool, cx: &mut Context<'_, SettingsWindow>, ) -> Stateful
{ let (found_in_file, _) = setting_item.field.file_set_in(file.clone(), cx); let file_set_in = SettingsUiFile::from_settings(found_in_file.clone()); + let clipboard_has_link = cx + .read_from_clipboard() + .and_then(|entry| entry.text()) + .map_or(false, |maybe_url| { + setting_item.field.json_path().is_some() + && maybe_url.strip_prefix("zed://settings/") == setting_item.field.json_path() + }); + + let (link_icon, link_icon_color) = if clipboard_has_link { + (IconName::Check, Color::Success) + } else { + (IconName::Link, Color::Muted) + }; + h_flex() .id(setting_item.title) .min_w_0() .justify_between() .child( v_flex() + .relative() .w_1_2() .child( h_flex() @@ -950,9 +963,13 @@ fn render_settings_item( .gap_1() .child(Label::new(SharedString::new_static(setting_item.title))) .when_some( - setting_item - .field - .reset_to_default_fn(&file, &found_in_file, cx), + if sub_field { + None + } else { + setting_item + .field + .reset_to_default_fn(&file, &found_in_file, cx) + }, |this, reset_to_default| { this.child( IconButton::new("reset-to-default-btn", IconName::Undo) @@ -990,6 +1007,41 @@ fn render_settings_item( ), ) .child(control) + .when(sub_page_stack().is_empty(), |this| { + // Intentionally using the description to make the icon button + // unique because some items share the same title (e.g., "Font Size") + let icon_button_id = + SharedString::new(format!("copy-link-btn-{}", setting_item.description)); + + this.child( + div() + .absolute() + .top(rems_from_px(18.)) + .map(|this| { + if sub_field { + this.visible_on_hover("setting-sub-item") + .left(rems_from_px(-8.5)) + } else { + this.visible_on_hover("setting-item") + .left(rems_from_px(-22.)) + } + }) + .child({ + IconButton::new(icon_button_id, link_icon) + .icon_color(link_icon_color) + .icon_size(IconSize::Small) + .shape(IconButtonShape::Square) + .tooltip(Tooltip::text("Copy Link")) + .when_some(setting_item.field.json_path(), |this, path| { + this.on_click(cx.listener(move |_, _, _, cx| { + let link = format!("zed://settings/{}", path); + cx.write_to_clipboard(ClipboardItem::new_string(link)); + cx.notify(); + })) + }) + }), + ) + }) } struct SettingItem { @@ -1139,6 +1191,7 @@ impl SettingsUiFile { settings::SettingsFile::Project(location) => SettingsUiFile::Project(location), settings::SettingsFile::Server => SettingsUiFile::Server("todo: server name"), settings::SettingsFile::Default => return None, + settings::SettingsFile::Global => return None, }) } @@ -1477,7 +1530,7 @@ impl SettingsWindow { fn update_matches(&mut self, cx: &mut Context) { self.search_task.take(); - let query = self.search_bar.read(cx).text(cx); + let mut query = self.search_bar.read(cx).text(cx); if query.is_empty() || self.search_index.is_none() { for page in &mut self.filter_table { page.fill(true); @@ -1489,6 +1542,14 @@ impl SettingsWindow { return; } + let is_json_link_query; + if query.starts_with("#") { + query.remove(0); + is_json_link_query = true; + } else { + is_json_link_query = false; + } + let search_index = self.search_index.as_ref().unwrap().clone(); fn update_matches_inner( @@ -1502,10 +1563,11 @@ impl SettingsWindow { } for match_index in match_indices { - let SearchItemKey { + let SearchKeyLUTEntry { page_index, header_index, item_index, + .. } = search_index.key_lut[match_index]; let page = &mut this.filter_table[page_index]; page[header_index] = true; @@ -1519,6 +1581,29 @@ impl SettingsWindow { } self.search_task = Some(cx.spawn(async move |this, cx| { + if is_json_link_query { + let mut indices = vec![]; + for (index, SearchKeyLUTEntry { json_path, .. }) in + search_index.key_lut.iter().enumerate() + { + let Some(json_path) = json_path else { + continue; + }; + + if let Some(post) = query.strip_prefix(json_path) + && (post.is_empty() || post.starts_with('.')) + { + indices.push(index); + } + } + if !indices.is_empty() { + this.update(cx, |this, cx| { + update_matches_inner(this, search_index.as_ref(), indices.into_iter(), cx); + }) + .ok(); + return; + } + } let bm25_task = cx.background_spawn({ let search_index = search_index.clone(); let max_results = search_index.key_lut.len(); @@ -1609,7 +1694,7 @@ impl SettingsWindow { } fn build_search_index(&mut self) { - let mut key_lut: Vec = vec![]; + let mut key_lut: Vec = vec![]; let mut documents = Vec::default(); let mut fuzzy_match_candidates = Vec::default(); @@ -1631,11 +1716,16 @@ impl SettingsWindow { let mut header_str = ""; for (item_index, item) in page.items.iter().enumerate() { let key_index = key_lut.len(); + let mut json_path = None; match item { SettingsPageItem::DynamicItem(DynamicItem { discriminant: item, .. }) | SettingsPageItem::SettingItem(item) => { + json_path = item + .field + .json_path() + .map(|path| path.trim_end_matches('$')); documents.push(bm25::Document { id: key_index, contents: [page.title, header_str, item.title, item.description] @@ -1669,10 +1759,11 @@ impl SettingsWindow { push_candidates(&mut fuzzy_match_candidates, key_index, page.title); push_candidates(&mut fuzzy_match_candidates, key_index, header_str); - key_lut.push(SearchItemKey { + key_lut.push(SearchKeyLUTEntry { page_index, header_index, item_index, + json_path, }); } } @@ -1730,7 +1821,10 @@ impl SettingsWindow { let prev_files = self.files.clone(); let settings_store = cx.global::(); let mut ui_files = vec![]; - let all_files = settings_store.get_all_files(); + let mut all_files = settings_store.get_all_files(); + if !all_files.contains(&settings::SettingsFile::User) { + all_files.push(settings::SettingsFile::User); + } for file in all_files { let Some(settings_ui_file) = SettingsUiFile::from_settings(file) else { continue; @@ -1897,7 +1991,6 @@ impl SettingsWindow { h_flex() .w_full() - .pb_4() .gap_1() .justify_between() .track_focus(&self.files_focus_handle) @@ -2078,8 +2171,15 @@ impl SettingsWindow { "Focus Navbar" }; + let mut key_context = KeyContext::new_with_defaults(); + key_context.add("NavigationMenu"); + key_context.add("menu"); + if self.search_bar.focus_handle(cx).is_focused(window) { + key_context.add("search"); + } + v_flex() - .key_context("NavigationMenu") + .key_context(key_context) .on_action(cx.listener(|this, _: &CollapseNavEntry, window, cx| { let Some(focused_entry) = this.focused_nav_entry(window, cx) else { return; @@ -2513,6 +2613,7 @@ impl SettingsWindow { cx.processor(move |this, index, window, cx| { if index == 0 { return div() + .px_8() .when(sub_page_stack().is_empty(), |this| { this.when_some(root_nav_label, |this, title| { this.child( @@ -2540,9 +2641,9 @@ impl SettingsWindow { v_flex() .id(("settings-page-item", actual_item_index)) + .track_focus(&item_focus_handle) .w_full() .min_w_0() - .track_focus(&item_focus_handle) .child(item.render( this, actual_item_index, @@ -2657,7 +2758,6 @@ impl SettingsWindow { } else { page_header = h_flex() .ml_neg_1p5() - .pb_4() .gap_1() .child( IconButton::new("back-btn", IconName::ArrowLeft) @@ -2678,40 +2778,73 @@ impl SettingsWindow { if let Some(error) = SettingsStore::global(cx).error_for_file(self.current_file.to_settings()) { - if self.shown_errors.insert(error.clone()) { - telemetry::event!("Settings Error Shown", error = &error); + fn banner( + label: &'static str, + error: String, + shown_errors: &mut HashSet, + cx: &mut Context, + ) -> impl IntoElement { + if shown_errors.insert(error.clone()) { + telemetry::event!("Settings Error Shown", label = label, error = &error); + } + Banner::new() + .severity(Severity::Warning) + .child( + v_flex() + .my_0p5() + .gap_0p5() + .child(Label::new(label)) + .child(Label::new(error).size(LabelSize::Small).color(Color::Muted)), + ) + .action_slot( + div().pr_1().pb_1().child( + Button::new("fix-in-json", "Fix in settings.json") + .tab_index(0_isize) + .style(ButtonStyle::Tinted(ui::TintColor::Warning)) + .on_click(cx.listener(|this, _, _, cx| { + this.open_current_settings_file(cx); + })), + ), + ) } + let parse_error = error.parse_error(); + let parse_failed = parse_error.is_some(); + warning_banner = v_flex() - .pb_4() - .child( - Banner::new() - .severity(Severity::Warning) - .child( - v_flex() - .my_0p5() - .gap_0p5() - .child(Label::new("Your settings file is in an invalid state.")) - .child( - Label::new(error).size(LabelSize::Small).color(Color::Muted), - ), - ) - .action_slot( - div().pr_1().child( - Button::new("fix-in-json", "Fix in settings.json") - .tab_index(0_isize) - .style(ButtonStyle::Tinted(ui::TintColor::Warning)) - .on_click(cx.listener(|this, _, _, cx| { - this.open_current_settings_file(cx); - })), - ), - ), - ) + .gap_2() + .when_some(parse_error, |this, err| { + this.child(banner( + "Failed to load your settings. Some values may be incorrect and changes may be lost.", + err, + &mut self.shown_errors, + cx, + )) + }) + .map(|this| match &error.migration_status { + settings::MigrationStatus::Succeeded => this.child(banner( + "Your settings are out of date, and need to be updated.", + match &self.current_file { + SettingsUiFile::User => "They can be automatically migrated to the latest version.", + SettingsUiFile::Server(_) | SettingsUiFile::Project(_) => "They must be manually migrated to the latest version." + }.to_string(), + &mut self.shown_errors, + cx, + )), + settings::MigrationStatus::Failed { error: err } if !parse_failed => this + .child(banner( + "Your settings file is out of date, automatic migration failed", + err.clone(), + &mut self.shown_errors, + cx, + )), + _ => this, + }) .into_any_element() } return v_flex() - .id("Settings-ui-page") + .id("settings-ui-page") .on_action(cx.listener(|this, _: &menu::SelectNext, window, cx| { if !sub_page_stack().is_empty() { window.focus_next(); @@ -2732,8 +2865,11 @@ impl SettingsWindow { this.list_state.scroll_to_reveal_item(next_logical_index); // We need to render the next item to ensure it's focus handle is in the element tree cx.on_next_frame(window, |_, window, cx| { - window.focus_next(); cx.notify(); + cx.on_next_frame(window, |_, window, cx| { + window.focus_next(); + cx.notify(); + }); }); cx.notify(); return; @@ -2761,8 +2897,11 @@ impl SettingsWindow { this.list_state.scroll_to_reveal_item(next_logical_index); // We need to render the next item to ensure it's focus handle is in the element tree cx.on_next_frame(window, |_, window, cx| { - window.focus_prev(); cx.notify(); + cx.on_next_frame(window, |_, window, cx| { + window.focus_prev(); + cx.notify(); + }); }); cx.notify(); return; @@ -2778,14 +2917,20 @@ impl SettingsWindow { this.vertical_scrollbar_for(self.sub_page_scroll_handle.clone(), window, cx) }) .track_focus(&self.content_focus_handle.focus_handle(cx)) - .flex_1() .pt_6() - .px_8() + .gap_4() + .flex_1() .bg(cx.theme().colors().editor_background) - .child(warning_banner) - .child(page_header) + .child( + v_flex() + .px_8() + .gap_2() + .child(page_header) + .child(warning_banner), + ) .child( div() + .flex_1() .size_full() .tab_group() .tab_index(CONTENT_GROUP_TAB_INDEX) diff --git a/crates/svg_preview/Cargo.toml b/crates/svg_preview/Cargo.toml index f64e60afe282da0da6780cc45097c751a8e7e8c1..e78a042180a62d31fd74da659df7afe8baa2caa7 100644 --- a/crates/svg_preview/Cargo.toml +++ b/crates/svg_preview/Cargo.toml @@ -15,6 +15,6 @@ path = "src/svg_preview.rs" editor.workspace = true file_icons.workspace = true gpui.workspace = true -multi_buffer.workspace = true +language.workspace = true ui.workspace = true workspace.workspace = true diff --git a/crates/svg_preview/src/svg_preview_view.rs b/crates/svg_preview/src/svg_preview_view.rs index 432e91648cf751744168e89823fc59bc168c6714..de27a7237d5f85b8dc18d47e09a6ac7fe22ee89f 100644 --- a/crates/svg_preview/src/svg_preview_view.rs +++ b/crates/svg_preview/src/svg_preview_view.rs @@ -1,13 +1,13 @@ -use std::path::PathBuf; +use std::mem; +use std::sync::Arc; use editor::Editor; use file_icons::FileIcons; use gpui::{ - App, Context, Entity, EventEmitter, FocusHandle, Focusable, ImageSource, IntoElement, - ParentElement, Render, Resource, RetainAllImageCache, Styled, Subscription, WeakEntity, Window, - div, img, + App, Context, Entity, EventEmitter, FocusHandle, Focusable, IntoElement, ParentElement, Render, + RenderImage, Styled, Subscription, Task, WeakEntity, Window, div, img, }; -use multi_buffer::{Event as MultiBufferEvent, MultiBuffer}; +use language::{Buffer, BufferEvent}; use ui::prelude::*; use workspace::item::Item; use workspace::{Pane, Workspace}; @@ -16,9 +16,10 @@ use crate::{OpenFollowingPreview, OpenPreview, OpenPreviewToTheSide}; pub struct SvgPreviewView { focus_handle: FocusHandle, - svg_path: Option, - image_cache: Entity, - _buffer_subscription: Subscription, + buffer: Option>, + current_svg: Option, SharedString>>, + _refresh: Task<()>, + _buffer_subscription: Option, _workspace_subscription: Option, } @@ -31,6 +32,182 @@ pub enum SvgPreviewMode { } impl SvgPreviewView { + pub fn new( + mode: SvgPreviewMode, + active_editor: Entity, + workspace_handle: WeakEntity, + window: &mut Window, + cx: &mut Context, + ) -> Entity { + cx.new(|cx| { + let workspace_subscription = if mode == SvgPreviewMode::Follow + && let Some(workspace) = workspace_handle.upgrade() + { + Some(Self::subscribe_to_workspace(workspace, window, cx)) + } else { + None + }; + + let buffer = active_editor + .read(cx) + .buffer() + .clone() + .read_with(cx, |buffer, _cx| buffer.as_singleton()); + + let subscription = buffer + .as_ref() + .map(|buffer| Self::create_buffer_subscription(buffer, window, cx)); + + let mut this = Self { + focus_handle: cx.focus_handle(), + buffer, + current_svg: None, + _buffer_subscription: subscription, + _workspace_subscription: workspace_subscription, + _refresh: Task::ready(()), + }; + this.render_image(window, cx); + + this + }) + } + + fn subscribe_to_workspace( + workspace: Entity, + window: &Window, + cx: &mut Context, + ) -> Subscription { + cx.subscribe_in( + &workspace, + window, + move |this: &mut SvgPreviewView, workspace, event: &workspace::Event, window, cx| { + if let workspace::Event::ActiveItemChanged = event { + let workspace = workspace.read(cx); + if let Some(active_item) = workspace.active_item(cx) + && let Some(editor) = active_item.downcast::() + && Self::is_svg_file(&editor, cx) + { + let Some(buffer) = editor.read(cx).buffer().read(cx).as_singleton() else { + return; + }; + if this.buffer.as_ref() != Some(&buffer) { + this._buffer_subscription = + Some(Self::create_buffer_subscription(&buffer, window, cx)); + this.buffer = Some(buffer); + this.render_image(window, cx); + cx.notify(); + } + } else { + this.set_current(None, window, cx); + } + } + }, + ) + } + + fn render_image(&mut self, window: &Window, cx: &mut Context) { + let Some(buffer) = self.buffer.as_ref() else { + return; + }; + const SCALE_FACTOR: f32 = 1.0; + + let renderer = cx.svg_renderer(); + let content = buffer.read(cx).snapshot(); + let background_task = cx.background_spawn(async move { + renderer.render_single_frame(content.text().as_bytes(), SCALE_FACTOR, true) + }); + + self._refresh = cx.spawn_in(window, async move |this, cx| { + let result = background_task.await; + + this.update_in(cx, |view, window, cx| { + let current = result.map_err(|e| e.to_string().into()); + view.set_current(Some(current), window, cx); + }) + .ok(); + }); + } + + fn set_current( + &mut self, + image: Option, SharedString>>, + window: &mut Window, + cx: &mut Context, + ) { + if let Some(Ok(image)) = mem::replace(&mut self.current_svg, image) { + window.drop_image(image).ok(); + } + cx.notify(); + } + + fn find_existing_preview_item_idx( + pane: &Pane, + editor: &Entity, + cx: &App, + ) -> Option { + let buffer_id = editor.read(cx).buffer().entity_id(); + pane.items_of_type::() + .find(|view| { + view.read(cx) + .buffer + .as_ref() + .is_some_and(|buffer| buffer.entity_id() == buffer_id) + }) + .and_then(|view| pane.index_for_item(&view)) + } + + pub fn resolve_active_item_as_svg_editor( + workspace: &Workspace, + cx: &mut Context, + ) -> Option> { + workspace + .active_item(cx)? + .act_as::(cx) + .filter(|editor| Self::is_svg_file(&editor, cx)) + } + + fn create_svg_view( + mode: SvgPreviewMode, + workspace: &mut Workspace, + editor: Entity, + window: &mut Window, + cx: &mut Context, + ) -> Entity { + let workspace_handle = workspace.weak_handle(); + SvgPreviewView::new(mode, editor, workspace_handle, window, cx) + } + + fn create_buffer_subscription( + buffer: &Entity, + window: &Window, + cx: &mut Context, + ) -> Subscription { + cx.subscribe_in( + buffer, + window, + move |this, _buffer, event: &BufferEvent, window, cx| match event { + BufferEvent::Edited | BufferEvent::Saved => { + this.render_image(window, cx); + } + _ => {} + }, + ) + } + + pub fn is_svg_file(editor: &Entity, cx: &App) -> bool { + editor + .read(cx) + .buffer() + .read(cx) + .as_singleton() + .and_then(|buffer| buffer.read(cx).file()) + .is_some_and(|file| { + file.path() + .extension() + .is_some_and(|ext| ext.eq_ignore_ascii_case("svg")) + }) + } + pub fn register(workspace: &mut Workspace, _window: &mut Window, _cx: &mut Context) { workspace.register_action(move |workspace, _: &OpenPreview, window, cx| { if let Some(editor) = Self::resolve_active_item_as_svg_editor(workspace, cx) @@ -104,154 +281,6 @@ impl SvgPreviewView { } }); } - - fn find_existing_preview_item_idx( - pane: &Pane, - editor: &Entity, - cx: &App, - ) -> Option { - let editor_path = Self::get_svg_path(editor.read(cx).buffer(), cx); - pane.items_of_type::() - .find(|view| { - let view_read = view.read(cx); - view_read.svg_path.is_some() && view_read.svg_path == editor_path - }) - .and_then(|view| pane.index_for_item(&view)) - } - - pub fn resolve_active_item_as_svg_editor( - workspace: &Workspace, - cx: &mut Context, - ) -> Option> { - let editor = workspace.active_item(cx)?.act_as::(cx)?; - - if Self::is_svg_file(&editor, cx) { - Some(editor) - } else { - None - } - } - - fn create_svg_view( - mode: SvgPreviewMode, - workspace: &mut Workspace, - editor: Entity, - window: &mut Window, - cx: &mut Context, - ) -> Entity { - let workspace_handle = workspace.weak_handle(); - SvgPreviewView::new(mode, editor, workspace_handle, window, cx) - } - - pub fn new( - mode: SvgPreviewMode, - active_editor: Entity, - workspace_handle: WeakEntity, - window: &mut Window, - cx: &mut Context, - ) -> Entity { - cx.new(|cx| { - let image_cache = RetainAllImageCache::new(cx); - let buffer = active_editor.read(cx).buffer(); - let svg_path = Self::get_svg_path(buffer, cx); - let subscription = Self::create_buffer_subscription(&buffer.clone(), window, cx); - - // Subscribe to workspace active item changes to follow SVG files - let workspace_subscription = if mode == SvgPreviewMode::Follow { - workspace_handle.upgrade().map(|workspace_handle| { - cx.subscribe_in( - &workspace_handle, - window, - |this: &mut SvgPreviewView, - workspace, - event: &workspace::Event, - window, - cx| { - if let workspace::Event::ActiveItemChanged = event { - let workspace_read = workspace.read(cx); - if let Some(active_item) = workspace_read.active_item(cx) - && let Some(editor) = active_item.downcast::() - && Self::is_svg_file(&editor, cx) - { - let buffer = editor.read(cx).buffer(); - let new_path = Self::get_svg_path(&buffer, cx); - if this.svg_path != new_path { - this.svg_path = new_path; - this._buffer_subscription = - Self::create_buffer_subscription( - &buffer.clone(), - window, - cx, - ); - cx.notify(); - } - } - } - }, - ) - }) - } else { - None - }; - - Self { - focus_handle: cx.focus_handle(), - svg_path, - image_cache, - _buffer_subscription: subscription, - _workspace_subscription: workspace_subscription, - } - }) - } - - fn create_buffer_subscription( - active_buffer: &Entity, - window: &mut Window, - cx: &mut Context, - ) -> Subscription { - cx.subscribe_in( - active_buffer, - window, - |this: &mut SvgPreviewView, buffer, event: &MultiBufferEvent, window, cx| { - let potential_path_change = event == &MultiBufferEvent::FileHandleChanged; - if event == &MultiBufferEvent::Saved || potential_path_change { - // Remove cached image to force reload - if let Some(svg_path) = &this.svg_path { - let resource = Resource::Path(svg_path.clone().into()); - this.image_cache.update(cx, |cache, cx| { - cache.remove(&resource, window, cx); - }); - } - - if potential_path_change { - this.svg_path = Self::get_svg_path(buffer, cx); - } - cx.notify(); - } - }, - ) - } - - pub fn is_svg_file(editor: &Entity, cx: &App) -> bool { - let buffer = editor.read(cx).buffer().read(cx); - if let Some(buffer) = buffer.as_singleton() - && let Some(file) = buffer.read(cx).file() - { - return file - .path() - .extension() - .map(|ext| ext.eq_ignore_ascii_case("svg")) - .unwrap_or(false); - } - false - } - - fn get_svg_path(buffer: &Entity, cx: &App) -> Option { - let buffer = buffer.read(cx).as_singleton()?; - let file = buffer.read(cx).file()?; - let local_file = file.as_local()?; - Some(local_file.abs_path(cx)) - } } impl Render for SvgPreviewView { @@ -265,20 +294,19 @@ impl Render for SvgPreviewView { .flex() .justify_center() .items_center() - .child(if let Some(svg_path) = &self.svg_path { - img(ImageSource::from(svg_path.clone())) - .image_cache(&self.image_cache) - .max_w_full() - .max_h_full() - .with_fallback(|| { - div() + .map(|this| match self.current_svg.clone() { + Some(Ok(image)) => { + this.child(img(image).max_w_full().max_h_full().with_fallback(|| { + h_flex() .p_4() - .child("Failed to load SVG file") + .gap_2() + .child(Icon::new(IconName::Warning)) + .child("Failed to load SVG image") .into_any_element() - }) - .into_any_element() - } else { - div().p_4().child("No SVG file selected").into_any_element() + })) + } + Some(Err(e)) => this.child(div().p_4().child(e).into_any_element()), + None => this.child(div().p_4().child("No SVG file selected")), }) } } @@ -295,20 +323,19 @@ impl Item for SvgPreviewView { type Event = (); fn tab_icon(&self, _window: &Window, cx: &App) -> Option { - // Use the same icon as SVG files in the file tree - self.svg_path + self.buffer .as_ref() - .and_then(|svg_path| FileIcons::get_icon(svg_path, cx)) + .and_then(|buffer| buffer.read(cx).file()) + .and_then(|file| FileIcons::get_icon(file.path().as_std_path(), cx)) .map(Icon::from_path) .or_else(|| Some(Icon::new(IconName::Image))) } - fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString { - self.svg_path + fn tab_content_text(&self, _detail: usize, cx: &App) -> SharedString { + self.buffer .as_ref() - .and_then(|svg_path| svg_path.file_name()) - .map(|name| name.to_string_lossy()) - .map(|name| format!("Preview {}", name).into()) + .and_then(|svg_path| svg_path.read(cx).file()) + .map(|name| format!("Preview {}", name.file_name(cx)).into()) .unwrap_or_else(|| "SVG Preview".into()) } diff --git a/crates/tab_switcher/src/tab_switcher.rs b/crates/tab_switcher/src/tab_switcher.rs index 2923ee6dd4b53108f0566a0a298b7fffd7e836ee..8b582796b371dc7e10f1bd72f406064fe6be80d1 100644 --- a/crates/tab_switcher/src/tab_switcher.rs +++ b/crates/tab_switcher/src/tab_switcher.rs @@ -155,9 +155,9 @@ impl TabSwitcher { Self { picker: cx.new(|cx| { if is_global { - Picker::uniform_list(delegate, window, cx) + Picker::list(delegate, window, cx) } else { - Picker::nonsearchable_uniform_list(delegate, window, cx) + Picker::nonsearchable_list(delegate, window, cx) } }), init_modifiers, diff --git a/crates/text/Cargo.toml b/crates/text/Cargo.toml index a58f2e20cc781f5d688b9fb1ceef8a17c48e6cb8..ed02381eb83db5daececd159171a90072244a340 100644 --- a/crates/text/Cargo.toml +++ b/crates/text/Cargo.toml @@ -23,6 +23,7 @@ log.workspace = true parking_lot.workspace = true postage.workspace = true rand = { workspace = true, optional = true } +regex.workspace = true rope.workspace = true smallvec.workspace = true sum_tree.workspace = true diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index d9f0626016f6377228070a2f21f0721d92ec58aa..6403c66106dca88cdac85e09888012d890158a23 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -20,9 +20,11 @@ use operation_queue::OperationQueue; pub use patch::Patch; use postage::{oneshot, prelude::*}; +use regex::Regex; pub use rope::*; pub use selection::*; use std::{ + borrow::Cow, cmp::{self, Ordering, Reverse}, fmt::Display, future::Future, @@ -30,7 +32,7 @@ use std::{ num::NonZeroU64, ops::{self, Deref, Range, Sub}, str, - sync::Arc, + sync::{Arc, LazyLock}, time::{Duration, Instant}, }; pub use subscription::*; @@ -41,6 +43,9 @@ use undo_map::UndoMap; #[cfg(any(test, feature = "test-support"))] use util::RandomCharIter; +static LINE_SEPARATORS_REGEX: LazyLock = + LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX")); + pub type TransactionId = clock::Lamport; pub struct Buffer { @@ -2014,24 +2019,10 @@ impl BufferSnapshot { start..position } - /// Returns the buffer's text as a String. - /// - /// Note: This always uses `\n` as the line separator, regardless of the buffer's - /// actual line ending setting. For LSP communication or other cases where you need - /// to preserve the original line endings, use [`Self::text_with_original_line_endings`] instead. pub fn text(&self) -> String { self.visible_text.to_string() } - /// Returns the buffer's text with line same endings as in buffer's file. - /// - /// Unlike [`Self::text`] which always uses `\n`, this method formats the text using - /// the buffer's actual line ending setting (Unix `\n` or Windows `\r\n`). - pub fn text_with_original_line_endings(&self) -> String { - self.visible_text - .to_string_with_line_ending(self.line_ending) - } - pub fn line_ending(&self) -> LineEnding { self.line_ending } @@ -2135,10 +2126,6 @@ impl BufferSnapshot { self.visible_text.reversed_bytes_in_range(start..end) } - /// Returns the text in the given range. - /// - /// Note: This always uses `\n` as the line separator, regardless of the buffer's - /// actual line ending setting. pub fn text_for_range(&self, range: Range) -> Chunks<'_> { let start = range.start.to_offset(self); let end = range.end.to_offset(self); @@ -3265,6 +3252,77 @@ impl FromAnchor for usize { } } +#[derive(Clone, Copy, Debug, PartialEq)] +pub enum LineEnding { + Unix, + Windows, +} + +impl Default for LineEnding { + fn default() -> Self { + #[cfg(unix)] + return Self::Unix; + + #[cfg(not(unix))] + return Self::Windows; + } +} + +impl LineEnding { + pub fn as_str(&self) -> &'static str { + match self { + LineEnding::Unix => "\n", + LineEnding::Windows => "\r\n", + } + } + + pub fn label(&self) -> &'static str { + match self { + LineEnding::Unix => "LF", + LineEnding::Windows => "CRLF", + } + } + + pub fn detect(text: &str) -> Self { + let mut max_ix = cmp::min(text.len(), 1000); + while !text.is_char_boundary(max_ix) { + max_ix -= 1; + } + + if let Some(ix) = text[..max_ix].find(['\n']) { + if ix > 0 && text.as_bytes()[ix - 1] == b'\r' { + Self::Windows + } else { + Self::Unix + } + } else { + Self::default() + } + } + + pub fn normalize(text: &mut String) { + if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") { + *text = replaced; + } + } + + pub fn normalize_arc(text: Arc) -> Arc { + if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") { + replaced.into() + } else { + text + } + } + + pub fn normalize_cow(text: Cow) -> Cow { + if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") { + replaced.into() + } else { + text + } + } +} + #[cfg(debug_assertions)] pub mod debug { use super::*; diff --git a/crates/title_bar/Cargo.toml b/crates/title_bar/Cargo.toml index 829dea3a55ba9fee7f2ede503139e1348dabc57f..6d5d0ce170e261deefca679953199597b2753981 100644 --- a/crates/title_bar/Cargo.toml +++ b/crates/title_bar/Cargo.toml @@ -30,6 +30,7 @@ test-support = [ anyhow.workspace = true auto_update.workspace = true call.workspace = true +channel.workspace = true chrono.workspace = true client.workspace = true cloud_llm_client.workspace = true diff --git a/crates/title_bar/src/collab.rs b/crates/title_bar/src/collab.rs index 5dd08ee3f9e132666520433db92279df559abdb0..070952d1cec346e4ec41e26f69895b65cd74f082 100644 --- a/crates/title_bar/src/collab.rs +++ b/crates/title_bar/src/collab.rs @@ -2,18 +2,22 @@ use std::rc::Rc; use std::sync::Arc; use call::{ActiveCall, ParticipantLocation, Room}; +use channel::ChannelStore; use client::{User, proto::PeerId}; use gpui::{ AnyElement, Hsla, IntoElement, MouseButton, Path, ScreenCaptureSource, Styled, WeakEntity, canvas, point, }; use gpui::{App, Task, Window, actions}; +use project::WorktreeSettings; use rpc::proto::{self}; +use settings::{Settings as _, SettingsLocation}; use theme::ActiveTheme; use ui::{ Avatar, AvatarAudioStatusIndicator, ContextMenu, ContextMenuItem, Divider, DividerColor, Facepile, PopoverMenu, SplitButton, SplitButtonStyle, TintColor, Tooltip, prelude::*, }; +use util::rel_path::RelPath; use workspace::notifications::DetachAndPromptErr; use crate::TitleBar; @@ -347,6 +351,11 @@ impl TitleBar { let can_share_projects = room.can_share_projects(); let screen_sharing_supported = cx.is_screen_capture_supported(); + let channel_store = ChannelStore::global(cx); + let channel = room + .channel_id() + .and_then(|channel_id| channel_store.read(cx).channel_for_id(channel_id).cloned()); + let mut children = Vec::new(); children.push( @@ -368,6 +377,20 @@ impl TitleBar { ); if is_local && can_share_projects && !is_connecting_to_project { + let is_sharing_disabled = channel.is_some_and(|channel| match channel.visibility { + proto::ChannelVisibility::Public => project.visible_worktrees(cx).any(|worktree| { + let worktree_id = worktree.read(cx).id(); + + let settings_location = Some(SettingsLocation { + worktree_id, + path: RelPath::empty(), + }); + + WorktreeSettings::get(settings_location, cx).prevent_sharing_in_public_channels + }), + proto::ChannelVisibility::Members => false, + }); + children.push( Button::new( "toggle_sharing", @@ -382,6 +405,11 @@ impl TitleBar { .selected_style(ButtonStyle::Tinted(TintColor::Accent)) .toggle_state(is_shared) .label_size(LabelSize::Small) + .when(is_sharing_disabled, |parent| { + parent.disabled(true).tooltip(Tooltip::text( + "This project may not be shared in a public channel.", + )) + }) .on_click(cx.listener(move |this, _, window, cx| { if is_shared { this.unshare_project(window, cx); diff --git a/crates/ui/src/components/keybinding.rs b/crates/ui/src/components/keybinding.rs index bf52d7be8c7e91b230eac295dff03f2679a004af..e22669995db416a3ec6884a79860e76610dd7d03 100644 --- a/crates/ui/src/components/keybinding.rs +++ b/crates/ui/src/components/keybinding.rs @@ -68,6 +68,18 @@ impl KeyBinding { pub fn for_action_in(action: &dyn Action, focus: &FocusHandle, cx: &App) -> Self { Self::new(action, Some(focus.clone()), cx) } + pub fn has_binding(&self, window: &Window) -> bool { + match &self.source { + Source::Action { + action, + focus_handle: Some(focus), + } => window + .highest_precedence_binding_for_action_in(action.as_ref(), focus) + .or_else(|| window.highest_precedence_binding_for_action(action.as_ref())) + .is_some(), + _ => false, + } + } pub fn set_vim_mode(cx: &mut App, enabled: bool) { cx.set_global(VimStyle(enabled)); diff --git a/crates/ui/src/components/scrollbar.rs b/crates/ui/src/components/scrollbar.rs index bfcaa93eb41f22c36d106273f6da98da38981f62..d3d33a296bbd65edb24371d8f5f1e6462e77e3fe 100644 --- a/crates/ui/src/components/scrollbar.rs +++ b/crates/ui/src/components/scrollbar.rs @@ -101,13 +101,21 @@ where T: ScrollableHandle, { let element_id = config.id.take().unwrap_or_else(|| caller_location.into()); + let track_color = config.track_color; - window.use_keyed_state(element_id, cx, |window, cx| { + let state = window.use_keyed_state(element_id, cx, |window, cx| { let parent_id = cx.entity_id(); ScrollbarStateWrapper( cx.new(|cx| ScrollbarState::new_from_config(config, parent_id, window, cx)), ) - }) + }); + + state.update(cx, |state, cx| { + state + .0 + .update(cx, |state, _cx| state.update_track_color(track_color)) + }); + state } pub trait WithScrollbar: Sized { @@ -334,7 +342,7 @@ enum ReservedSpace { #[default] None, Thumb, - Track(Hsla), + Track, } impl ReservedSpace { @@ -343,14 +351,7 @@ impl ReservedSpace { } fn needs_scroll_track(&self) -> bool { - matches!(self, ReservedSpace::Track(_)) - } - - fn track_color(&self) -> Option { - match self { - ReservedSpace::Track(color) => Some(*color), - _ => None, - } + *self == ReservedSpace::Track } } @@ -385,6 +386,7 @@ pub struct Scrollbars { tracked_entity: Option>, scrollable_handle: Handle, visibility: Point, + track_color: Option, scrollbar_width: ScrollbarWidth, } @@ -406,6 +408,7 @@ impl Scrollbars { scrollable_handle: Handle::Untracked(ScrollHandle::new), tracked_entity: None, visibility: show_along.apply_to(Default::default(), ReservedSpace::Thumb), + track_color: None, scrollbar_width: ScrollbarWidth::Normal, } } @@ -446,6 +449,7 @@ impl Scrollbars { scrollbar_width, visibility, get_visibility, + track_color, .. } = self; @@ -455,6 +459,7 @@ impl Scrollbars { tracked_entity: tracked_entity_id, visibility, scrollbar_width, + track_color, get_visibility, } } @@ -465,7 +470,8 @@ impl Scrollbars { } pub fn with_track_along(mut self, along: ScrollAxes, background_color: Hsla) -> Self { - self.visibility = along.apply_to(self.visibility, ReservedSpace::Track(background_color)); + self.visibility = along.apply_to(self.visibility, ReservedSpace::Track); + self.track_color = Some(background_color); self } @@ -593,6 +599,7 @@ struct ScrollbarState { show_behavior: ShowBehavior, get_visibility: fn(&App) -> ShowScrollbar, visibility: Point, + track_color: Option, show_state: VisibilityState, mouse_in_parent: bool, last_prepaint_state: Option, @@ -622,6 +629,7 @@ impl ScrollbarState { scroll_handle, width: config.scrollbar_width, visibility: config.visibility, + track_color: config.track_color, show_behavior, get_visibility: config.get_visibility, show_state: VisibilityState::from_behavior(show_behavior), @@ -794,6 +802,10 @@ impl ScrollbarState { } } + fn update_track_color(&mut self, track_color: Option) { + self.track_color = track_color; + } + fn parent_hovered(&self, window: &Window) -> bool { self.last_prepaint_state .as_ref() @@ -1103,8 +1115,10 @@ impl Element for ScrollbarElement { .not() .then(|| ScrollbarPrepaintState { thumbs: { - let thumb_ranges = self.state.read(cx).thumb_ranges().collect::>(); - let width = self.state.read(cx).width.to_pixels(); + let state = self.state.read(cx); + let thumb_ranges = state.thumb_ranges().collect::>(); + let width = state.width.to_pixels(); + let track_color = state.track_color; let additional_padding = if thumb_ranges.len() == 2 { width @@ -1157,20 +1171,22 @@ impl Element for ScrollbarElement { .apply_along(axis, |_| thumb_end - thumb_offset), ); + let needs_scroll_track = reserved_space.needs_scroll_track(); + ScrollbarLayout { thumb_bounds, track_bounds: padded_bounds, axis, cursor_hitbox: window.insert_hitbox( - if reserved_space.needs_scroll_track() { + if needs_scroll_track { padded_bounds } else { thumb_bounds }, HitboxBehavior::BlockMouseExceptScroll, ), - track_background: reserved_space - .track_color() + track_background: track_color + .filter(|_| needs_scroll_track) .map(|color| (padded_bounds.dilate(SCROLLBAR_PADDING), color)), reserved_space, } @@ -1279,10 +1295,15 @@ impl Element for ScrollbarElement { } if let Some((track_bounds, color)) = track_background { + let mut color = *color; + if let Some(fade) = autohide_fade { + color.fade_out(fade); + } + window.paint_quad(quad( *track_bounds, Corners::default(), - *color, + color, Edges::default(), Hsla::transparent_black(), BorderStyle::default(), diff --git a/crates/util/src/shell_env.rs b/crates/util/src/shell_env.rs index b3c9e3bef390b945314ba79fcc34ff2669a349a6..179e8529d253eacae1d0866acf266b661eb541e0 100644 --- a/crates/util/src/shell_env.rs +++ b/crates/util/src/shell_env.rs @@ -33,7 +33,6 @@ async fn capture_unix( directory: &Path, ) -> Result> { use std::os::unix::process::CommandExt; - use std::process::Stdio; let shell_kind = ShellKind::new(shell_path, false); let zed_path = super::get_shell_safe_zed_path(shell_kind)?; @@ -56,9 +55,6 @@ async fn capture_unix( ShellKind::Xonsh => (FD_STDERR, "o>e".to_string()), _ => (FD_STDIN, format!(">&{}", FD_STDIN)), // `>&0` }; - command.stdin(Stdio::null()); - command.stdout(Stdio::piped()); - command.stderr(Stdio::piped()); match shell_kind { ShellKind::Csh | ShellKind::Tcsh => { @@ -107,7 +103,7 @@ async fn spawn_and_read_fd( child_fd: std::os::fd::RawFd, ) -> anyhow::Result<(Vec, std::process::Output)> { use command_fds::{CommandFdExt, FdMapping}; - use std::io::Read; + use std::{io::Read, process::Stdio}; let (mut reader, writer) = std::io::pipe()?; @@ -116,7 +112,11 @@ async fn spawn_and_read_fd( child_fd, }])?; - let process = smol::process::Command::from(command).spawn()?; + let process = smol::process::Command::from(command) + .stdin(Stdio::null()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn()?; let mut buffer = Vec::new(); reader.read_to_end(&mut buffer)?; diff --git a/crates/vim/Cargo.toml b/crates/vim/Cargo.toml index 9d6381f8e6aa9afdc8b6ce5fa81bbcf47cca21f5..4cea29508f437d6753a78155965b94259a2d7884 100644 --- a/crates/vim/Cargo.toml +++ b/crates/vim/Cargo.toml @@ -67,6 +67,7 @@ lsp = { workspace = true, features = ["test-support"] } parking_lot.workspace = true project_panel.workspace = true release_channel.workspace = true +settings_ui.workspace = true settings.workspace = true perf.workspace = true util = { workspace = true, features = ["test-support"] } diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index f80f9be38edbb7fafb0864437c8de2bda4740154..739b40124181044326144c85897cf7e1d7536d5c 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -100,6 +100,10 @@ actions!( GoToTab, /// Go to previous tab page (with count support). GoToPreviousTab, + /// Go to tab page (with count support). + GoToPreviousReference, + /// Go to previous tab page (with count support). + GoToNextReference, ] ); @@ -202,6 +206,36 @@ pub(crate) fn register(editor: &mut Editor, cx: &mut Context) { vim.join_lines_impl(false, window, cx); }); + Vim::action(editor, cx, |vim, _: &GoToPreviousReference, window, cx| { + let count = Vim::take_count(cx); + vim.update_editor(cx, |_, editor, cx| { + let task = editor.go_to_reference_before_or_after_position( + editor::Direction::Prev, + count.unwrap_or(1), + window, + cx, + ); + if let Some(task) = task { + task.detach_and_log_err(cx); + }; + }); + }); + + Vim::action(editor, cx, |vim, _: &GoToNextReference, window, cx| { + let count = Vim::take_count(cx); + vim.update_editor(cx, |_, editor, cx| { + let task = editor.go_to_reference_before_or_after_position( + editor::Direction::Next, + count.unwrap_or(1), + window, + cx, + ); + if let Some(task) = task { + task.detach_and_log_err(cx); + }; + }); + }); + Vim::action(editor, cx, |vim, _: &Undo, window, cx| { let times = Vim::take_count(cx); Vim::take_forced_motion(cx); diff --git a/crates/vim/src/test.rs b/crates/vim/src/test.rs index 93b610877a163ba0f3035e8a0483f531a3246e6c..cb02a3ab0fafdeec254e8b3722bdd877fbeda0e2 100644 --- a/crates/vim/src/test.rs +++ b/crates/vim/src/test.rs @@ -2,19 +2,21 @@ mod neovim_backed_test_context; mod neovim_connection; mod vim_test_context; -use std::time::Duration; +use std::{sync::Arc, time::Duration}; use collections::HashMap; use command_palette::CommandPalette; use editor::{ - AnchorRangeExt, DisplayPoint, Editor, EditorMode, MultiBuffer, actions::DeleteLine, - code_context_menus::CodeContextMenu, display_map::DisplayRow, + AnchorRangeExt, DisplayPoint, Editor, EditorMode, MultiBuffer, + actions::{DeleteLine, WrapSelectionsInTag}, + code_context_menus::CodeContextMenu, + display_map::DisplayRow, test::editor_test_context::EditorTestContext, }; use futures::StreamExt; use gpui::{KeyBinding, Modifiers, MouseButton, TestAppContext, px}; use itertools::Itertools; -use language::Point; +use language::{Language, LanguageConfig, Point}; pub use neovim_backed_test_context::*; use settings::SettingsStore; use ui::Pixels; @@ -2319,3 +2321,47 @@ async fn test_clipping_on_mode_change(cx: &mut gpui::TestAppContext) { Mode::Normal, ); } + +#[gpui::test] +async fn test_wrap_selections_in_tag_line_mode(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + let js_language = Arc::new(Language::new( + LanguageConfig { + name: "JavaScript".into(), + wrap_characters: Some(language::WrapCharactersConfig { + start_prefix: "<".into(), + start_suffix: ">".into(), + end_prefix: "".into(), + }), + ..LanguageConfig::default() + }, + None, + )); + + cx.update_buffer(|buffer, cx| buffer.set_language(Some(js_language), cx)); + + cx.set_state( + indoc! { + " + ˇaaaaa + bbbbb + " + }, + Mode::Normal, + ); + + cx.simulate_keystrokes("shift-v j"); + cx.dispatch_action(WrapSelectionsInTag); + + cx.assert_state( + indoc! { + " + <ˇ>aaaaa + bbbbb + " + }, + Mode::VisualLine, + ); +} diff --git a/crates/vim/src/test/vim_test_context.rs b/crates/vim/src/test/vim_test_context.rs index 8dfc0c392d98073746e894bd4569f0edbf19e469..0ea300e30cbc93d60856890d2ca986dbc024127f 100644 --- a/crates/vim/src/test/vim_test_context.rs +++ b/crates/vim/src/test/vim_test_context.rs @@ -30,6 +30,7 @@ impl VimTestContext { editor::init_settings(cx); project::Project::init_settings(cx); theme::init(theme::LoadThemes::JustBase, cx); + settings_ui::init(cx); }); } diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index 7481d176109907baccf6e742d0b3f3614014dcac..e0b9bfd6e06c3528bd81b81e98d5cb65abb35aa3 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -260,6 +260,8 @@ actions!( [ /// Toggles Vim mode on or off. ToggleVimMode, + /// Toggles Helix mode on or off. + ToggleHelixMode, ] ); @@ -274,9 +276,23 @@ pub fn init(cx: &mut App) { cx.observe_new(|workspace: &mut Workspace, _, _| { workspace.register_action(|workspace, _: &ToggleVimMode, _, cx| { let fs = workspace.app_state().fs.clone(); - let currently_enabled = Vim::enabled(cx); + let currently_enabled = VimModeSetting::get_global(cx).0; update_settings_file(fs, cx, move |setting, _| { - setting.vim_mode = Some(!currently_enabled) + setting.vim_mode = Some(!currently_enabled); + if let Some(helix_mode) = &mut setting.helix_mode { + *helix_mode = false; + } + }) + }); + + workspace.register_action(|workspace, _: &ToggleHelixMode, _, cx| { + let fs = workspace.app_state().fs.clone(); + let currently_enabled = HelixModeSetting::get_global(cx).0; + update_settings_file(fs, cx, move |setting, _| { + setting.helix_mode = Some(!currently_enabled); + if let Some(vim_mode) = &mut setting.vim_mode { + *vim_mode = false; + } }) }); diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 9b6767086adffde00a0486b6a9cae62aaa8d41df..c2c79b6a5fc3cc337f6dd7273d529fd40f04c8a1 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -3702,6 +3702,10 @@ impl Render for Pane { key_context.add("EmptyPane"); } + self.toolbar + .read(cx) + .contribute_context(&mut key_context, cx); + let should_display_tab_bar = self.should_display_tab_bar.clone(); let display_tab_bar = should_display_tab_bar(window, cx); let Some(project) = self.project.upgrade() else { diff --git a/crates/workspace/src/toolbar.rs b/crates/workspace/src/toolbar.rs index 9d6626af80fbab9b7bf8439a3c5f628263892bc6..6e26be6dc7a09dd1ed8963579ae27d8f6cc8c50c 100644 --- a/crates/workspace/src/toolbar.rs +++ b/crates/workspace/src/toolbar.rs @@ -1,7 +1,7 @@ use crate::ItemHandle; use gpui::{ - AnyView, App, Context, Entity, EntityId, EventEmitter, ParentElement as _, Render, Styled, - Window, + AnyView, App, Context, Entity, EntityId, EventEmitter, KeyContext, ParentElement as _, Render, + Styled, Window, }; use ui::prelude::*; use ui::{h_flex, v_flex}; @@ -25,6 +25,8 @@ pub trait ToolbarItemView: Render + EventEmitter { _cx: &mut Context, ) { } + + fn contribute_context(&self, _context: &mut KeyContext, _cx: &App) {} } trait ToolbarItemViewHandle: Send { @@ -37,6 +39,7 @@ trait ToolbarItemViewHandle: Send { cx: &mut App, ) -> ToolbarItemLocation; fn focus_changed(&mut self, pane_focused: bool, window: &mut Window, cx: &mut App); + fn contribute_context(&self, context: &mut KeyContext, cx: &App); } #[derive(Copy, Clone, Debug, PartialEq)] @@ -236,6 +239,14 @@ impl Toolbar { pub fn hidden(&self) -> bool { self.hidden } + + pub fn contribute_context(&self, context: &mut KeyContext, cx: &App) { + for (item, location) in &self.items { + if *location != ToolbarItemLocation::Hidden { + item.contribute_context(context, cx); + } + } + } } impl ToolbarItemViewHandle for Entity { @@ -264,4 +275,8 @@ impl ToolbarItemViewHandle for Entity { cx.notify(); }); } + + fn contribute_context(&self, context: &mut KeyContext, cx: &App) { + self.read(cx).contribute_context(context, cx) + } } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 6933a6bcda8baffee618c219c3b05263f11738f5..a548a04aa7be55d44a0d30af5dbb49eeba54ade5 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -203,6 +203,8 @@ actions!( CloseActiveDock, /// Closes all docks. CloseAllDocks, + /// Toggles all docks. + ToggleAllDocks, /// Closes the current window. CloseWindow, /// Opens the feedback dialog. @@ -1176,6 +1178,7 @@ pub struct Workspace { _items_serializer: Task>, session_id: Option, scheduled_tasks: Vec>, + last_open_dock_positions: Vec, } impl EventEmitter for Workspace {} @@ -1518,6 +1521,7 @@ impl Workspace { session_id: Some(session_id), scheduled_tasks: Vec::new(), + last_open_dock_positions: Vec::new(), } } @@ -2987,12 +2991,17 @@ impl Workspace { window: &mut Window, cx: &mut Context, ) { - let dock = self.dock_at_position(dock_side); let mut focus_center = false; let mut reveal_dock = false; + + let other_is_zoomed = self.zoomed.is_some() && self.zoomed_position != Some(dock_side); + let was_visible = self.is_dock_at_position_open(dock_side, cx) && !other_is_zoomed; + if was_visible { + self.save_open_dock_positions(cx); + } + + let dock = self.dock_at_position(dock_side); dock.update(cx, |dock, cx| { - let other_is_zoomed = self.zoomed.is_some() && self.zoomed_position != Some(dock_side); - let was_visible = dock.is_open() && !other_is_zoomed; dock.set_open(!was_visible, window, cx); if dock.active_panel().is_none() { @@ -3041,7 +3050,8 @@ impl Workspace { } fn close_active_dock(&mut self, window: &mut Window, cx: &mut Context) -> bool { - if let Some(dock) = self.active_dock(window, cx) { + if let Some(dock) = self.active_dock(window, cx).cloned() { + self.save_open_dock_positions(cx); dock.update(cx, |dock, cx| { dock.set_open(false, window, cx); }); @@ -3051,6 +3061,7 @@ impl Workspace { } pub fn close_all_docks(&mut self, window: &mut Window, cx: &mut Context) { + self.save_open_dock_positions(cx); for dock in self.all_docks() { dock.update(cx, |dock, cx| { dock.set_open(false, window, cx); @@ -3062,6 +3073,67 @@ impl Workspace { self.serialize_workspace(window, cx); } + fn get_open_dock_positions(&self, cx: &Context) -> Vec { + self.all_docks() + .into_iter() + .filter_map(|dock| { + let dock_ref = dock.read(cx); + if dock_ref.is_open() { + Some(dock_ref.position()) + } else { + None + } + }) + .collect() + } + + /// Saves the positions of currently open docks. + /// + /// Updates `last_open_dock_positions` with positions of all currently open + /// docks, to later be restored by the 'Toggle All Docks' action. + fn save_open_dock_positions(&mut self, cx: &mut Context) { + let open_dock_positions = self.get_open_dock_positions(cx); + if !open_dock_positions.is_empty() { + self.last_open_dock_positions = open_dock_positions; + } + } + + /// Toggles all docks between open and closed states. + /// + /// If any docks are open, closes all and remembers their positions. If all + /// docks are closed, restores the last remembered dock configuration. + fn toggle_all_docks( + &mut self, + _: &ToggleAllDocks, + window: &mut Window, + cx: &mut Context, + ) { + let open_dock_positions = self.get_open_dock_positions(cx); + + if !open_dock_positions.is_empty() { + self.close_all_docks(window, cx); + } else if !self.last_open_dock_positions.is_empty() { + self.restore_last_open_docks(window, cx); + } + } + + /// Reopens docks from the most recently remembered configuration. + /// + /// Opens all docks whose positions are stored in `last_open_dock_positions` + /// and clears the stored positions. + fn restore_last_open_docks(&mut self, window: &mut Window, cx: &mut Context) { + let positions_to_open = std::mem::take(&mut self.last_open_dock_positions); + + for position in positions_to_open { + let dock = self.dock_at_position(position); + dock.update(cx, |dock, cx| dock.set_open(true, window, cx)); + } + + cx.focus_self(window); + cx.notify(); + self.serialize_workspace(window, cx); + } + /// Transfer focus to the panel of the given type. pub fn focus_panel( &mut self, @@ -5761,6 +5833,7 @@ impl Workspace { workspace.close_all_docks(window, cx); }), ) + .on_action(cx.listener(Self::toggle_all_docks)) .on_action(cx.listener( |workspace: &mut Workspace, _: &ClearAllNotifications, _, cx| { workspace.clear_all_notifications(cx); @@ -7323,6 +7396,7 @@ pub struct OpenOptions { pub visible: Option, pub focus: Option, pub open_new_workspace: Option, + pub prefer_focused_window: bool, pub replace_window: Option>, pub env: Option>, } @@ -7379,7 +7453,7 @@ pub fn open_paths( })?; if open_options.open_new_workspace.is_none() - && existing.is_none() + && (existing.is_none() || open_options.prefer_focused_window) && all_metadatas.iter().all(|file| !file.is_dir) { cx.update(|cx| { @@ -9205,6 +9279,238 @@ mod tests { }); } + #[gpui::test] + async fn test_toggle_all_docks(cx: &mut gpui::TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + let project = Project::test(fs, [], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + workspace.update_in(cx, |workspace, window, cx| { + // Open two docks + let left_dock = workspace.dock_at_position(DockPosition::Left); + let right_dock = workspace.dock_at_position(DockPosition::Right); + + left_dock.update(cx, |dock, cx| dock.set_open(true, window, cx)); + right_dock.update(cx, |dock, cx| dock.set_open(true, window, cx)); + + assert!(left_dock.read(cx).is_open()); + assert!(right_dock.read(cx).is_open()); + }); + + workspace.update_in(cx, |workspace, window, cx| { + // Toggle all docks - should close both + workspace.toggle_all_docks(&ToggleAllDocks, window, cx); + + let left_dock = workspace.dock_at_position(DockPosition::Left); + let right_dock = workspace.dock_at_position(DockPosition::Right); + assert!(!left_dock.read(cx).is_open()); + assert!(!right_dock.read(cx).is_open()); + }); + + workspace.update_in(cx, |workspace, window, cx| { + // Toggle again - should reopen both + workspace.toggle_all_docks(&ToggleAllDocks, window, cx); + + let left_dock = workspace.dock_at_position(DockPosition::Left); + let right_dock = workspace.dock_at_position(DockPosition::Right); + assert!(left_dock.read(cx).is_open()); + assert!(right_dock.read(cx).is_open()); + }); + } + + #[gpui::test] + async fn test_toggle_all_with_manual_close(cx: &mut gpui::TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + let project = Project::test(fs, [], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + workspace.update_in(cx, |workspace, window, cx| { + // Open two docks + let left_dock = workspace.dock_at_position(DockPosition::Left); + let right_dock = workspace.dock_at_position(DockPosition::Right); + + left_dock.update(cx, |dock, cx| dock.set_open(true, window, cx)); + right_dock.update(cx, |dock, cx| dock.set_open(true, window, cx)); + + assert!(left_dock.read(cx).is_open()); + assert!(right_dock.read(cx).is_open()); + }); + + workspace.update_in(cx, |workspace, window, cx| { + // Close them manually + workspace.toggle_dock(DockPosition::Left, window, cx); + workspace.toggle_dock(DockPosition::Right, window, cx); + + let left_dock = workspace.dock_at_position(DockPosition::Left); + let right_dock = workspace.dock_at_position(DockPosition::Right); + assert!(!left_dock.read(cx).is_open()); + assert!(!right_dock.read(cx).is_open()); + }); + + workspace.update_in(cx, |workspace, window, cx| { + // Toggle all docks - only last closed (right dock) should reopen + workspace.toggle_all_docks(&ToggleAllDocks, window, cx); + + let left_dock = workspace.dock_at_position(DockPosition::Left); + let right_dock = workspace.dock_at_position(DockPosition::Right); + assert!(!left_dock.read(cx).is_open()); + assert!(right_dock.read(cx).is_open()); + }); + } + + #[gpui::test] + async fn test_toggle_all_docks_after_dock_move(cx: &mut gpui::TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + + // Open two docks (left and right) with one panel each + let (left_panel, right_panel) = workspace.update_in(cx, |workspace, window, cx| { + let left_panel = cx.new(|cx| TestPanel::new(DockPosition::Left, cx)); + workspace.add_panel(left_panel.clone(), window, cx); + + let right_panel = cx.new(|cx| TestPanel::new(DockPosition::Right, cx)); + workspace.add_panel(right_panel.clone(), window, cx); + + workspace.toggle_dock(DockPosition::Left, window, cx); + workspace.toggle_dock(DockPosition::Right, window, cx); + + // Verify initial state + assert!( + workspace.left_dock().read(cx).is_open(), + "Left dock should be open" + ); + assert_eq!( + workspace + .left_dock() + .read(cx) + .visible_panel() + .unwrap() + .panel_id(), + left_panel.panel_id(), + "Left panel should be visible in left dock" + ); + assert!( + workspace.right_dock().read(cx).is_open(), + "Right dock should be open" + ); + assert_eq!( + workspace + .right_dock() + .read(cx) + .visible_panel() + .unwrap() + .panel_id(), + right_panel.panel_id(), + "Right panel should be visible in right dock" + ); + assert!( + !workspace.bottom_dock().read(cx).is_open(), + "Bottom dock should be closed" + ); + + (left_panel, right_panel) + }); + + // Focus the left panel and move it to the next position (bottom dock) + workspace.update_in(cx, |workspace, window, cx| { + workspace.toggle_panel_focus::(window, cx); // Focus left panel + assert!( + left_panel.read(cx).focus_handle(cx).is_focused(window), + "Left panel should be focused" + ); + }); + + cx.dispatch_action(MoveFocusedPanelToNextPosition); + + // Verify the left panel has moved to the bottom dock, and the bottom dock is now open + workspace.update(cx, |workspace, cx| { + assert!( + !workspace.left_dock().read(cx).is_open(), + "Left dock should be closed" + ); + assert!( + workspace.bottom_dock().read(cx).is_open(), + "Bottom dock should now be open" + ); + assert_eq!( + left_panel.read(cx).position, + DockPosition::Bottom, + "Left panel should now be in the bottom dock" + ); + assert_eq!( + workspace + .bottom_dock() + .read(cx) + .visible_panel() + .unwrap() + .panel_id(), + left_panel.panel_id(), + "Left panel should be the visible panel in the bottom dock" + ); + }); + + // Toggle all docks off + workspace.update_in(cx, |workspace, window, cx| { + workspace.toggle_all_docks(&ToggleAllDocks, window, cx); + assert!( + !workspace.left_dock().read(cx).is_open(), + "Left dock should be closed" + ); + assert!( + !workspace.right_dock().read(cx).is_open(), + "Right dock should be closed" + ); + assert!( + !workspace.bottom_dock().read(cx).is_open(), + "Bottom dock should be closed" + ); + }); + + // Toggle all docks back on and verify positions are restored + workspace.update_in(cx, |workspace, window, cx| { + workspace.toggle_all_docks(&ToggleAllDocks, window, cx); + assert!( + !workspace.left_dock().read(cx).is_open(), + "Left dock should remain closed" + ); + assert!( + workspace.right_dock().read(cx).is_open(), + "Right dock should remain open" + ); + assert!( + workspace.bottom_dock().read(cx).is_open(), + "Bottom dock should remain open" + ); + assert_eq!( + left_panel.read(cx).position, + DockPosition::Bottom, + "Left panel should remain in the bottom dock" + ); + assert_eq!( + right_panel.read(cx).position, + DockPosition::Right, + "Right panel should remain in the right dock" + ); + assert_eq!( + workspace + .bottom_dock() + .read(cx) + .visible_panel() + .unwrap() + .panel_id(), + left_panel.panel_id(), + "Left panel should be the visible panel in the right dock" + ); + }); + } + #[gpui::test] async fn test_join_pane_into_next(cx: &mut gpui::TestAppContext) { init_test(cx); diff --git a/crates/worktree/src/worktree_settings.rs b/crates/worktree/src/worktree_settings.rs index 9eddef8eaf43cecca949ea6f595c75795698ab38..e536256e51401e5cb4d9cbece0e5a52b3ff22b3c 100644 --- a/crates/worktree/src/worktree_settings.rs +++ b/crates/worktree/src/worktree_settings.rs @@ -11,6 +11,8 @@ use util::{ #[derive(Clone, PartialEq, Eq)] pub struct WorktreeSettings { pub project_name: Option, + /// Whether to prevent this project from being shared in public channels. + pub prevent_sharing_in_public_channels: bool, pub file_scan_inclusions: PathMatcher, pub file_scan_exclusions: PathMatcher, pub private_files: PathMatcher, @@ -51,6 +53,7 @@ impl Settings for WorktreeSettings { Self { project_name: worktree.project_name.into_inner(), + prevent_sharing_in_public_channels: worktree.prevent_sharing_in_public_channels, file_scan_exclusions: path_matchers(file_scan_exclusions, "file_scan_exclusions") .log_err() .unwrap_or_default(), diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index c84fa8261fe2efdc4c8c831fcd239514c2d16526..9f6196c1482bcff2db9b7812dfb75b1471fec273 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition.workspace = true name = "zed" -version = "0.211.0" +version = "0.212.0" publish.workspace = true license = "GPL-3.0-or-later" authors = ["Zed Team "] diff --git a/crates/zed/resources/windows/zed.iss b/crates/zed/resources/windows/zed.iss index b726bb1c2117b1d53f560aaff83acb370c2f2cd4..9df6d3b228272e305b2b06345c389b34756e326a 100644 --- a/crates/zed/resources/windows/zed.iss +++ b/crates/zed/resources/windows/zed.iss @@ -31,7 +31,10 @@ WizardStyle=modern CloseApplications=force +#if GetEnv("CI") != "" SignTool=Defaultsign +#endif + DefaultDirName={autopf}\{#AppName} PrivilegesRequired=lowest @@ -46,6 +49,10 @@ Name: "simplifiedChinese"; MessagesFile: "{#ResourcesDir}\messages\Default.zh-cn ; Delete logs Type: filesandordirs; Name: "{app}\tools" Type: filesandordirs; Name: "{app}\updates" +; Delete newer files which may not have been added by the initial installation +Type: filesandordirs; Name: "{app}\x64" +Type: filesandordirs; Name: "{app}\arm64" + [Tasks] Name: "desktopicon"; Description: "{cm:CreateDesktopIcon}"; GroupDescription: "{cm:AdditionalIcons}"; Flags: unchecked @@ -62,8 +69,15 @@ Source: "{#ResourcesDir}\Zed.exe"; DestDir: "{code:GetInstallDir}"; Flags: ignor Source: "{#ResourcesDir}\bin\*"; DestDir: "{code:GetInstallDir}\bin"; Flags: ignoreversion Source: "{#ResourcesDir}\tools\*"; DestDir: "{app}\tools"; Flags: ignoreversion Source: "{#ResourcesDir}\appx\*"; DestDir: "{app}\appx"; BeforeInstall: RemoveAppxPackage; AfterInstall: AddAppxPackage; Flags: ignoreversion; Check: IsWindows11OrLater +#ifexist ResourcesDir + "\amd_ags_x64.dll" Source: "{#ResourcesDir}\amd_ags_x64.dll"; DestDir: "{app}"; Flags: ignoreversion -Source: "{#ResourcesDir}\OpenConsole.exe"; DestDir: "{code:GetInstallDir}"; Flags: ignoreversion +#endif +#ifexist ResourcesDir + "\x64\OpenConsole.exe" +Source: "{#ResourcesDir}\x64\OpenConsole.exe"; DestDir: "{code:GetInstallDir}\x64"; Flags: ignoreversion +#endif +#ifexist ResourcesDir + "\arm64\OpenConsole.exe" +Source: "{#ResourcesDir}\arm64\OpenConsole.exe"; DestDir: "{code:GetInstallDir}\arm64"; Flags: ignoreversion +#endif Source: "{#ResourcesDir}\conpty.dll"; DestDir: "{code:GetInstallDir}"; Flags: ignoreversion [Icons] diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 93feb4a71d18164501955b46187a14d6757d861e..b873a58d3b61338b25c5908c2f87b62acb95d6f6 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -853,10 +853,13 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut // languages.$(language).tab_size // [ languages $(language) tab_size] workspace::with_active_or_new_workspace(cx, |_workspace, window, cx| { - window.dispatch_action( - Box::new(zed_actions::OpenSettingsAt { path: setting_path }), - cx, - ); + match setting_path { + None => window.dispatch_action(Box::new(zed_actions::OpenSettings), cx), + Some(setting_path) => window.dispatch_action( + Box::new(zed_actions::OpenSettingsAt { path: setting_path }), + cx, + ), + } }); } } diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index cc2906adfc0f1d8e3a78423e69b93e5ee5909da0..f71299997930040c848dd6f5c2819185cf8fee81 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -39,7 +39,7 @@ use language_onboarding::BasedPyrightBanner; use language_tools::lsp_button::{self, LspButton}; use language_tools::lsp_log_view::LspLogToolbarItemView; use migrate::{MigrationBanner, MigrationEvent, MigrationNotification, MigrationType}; -use migrator::{migrate_keymap, migrate_settings}; +use migrator::migrate_keymap; use onboarding::DOCS_URL; use onboarding::multibuffer_hint::MultibufferHint; pub use open_listener::*; @@ -1298,18 +1298,24 @@ pub fn handle_settings_file_changes( store: &mut SettingsStore, cx: &mut App| -> bool { + let result = if is_user { + store.set_user_settings(&content, cx) + } else { + store.set_global_settings(&content, cx) + }; + let id = NotificationId::Named("failed-to-migrate-settings".into()); // Apply migrations to both user and global settings - let (processed_content, content_migrated) = match migrate_settings(&content) { - Ok(result) => { + let content_migrated = match result.migration_status { + settings::MigrationStatus::Succeeded => { dismiss_app_notification(&id, cx); - if let Some(migrated_content) = result { - (migrated_content, true) - } else { - (content, false) - } + true + } + settings::MigrationStatus::NotNeeded => { + dismiss_app_notification(&id, cx); + false } - Err(err) => { + settings::MigrationStatus::Failed { error: err } => { show_app_notification(id, cx, move |cx| { cx.new(|cx| { MessageNotification::new( @@ -1328,22 +1334,22 @@ pub fn handle_settings_file_changes( }) }); // notify user here - (content, false) + false } }; - let result = if is_user { - store.set_user_settings(&processed_content, cx) - } else { - store.set_global_settings(&processed_content, cx) - }; - - if let Err(err) = &result { + if let settings::ParseStatus::Failed { error: err } = &result.parse_status { let settings_type = if is_user { "user" } else { "global" }; log::error!("Failed to load {} settings: {err}", settings_type); } - settings_changed(result.err(), cx); + settings_changed( + match result.parse_status { + settings::ParseStatus::Failed { error } => Some(anyhow::format_err!(error)), + settings::ParseStatus::Success => None, + }, + cx, + ); content_migrated }; @@ -4556,74 +4562,6 @@ mod tests { }); } - /// Actions that don't build from empty input won't work from command palette invocation. - #[gpui::test] - async fn test_actions_build_with_empty_input(cx: &mut gpui::TestAppContext) { - init_keymap_test(cx); - cx.update(|cx| { - let all_actions = cx.all_action_names(); - let mut failing_names = Vec::new(); - let mut errors = Vec::new(); - for action in all_actions { - match action.to_string().as_str() { - "vim::FindCommand" - | "vim::Literal" - | "vim::ResizePane" - | "vim::PushObject" - | "vim::PushFindForward" - | "vim::PushFindBackward" - | "vim::PushSneak" - | "vim::PushSneakBackward" - | "vim::PushChangeSurrounds" - | "vim::PushJump" - | "vim::PushDigraph" - | "vim::PushLiteral" - | "vim::PushHelixNext" - | "vim::PushHelixPrevious" - | "vim::Number" - | "vim::SelectRegister" - | "git::StageAndNext" - | "git::UnstageAndNext" - | "terminal::SendText" - | "terminal::SendKeystroke" - | "app_menu::OpenApplicationMenu" - | "picker::ConfirmInput" - | "editor::HandleInput" - | "editor::FoldAtLevel" - | "pane::ActivateItem" - | "workspace::ActivatePane" - | "workspace::MoveItemToPane" - | "workspace::MoveItemToPaneInDirection" - | "workspace::NewFileSplit" - | "workspace::OpenTerminal" - | "workspace::SendKeystrokes" - | "agent::NewNativeAgentThreadFromSummary" - | "action::Sequence" - | "zed::OpenBrowser" - | "zed::OpenZedUrl" - | "settings_editor::FocusFile" => {} - _ => { - let result = cx.build_action(action, None); - match &result { - Ok(_) => {} - Err(err) => { - failing_names.push(action); - errors.push(format!("{action} failed to build: {err:?}")); - } - } - } - } - } - if !errors.is_empty() { - panic!( - "Failed to build actions using {{}} as input: {:?}. Errors:\n{}", - failing_names, - errors.join("\n") - ); - } - }); - } - /// Checks that action namespaces are the expected set. The purpose of this is to prevent typos /// and let you know when introducing a new namespace. #[gpui::test] @@ -5080,4 +5018,63 @@ mod tests { "BUG FOUND: Project settings were overwritten when opening via command - original custom content was lost" ); } + + #[gpui::test] + async fn test_prefer_focused_window(cx: &mut gpui::TestAppContext) { + let app_state = init_test(cx); + let paths = [PathBuf::from(path!("/dir/document.txt"))]; + + app_state + .fs + .as_fake() + .insert_tree( + path!("/dir"), + json!({ + "document.txt": "Some of the documentation's content." + }), + ) + .await; + + let project_a = Project::test(app_state.fs.clone(), [path!("/dir").as_ref()], cx).await; + let window_a = + cx.add_window(|window, cx| Workspace::test_new(project_a.clone(), window, cx)); + + let project_b = Project::test(app_state.fs.clone(), [path!("/dir").as_ref()], cx).await; + let window_b = + cx.add_window(|window, cx| Workspace::test_new(project_b.clone(), window, cx)); + + let project_c = Project::test(app_state.fs.clone(), [path!("/dir").as_ref()], cx).await; + let window_c = + cx.add_window(|window, cx| Workspace::test_new(project_c.clone(), window, cx)); + + for window in [window_a, window_b, window_c] { + let _ = cx.update_window(*window, |_, window, _| { + window.activate_window(); + }); + + cx.update(|cx| { + let open_options = OpenOptions { + prefer_focused_window: true, + ..Default::default() + }; + + workspace::open_paths(&paths, app_state.clone(), open_options, cx) + }) + .await + .unwrap(); + + cx.update_window(*window, |_, window, _| assert!(window.is_window_active())) + .unwrap(); + + let _ = window.read_with(cx, |workspace, cx| { + let pane = workspace.active_pane().read(cx); + let project_path = pane.active_item().unwrap().project_path(cx).unwrap(); + + assert_eq!( + project_path.path.as_ref().as_std_path().to_str().unwrap(), + path!("document.txt") + ) + }); + } + } } diff --git a/crates/zed/src/zed/app_menus.rs b/crates/zed/src/zed/app_menus.rs index ac22f972368f61fa518ac74a5ac23e593433c75b..af68cbbbe9c5178db80f1fc9adc0a922e634c82a 100644 --- a/crates/zed/src/zed/app_menus.rs +++ b/crates/zed/src/zed/app_menus.rs @@ -28,7 +28,7 @@ pub fn app_menus(cx: &mut App) -> Vec { MenuItem::action("Toggle Left Dock", workspace::ToggleLeftDock), MenuItem::action("Toggle Right Dock", workspace::ToggleRightDock), MenuItem::action("Toggle Bottom Dock", workspace::ToggleBottomDock), - MenuItem::action("Close All Docks", workspace::CloseAllDocks), + MenuItem::action("Toggle All Docks", workspace::ToggleAllDocks), MenuItem::submenu(Menu { name: "Editor Layout".into(), items: vec![ diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index 618849b3474e60f8a3737facf7c502f6e5f1cf52..3abb76715d67e3d288cf812fc6a4bff58ac3ddfe 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -43,11 +43,20 @@ pub struct OpenRequest { #[derive(Debug)] pub enum OpenRequestKind { CliConnection((mpsc::Receiver, IpcSender)), - Extension { extension_id: String }, + Extension { + extension_id: String, + }, AgentPanel, - DockMenuAction { index: usize }, - BuiltinJsonSchema { schema_path: String }, - Setting { setting_path: String }, + DockMenuAction { + index: usize, + }, + BuiltinJsonSchema { + schema_path: String, + }, + Setting { + // None just opens settings without navigating to a specific path + setting_path: Option, + }, } impl OpenRequest { @@ -94,9 +103,11 @@ impl OpenRequest { this.kind = Some(OpenRequestKind::BuiltinJsonSchema { schema_path: schema_path.to_string(), }); + } else if url == "zed://settings" || url == "zed://settings/" { + this.kind = Some(OpenRequestKind::Setting { setting_path: None }); } else if let Some(setting_path) = url.strip_prefix("zed://settings/") { this.kind = Some(OpenRequestKind::Setting { - setting_path: setting_path.to_string(), + setting_path: Some(setting_path.to_string()), }); } else if url.starts_with("ssh://") { this.parse_ssh_file_path(&url, cx)? @@ -531,6 +542,7 @@ async fn open_local_workspace( workspace::OpenOptions { open_new_workspace: effective_open_new_workspace, replace_window, + prefer_focused_window: wait, env: env.cloned(), ..Default::default() }, diff --git a/crates/zed/src/zed/quick_action_bar.rs b/crates/zed/src/zed/quick_action_bar.rs index a25074d46f356bbea5de986055b93557e73a8383..273e99588b90d16f6c0b7c4f2982cd995d4ca2f1 100644 --- a/crates/zed/src/zed/quick_action_bar.rs +++ b/crates/zed/src/zed/quick_action_bar.rs @@ -22,7 +22,7 @@ use ui::{ ButtonStyle, ContextMenu, ContextMenuEntry, DocumentationEdge, DocumentationSide, IconButton, IconName, IconSize, PopoverMenu, PopoverMenuHandle, Tooltip, prelude::*, }; -use vim_mode_setting::VimModeSetting; +use vim_mode_setting::{HelixModeSetting, VimModeSetting}; use workspace::item::ItemBufferKind; use workspace::{ ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, item::ItemHandle, @@ -307,6 +307,7 @@ impl Render for QuickActionBar { let editor = editor.downgrade(); let editor_settings_dropdown = { let vim_mode_enabled = VimModeSetting::get_global(cx).0; + let helix_mode_enabled = HelixModeSetting::get_global(cx).0; PopoverMenu::new("editor-settings") .trigger_with_tooltip( @@ -583,10 +584,25 @@ impl Render for QuickActionBar { move |window, cx| { let new_value = !vim_mode_enabled; VimModeSetting::override_global(VimModeSetting(new_value), cx); + HelixModeSetting::override_global(HelixModeSetting(false), cx); window.refresh(); } }, ); + menu = menu.toggleable_entry( + "Helix Mode", + helix_mode_enabled, + IconPosition::Start, + None, + { + move |window, cx| { + let new_value = !helix_mode_enabled; + HelixModeSetting::override_global(HelixModeSetting(new_value), cx); + VimModeSetting::override_global(VimModeSetting(false), cx); + window.refresh(); + } + } + ); menu } diff --git a/crates/zed_actions/src/lib.rs b/crates/zed_actions/src/lib.rs index 3506e492b77d1eca6a1dde84bf5ea0a2be107540..5cb2903fa653fc765bfb3471aa51b232e4bfadec 100644 --- a/crates/zed_actions/src/lib.rs +++ b/crates/zed_actions/src/lib.rs @@ -27,6 +27,13 @@ pub struct OpenZedUrl { pub url: String, } +/// Opens the keymap to either add a keybinding or change an existing one +#[derive(PartialEq, Clone, Default, Action, JsonSchema, Serialize, Deserialize)] +#[action(namespace = zed, no_json, no_register)] +pub struct ChangeKeybinding { + pub action: String, +} + actions!( zed, [ @@ -109,12 +116,11 @@ pub struct IncreaseBufferFontSize { } /// Increases the font size in the editor buffer. -#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)] +#[derive(PartialEq, Clone, Debug, Deserialize, JsonSchema, Action)] #[action(namespace = zed)] #[serde(deny_unknown_fields)] pub struct OpenSettingsAt { /// A path to a specific setting (e.g. `theme.mode`) - #[serde(default)] pub path: String, } @@ -232,7 +238,7 @@ pub mod command_palette { command_palette, [ /// Toggles the command palette. - Toggle + Toggle, ] ); } diff --git a/crates/zeta2/Cargo.toml b/crates/zeta2/Cargo.toml index 7ca140fa353b6404e451fdb79cccfed982b64e27..13bb4e9106de9f5f201ba59106304a6aab4208d1 100644 --- a/crates/zeta2/Cargo.toml +++ b/crates/zeta2/Cargo.toml @@ -18,6 +18,7 @@ chrono.workspace = true client.workspace = true cloud_llm_client.workspace = true cloud_zeta2_prompt.workspace = true +collections.workspace = true edit_prediction.workspace = true edit_prediction_context.workspace = true feature_flags.workspace = true @@ -29,6 +30,7 @@ language_model.workspace = true log.workspace = true project.workspace = true release_channel.workspace = true +schemars.workspace = true serde.workspace = true serde_json.workspace = true thiserror.workspace = true @@ -43,6 +45,7 @@ cloud_llm_client = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } lsp.workspace = true indoc.workspace = true +language = { workspace = true, features = ["test-support"] } language_model = { workspace = true, features = ["test-support"] } pretty_assertions.workspace = true project = { workspace = true, features = ["test-support"] } diff --git a/crates/zeta2/src/merge_excerpts.rs b/crates/zeta2/src/merge_excerpts.rs new file mode 100644 index 0000000000000000000000000000000000000000..4cb7ab6cf4d3b63e641087f0c22cf0f900f56adc --- /dev/null +++ b/crates/zeta2/src/merge_excerpts.rs @@ -0,0 +1,192 @@ +use cloud_llm_client::predict_edits_v3::{self, Excerpt}; +use edit_prediction_context::Line; +use language::{BufferSnapshot, Point}; +use std::ops::Range; + +pub fn merge_excerpts( + buffer: &BufferSnapshot, + sorted_line_ranges: impl IntoIterator>, +) -> Vec { + let mut output = Vec::new(); + let mut merged_ranges = Vec::>::new(); + + for line_range in sorted_line_ranges { + if let Some(last_line_range) = merged_ranges.last_mut() + && line_range.start <= last_line_range.end + { + last_line_range.end = last_line_range.end.max(line_range.end); + continue; + } + merged_ranges.push(line_range); + } + + let outline_items = buffer.outline_items_as_points_containing(0..buffer.len(), false, None); + let mut outline_items = outline_items.into_iter().peekable(); + + for range in merged_ranges { + let point_range = Point::new(range.start.0, 0)..Point::new(range.end.0, 0); + + while let Some(outline_item) = outline_items.peek() { + if outline_item.range.start >= point_range.start { + break; + } + if outline_item.range.end > point_range.start { + let mut point_range = outline_item.source_range_for_text.clone(); + point_range.start.column = 0; + point_range.end.column = buffer.line_len(point_range.end.row); + + output.push(Excerpt { + start_line: Line(point_range.start.row), + text: buffer + .text_for_range(point_range.clone()) + .collect::() + .into(), + }) + } + outline_items.next(); + } + + output.push(Excerpt { + start_line: Line(point_range.start.row), + text: buffer + .text_for_range(point_range.clone()) + .collect::() + .into(), + }) + } + + output +} + +pub fn write_merged_excerpts( + buffer: &BufferSnapshot, + sorted_line_ranges: impl IntoIterator>, + sorted_insertions: &[(predict_edits_v3::Point, &str)], + output: &mut String, +) { + cloud_zeta2_prompt::write_excerpts( + merge_excerpts(buffer, sorted_line_ranges).iter(), + sorted_insertions, + Line(buffer.max_point().row), + true, + output, + ); +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use super::*; + use gpui::{TestAppContext, prelude::*}; + use indoc::indoc; + use language::{Buffer, Language, LanguageConfig, LanguageMatcher, OffsetRangeExt}; + use pretty_assertions::assert_eq; + use util::test::marked_text_ranges; + + #[gpui::test] + fn test_rust(cx: &mut TestAppContext) { + let table = [ + ( + indoc! {r#" + struct User { + first_name: String, + « last_name: String, + ageˇ: u32, + » email: String, + create_at: Instant, + } + + impl User { + pub fn first_name(&self) -> String { + self.first_name.clone() + } + + pub fn full_name(&self) -> String { + « format!("{} {}", self.first_name, self.last_name) + » } + } + "#}, + indoc! {r#" + 1|struct User { + … + 3| last_name: String, + 4| age<|cursor|>: u32, + … + 9|impl User { + … + 14| pub fn full_name(&self) -> String { + 15| format!("{} {}", self.first_name, self.last_name) + … + "#}, + ), + ( + indoc! {r#" + struct User { + first_name: String, + « last_name: String, + age: u32, + } + »"# + }, + indoc! {r#" + 1|struct User { + … + 3| last_name: String, + 4| age: u32, + 5|} + "#}, + ), + ]; + + for (input, expected_output) in table { + let input_without_ranges = input.replace(['«', '»'], ""); + let input_without_caret = input.replace('ˇ', ""); + let cursor_offset = input_without_ranges.find('ˇ'); + let (input, ranges) = marked_text_ranges(&input_without_caret, false); + let buffer = + cx.new(|cx| Buffer::local(input, cx).with_language(Arc::new(rust_lang()), cx)); + buffer.read_with(cx, |buffer, _cx| { + let insertions = cursor_offset + .map(|offset| { + let point = buffer.offset_to_point(offset); + vec![( + predict_edits_v3::Point { + line: Line(point.row), + column: point.column, + }, + "<|cursor|>", + )] + }) + .unwrap_or_default(); + let ranges: Vec> = ranges + .into_iter() + .map(|range| { + let point_range = range.to_point(&buffer); + Line(point_range.start.row)..Line(point_range.end.row) + }) + .collect(); + + let mut output = String::new(); + write_merged_excerpts(&buffer.snapshot(), ranges, &insertions, &mut output); + assert_eq!(output, expected_output); + }); + } + } + + fn rust_lang() -> Language { + Language::new( + LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + ..Default::default() + }, + Some(language::tree_sitter_rust::LANGUAGE.into()), + ) + .with_outline_query(include_str!("../../languages/src/rust/outline.scm")) + .unwrap() + } +} diff --git a/crates/zeta2/src/provider.rs b/crates/zeta2/src/provider.rs index 3c0dd75cc23a6a7b18a0fba19d0eab0a4833ba9c..a19e7f9a1da5e1808c48e3ce0469d8b390698760 100644 --- a/crates/zeta2/src/provider.rs +++ b/crates/zeta2/src/provider.rs @@ -116,6 +116,10 @@ impl EditPredictionProvider for ZetaEditPredictionProvider { return; } + self.zeta.update(cx, |zeta, cx| { + zeta.refresh_context_if_needed(&self.project, &buffer, cursor_position, cx); + }); + let pending_prediction_id = self.next_pending_prediction_id; self.next_pending_prediction_id += 1; let last_request_timestamp = self.last_request_timestamp; diff --git a/crates/zeta2/src/related_excerpts.rs b/crates/zeta2/src/related_excerpts.rs new file mode 100644 index 0000000000000000000000000000000000000000..7434dbed9e48bb2dcf98131177dc65b2f3930094 --- /dev/null +++ b/crates/zeta2/src/related_excerpts.rs @@ -0,0 +1,623 @@ +use std::{cmp::Reverse, fmt::Write, ops::Range, path::PathBuf, sync::Arc, time::Instant}; + +use crate::{ + ZetaContextRetrievalDebugInfo, ZetaDebugInfo, ZetaSearchQueryDebugInfo, + merge_excerpts::write_merged_excerpts, +}; +use anyhow::{Result, anyhow}; +use collections::HashMap; +use edit_prediction_context::{EditPredictionExcerpt, EditPredictionExcerptOptions, Line}; +use futures::{StreamExt, channel::mpsc, stream::BoxStream}; +use gpui::{App, AsyncApp, Entity, Task}; +use indoc::indoc; +use language::{Anchor, Bias, Buffer, OffsetRangeExt, Point, TextBufferSnapshot, ToPoint as _}; +use language_model::{ + LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelId, + LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, + LanguageModelRequestTool, LanguageModelToolResult, MessageContent, Role, +}; +use project::{ + Project, WorktreeSettings, + search::{SearchQuery, SearchResult}, +}; +use schemars::JsonSchema; +use serde::Deserialize; +use util::paths::{PathMatcher, PathStyle}; +use workspace::item::Settings as _; + +const SEARCH_PROMPT: &str = indoc! {r#" + ## Task + + You are part of an edit prediction system in a code editor. Your role is to identify relevant code locations + that will serve as context for predicting the next required edit. + + **Your task:** + - Analyze the user's recent edits and current cursor context + - Use the `search` tool to find code that may be relevant for predicting the next edit + - Focus on finding: + - Code patterns that might need similar changes based on the recent edits + - Functions, variables, types, and constants referenced in the current cursor context + - Related implementations, usages, or dependencies that may require consistent updates + + **Important constraints:** + - This conversation has exactly 2 turns + - You must make ALL search queries in your first response via the `search` tool + - All queries will be executed in parallel and results returned together + - In the second turn, you will select the most relevant results via the `select` tool. + + ## User Edits + + {edits} + + ## Current cursor context + + `````filename={current_file_path} + {cursor_excerpt} + ````` + + -- + Use the `search` tool now +"#}; + +const SEARCH_TOOL_NAME: &str = "search"; + +/// Search for relevant code +/// +/// For the best results, run multiple queries at once with a single invocation of this tool. +#[derive(Clone, Deserialize, JsonSchema)] +pub struct SearchToolInput { + /// An array of queries to run for gathering context relevant to the next prediction + #[schemars(length(max = 5))] + pub queries: Box<[SearchToolQuery]>, +} + +#[derive(Debug, Clone, Deserialize, JsonSchema)] +pub struct SearchToolQuery { + /// A glob pattern to match file paths in the codebase + pub glob: String, + /// A regular expression to match content within the files matched by the glob pattern + pub regex: String, + /// Whether the regex is case-sensitive. Defaults to false (case-insensitive). + #[serde(default)] + pub case_sensitive: bool, +} + +const RESULTS_MESSAGE: &str = indoc! {" + Here are the results of your queries combined and grouped by file: + +"}; + +const SELECT_TOOL_NAME: &str = "select"; + +const SELECT_PROMPT: &str = indoc! {" + Use the `select` tool now to pick the most relevant line ranges according to the user state provided in the first message. + Make sure to include enough lines of context so that the edit prediction model can suggest accurate edits. + Include up to 200 lines in total. +"}; + +/// Select line ranges from search results +#[derive(Deserialize, JsonSchema)] +struct SelectToolInput { + /// The line ranges to select from search results. + ranges: Vec, +} + +/// A specific line range to select from a file +#[derive(Debug, Deserialize, JsonSchema)] +struct SelectLineRange { + /// The file path containing the lines to select + /// Exactly as it appears in the search result codeblocks. + path: PathBuf, + /// The starting line number (1-based) + #[schemars(range(min = 1))] + start_line: u32, + /// The ending line number (1-based, inclusive) + #[schemars(range(min = 1))] + end_line: u32, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct LlmContextOptions { + pub excerpt: EditPredictionExcerptOptions, +} + +pub fn find_related_excerpts<'a>( + buffer: Entity, + cursor_position: Anchor, + project: &Entity, + events: impl Iterator, + options: &LlmContextOptions, + debug_tx: Option>, + cx: &App, +) -> Task, Vec>>>> { + let language_model_registry = LanguageModelRegistry::global(cx); + let Some(model) = language_model_registry + .read(cx) + .available_models(cx) + .find(|model| { + model.provider_id() == language_model::ANTHROPIC_PROVIDER_ID + && model.id() == LanguageModelId("claude-haiku-4-5-latest".into()) + }) + else { + return Task::ready(Err(anyhow!("could not find claude model"))); + }; + + let mut edits_string = String::new(); + + for event in events { + if let Some(event) = event.to_request_event(cx) { + writeln!(&mut edits_string, "{event}").ok(); + } + } + + if edits_string.is_empty() { + edits_string.push_str("(No user edits yet)"); + } + + // TODO [zeta2] include breadcrumbs? + let snapshot = buffer.read(cx).snapshot(); + let cursor_point = cursor_position.to_point(&snapshot); + let Some(cursor_excerpt) = + EditPredictionExcerpt::select_from_buffer(cursor_point, &snapshot, &options.excerpt, None) + else { + return Task::ready(Ok(HashMap::default())); + }; + + let current_file_path = snapshot + .file() + .map(|f| f.full_path(cx).display().to_string()) + .unwrap_or_else(|| "untitled".to_string()); + + let prompt = SEARCH_PROMPT + .replace("{edits}", &edits_string) + .replace("{current_file_path}", ¤t_file_path) + .replace("{cursor_excerpt}", &cursor_excerpt.text(&snapshot).body); + + let path_style = project.read(cx).path_style(cx); + + let exclude_matcher = { + let global_settings = WorktreeSettings::get_global(cx); + let exclude_patterns = global_settings + .file_scan_exclusions + .sources() + .iter() + .chain(global_settings.private_files.sources().iter()); + + match PathMatcher::new(exclude_patterns, path_style) { + Ok(matcher) => matcher, + Err(err) => { + return Task::ready(Err(anyhow!(err))); + } + } + }; + + let project = project.clone(); + cx.spawn(async move |cx| { + let initial_prompt_message = LanguageModelRequestMessage { + role: Role::User, + content: vec![prompt.into()], + cache: false, + }; + + let mut search_stream = request_tool_call::( + vec![initial_prompt_message.clone()], + SEARCH_TOOL_NAME, + &model, + cx, + ) + .await?; + + let mut select_request_messages = Vec::with_capacity(5); // initial prompt, LLM response/thinking, tool use, tool result, select prompt + select_request_messages.push(initial_prompt_message); + let mut search_calls = Vec::new(); + + while let Some(event) = search_stream.next().await { + match event? { + LanguageModelCompletionEvent::ToolUse(tool_use) => { + if !tool_use.is_input_complete { + continue; + } + + if tool_use.name.as_ref() == SEARCH_TOOL_NAME { + search_calls.push((select_request_messages.len(), tool_use)); + } else { + log::warn!( + "context gathering model tried to use unknown tool: {}", + tool_use.name + ); + } + } + LanguageModelCompletionEvent::Text(txt) => { + if let Some(LanguageModelRequestMessage { + role: Role::Assistant, + content, + .. + }) = select_request_messages.last_mut() + { + if let Some(MessageContent::Text(existing_text)) = content.last_mut() { + existing_text.push_str(&txt); + } else { + content.push(MessageContent::Text(txt)); + } + } else { + select_request_messages.push(LanguageModelRequestMessage { + role: Role::Assistant, + content: vec![MessageContent::Text(txt)], + cache: false, + }); + } + } + LanguageModelCompletionEvent::Thinking { text, signature } => { + if let Some(LanguageModelRequestMessage { + role: Role::Assistant, + content, + .. + }) = select_request_messages.last_mut() + { + if let Some(MessageContent::Thinking { + text: existing_text, + signature: existing_signature, + }) = content.last_mut() + { + existing_text.push_str(&text); + *existing_signature = signature; + } else { + content.push(MessageContent::Thinking { text, signature }); + } + } else { + select_request_messages.push(LanguageModelRequestMessage { + role: Role::Assistant, + content: vec![MessageContent::Thinking { text, signature }], + cache: false, + }); + } + } + LanguageModelCompletionEvent::RedactedThinking { data } => { + if let Some(LanguageModelRequestMessage { + role: Role::Assistant, + content, + .. + }) = select_request_messages.last_mut() + { + if let Some(MessageContent::RedactedThinking(existing_data)) = + content.last_mut() + { + existing_data.push_str(&data); + } else { + content.push(MessageContent::RedactedThinking(data)); + } + } else { + select_request_messages.push(LanguageModelRequestMessage { + role: Role::Assistant, + content: vec![MessageContent::RedactedThinking(data)], + cache: false, + }); + } + } + ev @ LanguageModelCompletionEvent::ToolUseJsonParseError { .. } => { + log::error!("{ev:?}"); + } + ev => { + log::trace!("context search event: {ev:?}") + } + } + } + + struct ResultBuffer { + buffer: Entity, + snapshot: TextBufferSnapshot, + } + + let search_queries = search_calls + .iter() + .map(|(_, tool_use)| { + Ok(serde_json::from_value::( + tool_use.input.clone(), + )?) + }) + .collect::>>()?; + + if let Some(debug_tx) = &debug_tx { + debug_tx + .unbounded_send(ZetaDebugInfo::SearchQueriesGenerated( + ZetaSearchQueryDebugInfo { + project: project.clone(), + timestamp: Instant::now(), + queries: search_queries + .iter() + .flat_map(|call| call.queries.iter().cloned()) + .collect(), + }, + )) + .ok(); + } + + let mut result_buffers_by_path = HashMap::default(); + + for ((index, tool_use), call) in search_calls.into_iter().zip(search_queries).rev() { + let mut excerpts_by_buffer = HashMap::default(); + + for query in call.queries { + // TODO [zeta2] parallelize? + + run_query( + query, + &mut excerpts_by_buffer, + path_style, + exclude_matcher.clone(), + &project, + cx, + ) + .await?; + } + + if excerpts_by_buffer.is_empty() { + continue; + } + + let mut merged_result = RESULTS_MESSAGE.to_string(); + + for (buffer_entity, mut excerpts_for_buffer) in excerpts_by_buffer { + excerpts_for_buffer.sort_unstable_by_key(|range| (range.start, Reverse(range.end))); + + buffer_entity + .clone() + .read_with(cx, |buffer, cx| { + let Some(file) = buffer.file() else { + return; + }; + + let path = file.full_path(cx); + + writeln!(&mut merged_result, "`````filename={}", path.display()).unwrap(); + + let snapshot = buffer.snapshot(); + + write_merged_excerpts( + &snapshot, + excerpts_for_buffer, + &[], + &mut merged_result, + ); + + merged_result.push_str("`````\n\n"); + + result_buffers_by_path.insert( + path, + ResultBuffer { + buffer: buffer_entity, + snapshot: snapshot.text, + }, + ); + }) + .ok(); + } + + let tool_result = LanguageModelToolResult { + tool_use_id: tool_use.id.clone(), + tool_name: SEARCH_TOOL_NAME.into(), + is_error: false, + content: merged_result.into(), + output: None, + }; + + // Almost always appends at the end, but in theory, the model could return some text after the tool call + // or perform parallel tool calls, so we splice at the message index for correctness. + select_request_messages.splice( + index..index, + [ + LanguageModelRequestMessage { + role: Role::Assistant, + content: vec![MessageContent::ToolUse(tool_use)], + cache: false, + }, + LanguageModelRequestMessage { + role: Role::User, + content: vec![MessageContent::ToolResult(tool_result)], + cache: false, + }, + ], + ); + + if let Some(debug_tx) = &debug_tx { + debug_tx + .unbounded_send(ZetaDebugInfo::SearchQueriesExecuted( + ZetaContextRetrievalDebugInfo { + project: project.clone(), + timestamp: Instant::now(), + }, + )) + .ok(); + } + } + + if result_buffers_by_path.is_empty() { + log::trace!("context gathering queries produced no results"); + return anyhow::Ok(HashMap::default()); + } + + select_request_messages.push(LanguageModelRequestMessage { + role: Role::User, + content: vec![SELECT_PROMPT.into()], + cache: false, + }); + + let mut select_stream = request_tool_call::( + select_request_messages, + SELECT_TOOL_NAME, + &model, + cx, + ) + .await?; + let mut selected_ranges = Vec::new(); + + while let Some(event) = select_stream.next().await { + match event? { + LanguageModelCompletionEvent::ToolUse(tool_use) => { + if !tool_use.is_input_complete { + continue; + } + + if tool_use.name.as_ref() == SELECT_TOOL_NAME { + let call = + serde_json::from_value::(tool_use.input.clone())?; + selected_ranges.extend(call.ranges); + } else { + log::warn!( + "context gathering model tried to use unknown tool: {}", + tool_use.name + ); + } + } + ev @ LanguageModelCompletionEvent::ToolUseJsonParseError { .. } => { + log::error!("{ev:?}"); + } + ev => { + log::trace!("context select event: {ev:?}") + } + } + } + + if selected_ranges.is_empty() { + log::trace!("context gathering selected no ranges") + } + + let mut related_excerpts_by_buffer: HashMap<_, Vec<_>> = HashMap::default(); + + for selected_range in selected_ranges { + if let Some(ResultBuffer { buffer, snapshot }) = + result_buffers_by_path.get(&selected_range.path) + { + let start_point = Point::new(selected_range.start_line.saturating_sub(1), 0); + let end_point = + snapshot.clip_point(Point::new(selected_range.end_line, 0), Bias::Left); + let range = snapshot.anchor_after(start_point)..snapshot.anchor_before(end_point); + + related_excerpts_by_buffer + .entry(buffer.clone()) + .or_default() + .push(range); + } else { + log::warn!( + "selected path that wasn't included in search results: {}", + selected_range.path.display() + ); + } + } + + for (buffer, ranges) in &mut related_excerpts_by_buffer { + buffer.read_with(cx, |buffer, _cx| { + ranges.sort_unstable_by(|a, b| { + a.start + .cmp(&b.start, buffer) + .then(b.end.cmp(&a.end, buffer)) + }); + })?; + } + + anyhow::Ok(related_excerpts_by_buffer) + }) +} + +async fn request_tool_call( + messages: Vec, + tool_name: &'static str, + model: &Arc, + cx: &mut AsyncApp, +) -> Result>> +{ + let schema = schemars::schema_for!(T); + + let request = LanguageModelRequest { + messages, + tools: vec![LanguageModelRequestTool { + name: tool_name.into(), + description: schema + .get("description") + .and_then(|description| description.as_str()) + .unwrap() + .to_string(), + input_schema: serde_json::to_value(schema).unwrap(), + }], + ..Default::default() + }; + + Ok(model.stream_completion(request, cx).await?) +} + +const MIN_EXCERPT_LEN: usize = 16; +const MAX_EXCERPT_LEN: usize = 768; +const MAX_RESULT_BYTES_PER_QUERY: usize = MAX_EXCERPT_LEN * 5; + +async fn run_query( + args: SearchToolQuery, + excerpts_by_buffer: &mut HashMap, Vec>>, + path_style: PathStyle, + exclude_matcher: PathMatcher, + project: &Entity, + cx: &mut AsyncApp, +) -> Result<()> { + let include_matcher = PathMatcher::new(vec![args.glob], path_style)?; + + let query = SearchQuery::regex( + &args.regex, + false, + args.case_sensitive, + false, + true, + include_matcher, + exclude_matcher, + true, + None, + )?; + + let results = project.update(cx, |project, cx| project.search(query, cx))?; + futures::pin_mut!(results); + + let mut total_bytes = 0; + + while let Some(SearchResult::Buffer { buffer, ranges }) = results.next().await { + if ranges.is_empty() { + continue; + } + + let excerpts_for_buffer = excerpts_by_buffer + .entry(buffer.clone()) + .or_insert_with(|| Vec::with_capacity(ranges.len())); + + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?; + + for range in ranges { + let offset_range = range.to_offset(&snapshot); + let query_point = (offset_range.start + offset_range.len() / 2).to_point(&snapshot); + + if total_bytes + MIN_EXCERPT_LEN >= MAX_RESULT_BYTES_PER_QUERY { + break; + } + + let excerpt = EditPredictionExcerpt::select_from_buffer( + query_point, + &snapshot, + &EditPredictionExcerptOptions { + max_bytes: MAX_EXCERPT_LEN.min(MAX_RESULT_BYTES_PER_QUERY - total_bytes), + min_bytes: MIN_EXCERPT_LEN, + target_before_cursor_over_total_bytes: 0.5, + }, + None, + ); + + if let Some(excerpt) = excerpt { + total_bytes += excerpt.range.len(); + if !excerpt.line_range.is_empty() { + excerpts_for_buffer.push(excerpt.line_range); + } + } + } + + if excerpts_for_buffer.is_empty() { + excerpts_by_buffer.remove(&buffer); + } + } + + anyhow::Ok(()) +} diff --git a/crates/zeta2/src/zeta2.rs b/crates/zeta2/src/zeta2.rs index 42eb565502e6568491e820dfb5c0921e4d56039b..81fc4172592c59ca47527c545ee8d57398ae1247 100644 --- a/crates/zeta2/src/zeta2.rs +++ b/crates/zeta2/src/zeta2.rs @@ -6,10 +6,12 @@ use cloud_llm_client::{ AcceptEditPredictionBody, EXPIRED_LLM_TOKEN_HEADER_NAME, MINIMUM_REQUIRED_VERSION_HEADER_NAME, ZED_VERSION_HEADER_NAME, }; -use cloud_zeta2_prompt::{DEFAULT_MAX_PROMPT_BYTES, PlannedPrompt}; +use cloud_zeta2_prompt::{DEFAULT_MAX_PROMPT_BYTES, build_prompt}; +use collections::HashMap; use edit_prediction_context::{ DeclarationId, DeclarationStyle, EditPredictionContext, EditPredictionContextOptions, - EditPredictionExcerptOptions, EditPredictionScoreOptions, SyntaxIndex, SyntaxIndexState, + EditPredictionExcerpt, EditPredictionExcerptOptions, EditPredictionScoreOptions, Line, + SyntaxIndex, SyntaxIndexState, }; use feature_flags::{FeatureFlag, FeatureFlagAppExt as _}; use futures::AsyncReadExt as _; @@ -19,25 +21,32 @@ use gpui::{ App, Entity, EntityId, Global, SemanticVersion, SharedString, Subscription, Task, WeakEntity, http_client, prelude::*, }; -use language::BufferSnapshot; -use language::{Buffer, DiagnosticSet, LanguageServerId, ToOffset as _, ToPoint}; +use language::{Anchor, Buffer, DiagnosticSet, LanguageServerId, ToOffset as _, ToPoint}; +use language::{BufferSnapshot, OffsetRangeExt}; use language_model::{LlmApiToken, RefreshLlmTokenListener}; use project::Project; use release_channel::AppVersion; use serde::de::DeserializeOwned; -use std::collections::{HashMap, VecDeque, hash_map}; +use std::collections::{VecDeque, hash_map}; +use std::ops::Range; use std::path::Path; use std::str::FromStr as _; use std::sync::Arc; use std::time::{Duration, Instant}; use thiserror::Error; +use util::ResultExt as _; use util::rel_path::RelPathBuf; use workspace::notifications::{ErrorMessagePrompt, NotificationId, show_app_notification}; +mod merge_excerpts; mod prediction; mod provider; +mod related_excerpts; +use crate::merge_excerpts::merge_excerpts; use crate::prediction::EditPrediction; +use crate::related_excerpts::find_related_excerpts; +pub use crate::related_excerpts::{LlmContextOptions, SearchToolQuery}; pub use provider::ZetaEditPredictionProvider; const BUFFER_CHANGE_GROUPING_INTERVAL: Duration = Duration::from_secs(1); @@ -45,19 +54,28 @@ const BUFFER_CHANGE_GROUPING_INTERVAL: Duration = Duration::from_secs(1); /// Maximum number of events to track. const MAX_EVENT_COUNT: usize = 16; -pub const DEFAULT_CONTEXT_OPTIONS: EditPredictionContextOptions = EditPredictionContextOptions { - use_imports: true, - max_retrieved_declarations: 0, - excerpt: EditPredictionExcerptOptions { - max_bytes: 512, - min_bytes: 128, - target_before_cursor_over_total_bytes: 0.5, - }, - score: EditPredictionScoreOptions { - omit_excerpt_overlaps: true, - }, +pub const DEFAULT_EXCERPT_OPTIONS: EditPredictionExcerptOptions = EditPredictionExcerptOptions { + max_bytes: 512, + min_bytes: 128, + target_before_cursor_over_total_bytes: 0.5, +}; + +pub const DEFAULT_CONTEXT_OPTIONS: ContextMode = ContextMode::Llm(DEFAULT_LLM_CONTEXT_OPTIONS); + +pub const DEFAULT_LLM_CONTEXT_OPTIONS: LlmContextOptions = LlmContextOptions { + excerpt: DEFAULT_EXCERPT_OPTIONS, }; +pub const DEFAULT_SYNTAX_CONTEXT_OPTIONS: EditPredictionContextOptions = + EditPredictionContextOptions { + use_imports: true, + max_retrieved_declarations: 0, + excerpt: DEFAULT_EXCERPT_OPTIONS, + score: EditPredictionScoreOptions { + omit_excerpt_overlaps: true, + }, + }; + pub const DEFAULT_OPTIONS: ZetaOptions = ZetaOptions { context: DEFAULT_CONTEXT_OPTIONS, max_prompt_bytes: DEFAULT_MAX_PROMPT_BYTES, @@ -89,19 +107,47 @@ pub struct Zeta { projects: HashMap, options: ZetaOptions, update_required: bool, - debug_tx: Option>, + debug_tx: Option>, } #[derive(Debug, Clone, PartialEq)] pub struct ZetaOptions { - pub context: EditPredictionContextOptions, + pub context: ContextMode, pub max_prompt_bytes: usize, pub max_diagnostic_bytes: usize, pub prompt_format: predict_edits_v3::PromptFormat, pub file_indexing_parallelism: usize, } -pub struct PredictionDebugInfo { +#[derive(Debug, Clone, PartialEq)] +pub enum ContextMode { + Llm(LlmContextOptions), + Syntax(EditPredictionContextOptions), +} + +impl ContextMode { + pub fn excerpt(&self) -> &EditPredictionExcerptOptions { + match self { + ContextMode::Llm(options) => &options.excerpt, + ContextMode::Syntax(options) => &options.excerpt, + } + } +} + +pub enum ZetaDebugInfo { + ContextRetrievalStarted(ZetaContextRetrievalDebugInfo), + SearchQueriesGenerated(ZetaSearchQueryDebugInfo), + SearchQueriesExecuted(ZetaContextRetrievalDebugInfo), + ContextRetrievalFinished(ZetaContextRetrievalDebugInfo), + EditPredicted(ZetaEditPredictionDebugInfo), +} + +pub struct ZetaContextRetrievalDebugInfo { + pub project: Entity, + pub timestamp: Instant, +} + +pub struct ZetaEditPredictionDebugInfo { pub request: predict_edits_v3::PredictEditsRequest, pub retrieval_time: TimeDelta, pub buffer: WeakEntity, @@ -110,6 +156,12 @@ pub struct PredictionDebugInfo { pub response_rx: oneshot::Receiver>, } +pub struct ZetaSearchQueryDebugInfo { + pub project: Entity, + pub timestamp: Instant, + pub queries: Vec, +} + pub type RequestDebugInfo = predict_edits_v3::DebugInfo; struct ZetaProject { @@ -117,6 +169,10 @@ struct ZetaProject { events: VecDeque, registered_buffers: HashMap, current_prediction: Option, + context: Option, Vec>>>, + refresh_context_task: Option>>, + refresh_context_debounce_task: Option>>, + refresh_context_timestamp: Option, } #[derive(Debug, Clone)] @@ -183,6 +239,44 @@ pub enum Event { }, } +impl Event { + pub fn to_request_event(&self, cx: &App) -> Option { + match self { + Event::BufferChange { + old_snapshot, + new_snapshot, + .. + } => { + let path = new_snapshot.file().map(|f| f.full_path(cx)); + + let old_path = old_snapshot.file().and_then(|f| { + let old_path = f.full_path(cx); + if Some(&old_path) != path.as_ref() { + Some(old_path) + } else { + None + } + }); + + // TODO [zeta2] move to bg? + let diff = language::unified_diff(&old_snapshot.text(), &new_snapshot.text()); + + if path == old_path && diff.is_empty() { + None + } else { + Some(predict_edits_v3::Event::BufferChange { + old_path, + path, + diff, + //todo: Actually detect if this edit was predicted or not + predicted: false, + }) + } + } + } + } +} + impl Zeta { pub fn try_global(cx: &App) -> Option> { cx.try_global::().map(|global| global.0.clone()) @@ -206,7 +300,7 @@ impl Zeta { let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx); Self { - projects: HashMap::new(), + projects: HashMap::default(), client, user_store, options: DEFAULT_OPTIONS, @@ -228,7 +322,7 @@ impl Zeta { } } - pub fn debug_info(&mut self) -> mpsc::UnboundedReceiver { + pub fn debug_info(&mut self) -> mpsc::UnboundedReceiver { let (debug_watch_tx, debug_watch_rx) = mpsc::unbounded(); self.debug_tx = Some(debug_watch_tx); debug_watch_rx @@ -248,6 +342,33 @@ impl Zeta { } } + pub fn history_for_project(&self, project: &Entity) -> impl Iterator { + self.projects + .get(&project.entity_id()) + .map(|project| project.events.iter()) + .into_iter() + .flatten() + } + + pub fn context_for_project( + &self, + project: &Entity, + ) -> impl Iterator, &[Range])> { + self.projects + .get(&project.entity_id()) + .and_then(|project| { + Some( + project + .context + .as_ref()? + .iter() + .map(|(buffer, ranges)| (buffer.clone(), ranges.as_slice())), + ) + }) + .into_iter() + .flatten() + } + pub fn usage(&self, cx: &App) -> Option { self.user_store.read(cx).edit_prediction_usage() } @@ -278,8 +399,12 @@ impl Zeta { SyntaxIndex::new(project, self.options.file_indexing_parallelism, cx) }), events: VecDeque::new(), - registered_buffers: HashMap::new(), + registered_buffers: HashMap::default(), current_prediction: None, + context: None, + refresh_context_task: None, + refresh_context_debounce_task: None, + refresh_context_timestamp: None, }) } @@ -507,7 +632,10 @@ impl Zeta { }); let options = self.options.clone(); let snapshot = buffer.read(cx).snapshot(); - let Some(excerpt_path) = snapshot.file().map(|path| path.full_path(cx).into()) else { + let Some(excerpt_path) = snapshot + .file() + .map(|path| -> Arc { path.full_path(cx).into() }) + else { return Task::ready(Err(anyhow!("No file path for excerpt"))); }; let client = self.client.clone(); @@ -525,40 +653,7 @@ impl Zeta { state .events .iter() - .filter_map(|event| match event { - Event::BufferChange { - old_snapshot, - new_snapshot, - .. - } => { - let path = new_snapshot.file().map(|f| f.full_path(cx)); - - let old_path = old_snapshot.file().and_then(|f| { - let old_path = f.full_path(cx); - if Some(&old_path) != path.as_ref() { - Some(old_path) - } else { - None - } - }); - - // TODO [zeta2] move to bg? - let diff = - language::unified_diff(&old_snapshot.text(), &new_snapshot.text()); - - if path == old_path && diff.is_empty() { - None - } else { - Some(predict_edits_v3::Event::BufferChange { - old_path, - path, - diff, - //todo: Actually detect if this edit was predicted or not - predicted: false, - }) - } - } - }) + .filter_map(|event| event.to_request_event(cx)) .collect::>() }) .unwrap_or_default(); @@ -573,6 +668,20 @@ impl Zeta { // TODO data collection let can_collect_data = cx.is_staff(); + let mut included_files = project_state + .and_then(|project_state| project_state.context.as_ref()) + .unwrap_or(&HashMap::default()) + .iter() + .filter_map(|(buffer, ranges)| { + let buffer = buffer.read(cx); + Some(( + buffer.snapshot(), + buffer.file()?.full_path(cx).into(), + ranges.clone(), + )) + }) + .collect::>(); + let request_task = cx.background_spawn({ let snapshot = snapshot.clone(); let buffer = buffer.clone(); @@ -588,18 +697,6 @@ impl Zeta { let before_retrieval = chrono::Utc::now(); - let Some(context) = EditPredictionContext::gather_context( - cursor_point, - &snapshot, - parent_abs_path.as_deref(), - &options.context, - index_state.as_deref(), - ) else { - return Ok((None, None)); - }; - - let retrieval_time = chrono::Utc::now() - before_retrieval; - let (diagnostic_groups, diagnostic_groups_truncated) = Self::gather_nearby_diagnostics( cursor_offset, @@ -608,37 +705,133 @@ impl Zeta { options.max_diagnostic_bytes, ); - let request = make_cloud_request( - excerpt_path, - context, - events, - can_collect_data, - diagnostic_groups, - diagnostic_groups_truncated, - None, - debug_tx.is_some(), - &worktree_snapshots, - index_state.as_deref(), - Some(options.max_prompt_bytes), - options.prompt_format, - ); + let request = match options.context { + ContextMode::Llm(context_options) => { + let Some(excerpt) = EditPredictionExcerpt::select_from_buffer( + cursor_point, + &snapshot, + &context_options.excerpt, + index_state.as_deref(), + ) else { + return Ok((None, None)); + }; + + let excerpt_anchor_range = snapshot.anchor_after(excerpt.range.start) + ..snapshot.anchor_before(excerpt.range.end); + + if let Some(buffer_ix) = included_files + .iter() + .position(|(buffer, _, _)| buffer.remote_id() == snapshot.remote_id()) + { + let (buffer, _, ranges) = &mut included_files[buffer_ix]; + let range_ix = ranges + .binary_search_by(|probe| { + probe + .start + .cmp(&excerpt_anchor_range.start, buffer) + .then(excerpt_anchor_range.end.cmp(&probe.end, buffer)) + }) + .unwrap_or_else(|ix| ix); + + ranges.insert(range_ix, excerpt_anchor_range); + let last_ix = included_files.len() - 1; + included_files.swap(buffer_ix, last_ix); + } else { + included_files.push(( + snapshot, + excerpt_path.clone(), + vec![excerpt_anchor_range], + )); + } + + let included_files = included_files + .into_iter() + .map(|(buffer, path, ranges)| { + let excerpts = merge_excerpts( + &buffer, + ranges.iter().map(|range| { + let point_range = range.to_point(&buffer); + Line(point_range.start.row)..Line(point_range.end.row) + }), + ); + predict_edits_v3::IncludedFile { + path, + max_row: Line(buffer.max_point().row), + excerpts, + } + }) + .collect::>(); + + predict_edits_v3::PredictEditsRequest { + excerpt_path, + excerpt: String::new(), + excerpt_line_range: Line(0)..Line(0), + excerpt_range: 0..0, + cursor_point: predict_edits_v3::Point { + line: predict_edits_v3::Line(cursor_point.row), + column: cursor_point.column, + }, + included_files, + referenced_declarations: vec![], + events, + can_collect_data, + diagnostic_groups, + diagnostic_groups_truncated, + debug_info: debug_tx.is_some(), + prompt_max_bytes: Some(options.max_prompt_bytes), + prompt_format: options.prompt_format, + // TODO [zeta2] + signatures: vec![], + excerpt_parent: None, + git_info: None, + } + } + ContextMode::Syntax(context_options) => { + let Some(context) = EditPredictionContext::gather_context( + cursor_point, + &snapshot, + parent_abs_path.as_deref(), + &context_options, + index_state.as_deref(), + ) else { + return Ok((None, None)); + }; + + make_syntax_context_cloud_request( + excerpt_path, + context, + events, + can_collect_data, + diagnostic_groups, + diagnostic_groups_truncated, + None, + debug_tx.is_some(), + &worktree_snapshots, + index_state.as_deref(), + Some(options.max_prompt_bytes), + options.prompt_format, + ) + } + }; + + let retrieval_time = chrono::Utc::now() - before_retrieval; let debug_response_tx = if let Some(debug_tx) = &debug_tx { let (response_tx, response_rx) = oneshot::channel(); - let local_prompt = PlannedPrompt::populate(&request) - .and_then(|p| p.to_prompt_string().map(|p| p.0)) + let local_prompt = build_prompt(&request) + .map(|(prompt, _)| prompt) .map_err(|err| err.to_string()); debug_tx - .unbounded_send(PredictionDebugInfo { + .unbounded_send(ZetaDebugInfo::EditPredicted(ZetaEditPredictionDebugInfo { request: request.clone(), retrieval_time, buffer: buffer.downgrade(), local_prompt, position, response_rx, - }) + })) .ok(); Some(response_tx) } else { @@ -827,6 +1020,127 @@ impl Zeta { } } + pub const CONTEXT_RETRIEVAL_IDLE_DURATION: Duration = Duration::from_secs(10); + pub const CONTEXT_RETRIEVAL_DEBOUNCE_DURATION: Duration = Duration::from_secs(3); + + // Refresh the related excerpts when the user just beguns editing after + // an idle period, and after they pause editing. + fn refresh_context_if_needed( + &mut self, + project: &Entity, + buffer: &Entity, + cursor_position: language::Anchor, + cx: &mut Context, + ) { + if !matches!(&self.options().context, ContextMode::Llm { .. }) { + return; + } + + let Some(zeta_project) = self.projects.get_mut(&project.entity_id()) else { + return; + }; + + let now = Instant::now(); + let was_idle = zeta_project + .refresh_context_timestamp + .map_or(true, |timestamp| { + now - timestamp > Self::CONTEXT_RETRIEVAL_IDLE_DURATION + }); + zeta_project.refresh_context_timestamp = Some(now); + zeta_project.refresh_context_debounce_task = Some(cx.spawn({ + let buffer = buffer.clone(); + let project = project.clone(); + async move |this, cx| { + if was_idle { + log::debug!("refetching edit prediction context after idle"); + } else { + cx.background_executor() + .timer(Self::CONTEXT_RETRIEVAL_DEBOUNCE_DURATION) + .await; + log::debug!("refetching edit prediction context after pause"); + } + this.update(cx, |this, cx| { + this.refresh_context(project, buffer, cursor_position, cx); + }) + .ok() + } + })); + } + + // Refresh the related excerpts asynchronously. Ensure the task runs to completion, + // and avoid spawning more than one concurrent task. + fn refresh_context( + &mut self, + project: Entity, + buffer: Entity, + cursor_position: language::Anchor, + cx: &mut Context, + ) { + let Some(zeta_project) = self.projects.get_mut(&project.entity_id()) else { + return; + }; + + let debug_tx = self.debug_tx.clone(); + + zeta_project + .refresh_context_task + .get_or_insert(cx.spawn(async move |this, cx| { + if let Some(debug_tx) = &debug_tx { + debug_tx + .unbounded_send(ZetaDebugInfo::ContextRetrievalStarted( + ZetaContextRetrievalDebugInfo { + project: project.clone(), + timestamp: Instant::now(), + }, + )) + .ok(); + } + + let related_excerpts = this + .update(cx, |this, cx| { + let Some(zeta_project) = this.projects.get(&project.entity_id()) else { + return Task::ready(anyhow::Ok(HashMap::default())); + }; + + let ContextMode::Llm(options) = &this.options().context else { + return Task::ready(anyhow::Ok(HashMap::default())); + }; + + find_related_excerpts( + buffer.clone(), + cursor_position, + &project, + zeta_project.events.iter(), + options, + debug_tx, + cx, + ) + }) + .ok()? + .await + .log_err() + .unwrap_or_default(); + this.update(cx, |this, _cx| { + let Some(zeta_project) = this.projects.get_mut(&project.entity_id()) else { + return; + }; + zeta_project.context = Some(related_excerpts); + zeta_project.refresh_context_task.take(); + if let Some(debug_tx) = &this.debug_tx { + debug_tx + .unbounded_send(ZetaDebugInfo::ContextRetrievalFinished( + ZetaContextRetrievalDebugInfo { + project, + timestamp: Instant::now(), + }, + )) + .ok(); + } + }) + .ok() + })); + } + fn gather_nearby_diagnostics( cursor_offset: usize, diagnostic_sets: &[(LanguageServerId, DiagnosticSet)], @@ -918,12 +1232,20 @@ impl Zeta { cursor_point, &snapshot, parent_abs_path.as_deref(), - &options.context, + match &options.context { + ContextMode::Llm(_) => { + // TODO + panic!("Llm mode not supported in zeta cli yet"); + } + ContextMode::Syntax(edit_prediction_context_options) => { + edit_prediction_context_options + } + }, index_state.as_deref(), ) .context("Failed to select excerpt") .map(|context| { - make_cloud_request( + make_syntax_context_cloud_request( excerpt_path.into(), context, // TODO pass everything @@ -963,7 +1285,7 @@ pub struct ZedUpdateRequiredError { minimum_version: SemanticVersion, } -fn make_cloud_request( +fn make_syntax_context_cloud_request( excerpt_path: Arc, context: EditPredictionContext, events: Vec, @@ -1044,6 +1366,7 @@ fn make_cloud_request( column: context.cursor_point.column, }, referenced_declarations, + included_files: vec![], signatures, excerpt_parent, events, diff --git a/crates/zeta2_tools/Cargo.toml b/crates/zeta2_tools/Cargo.toml index edd1b1eb242c6c02001bec53120425f9a05e5d1d..0877ee6f4661e7dcdbbae5241702951746b74725 100644 --- a/crates/zeta2_tools/Cargo.toml +++ b/crates/zeta2_tools/Cargo.toml @@ -12,6 +12,7 @@ workspace = true path = "src/zeta2_tools.rs" [dependencies] +anyhow.workspace = true chrono.workspace = true client.workspace = true cloud_llm_client.workspace = true diff --git a/crates/zeta2_tools/src/zeta2_context_view.rs b/crates/zeta2_tools/src/zeta2_context_view.rs new file mode 100644 index 0000000000000000000000000000000000000000..421328df2c3f39d61352290c0ca5fd34ff39bb78 --- /dev/null +++ b/crates/zeta2_tools/src/zeta2_context_view.rs @@ -0,0 +1,412 @@ +use std::{ + any::TypeId, + collections::VecDeque, + ops::Add, + sync::Arc, + time::{Duration, Instant}, +}; + +use anyhow::Result; +use client::{Client, UserStore}; +use editor::{Editor, PathKey}; +use futures::StreamExt as _; +use gpui::{ + Animation, AnimationExt, App, AppContext as _, Context, Entity, EventEmitter, FocusHandle, + Focusable, ParentElement as _, SharedString, Styled as _, Task, TextAlign, Window, actions, + pulsating_between, +}; +use multi_buffer::MultiBuffer; +use project::Project; +use text::OffsetRangeExt; +use ui::{ + ButtonCommon, Clickable, Color, Disableable, FluentBuilder as _, Icon, IconButton, IconName, + IconSize, InteractiveElement, IntoElement, ListItem, StyledTypography, div, h_flex, v_flex, +}; +use workspace::{Item, ItemHandle as _}; +use zeta2::{ + SearchToolQuery, Zeta, ZetaContextRetrievalDebugInfo, ZetaDebugInfo, ZetaSearchQueryDebugInfo, +}; + +pub struct Zeta2ContextView { + empty_focus_handle: FocusHandle, + project: Entity, + zeta: Entity, + runs: VecDeque, + current_ix: usize, + _update_task: Task>, +} + +#[derive(Debug)] +pub struct RetrievalRun { + editor: Entity, + search_queries: Vec, + started_at: Instant, + search_results_generated_at: Option, + search_results_executed_at: Option, + finished_at: Option, +} + +actions!( + dev, + [ + /// Go to the previous context retrieval run + Zeta2ContextGoBack, + /// Go to the next context retrieval run + Zeta2ContextGoForward + ] +); + +impl Zeta2ContextView { + pub fn new( + project: Entity, + client: &Arc, + user_store: &Entity, + window: &mut gpui::Window, + cx: &mut Context, + ) -> Self { + let zeta = Zeta::global(client, user_store, cx); + + let mut debug_rx = zeta.update(cx, |zeta, _| zeta.debug_info()); + let _update_task = cx.spawn_in(window, async move |this, cx| { + while let Some(event) = debug_rx.next().await { + this.update_in(cx, |this, window, cx| { + this.handle_zeta_event(event, window, cx) + })?; + } + Ok(()) + }); + + Self { + empty_focus_handle: cx.focus_handle(), + project, + runs: VecDeque::new(), + current_ix: 0, + zeta, + _update_task, + } + } + + fn handle_zeta_event( + &mut self, + event: ZetaDebugInfo, + window: &mut gpui::Window, + cx: &mut Context, + ) { + match event { + ZetaDebugInfo::ContextRetrievalStarted(info) => { + if info.project == self.project { + self.handle_context_retrieval_started(info, window, cx); + } + } + ZetaDebugInfo::SearchQueriesGenerated(info) => { + if info.project == self.project { + self.handle_search_queries_generated(info, window, cx); + } + } + ZetaDebugInfo::SearchQueriesExecuted(info) => { + if info.project == self.project { + self.handle_search_queries_executed(info, window, cx); + } + } + ZetaDebugInfo::ContextRetrievalFinished(info) => { + if info.project == self.project { + self.handle_context_retrieval_finished(info, window, cx); + } + } + ZetaDebugInfo::EditPredicted(_) => {} + } + } + + fn handle_context_retrieval_started( + &mut self, + info: ZetaContextRetrievalDebugInfo, + window: &mut Window, + cx: &mut Context, + ) { + if self + .runs + .back() + .is_some_and(|run| run.search_results_executed_at.is_none()) + { + self.runs.pop_back(); + } + + let multibuffer = cx.new(|_| MultiBuffer::new(language::Capability::ReadOnly)); + let editor = cx + .new(|cx| Editor::for_multibuffer(multibuffer, Some(self.project.clone()), window, cx)); + + if self.runs.len() == 32 { + self.runs.pop_front(); + } + + self.runs.push_back(RetrievalRun { + editor, + search_queries: Vec::new(), + started_at: info.timestamp, + search_results_generated_at: None, + search_results_executed_at: None, + finished_at: None, + }); + + cx.notify(); + } + + fn handle_context_retrieval_finished( + &mut self, + info: ZetaContextRetrievalDebugInfo, + window: &mut Window, + cx: &mut Context, + ) { + let Some(run) = self.runs.back_mut() else { + return; + }; + + run.finished_at = Some(info.timestamp); + + let multibuffer = run.editor.read(cx).buffer().clone(); + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.clear(cx); + + let context = self.zeta.read(cx).context_for_project(&self.project); + let mut paths = Vec::new(); + for (buffer, ranges) in context { + let path = PathKey::for_buffer(&buffer, cx); + let snapshot = buffer.read(cx).snapshot(); + let ranges = ranges + .iter() + .map(|range| range.to_point(&snapshot)) + .collect::>(); + paths.push((path, buffer, ranges)); + } + + for (path, buffer, ranges) in paths { + multibuffer.set_excerpts_for_path(path, buffer, ranges, 0, cx); + } + }); + + run.editor.update(cx, |editor, cx| { + editor.move_to_beginning(&Default::default(), window, cx); + }); + + cx.notify(); + } + + fn handle_search_queries_generated( + &mut self, + info: ZetaSearchQueryDebugInfo, + _window: &mut Window, + cx: &mut Context, + ) { + let Some(run) = self.runs.back_mut() else { + return; + }; + + run.search_results_generated_at = Some(info.timestamp); + run.search_queries = info.queries; + cx.notify(); + } + + fn handle_search_queries_executed( + &mut self, + info: ZetaContextRetrievalDebugInfo, + _window: &mut Window, + cx: &mut Context, + ) { + if self.current_ix + 2 == self.runs.len() { + // Switch to latest when the queries are executed + self.current_ix += 1; + } + + let Some(run) = self.runs.back_mut() else { + return; + }; + + run.search_results_executed_at = Some(info.timestamp); + cx.notify(); + } + + fn handle_go_back( + &mut self, + _: &Zeta2ContextGoBack, + window: &mut Window, + cx: &mut Context, + ) { + self.current_ix = self.current_ix.saturating_sub(1); + cx.focus_self(window); + cx.notify(); + } + + fn handle_go_forward( + &mut self, + _: &Zeta2ContextGoForward, + window: &mut Window, + cx: &mut Context, + ) { + self.current_ix = self + .current_ix + .add(1) + .min(self.runs.len().saturating_sub(1)); + cx.focus_self(window); + cx.notify(); + } + + fn render_informational_footer(&self, cx: &mut Context<'_, Zeta2ContextView>) -> ui::Div { + let is_latest = self.runs.len() == self.current_ix + 1; + let run = &self.runs[self.current_ix]; + + h_flex() + .w_full() + .font_buffer(cx) + .text_xs() + .border_t_1() + .child( + v_flex() + .h_full() + .flex_1() + .children(run.search_queries.iter().enumerate().map(|(ix, query)| { + ListItem::new(ix) + .start_slot( + Icon::new(IconName::MagnifyingGlass) + .color(Color::Muted) + .size(IconSize::Small), + ) + .child(query.regex.clone()) + })), + ) + .child( + v_flex() + .h_full() + .pr_2() + .text_align(TextAlign::Right) + .child( + h_flex() + .justify_end() + .child( + IconButton::new("go-back", IconName::ChevronLeft) + .disabled(self.current_ix == 0 || self.runs.len() < 2) + .tooltip(ui::Tooltip::for_action_title( + "Go to previous run", + &Zeta2ContextGoBack, + )) + .on_click(cx.listener(|this, _, window, cx| { + this.handle_go_back(&Zeta2ContextGoBack, window, cx); + })), + ) + .child( + div() + .child(format!("{}/{}", self.current_ix + 1, self.runs.len())) + .map(|this| { + if self.runs.back().is_some_and(|back| { + back.search_results_executed_at.is_none() + }) { + this.with_animation( + "pulsating-count", + Animation::new(Duration::from_secs(2)) + .repeat() + .with_easing(pulsating_between(0.4, 0.8)), + |label, delta| label.opacity(delta), + ) + .into_any_element() + } else { + this.into_any_element() + } + }), + ) + .child( + IconButton::new("go-forward", IconName::ChevronRight) + .disabled(self.current_ix + 1 == self.runs.len()) + .tooltip(ui::Tooltip::for_action_title( + "Go to next run", + &Zeta2ContextGoBack, + )) + .on_click(cx.listener(|this, _, window, cx| { + this.handle_go_forward(&Zeta2ContextGoForward, window, cx); + })), + ), + ) + .map(|mut div| { + let t0 = run.started_at; + let Some(t1) = run.search_results_generated_at else { + return div.child("Planning search..."); + }; + div = div.child(format!("Planned search: {:>5} ms", (t1 - t0).as_millis())); + + let Some(t2) = run.search_results_executed_at else { + return div.child("Running search..."); + }; + div = div.child(format!("Ran search: {:>5} ms", (t2 - t1).as_millis())); + + let Some(t3) = run.finished_at else { + if is_latest { + return div.child("Filtering results..."); + } else { + return div.child("Canceled"); + } + }; + div.child(format!("Filtered results: {:>5} ms", (t3 - t2).as_millis())) + }), + ) + } +} + +impl Focusable for Zeta2ContextView { + fn focus_handle(&self, cx: &App) -> FocusHandle { + self.runs + .get(self.current_ix) + .map(|run| run.editor.read(cx).focus_handle(cx)) + .unwrap_or_else(|| self.empty_focus_handle.clone()) + } +} + +impl EventEmitter<()> for Zeta2ContextView {} + +impl Item for Zeta2ContextView { + type Event = (); + + fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString { + "Edit Prediction Context".into() + } + + fn buffer_kind(&self, _cx: &App) -> workspace::item::ItemBufferKind { + workspace::item::ItemBufferKind::Multibuffer + } + + fn act_as_type<'a>( + &'a self, + type_id: TypeId, + self_handle: &'a Entity, + _: &'a App, + ) -> Option { + if type_id == TypeId::of::() { + Some(self_handle.to_any()) + } else if type_id == TypeId::of::() { + Some(self.runs.get(self.current_ix)?.editor.to_any()) + } else { + None + } + } +} + +impl gpui::Render for Zeta2ContextView { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl ui::IntoElement { + v_flex() + .key_context("Zeta2Context") + .on_action(cx.listener(Self::handle_go_back)) + .on_action(cx.listener(Self::handle_go_forward)) + .size_full() + .map(|this| { + if self.runs.is_empty() { + this.child( + v_flex() + .size_full() + .justify_center() + .items_center() + .child("No retrieval runs yet"), + ) + } else { + this.child(self.runs[self.current_ix].editor.clone()) + .child(self.render_informational_footer(cx)) + } + }) + } +} diff --git a/crates/zeta2_tools/src/zeta2_tools.rs b/crates/zeta2_tools/src/zeta2_tools.rs index 2319df2a49d04c7e73180830ecf9778380bbf025..0b4a59844d7b4a02c2f41ff7654c7df0c4292f7a 100644 --- a/crates/zeta2_tools/src/zeta2_tools.rs +++ b/crates/zeta2_tools/src/zeta2_tools.rs @@ -1,3 +1,5 @@ +mod zeta2_context_view; + use std::{cmp::Reverse, path::PathBuf, str::FromStr, sync::Arc, time::Duration}; use chrono::TimeDelta; @@ -20,14 +22,20 @@ use ui::{ButtonLike, ContextMenu, ContextMenuEntry, DropdownMenu, KeyBinding, pr use ui_input::InputField; use util::{ResultExt, paths::PathStyle, rel_path::RelPath}; use workspace::{Item, SplitDirection, Workspace}; -use zeta2::{PredictionDebugInfo, Zeta, Zeta2FeatureFlag, ZetaOptions}; +use zeta2::{ + ContextMode, DEFAULT_SYNTAX_CONTEXT_OPTIONS, LlmContextOptions, Zeta, Zeta2FeatureFlag, + ZetaDebugInfo, ZetaEditPredictionDebugInfo, ZetaOptions, +}; use edit_prediction_context::{EditPredictionContextOptions, EditPredictionExcerptOptions}; +use zeta2_context_view::Zeta2ContextView; actions!( dev, [ - /// Opens the language server protocol logs viewer. + /// Opens the edit prediction context view. + OpenZeta2ContextView, + /// Opens the edit prediction inspector. OpenZeta2Inspector, /// Rate prediction as positive. Zeta2RatePredictionPositive, @@ -57,6 +65,27 @@ pub fn init(cx: &mut App) { }); }) .detach(); + + cx.observe_new(move |workspace: &mut Workspace, _, _cx| { + workspace.register_action(move |workspace, _: &OpenZeta2ContextView, window, cx| { + let project = workspace.project(); + workspace.split_item( + SplitDirection::Right, + Box::new(cx.new(|cx| { + Zeta2ContextView::new( + project.clone(), + workspace.client(), + workspace.user_store(), + window, + cx, + ) + })), + window, + cx, + ); + }); + }) + .detach(); } // TODO show included diagnostics, and events @@ -69,7 +98,7 @@ pub struct Zeta2Inspector { min_excerpt_bytes_input: Entity, cursor_context_ratio_input: Entity, max_prompt_bytes_input: Entity, - max_retrieved_declarations: Entity, + context_mode: ContextModeState, active_view: ActiveView, zeta: Entity, _active_editor_subscription: Option, @@ -77,6 +106,13 @@ pub struct Zeta2Inspector { _receive_task: Task<()>, } +pub enum ContextModeState { + Llm, + Syntax { + max_retrieved_declarations: Entity, + }, +} + #[derive(PartialEq)] enum ActiveView { Context, @@ -143,36 +179,34 @@ impl Zeta2Inspector { min_excerpt_bytes_input: Self::number_input("Min Excerpt Bytes", window, cx), cursor_context_ratio_input: Self::number_input("Cursor Context Ratio", window, cx), max_prompt_bytes_input: Self::number_input("Max Prompt Bytes", window, cx), - max_retrieved_declarations: Self::number_input("Max Retrieved Definitions", window, cx), + context_mode: ContextModeState::Llm, zeta: zeta.clone(), _active_editor_subscription: None, _update_state_task: Task::ready(()), _receive_task: receive_task, }; - this.set_input_options(&zeta.read(cx).options().clone(), window, cx); + this.set_options_state(&zeta.read(cx).options().clone(), window, cx); this } - fn set_input_options( + fn set_options_state( &mut self, options: &ZetaOptions, window: &mut Window, cx: &mut Context, ) { + let excerpt_options = options.context.excerpt(); self.max_excerpt_bytes_input.update(cx, |input, cx| { - input.set_text(options.context.excerpt.max_bytes.to_string(), window, cx); + input.set_text(excerpt_options.max_bytes.to_string(), window, cx); }); self.min_excerpt_bytes_input.update(cx, |input, cx| { - input.set_text(options.context.excerpt.min_bytes.to_string(), window, cx); + input.set_text(excerpt_options.min_bytes.to_string(), window, cx); }); self.cursor_context_ratio_input.update(cx, |input, cx| { input.set_text( format!( "{:.2}", - options - .context - .excerpt - .target_before_cursor_over_total_bytes + excerpt_options.target_before_cursor_over_total_bytes ), window, cx, @@ -181,20 +215,28 @@ impl Zeta2Inspector { self.max_prompt_bytes_input.update(cx, |input, cx| { input.set_text(options.max_prompt_bytes.to_string(), window, cx); }); - self.max_retrieved_declarations.update(cx, |input, cx| { - input.set_text( - options.context.max_retrieved_declarations.to_string(), - window, - cx, - ); - }); + + match &options.context { + ContextMode::Llm(_) => { + self.context_mode = ContextModeState::Llm; + } + ContextMode::Syntax(_) => { + self.context_mode = ContextModeState::Syntax { + max_retrieved_declarations: Self::number_input( + "Max Retrieved Definitions", + window, + cx, + ), + }; + } + } cx.notify(); } - fn set_options(&mut self, options: ZetaOptions, cx: &mut Context) { + fn set_zeta_options(&mut self, options: ZetaOptions, cx: &mut Context) { self.zeta.update(cx, |this, _cx| this.set_options(options)); - const THROTTLE_TIME: Duration = Duration::from_millis(100); + const DEBOUNCE_TIME: Duration = Duration::from_millis(100); if let Some(prediction) = self.last_prediction.as_mut() { if let Some(buffer) = prediction.buffer.upgrade() { @@ -202,7 +244,7 @@ impl Zeta2Inspector { let zeta = self.zeta.clone(); let project = self.project.clone(); prediction._task = Some(cx.spawn(async move |_this, cx| { - cx.background_executor().timer(THROTTLE_TIME).await; + cx.background_executor().timer(DEBOUNCE_TIME).await; if let Some(task) = zeta .update(cx, |zeta, cx| { zeta.refresh_prediction(&project, &buffer, position, cx) @@ -255,25 +297,40 @@ impl Zeta2Inspector { let zeta_options = this.zeta.read(cx).options().clone(); - let context_options = EditPredictionContextOptions { - excerpt: EditPredictionExcerptOptions { - max_bytes: number_input_value(&this.max_excerpt_bytes_input, cx), - min_bytes: number_input_value(&this.min_excerpt_bytes_input, cx), - target_before_cursor_over_total_bytes: number_input_value( - &this.cursor_context_ratio_input, - cx, - ), - }, - max_retrieved_declarations: number_input_value( - &this.max_retrieved_declarations, + let excerpt_options = EditPredictionExcerptOptions { + max_bytes: number_input_value(&this.max_excerpt_bytes_input, cx), + min_bytes: number_input_value(&this.min_excerpt_bytes_input, cx), + target_before_cursor_over_total_bytes: number_input_value( + &this.cursor_context_ratio_input, cx, ), - ..zeta_options.context }; - this.set_options( + let context = match zeta_options.context { + ContextMode::Llm(_context_options) => ContextMode::Llm(LlmContextOptions { + excerpt: excerpt_options, + }), + ContextMode::Syntax(context_options) => { + let max_retrieved_declarations = match &this.context_mode { + ContextModeState::Llm => { + zeta2::DEFAULT_SYNTAX_CONTEXT_OPTIONS.max_retrieved_declarations + } + ContextModeState::Syntax { + max_retrieved_declarations, + } => number_input_value(max_retrieved_declarations, cx), + }; + + ContextMode::Syntax(EditPredictionContextOptions { + excerpt: excerpt_options, + max_retrieved_declarations, + ..context_options + }) + } + }; + + this.set_zeta_options( ZetaOptions { - context: context_options, + context, max_prompt_bytes: number_input_value(&this.max_prompt_bytes_input, cx), max_diagnostic_bytes: zeta_options.max_diagnostic_bytes, prompt_format: zeta_options.prompt_format, @@ -289,7 +346,7 @@ impl Zeta2Inspector { fn update_last_prediction( &mut self, - prediction: zeta2::PredictionDebugInfo, + prediction: zeta2::ZetaDebugInfo, window: &mut Window, cx: &mut Context, ) { @@ -309,6 +366,9 @@ impl Zeta2Inspector { let language_registry = self.project.read(cx).languages().clone(); async move |this, cx| { let mut languages = HashMap::default(); + let ZetaDebugInfo::EditPredicted(prediction) = prediction else { + return; + }; for ext in prediction .request .referenced_declarations @@ -419,7 +479,7 @@ impl Zeta2Inspector { editor }); - let PredictionDebugInfo { + let ZetaEditPredictionDebugInfo { response_rx, position, buffer, @@ -709,7 +769,7 @@ impl Zeta2Inspector { .style(ButtonStyle::Outlined) .size(ButtonSize::Large) .on_click(cx.listener(|this, _, window, cx| { - this.set_input_options(&zeta2::DEFAULT_OPTIONS, window, cx); + this.set_options_state(&zeta2::DEFAULT_OPTIONS, window, cx); })), ), ) @@ -722,19 +782,113 @@ impl Zeta2Inspector { .items_end() .child(self.max_excerpt_bytes_input.clone()) .child(self.min_excerpt_bytes_input.clone()) - .child(self.cursor_context_ratio_input.clone()), + .child(self.cursor_context_ratio_input.clone()) + .child(self.render_context_mode_dropdown(window, cx)), ) .child( h_flex() .gap_2() .items_end() - .child(self.max_retrieved_declarations.clone()) + .children(match &self.context_mode { + ContextModeState::Llm => None, + ContextModeState::Syntax { + max_retrieved_declarations, + } => Some(max_retrieved_declarations.clone()), + }) .child(self.max_prompt_bytes_input.clone()) .child(self.render_prompt_format_dropdown(window, cx)), ), ) } + fn render_context_mode_dropdown(&self, window: &mut Window, cx: &mut Context) -> Div { + let this = cx.weak_entity(); + + v_flex() + .gap_1p5() + .child( + Label::new("Context Mode") + .size(LabelSize::Small) + .color(Color::Muted), + ) + .child( + DropdownMenu::new( + "ep-ctx-mode", + match &self.context_mode { + ContextModeState::Llm => "LLM-based", + ContextModeState::Syntax { .. } => "Syntax", + }, + ContextMenu::build(window, cx, move |menu, _window, _cx| { + menu.item( + ContextMenuEntry::new("LLM-based") + .toggleable( + IconPosition::End, + matches!(self.context_mode, ContextModeState::Llm), + ) + .handler({ + let this = this.clone(); + move |window, cx| { + this.update(cx, |this, cx| { + let current_options = + this.zeta.read(cx).options().clone(); + match current_options.context.clone() { + ContextMode::Llm(_) => {} + ContextMode::Syntax(context_options) => { + let options = ZetaOptions { + context: ContextMode::Llm( + LlmContextOptions { + excerpt: context_options.excerpt, + }, + ), + ..current_options + }; + this.set_options_state(&options, window, cx); + this.set_zeta_options(options, cx); + } + } + }) + .ok(); + } + }), + ) + .item( + ContextMenuEntry::new("Syntax") + .toggleable( + IconPosition::End, + matches!(self.context_mode, ContextModeState::Syntax { .. }), + ) + .handler({ + move |window, cx| { + this.update(cx, |this, cx| { + let current_options = + this.zeta.read(cx).options().clone(); + match current_options.context.clone() { + ContextMode::Llm(context_options) => { + let options = ZetaOptions { + context: ContextMode::Syntax( + EditPredictionContextOptions { + excerpt: context_options.excerpt, + ..DEFAULT_SYNTAX_CONTEXT_OPTIONS + }, + ), + ..current_options + }; + this.set_options_state(&options, window, cx); + this.set_zeta_options(options, cx); + } + ContextMode::Syntax(_) => {} + } + }) + .ok(); + } + }), + ) + }), + ) + .style(ui::DropdownStyle::Outlined), + ) + } + fn render_prompt_format_dropdown(&self, window: &mut Window, cx: &mut Context) -> Div { let active_format = self.zeta.read(cx).options().prompt_format; let this = cx.weak_entity(); @@ -765,7 +919,7 @@ impl Zeta2Inspector { prompt_format, ..current_options }; - this.set_options(options, cx); + this.set_zeta_options(options, cx); }) .ok(); } diff --git a/crates/zeta_cli/src/main.rs b/crates/zeta_cli/src/main.rs index 149b13719f2075143d81c164e8d91bbdaca17384..eea80898870d68a8ad361de43d4556438ed25444 100644 --- a/crates/zeta_cli/src/main.rs +++ b/crates/zeta_cli/src/main.rs @@ -20,6 +20,7 @@ use reqwest_client::ReqwestClient; use serde_json::json; use std::{collections::HashSet, path::PathBuf, process::exit, str::FromStr, sync::Arc}; use zeta::{PerformPredictEditsParams, Zeta}; +use zeta2::ContextMode; use crate::headless::ZetaCliAppState; use crate::source_location::SourceLocation; @@ -263,8 +264,8 @@ async fn get_context( })? .await?; - let planned_prompt = cloud_zeta2_prompt::PlannedPrompt::populate(&request)?; - let (prompt_string, section_labels) = planned_prompt.to_prompt_string()?; + let (prompt_string, section_labels) = + cloud_zeta2_prompt::build_prompt(&request)?; match zeta2_args.output_format { OutputFormat::Prompt => anyhow::Ok(prompt_string), @@ -301,7 +302,7 @@ async fn get_context( impl Zeta2Args { fn to_options(&self, omit_excerpt_overlaps: bool) -> zeta2::ZetaOptions { zeta2::ZetaOptions { - context: EditPredictionContextOptions { + context: ContextMode::Syntax(EditPredictionContextOptions { max_retrieved_declarations: self.max_retrieved_definitions, use_imports: !self.disable_imports_gathering, excerpt: EditPredictionExcerptOptions { @@ -313,7 +314,7 @@ impl Zeta2Args { score: EditPredictionScoreOptions { omit_excerpt_overlaps, }, - }, + }), max_diagnostic_bytes: self.max_diagnostic_bytes, max_prompt_bytes: self.max_prompt_bytes, prompt_format: self.prompt_format.clone().into(), diff --git a/crates/zeta_cli/src/retrieval_stats.rs b/crates/zeta_cli/src/retrieval_stats.rs index bf1f78200ec5dd9262b6ae8937695b690155e8e2..f2634b1323d92b7136c591627226161b2905a955 100644 --- a/crates/zeta_cli/src/retrieval_stats.rs +++ b/crates/zeta_cli/src/retrieval_stats.rs @@ -3,8 +3,8 @@ use ::util::{RangeExt, ResultExt as _}; use anyhow::{Context as _, Result}; use cloud_llm_client::predict_edits_v3::DeclarationScoreComponents; use edit_prediction_context::{ - Declaration, DeclarationStyle, EditPredictionContext, Identifier, Imports, Reference, - ReferenceRegion, SyntaxIndex, SyntaxIndexState, references_in_range, + Declaration, DeclarationStyle, EditPredictionContext, EditPredictionContextOptions, Identifier, + Imports, Reference, ReferenceRegion, SyntaxIndex, SyntaxIndexState, references_in_range, }; use futures::StreamExt as _; use futures::channel::mpsc; @@ -32,6 +32,7 @@ use std::{ time::Duration, }; use util::paths::PathStyle; +use zeta2::ContextMode; use crate::headless::ZetaCliAppState; use crate::source_location::SourceLocation; @@ -46,6 +47,10 @@ pub async fn retrieval_stats( options: zeta2::ZetaOptions, cx: &mut AsyncApp, ) -> Result { + let ContextMode::Syntax(context_options) = options.context.clone() else { + anyhow::bail!("retrieval stats only works in ContextMode::Syntax"); + }; + let options = Arc::new(options); let worktree_path = worktree.canonicalize()?; @@ -264,10 +269,10 @@ pub async fn retrieval_stats( .map(|project_file| { let index_state = index_state.clone(); let lsp_definitions = lsp_definitions.clone(); - let options = options.clone(); let output_tx = output_tx.clone(); let done_count = done_count.clone(); let file_snapshots = file_snapshots.clone(); + let context_options = context_options.clone(); cx.background_spawn(async move { let snapshot = project_file.snapshot; @@ -279,7 +284,7 @@ pub async fn retrieval_stats( &snapshot, ); - let imports = if options.context.use_imports { + let imports = if context_options.use_imports { Imports::gather(&snapshot, Some(&project_file.parent_abs_path)) } else { Imports::default() @@ -311,7 +316,7 @@ pub async fn retrieval_stats( &snapshot, &index_state, &file_snapshots, - &options, + &context_options, ) .await?; @@ -958,7 +963,7 @@ async fn retrieve_definitions( snapshot: &BufferSnapshot, index: &Arc, file_snapshots: &Arc>, - options: &Arc, + context_options: &EditPredictionContextOptions, ) -> Result { let mut single_reference_map = HashMap::default(); single_reference_map.insert(reference.identifier.clone(), vec![reference.clone()]); @@ -966,7 +971,7 @@ async fn retrieve_definitions( query_point, snapshot, imports, - &options.context, + &context_options, Some(&index), |_, _, _| single_reference_map, ); diff --git a/crates/zlog/src/filter.rs b/crates/zlog/src/filter.rs index 9a2de13cb3d33a1a6f4d17f7eddd4754cae40ea3..e2ca04be60f4fe7eba7cdb2fc9eb983092d2331a 100644 --- a/crates/zlog/src/filter.rs +++ b/crates/zlog/src/filter.rs @@ -41,6 +41,9 @@ const DEFAULT_FILTERS: &[(&str, log::LevelFilter)] = &[ ("blade_graphics", log::LevelFilter::Warn), #[cfg(any(target_os = "linux", target_os = "freebsd", target_os = "windows"))] ("naga::back::spv::writer", log::LevelFilter::Warn), + // usvg prints a lot of warnings on rendering an SVG with partial errors, which + // can happen a lot with the SVG preview + ("usvg::parser::style", log::LevelFilter::Error), ]; pub fn init_env_filter(filter: env_config::EnvFilter) { diff --git a/crates/zlog/src/zlog.rs b/crates/zlog/src/zlog.rs index 8254866b6f97f6479f55bd570d1bc63ab26b10d4..b65a2f34bc468c1fccd334487a0b386da238ce52 100644 --- a/crates/zlog/src/zlog.rs +++ b/crates/zlog/src/zlog.rs @@ -10,22 +10,28 @@ pub use sink::{flush, init_output_file, init_output_stderr, init_output_stdout}; pub const SCOPE_DEPTH_MAX: usize = 4; pub fn init() { - if let Err(err) = try_init() { + if let Err(err) = try_init(None) { log::error!("{err}"); eprintln!("{err}"); } } -pub fn try_init() -> anyhow::Result<()> { +pub fn try_init(filter: Option) -> anyhow::Result<()> { log::set_logger(&ZLOG)?; log::set_max_level(log::LevelFilter::max()); - process_env(); + process_env(filter); filter::refresh_from_settings(&std::collections::HashMap::default()); Ok(()) } pub fn init_test() { - if get_env_config().is_some() && try_init().is_ok() { + if get_env_config().is_some() && try_init(None).is_ok() { + init_output_stdout(); + } +} + +pub fn init_test_with(filter: &str) { + if try_init(Some(filter.to_owned())).is_ok() { init_output_stdout(); } } @@ -36,8 +42,8 @@ fn get_env_config() -> Option { .ok() } -pub fn process_env() { - let Some(env_config) = get_env_config() else { +pub fn process_env(filter: Option) { + let Some(env_config) = get_env_config().or(filter) else { return; }; match env_config::parse(&env_config) { diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index 0b9fc289c540e43e9bef89b2c561c97a5c1928ef..9e5bbb1413fec9b021d73dce0f002c1e039c5da9 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -1,15 +1,14 @@ # Summary -# General +# Getting Started - [Getting Started](./getting-started.md) -- [System Requirements](./system-requirements.md) -- [Accounts](./accounts.md) -- [Linux](./linux.md) -- [Windows](./windows.md) +- [Installation](./installation.md) + - [Update](./update.md) + - [Uninstall](./uninstall.md) +- [Authenticate](./authentication.md) - [Telemetry](./telemetry.md) - [Troubleshooting](./troubleshooting.md) -- [Additional Learning Materials](./additional-learning-materials.md) # Configuration @@ -31,6 +30,8 @@ # Using Zed - [Multibuffers](./multibuffers.md) +- [Command Palette](./command-palette.md) +- [Command-line Interface](./command-line-interface.md) - [Outline Panel](./outline-panel.md) - [Code Completions](./completions.md) - [Channels](./channels.md) @@ -43,6 +44,11 @@ - [Environment Variables](./environment.md) - [REPL](./repl.md) +# Platform Support + +- [Windows](./windows.md) +- [Linux](./linux.md) + # AI - [Overview](./ai/overview.md) diff --git a/docs/src/additional-learning-materials.md b/docs/src/additional-learning-materials.md deleted file mode 100644 index 9ff7b3bc5c02e207d0bbf44443d03c0523729833..0000000000000000000000000000000000000000 --- a/docs/src/additional-learning-materials.md +++ /dev/null @@ -1,4 +0,0 @@ -# Additional Learning Materials - -- [Text Manipulation Kung Fu for the Aspiring Black Belt](https://zed.dev/blog/text-manipulation) -- [Hidden Gems: Team Edition Part 1](https://zed.dev/blog/hidden-gems-team-edition-part-1) diff --git a/docs/src/ai/agent-panel.md b/docs/src/ai/agent-panel.md index 445b85337046d75b78ec4101051fc55c7dcf3752..0b9f4fe8fac40b1881b9087d4a079db055bc2539 100644 --- a/docs/src/ai/agent-panel.md +++ b/docs/src/ai/agent-panel.md @@ -78,11 +78,14 @@ Edit diffs also appear in individual buffers. If your active tab had edits made ## Adding Context {#adding-context} -Although Zed's agent is very efficient at reading through your code base to autonomously pick up relevant files, directories, and other context, manually adding context is still encouraged as a way to speed up and improve the AI's response quality. +Although Zed's agent is very efficient at reading through your code base to autonomously pick up relevant context, manually adding whatever would be useful to fulfill your prompt is still very encouraged as a way to not only improve the AI's response quality but also to speed its response time up. -To add any file, directory, symbol, previous threads, rules files, or even web pages as context, type `@` to mention them in the editor. +In Zed's Agent Panel, all pieces of context are added as mentions in the panel's message editor. +You can type `@` to mention files, directories, symbols, previous threads, and rules files. -Pasting images as context is also supported by the Agent Panel. +Additionally, you can also select text in a buffer and add it as context by using the {#kb agent::AddSelectionToThread} keybinding, running the {#action agent::AddSelectionToThread} action, or choosing the "Selection" item in the `@` menu. + +Copying images and pasting them in the panel's message editor is also supported. ### Token Usage {#token-usage} diff --git a/docs/src/ai/configuration.md b/docs/src/ai/configuration.md index e2cd9ad0201933a7ba4f1239615cf44ccdb7f3f6..8877689e4641ca9c03b2a0668f8df759199d5d71 100644 --- a/docs/src/ai/configuration.md +++ b/docs/src/ai/configuration.md @@ -3,7 +3,7 @@ When using AI in Zed, you can configure multiple dimensions: 1. Which LLM providers you can use - - Zed's hosted models, which require [authentication](../accounts.md) and [subscription](./subscription.md) + - Zed's hosted models, which require [authentication](../authentication.md) and [subscription](./subscription.md) - [Using your own API keys](./llm-providers.md), which do not - Using [external agents like Claude Code](./external-agents.md), which do not 2. [Model parameters and usage](./agent-settings.md#model-settings) diff --git a/docs/src/ai/edit-prediction.md b/docs/src/ai/edit-prediction.md index 3c653284b015f33c9457338c6932289e95c6babd..07f68dadce41ab89f210873aadd10d1ca65fcef3 100644 --- a/docs/src/ai/edit-prediction.md +++ b/docs/src/ai/edit-prediction.md @@ -3,7 +3,7 @@ Edit Prediction is Zed's mechanism for predicting the code you want to write through AI. Each keystroke sends a new request to the edit prediction provider, which returns individual or multi-line suggestions that can be quickly accepted by pressing `tab`. -The default provider is [Zeta, a proprietary open source and open dataset model](https://huggingface.co/zed-industries/zeta), which [requires being signed into Zed](../accounts.md#what-features-require-signing-in). +The default provider is [Zeta, a proprietary open source and open dataset model](https://huggingface.co/zed-industries/zeta), which [requires being signed into Zed](../authentication.md#what-features-require-signing-in). Alternatively, you can use other providers like [GitHub Copilot](#github-copilot) (or [Enterprise](#github-copilot-enterprise)) or [Supermaven](#supermaven). diff --git a/docs/src/ai/plans-and-usage.md b/docs/src/ai/plans-and-usage.md index cbca689f9a20ffa99df9816a4bb1fef933d56fee..fc59a894aacd524a10e31b65ababd4f8d79e3b8e 100644 --- a/docs/src/ai/plans-and-usage.md +++ b/docs/src/ai/plans-and-usage.md @@ -4,7 +4,7 @@ For costs and more information on pricing, visit [Zed’s pricing page](https://zed.dev/pricing). -Please note that if you’re interested in just using Zed as the world’s fastest editor, with no AI or subscription features, you can always do so for free, without [authentication](../accounts.md). +Please note that if you’re interested in just using Zed as the world’s fastest editor, with no AI or subscription features, you can always do so for free, without [authentication](../authentication.md). ## Usage {#usage} diff --git a/docs/src/ai/privacy-and-security.md b/docs/src/ai/privacy-and-security.md index 23166df1d7e2e188e68d7ecafa98ac92f8e704a4..735d1518642348b8b1787e99e3770ab93fb13766 100644 --- a/docs/src/ai/privacy-and-security.md +++ b/docs/src/ai/privacy-and-security.md @@ -16,7 +16,7 @@ It is entirely possible to use Zed, including Zed's AI capabilities, without sha - [AI Improvement](./ai-improvement.md): Zed's opt-in-only approach to data collection for AI improvement, whether our Agentic offering or Edit Predictions. -- [Accounts](../accounts.md): When and why you'd need to authenticate into Zed, how to do so, and what scope we need from you. +- [Accounts](../authentication.md): When and why you'd need to authenticate into Zed, how to do so, and what scope we need from you. ## Legal Links diff --git a/docs/src/accounts.md b/docs/src/authentication.md similarity index 83% rename from docs/src/accounts.md rename to docs/src/authentication.md index af4c4c172f76ba1d491ddb4031714f60f848c3b6..6d05567e3198ed5180b65dc0fb5f470baa679f9e 100644 --- a/docs/src/accounts.md +++ b/docs/src/authentication.md @@ -1,11 +1,11 @@ -# Accounts +# Authenticate with Zed -Signing in to Zed is not a requirement. You can use most features you'd expect in a code editor without ever doing so. We'll outline the few features that do require signing in, and how to do so, here. +Signing in to Zed is not required. You can use most features you'd expect in a code editor without ever doing so. We'll outline the few features that do require signing in, and how to do so, here. ## What Features Require Signing In? 1. All real-time [collaboration features](./collaboration.md). -2. [LLM-powered features](./ai/overview.md), if you are using Zed as the provider of your LLM models. Alternatively, you can [bring and configure your own API keys](./ai/llm-providers.md#use-your-own-keys) if you'd prefer, and avoid having to sign in. +2. [LLM-powered features](./ai/overview.md), if you are using Zed as the provider of your LLM models. To use AI without signing in, you can [bring and configure your own API keys](./ai/llm-providers.md#use-your-own-keys). ## Signing In diff --git a/docs/src/command-line-interface.md b/docs/src/command-line-interface.md new file mode 100644 index 0000000000000000000000000000000000000000..1a7831811dd119357c7f076be3fe9efa35bce021 --- /dev/null +++ b/docs/src/command-line-interface.md @@ -0,0 +1,18 @@ +# Command-line Interface + +Zed has a CLI, on Linux this should come with the distribution's Zed package (binary name can vary from distribution to distribution, `zed` will be used later for brevity). +For macOS, the CLI comes in the same package with the editor binary, and could be installed into the system with the `cli: install` Zed command which will create a symlink to the `/usr/local/bin/zed`. +It can also be built from source out of the `cli` crate in this repository. + +Use `zed --help` to see the full list of capabilities. +General highlights: + +- Opening another empty Zed window: `zed` + +- Opening a file or directory in Zed: `zed /path/to/entry` (use `-n` to open in the new window) + +- Reading from stdin: `ps axf | zed -` + +- Starting Zed with logs in the terminal: `zed --foreground` + +- Uninstalling Zed and all its related files: `zed --uninstall` diff --git a/docs/src/command-palette.md b/docs/src/command-palette.md new file mode 100644 index 0000000000000000000000000000000000000000..b573fc6a5f8b6e664b5a3c6f94cf115e67dd5c78 --- /dev/null +++ b/docs/src/command-palette.md @@ -0,0 +1,9 @@ +# Command Palette + +The Command Palette is the main way to access pretty much any functionality that's available in Zed. Its keybinding is the first one you should make yourself familiar with. To open it, hit: {#kb command_palette::Toggle}. + +![The opened Command Palette](https://zed.dev/img/features/command-palette.jpg) + +Try it! Open the Command Palette and type in `new file`. You should see the list of commands being filtered down to `workspace: new file`. Hit return and you end up with a new buffer. + +Any time you see instructions that include commands of the form `zed: ...` or `editor: ...` and so on that means you need to execute them in the Command Palette. diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index efc4538c0e5286a053a89916c90548796ba619d0..054b6b1b5c812bed95dc7db6e63522b11b86c09c 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -4,7 +4,14 @@ Zed is designed to be configured: we want to fit your workflow and preferences e In addition to the settings described here, you may also want to change your [theme](./themes.md), configure your [key bindings](./key-bindings.md), set up [tasks](./tasks.md) or install [extensions](https://github.com/zed-industries/extensions). -## Settings files +## Settings Editor + +You can browse through many of the supported settings via the Settings Editor, which can be opened with the {#kb zed::OpenSettings} keybinding, or through the `zed: open settings` action in the command palette. Through it, you can customize your local, user settings as well as project settings. + +> Note that not all settings that Zed supports are available through the Settings Editor yet. +> Some more intricate ones, such as language formatters, can only be changed through the JSON settings file {#kb zed::OpenSettingsFile}. + +## User Settings File -Your settings file can be opened with {#kb zed::OpenSettings}. By default it is located at `~/.config/zed/settings.json`, though if you have XDG_CONFIG_HOME in your environment on Linux it will be at `$XDG_CONFIG_HOME/zed/settings.json` instead. +Your settings JSON file can be opened with {#kb zed::OpenSettingsFile}. +By default it is located at `~/.config/zed/settings.json`, though if you have `XDG_CONFIG_HOME` in your environment on Linux it will be at `$XDG_CONFIG_HOME/zed/settings.json` instead. + +Whatever you have added to your user settings file gets merged with any local configuration inside your projects. + +### Default Settings -This configuration is merged with any local configuration inside your projects. You can open the project settings by running {#action zed::OpenProjectSettings} from the command palette. This will create a `.zed` directory containing`.zed/settings.json`. +In the Settings Editor, the values you see set are the default ones. +You can also verify them in JSON by running {#action zed::OpenDefaultSettings} from the command palette. -Although most projects will only need one settings file at the root, you can add more local settings files for subdirectories as needed. Not all settings can be set in local files, just those that impact the behavior of the editor and language tooling. For example you can set `tab_size`, `formatter` etc. but not `theme`, `vim_mode` and similar. +Extensions that provide language servers may also provide default settings for those language servers. + +## Project Settings File + +Similarly to user files, you can open your project settings file by running {#action zed::OpenProjectSettings} from the command palette. +This will create a `.zed` directory containing`.zed/settings.json`. + +Although most projects will only need one settings file at the root, you can add more local settings files for subdirectories as needed. +Not all settings can be set in local files, just those that impact the behavior of the editor and language tooling. +For example you can set `tab_size`, `formatter` etc. but not `theme`, `vim_mode` and similar. The syntax for configuration files is a super-set of JSON that allows `//` comments. -## Default settings +## Per-release Channel Overrides -You can find the default settings for your current Zed by running {#action zed::OpenDefaultSettings} from the command palette. +Zed reads the same `settings.json` across all release channels (Stable, Preview or Nightly). +However, you can scope overrides to a specific channel by adding top-level `stable`, `preview`, `nightly` or `dev` objects. +They are merged into the base configuration with settings from these keys taking precedence upon launching the specified build. For example: -Extensions that provide language servers may also provide default settings for those language servers. +```json [settings] +{ + "theme": "sunset", + "vim_mode": false, + "nightly": { + "theme": "cave-light", + "vim_mode": true + }, + "preview": { + "theme": "zed-dark" + } +} +``` + +With this configuration, Stable keeps all base preferences, Preview switches to `zed-dark`, and Nightly enables Vim mode with a different theme. + +Changing settings in the Settings Editorwill always apply the change across all channels. # Settings +Find below an extensive run-through of many supported settings by Zed. + ## Active Pane Modifiers - Description: Styling settings applied to the active pane. diff --git a/docs/src/development/releases.md b/docs/src/development/releases.md index 9e2cdccfdc01c528a75c85cad5d6ac0fe9ed64e2..6cb3deb31680f8c038195c93ebf12fe6699354e2 100644 --- a/docs/src/development/releases.md +++ b/docs/src/development/releases.md @@ -44,7 +44,7 @@ This is mostly a formality on Wednesday's minor update releases, but can be bene 1. Check the stable release assets. - Ensure the stable release job has finished without error. - - Ensure the draft has the proper number of assets—releases currently have 11 assets each. + - Ensure the draft has the proper number of assets—releases currently have 12 assets each (as of v0.211). - Download the artifacts for the stable release draft and test that you can run them locally. 1. Publish the stable draft on [GitHub Releases](https://github.com/zed-industries/zed/releases). @@ -70,7 +70,7 @@ This is mostly a formality on Wednesday's minor update releases, but can be bene - Take the script's output and build release notes by organizing each release note line into a category. - Use a prior release for the initial outline. - - Make sure to append the `Credit` line, if present, to the end of the release note line. + - Make sure to append the `Credit` line, if present, to the end of each release note line. 1. Once the preview release draft is up on [GitHub Releases](https://github.com/zed-industries/zed/releases), paste the preview release notes into it and **save**. @@ -79,7 +79,7 @@ This is mostly a formality on Wednesday's minor update releases, but can be bene 1. Check the preview release assets. - Ensure the preview release job has finished without error. - - Ensure the draft has the proper number of assets—releases currently have 11 assets each. + - Ensure the draft has the proper number of assets—releases currently have 12 assets each (as of v0.211). - Download the artifacts for the preview release draft and test that you can run them locally. 1. Publish the preview draft on [GitHub Releases](https://github.com/zed-industries/zed/releases). diff --git a/docs/src/getting-started.md b/docs/src/getting-started.md index 6bd34ad5cc387bc4fb208334f206aabc047d37b8..77bf9cef30ef2b0d701ae9d4d46f407b15196e93 100644 --- a/docs/src/getting-started.md +++ b/docs/src/getting-started.md @@ -1,92 +1,19 @@ # Getting Started -Welcome to Zed! We are excited to have you. Here is a jumping-off point to getting started. +Welcome to Zed! We are excited to have you. Zed is a powerful multiplayer code editor designed to stay out of your way and help you build what's next. -## Download Zed +## Key Features -### macOS +- [Smooth Editing](./configuring-zed.md): Built in Rust, Zed is responsive and intuitive, with a minimalistic aesthetic and pixel-level editing controls. +- [Agentic Editing](./ai/overview.md): Use Zed's hosted models to collaborate with agents directly in an IDE. You can also plug into a third-party agent or bring your own keys. +- [Debugger](./debugger.md): Debug your code in seconds, not hours, with minimal setup required. +- [Remote Development](./remote-development.md): Offload the heavy lifting to the cloud, so you can focus on writing code. +- [Extensions](./extensions.md): Leverage Zed's extensions to customize how you work. -Get the latest stable builds via [the download page](https://zed.dev/download). If you want to download our preview build, you can find it on its [releases page](https://zed.dev/releases/preview). After the first manual installation, Zed will periodically check for install updates. +## Join the Zed Community -You can also install Zed stable via Homebrew: +Zed is proudly open source, and we get better with every contribution. Join us on GitHub or in Discord to contribute code, report bugs, or suggest features. -```sh -brew install --cask zed -``` - -As well as Zed preview: - -```sh -brew install --cask zed@preview -``` - -### Windows - -Get the latest stable builds via [the download page](https://zed.dev/download). If you want to download our preview build, you can find it on its [releases page](https://zed.dev/releases/preview). After the first manual installation, Zed will periodically check for install updates. - -### Linux - -For most Linux users, the easiest way to install Zed is through our installation script: - -```sh -curl -f https://zed.dev/install.sh | sh -``` - -If you'd like to help us test our new features, you can also install our preview build: - -```sh -curl -f https://zed.dev/install.sh | ZED_CHANNEL=preview sh -``` - -This script supports `x86_64` and `AArch64`, as well as common Linux distributions: Ubuntu, Arch, Debian, RedHat, CentOS, Fedora, and more. - -If Zed is installed using this installation script, it can be uninstalled at any time by running the shell command `zed --uninstall`. The shell will then prompt you whether you'd like to keep your preferences or delete them. After making a choice, you should see a message that Zed was successfully uninstalled. - -If this script is insufficient for your use case, you run into problems running Zed, or there are errors in uninstalling Zed, please see our [Linux-specific documentation](./linux.md). - -## Command Palette - -The Command Palette is the main way to access pretty much any functionality that's available in Zed. Its keybinding is the first one you should make yourself familiar with. To open it, hit: {#kb command_palette::Toggle}. - -![The opened Command Palette](https://zed.dev/img/features/command-palette.jpg) - -Try it! Open the Command Palette and type in `new file`. You should see the list of commands being filtered down to `workspace: new file`. Hit return and you end up with a new buffer. - -Any time you see instructions that include commands of the form `zed: ...` or `editor: ...` and so on that means you need to execute them in the Command Palette. - -## CLI - -Zed has a CLI, on Linux this should come with the distribution's Zed package (binary name can vary from distribution to distribution, `zed` will be used later for brevity). -For macOS, the CLI comes in the same package with the editor binary, and could be installed into the system with the `cli: install` Zed command which will create a symlink to the `/usr/local/bin/zed`. -It can also be built from source out of the `cli` crate in this repository. - -Use `zed --help` to see the full list of capabilities. -General highlights: - -- Opening another empty Zed window: `zed` - -- Opening a file or directory in Zed: `zed /path/to/entry` (use `-n` to open in the new window) - -- Reading from stdin: `ps axf | zed -` - -- Starting Zed with logs in the terminal: `zed --foreground` - -- Uninstalling Zed and all its related files: `zed --uninstall` - -## Configure Zed - -To open your custom settings to set things like fonts, formatting settings, per-language settings, and more, use the {#kb zed::OpenSettings} keybinding. - -To see all available settings, open the Command Palette with {#kb command_palette::Toggle} and search for `zed: open default settings`. -You can also check them all out in the [Configuring Zed](./configuring-zed.md) documentation. - -## Configure AI in Zed - -Zed smoothly integrates LLMs in multiple ways across the editor. -Visit [the AI overview page](./ai/overview.md) to learn how to quickly get started with LLMs on Zed. - -## Set up your key bindings - -To edit your custom keymap and add or remap bindings, you can either use {#kb zed::OpenKeymap} to spawn the Zed Keymap Editor ({#action zed::OpenKeymap}) or you can directly open your Zed Keymap json (`~/.config/zed/keymap.json`) with {#action zed::OpenKeymap}. - -To access the default key binding set, open the Command Palette with {#kb command_palette::Toggle} and search for "zed: open default keymap". See [Key Bindings](./key-bindings.md) for more info. +- [Join Discord](https://discord.com/invite/zedindustries) +- [GitHub Discussions](https://github.com/zed-industries/zed/discussions) +- [Zed Reddit](https://www.reddit.com/r/ZedEditor) diff --git a/docs/src/system-requirements.md b/docs/src/installation.md similarity index 56% rename from docs/src/system-requirements.md rename to docs/src/installation.md index eaf9c027be5cef93c2dea0149e7e55dcdd8eb154..7f8baf0d49bb5ae7873c09424c6f464af044f241 100644 --- a/docs/src/system-requirements.md +++ b/docs/src/installation.md @@ -1,6 +1,48 @@ -# System Requirements +# Installing Zed -## Apple +## Download Zed + +### macOS + +Get the latest stable builds via [the download page](https://zed.dev/download). If you want to download our preview build, you can find it on its [releases page](https://zed.dev/releases/preview). After the first manual installation, Zed will periodically check for install updates. + +You can also install Zed stable via Homebrew: + +```sh +brew install --cask zed +``` + +As well as Zed preview: + +```sh +brew install --cask zed@preview +``` + +### Windows + +Get the latest stable builds via [the download page](https://zed.dev/download). If you want to download our preview build, you can find it on its [releases page](https://zed.dev/releases/preview). After the first manual installation, Zed will periodically check for install updates. + +### Linux + +For most Linux users, the easiest way to install Zed is through our installation script: + +```sh +curl -f https://zed.dev/install.sh | sh +``` + +If you'd like to help us test our new features, you can also install our preview build: + +```sh +curl -f https://zed.dev/install.sh | ZED_CHANNEL=preview sh +``` + +This script supports `x86_64` and `AArch64`, as well as common Linux distributions: Ubuntu, Arch, Debian, RedHat, CentOS, Fedora, and more. + +If Zed is installed using this installation script, it can be uninstalled at any time by running the shell command `zed --uninstall`. The shell will then prompt you whether you'd like to keep your preferences or delete them. After making a choice, you should see a message that Zed was successfully uninstalled. + +If this script is insufficient for your use case, you run into problems running Zed, or there are errors in uninstalling Zed, please see our [Linux-specific documentation](./linux.md). + +## System Requirements ### macOS @@ -17,7 +59,7 @@ Zed supports the follow macOS releases: The macOS releases labelled "Partially Supported" (Big Sur and Catalina) do not support screen sharing via Zed Collaboration. These features use the [LiveKit SDK](https://livekit.io) which relies upon [ScreenCaptureKit.framework](https://developer.apple.com/documentation/screencapturekit/) only available on macOS 12 (Monterey) and newer. -### Mac Hardware +#### Mac Hardware Zed supports machines with Intel (x86_64) or Apple (aarch64) processors that meet the above macOS requirements: @@ -30,7 +72,7 @@ Zed supports machines with Intel (x86_64) or Apple (aarch64) processors that mee - iMac Pro (all models) - Mac Studio (all models) -## Linux +### Linux Zed supports 64bit Intel/AMD (x86_64) and 64Bit ARM (aarch64) processors. @@ -40,7 +82,7 @@ Zed requires a Vulkan 1.3 driver, and the following desktop portals: - `org.freedesktop.portal.OpenURI` - `org.freedesktop.portal.Secret`, or `org.freedesktop.Secrets` -## Windows +### Windows Zed supports the follow Windows releases: | Version | Microsoft Status | Zed Status | @@ -48,7 +90,7 @@ Zed supports the follow Windows releases: | Windows 11 (all releases) | Supported | Supported | | Windows 10 (64-bit) | Supported | Supported | -### Windows Hardware +#### Windows Hardware Zed supports machines with Intel or AMD 64-bit (x86_64) processors that meet the above Windows requirements: @@ -57,10 +99,10 @@ Zed supports machines with Intel or AMD 64-bit (x86_64) processors that meet the - Graphics: A GPU that supports DirectX 11 (most PCs from 2012+). - Driver: Current NVIDIA/AMD/Intel driver (not the Microsoft Basic Display Adapter). -## FreeBSD +### FreeBSD Not yet available as an official download. Can be built [from source](./development/freebsd.md). -## Web +### Web Not supported at this time. See our [Platform Support issue](https://github.com/zed-industries/zed/issues/5391). diff --git a/docs/src/key-bindings.md b/docs/src/key-bindings.md index 1ce148bcf118bce61e113074ad321f27344aec04..f0f1e472c75e7e6bd1489c22d20ffa380190258e 100644 --- a/docs/src/key-bindings.md +++ b/docs/src/key-bindings.md @@ -2,9 +2,9 @@ Zed has a very customizable key binding system—you can tweak everything to work exactly how your fingers expect! -## Predefined keymaps +## Predefined Keymaps -If you're used to a specific editor's defaults, you can set a `base_keymap` in your [settings file](./configuring-zed.md). +If you're used to a specific editor's defaults, you can change your `base_keymap` through the settings window ({#kb zed::OpenSettings}) or directly through your `settings.json` file ({#kb zed::OpenSettingsFile}). We currently support: - VS Code (default) @@ -21,18 +21,31 @@ This setting can also be changed via the command palette through the `zed: toggl You can also enable `vim_mode` or `helix_mode`, which add modal bindings. For more information, see the documentation for [Vim mode](./vim.md) and [Helix mode](./helix.md). -## User keymaps +## Keymap Editor -Where Zed looks for your keymap: +You can access the keymap editor through the {#kb zed::OpenKeymap} action or by running {#action zed::OpenKeymap} action from the command palette. You can easily add or change a keybind for an action with the `Change Keybinding` or `Add Keybinding` button on the command pallets left bottom corner. + +In there, you can see all of the existing actions in Zed as well as the associated keybindings set to them by default. + +You can also customize them right from there, either by clicking on the pencil icon that appears when you hover over a particular action, by double-clicking on the action row, or by pressing the `enter` key. + +Anything that you end up doing on the keymap editor also gets reflected on the `keymap.json` file. + +## User Keymaps + +The keymap file is stored in the following locations for each platform: - macOS/Linux: `~/.config/zed/keymap.json` - Windows: `~\AppData\Roaming\Zed/keymap.json` -You can open the keymap with the {#action zed::OpenKeymapFile} action from the command palette, or edit it in Zed's Keymap Editor, accessible via the {#action zed::OpenKeymap} action or the {#kb zed::OpenKeymap} keybinding. +You can open the keymap with the {#action zed::OpenKeymapFile} action from the command palette. -The `keymap.json` file contains a JSON array of objects with `"bindings"`. If no `"context"` is set, the bindings are always active. If it is set, the binding is only active when the [context matches](#contexts). +This file contains a JSON array of objects with `"bindings"`. +If no `"context"` is set, the bindings are always active. +If it is set, the binding is only active when the [context matches](#contexts). -Within each binding section, a [key sequence](#keybinding-syntax) is mapped to [an action](#actions). If conflicts are detected, they are resolved as [described below](#precedence). +Within each binding section, a [key sequence](#keybinding-syntax) is mapped to [an action](#actions). +If conflicts are detected, they are resolved as [described below](#precedence). If you are using a non-QWERTY, Latin-character keyboard, you may want to set `use_key_equivalents` to `true`. See [Non-QWERTY keyboards](#non-qwerty-keyboards) for more information. @@ -55,15 +68,16 @@ For example: ] ``` -You can see all of Zed's default bindings in the default keymaps for: +You can see all of Zed's default bindings for each platform in the default keymaps files: - [macOS](https://github.com/zed-industries/zed/blob/main/assets/keymaps/default-macos.json) - [Windows](https://github.com/zed-industries/zed/blob/main/assets/keymaps/default-windows.json) - [Linux](https://github.com/zed-industries/zed/blob/main/assets/keymaps/default-linux.json). -If you want to debug problems with custom keymaps, you can use `dev: Open Key Context View` from the command palette. Please file [an issue](https://github.com/zed-industries/zed) if you run into something you think should work but isn't. +If you want to debug problems with custom keymaps, you can use `dev: Open Key Context View` from the command palette. +Please file [an issue](https://github.com/zed-industries/zed) if you run into something you think should work but isn't. -### Keybinding syntax +### Keybinding Syntax Zed has the ability to match against not just a single keypress, but a sequence of keys typed in order. Each key in the `"bindings"` map is a sequence of keypresses separated with a space. @@ -125,13 +139,13 @@ Context expressions can contain the following syntax: For example: - `"context": "Editor"` - matches any editor (including inline inputs) -- `"context": "Editor && mode=full"` - matches the main editors used for editing code +- `"context": "Editor && mode == full"` - matches the main editors used for editing code - `"context": "!Editor && !Terminal"` - matches anywhere except where an Editor or Terminal is focused -- `"context": "os=macos > Editor"` - matches any editor on macOS. +- `"context": "os == macos > Editor"` - matches any editor on macOS. It's worth noting that attributes are only available on the node they are defined on. This means that if you want to (for example) only enable a keybinding when the debugger is stopped in vim normal mode, you need to do `debugger_stopped > vim_mode == normal`. -> Note: Before Zed v0.197.x, the `!` operator only looked at one node at a time, and `>` meant "parent" not "ancestor". This meant that `!Editor` would match the context `Workspace > Pane > Editor`, because (confusingly) the Pane matches `!Editor`, and that `os=macos > Editor` did not match the context `Workspace > Pane > Editor` because of the intermediate `Pane` node. +> Note: Before Zed v0.197.x, the `!` operator only looked at one node at a time, and `>` meant "parent" not "ancestor". This meant that `!Editor` would match the context `Workspace > Pane > Editor`, because (confusingly) the Pane matches `!Editor`, and that `os == macos > Editor` did not match the context `Workspace > Pane > Editor` because of the intermediate `Pane` node. If you're using Vim mode, we have information on how [vim modes influence the context](./vim.md#contexts). Helix mode is built on top of Vim mode and uses the same contexts. diff --git a/docs/src/languages/javascript.md b/docs/src/languages/javascript.md index 45f440267ec01880437fc16e788c6b1b715efd82..f8ba2f18e7e8b18479ecca00f0f7771751cb7d09 100644 --- a/docs/src/languages/javascript.md +++ b/docs/src/languages/javascript.md @@ -92,6 +92,7 @@ the formatter: { "languages": { "JavaScript": { + "formatter": [], "code_actions_on_format": { "source.fixAll.eslint": true } diff --git a/docs/src/quick-start.md b/docs/src/quick-start.md new file mode 100644 index 0000000000000000000000000000000000000000..05cf8c1fd04c2ca4599aab34df0963e76ca8a87e --- /dev/null +++ b/docs/src/quick-start.md @@ -0,0 +1 @@ +# Quick Start diff --git a/docs/src/remote-development.md b/docs/src/remote-development.md index b4d7033a3b1d2201fbf35afa096551a2e5232272..057a3d2e0814e083a3ecbbeafd987762cd825388 100644 --- a/docs/src/remote-development.md +++ b/docs/src/remote-development.md @@ -29,7 +29,7 @@ The remote machine must be able to run Zed's server. The following platforms sho - macOS Catalina or later (Intel or Apple Silicon) - Linux (x86_64 or arm64, we do not yet support 32-bit platforms) -- Windows is not yet supported. +- Windows is not yet supported as a remote server, but Windows can be used as a local machine to connect to remote servers. ## Configuration @@ -87,6 +87,28 @@ If you use the command line to open a connection to a host by doing `zed ssh://1 Additionally it's worth noting that while you can pass a password on the command line `zed ssh://user:password@host/~`, we do not support writing a password to your settings file. If you're connecting repeatedly to the same host, you should configure key-based authentication. +## Remote Development on Windows (SSH) + +Zed on Windows supports SSH remoting and will prompt for credentials when needed. + +If you encounter authentication issues, confirm that your SSH key agent is running (e.g., ssh-agent or your Git client's agent) and that ssh.exe is on PATH. + +### Troubleshooting SSH on Windows + +When prompted for credentials, use the graphical askpass dialog. If it doesn't appear, check for credential manager conflicts and that GUI prompts aren't blocked by your terminal. + +## WSL Support + +Zed supports opening folders inside of WSL natively on Windows. + +### Opening a local folder in WSL + +To open a local folder inside a WSL container, use the `projects: open in wsl` action and select the folder you want to open. You will be presented with a list of available WSL distributions to open the folder in. + +### Opening a folder already in WSL + +To open a folder that's already located inside of a WSL container, use the `projects: open wsl` action and select the WSL distribution. The distribution will be added to the `Remote Projects` window where you will be able to open the folder. + ## Port forwarding If you'd like to be able to connect to ports on your remote server from your local machine, you can configure port forwarding in your settings file. This is particularly useful for developing websites so you can load the site in your browser while working. diff --git a/docs/src/uninstall.md b/docs/src/uninstall.md new file mode 100644 index 0000000000000000000000000000000000000000..f2d7da93e78b71c607e79b0bdd5d017f88d55f4d --- /dev/null +++ b/docs/src/uninstall.md @@ -0,0 +1,113 @@ +# Uninstall + +This guide covers how to uninstall Zed on different operating systems. + +## macOS + +### Standard Installation + +If you installed Zed by downloading it from the website: + +1. Quit Zed if it's running +2. Open Finder and go to your Applications folder +3. Drag Zed to the Trash (or right-click and select "Move to Trash") +4. Empty the Trash + +### Homebrew Installation + +If you installed Zed using Homebrew, use the following command: + +```sh +brew uninstall --cask zed +``` + +Or for the preview version: + +```sh +brew uninstall --cask zed@preview +``` + +### Removing User Data (Optional) + +To completely remove all Zed configuration files and data: + +1. Open Finder +2. Press `Cmd + Shift + G` to open "Go to Folder" +3. Delete the following directories if they exist: + - `~/Library/Application Support/Zed` + - `~/Library/Saved Application State/dev.zed.Zed.savedState` + - `~/Library/Logs/Zed` + - `~/Library/Caches/dev.zed.Zed` + +## Linux + +### Standard Uninstall + +If Zed was installed using the default installation script, run: + +```sh +zed --uninstall +``` + +You'll be prompted whether to keep or delete your preferences. After making a choice, you should see a message that Zed was successfully uninstalled. + +If the `zed` command is not found in your PATH, try: + +```sh +$HOME/.local/bin/zed --uninstall +``` + +or: + +```sh +$HOME/.local/zed.app/bin/zed --uninstall +``` + +### Package Manager + +If you installed Zed using a package manager (such as Flatpak, Snap, or a distribution-specific package manager), consult that package manager's documentation for uninstallation instructions. + +### Manual Removal + +If the uninstall command fails or Zed was installed to a custom location, you can manually remove: + +- Installation directory: `~/.local/zed.app` (or your custom installation path) +- Binary symlink: `~/.local/bin/zed` +- Configuration and data: `~/.config/zed` + +## Windows + +### Standard Installation + +1. Quit Zed if it's running +2. Open Settings (Windows key + I) +3. Go to "Apps" > "Installed apps" (or "Apps & features" on Windows 10) +4. Search for "Zed" +5. Click the three dots menu next to Zed and select "Uninstall" +6. Follow the prompts to complete the uninstallation + +Alternatively, you can: + +1. Open the Start menu +2. Right-click on Zed +3. Select "Uninstall" + +### Removing User Data (Optional) + +To completely remove all Zed configuration files and data: + +1. Press `Windows key + R` to open Run +2. Type `%APPDATA%` and press Enter +3. Delete the `Zed` folder if it exists +4. Press `Windows key + R` again, type `%LOCALAPPDATA%` and press Enter +5. Delete the `Zed` folder if it exists + +## Troubleshooting + +If you encounter issues during uninstallation: + +- **macOS/Windows**: Ensure Zed is completely quit before attempting to uninstall. Check Activity Manager (macOS) or Task Manager (Windows) for any running Zed processes. +- **Linux**: If the uninstall script fails, check the error message and consider manual removal of the directories listed above. +- **All platforms**: If you want to start fresh while keeping Zed installed, you can delete the configuration directories instead of uninstalling the application entirely. + +For additional help, see our [Linux-specific documentation](./linux.md) or visit the [Zed community](https://zed.dev/community). diff --git a/docs/src/update.md b/docs/src/update.md new file mode 100644 index 0000000000000000000000000000000000000000..d828e5edf072a98267111965483f5dfd5400138b --- /dev/null +++ b/docs/src/update.md @@ -0,0 +1,21 @@ +# Update Zed + +Zed is designed to keep itself up to date automatically. You can always update this behavior in your settings. + +## Auto-updates + +By default, Zed checks for updates and installs them automatically the next time you restart the app. You’ll always be running the latest version with no extra steps. + +If an update is available, Zed will download it in the background and apply it on restart. + +## How to check your current version + +To check which version of Zed you're using: + +Open the Command Palette (Cmd+Shift+P on macOS, Ctrl+Shift+P on Linux/Windows). + +Type and select `zed: about`. A modal will appear with your version information. + +## How to control update behavior + +If you want to turn off auto-updates, open the Settings Editor (Cmd ,) and find `Auto Update` under General Settings. diff --git a/docs/src/windows.md b/docs/src/windows.md index dbb6fbdd472a293774e56b3c4e1ec475eea0bc40..7d064b47a4ec2d49d1344498f94bc91bbb9c1020 100644 --- a/docs/src/windows.md +++ b/docs/src/windows.md @@ -15,17 +15,9 @@ Your settings and extensions live in your user profile. When uninstalling, you c ## Remote Development (SSH) -Zed supports SSH remoting on Windows and will prompt for credentials when needed. +Zed supports remote development on Windows through both SSH and WSL. You can connect to remote servers via SSH or work with files inside WSL distributions directly from Zed. -If you encounter authentication issues, confirm that your SSH key agent is running (e.g., ssh-agent or your Git client’s agent) and that ssh.exe is on PATH. - -## WSL Support - -Zed supports opening folders inside of WSL natively. - -To open a local folder inside a WSL container use the `projects: open in wsl` action and select the folder you want to open, after which you will be presented with a list of available WSL distributions to open the folder in. - -To open a folder that's already located inside of a WSL container use the `projects: open wsl` action and select the WSL distribution, after which you the distro will be added to the `Remote Projects` window where you will be able to open the folder, see [Remote Development](./remote-development.md) +For detailed instructions on setting up and using remote development features, including SSH configuration, WSL setup, and troubleshooting, see the [Remote Development documentation](./remote-development.md). ## Troubleshooting diff --git a/docs/theme/css/chrome.css b/docs/theme/css/chrome.css index c66982a2126373c2c4e27503e5b69eeceb56f92b..9f2afad54388bf2289e57f43275cdf2b4d98d4dd 100644 --- a/docs/theme/css/chrome.css +++ b/docs/theme/css/chrome.css @@ -26,6 +26,14 @@ a > .hljs { overflow-x: clip; } +.large-logo-img { + display: block; +} + +.icon-logo-img { + display: none; +} + /* Menu Bar */ #menu-bar, @@ -34,7 +42,7 @@ a > .hljs { margin: auto calc(0px - var(--page-padding)); } #menu-bar { - padding: 16px; + padding: 12px 16px; position: relative; display: flex; flex-wrap: wrap; @@ -77,9 +85,24 @@ a > .hljs { } @media only screen and (max-width: 420px) { - #menu-bar i, - #menu-bar .icon-button { - padding: 0 5px; + .large-logo-img { + display: none; + } + + .icon-logo-img { + display: block; + } + + #menu-bar { + padding: 12px; + } + + #menu-bar .ib-hidden-mobile { + display: none; + } + + .right-buttons { + width: 100px; /*For center aligning the icon link*/ } } @@ -96,7 +119,9 @@ a > .hljs { .right-buttons { display: flex; align-items: center; + justify-content: end; } + .right-buttons a { text-decoration: none; } diff --git a/docs/theme/index.hbs b/docs/theme/index.hbs index 64ca072ef2128f84358dfc58d07e7c9ad3ed86ea..052d983483708699333c6ca308509d306cbc7566 100644 --- a/docs/theme/index.hbs +++ b/docs/theme/index.hbs @@ -139,7 +139,7 @@
  • - @@ -152,7 +152,8 @@ diff --git a/nix/shell.nix b/nix/shell.nix index b6f1efd366b32cdb246f3884856643977d1b3552..6956de8e8abb55e404f62ed9e377ff79aface5bd 100644 --- a/nix/shell.nix +++ b/nix/shell.nix @@ -1,24 +1,30 @@ { mkShell, makeFontsConf, + pkgsCross, zed-editor, rust-analyzer, + rustup, cargo-nextest, cargo-hakari, cargo-machete, + cargo-zigbuild, nixfmt-rfc-style, protobuf, nodejs_22, + zig, }: (mkShell.override { inherit (zed-editor) stdenv; }) { inputsFrom = [ zed-editor ]; packages = [ rust-analyzer + rustup cargo-nextest cargo-hakari cargo-machete + cargo-zigbuild nixfmt-rfc-style # TODO: package protobuf-language-server for editing zed.proto # TODO: add other tools used in our scripts @@ -26,6 +32,7 @@ # `build.nix` adds this to the `zed-editor` wrapper (see `postFixup`) # we'll just put it on `$PATH`: nodejs_22 + zig ]; env = @@ -51,5 +58,6 @@ ]; }; PROTOC = "${protobuf}/bin/protoc"; + ZED_ZSTD_MUSL_LIB = "${pkgsCross.musl64.pkgsStatic.zstd.out}/lib"; }; } diff --git a/script/bundle-linux b/script/bundle-linux index e8263fe4bcc8a90073149bf3a02ff1ed481017c3..e41a3d4783d5378409a568e77f74c7f9393576b5 100755 --- a/script/bundle-linux +++ b/script/bundle-linux @@ -101,26 +101,10 @@ else fi # Strip debug symbols and save them for upload to DigitalOcean -objcopy --only-keep-debug "${target_dir}/${target_triple}/release/zed" "${target_dir}/${target_triple}/release/zed.dbg" -objcopy --only-keep-debug "${target_dir}/${remote_server_triple}/release/remote_server" "${target_dir}/${remote_server_triple}/release/remote_server.dbg" objcopy --strip-debug "${target_dir}/${target_triple}/release/zed" objcopy --strip-debug "${target_dir}/${target_triple}/release/cli" objcopy --strip-debug "${target_dir}/${remote_server_triple}/release/remote_server" -gzip -f "${target_dir}/${target_triple}/release/zed.dbg" -gzip -f "${target_dir}/${remote_server_triple}/release/remote_server.dbg" - -if [[ -n "${DIGITALOCEAN_SPACES_SECRET_KEY:-}" && -n "${DIGITALOCEAN_SPACES_ACCESS_KEY:-}" ]]; then - upload_to_blob_store_public \ - "zed-debug-symbols" \ - "${target_dir}/${target_triple}/release/zed.dbg.gz" \ - "$channel/zed-$version-${target_triple}.dbg.gz" - upload_to_blob_store_public \ - "zed-debug-symbols" \ - "${target_dir}/${remote_server_triple}/release/remote_server.dbg.gz" \ - "$channel/remote_server-$version-${remote_server_triple}.dbg.gz" -fi - # Ensure that remote_server does not depend on libssl nor libcrypto, as we got rid of these deps. if ldd "${target_dir}/${remote_server_triple}/release/remote_server" | grep -q 'libcrypto\|libssl'; then if [[ "$remote_server_triple" == *-musl ]]; then diff --git a/script/bundle-mac b/script/bundle-mac index abcdb6cee2e6b35bcc185a40b6ad459dd98389fb..3f895ec14b72522abddb7548a6139729adcdfe8d 100755 --- a/script/bundle-mac +++ b/script/bundle-mac @@ -9,7 +9,6 @@ open_result=false local_arch=false local_only=false local_install=false -bundle_name="" can_code_sign=false # This must match the team in the provisioning profile. @@ -19,12 +18,11 @@ APPLE_NOTARIZATION_TEAM="MQ55VZLNZQ" # Function for displaying help info help_info() { echo " -Usage: ${0##*/} [options] [bundle_name] +Usage: ${0##*/} [options] [architecture=host] Build the application bundle for macOS. Options: -d Compile in debug mode - -l Compile for local architecture only. -o Open dir with the resulting DMG or launch the app itself in local mode. -i Install the resulting DMG into /Applications in local mode. Noop without -l. -h Display this help and exit. @@ -41,12 +39,6 @@ do build_flag=""; target_dir="debug" ;; - l) - export CARGO_INCREMENTAL=true - export CARGO_BUNDLE_SKIP_BUILD=true - local_arch=true - local_only=true - ;; i) local_install=true;; h) help_info @@ -57,11 +49,6 @@ done shift $((OPTIND-1)) -if [[ $# -gt 0 ]]; then - if [ "$1" ]; then - bundle_name=$1 - fi -fi # Get release channel pushd crates/zed @@ -81,24 +68,31 @@ export CXXFLAGS="-stdlib=libc++" version_info=$(rustc --version --verbose) host_line=$(echo "$version_info" | grep host) -local_target_triple=${host_line#*: } +target_triple=${host_line#*: } +if [[ $# -gt 0 && -n "$1" ]]; then + target_triple="$1" +fi +remote_server_arch="" + +if [[ "$target_triple" = "x86_64-apple-darwin" ]]; then + remote_server_arch="x86_64" +elif [[ "$target_triple" = "aarch64-apple-darwin" ]]; then + remote_server_arch="aarch64" +else + echo "Unsupported architecture $target_triple" + exit 1 +fi # Generate the licenses first, so they can be baked into the binaries script/generate-licenses -if [ "$local_arch" = true ]; then - echo "Building for local target only." - cargo build ${build_flag} --package zed --package cli --package remote_server -else - rustup target add aarch64-apple-darwin - rustup target add x86_64-apple-darwin - - echo "Compiling zed binaries" - cargo build ${build_flag} --package zed --package cli --target aarch64-apple-darwin --target x86_64-apple-darwin - # Build remote_server in separate invocation to prevent feature unification from other crates - # from influencing dynamic libraries required by it. - cargo build ${build_flag} --package remote_server --target aarch64-apple-darwin --target x86_64-apple-darwin -fi +rustup target add $target_triple + +echo "Compiling zed binaries" +cargo build ${build_flag} --package zed --package cli --target $target_triple +# Build remote_server in separate invocation to prevent feature unification from other crates +# from influencing dynamic libraries required by it. +cargo build ${build_flag} --package remote_server --target $target_triple echo "Creating application bundle" pushd crates/zed @@ -108,13 +102,7 @@ sed \ "s/package.metadata.bundle-${channel}/package.metadata.bundle/" \ Cargo.toml -if [ "$local_arch" = true ]; then - app_path=$(cargo bundle ${build_flag} --select-workspace-root | xargs) -else - app_path_x64=$(cargo bundle ${build_flag} --target x86_64-apple-darwin --select-workspace-root | xargs) - app_path_aarch64=$(cargo bundle ${build_flag} --target aarch64-apple-darwin --select-workspace-root | xargs) - app_path=$app_path_x64 -fi +app_path=$(cargo bundle ${build_flag} --target $target_triple --select-workspace-root | xargs) mv Cargo.toml.backup Cargo.toml popd @@ -189,51 +177,12 @@ function download_git() { rm -rf "$tmp_dir" } -function prepare_binaries() { - local architecture=$1 - local app_path=$2 - - echo "Unpacking dSYMs for $architecture" - exe_path="target/${architecture}/${target_dir}/Zed" - if ! dsymutil --flat "${exe_path}" 2> target/dsymutil.log; then - echo "dsymutil failed" - cat target/dsymutil.log - exit 1 - fi - uuid=$(dwarfdump --uuid "${exe_path}" | cut -d ' ' -f 2 | tr 'A-F' 'a-f') - version="$(cargo metadata --no-deps --manifest-path crates/zed/Cargo.toml --offline --format-version=1 | jq -r '.packages | map(select(.name == "zed"))[0].version')" - if [ "$channel" == "nightly" ]; then - version="$version-$(git rev-parse --short HEAD)" - fi - - echo "Removing existing gzipped dSYMs for $architecture" - rm -f target/${architecture}/${target_dir}/Zed.dwarf.gz - - echo "Gzipping dSYMs for $architecture" - gzip -kf target/${architecture}/${target_dir}/Zed.dwarf - - echo "Uploading dSYMs${architecture} for $architecture to by-uuid/${uuid}.dwarf.gz" - upload_to_blob_store_public \ - "zed-debug-symbols" \ - target/${architecture}/${target_dir}/Zed.dwarf.gz \ - "by-uuid/${uuid}.dwarf.gz" - - cp target/${architecture}/${target_dir}/zed "${app_path}/Contents/MacOS/zed" - cp target/${architecture}/${target_dir}/cli "${app_path}/Contents/MacOS/cli" -} - function sign_app_binaries() { - local app_path=$1 - local architecture=$2 - local architecture_dir=$3 rm -rf "${app_path}/Contents/Frameworks" mkdir -p "${app_path}/Contents/Frameworks" - if [ "$local_arch" = true ]; then - cp -R target/${target_dir}/cli "${app_path}/Contents/MacOS/" - fi echo "Downloading git binary" - download_git "${architecture}" "${app_path}/Contents/MacOS/git" + download_git "${target_triple}" "${app_path}/Contents/MacOS/git" # Note: The app identifier for our development builds is the same as the app identifier for nightly. cp crates/zed/contents/$channel/embedded.provisionprofile "${app_path}/Contents/" @@ -276,15 +225,7 @@ function sign_app_binaries() { exit 0 fi - # If bundle_name is not set or empty, use the basename of $app_path - if [ -z "$bundle_name" ]; then - bundle_name=$(basename "$app_path") - else - # If bundle_name doesn't end in .app, append it - if [[ "$bundle_name" != *.app ]]; then - bundle_name="$bundle_name.app" - fi - fi + bundle_name=$(basename "$app_path") if [ "$local_only" = true ]; then if [ "$local_install" = true ]; then @@ -302,7 +243,7 @@ function sign_app_binaries() { fi fi else - dmg_target_directory="target/${architecture_dir}/${target_dir}" + dmg_target_directory="target/${target_triple}/${target_dir}" dmg_source_directory="${dmg_target_directory}/dmg" dmg_file_path="${dmg_target_directory}/Zed.dmg" xcode_bin_dir_path="$(xcode-select -p)/usr/bin" @@ -350,44 +291,29 @@ function sign_binary() { /usr/bin/codesign --deep --force --timestamp --options runtime --entitlements crates/zed/resources/zed.entitlements --sign "$IDENTITY" "${binary_path}" -v fi } +cp target/${target_triple}/${target_dir}/zed "${app_path}/Contents/MacOS/zed" +cp target/${target_triple}/${target_dir}/cli "${app_path}/Contents/MacOS/cli" +sign_app_binaries -if [ "$local_arch" = true ]; then - sign_app_binaries "$app_path" "$local_target_triple" "$local_target_triple" - - sign_binary "target/release/remote_server" -else - # Create universal binary - prepare_binaries "aarch64-apple-darwin" "$app_path_aarch64" - prepare_binaries "x86_64-apple-darwin" "$app_path_x64" - - - sign_app_binaries "$app_path_x64" "x86_64-apple-darwin" "x86_64-apple-darwin" - sign_app_binaries "$app_path_aarch64" "aarch64-apple-darwin" "aarch64-apple-darwin" - - sign_binary "target/x86_64-apple-darwin/release/remote_server" - sign_binary "target/aarch64-apple-darwin/release/remote_server" - gzip -f --stdout --best target/x86_64-apple-darwin/release/remote_server > target/zed-remote-server-macos-x86_64.gz - gzip -f --stdout --best target/aarch64-apple-darwin/release/remote_server > target/zed-remote-server-macos-aarch64.gz -fi +sign_binary "target/$target_triple/release/remote_server" +gzip -f --stdout --best target/$target_triple/release/remote_server > target/zed-remote-server-macos-$remote_server_arch.gz function upload_debug_info() { - architecture=$1 if [[ -n "${SENTRY_AUTH_TOKEN:-}" ]]; then echo "Uploading zed debug symbols to sentry..." # note: this uploads the unstripped binary which is needed because it contains # .eh_frame data for stack unwinding. see https://github.com/getsentry/symbolic/issues/783 sentry-cli debug-files upload --include-sources --wait -p zed -o zed-dev \ - "target/${architecture}/${target_dir}/zed" \ - "target/${architecture}/${target_dir}/remote_server" \ - "target/${architecture}/${target_dir}/zed.dwarf" + "target/${target_triple}/${target_dir}/zed" \ + "target/${target_triple}/${target_dir}/remote_server" \ + "target/${target_triple}/${target_dir}/zed.dwarf" else echo "missing SENTRY_AUTH_TOKEN. skipping sentry upload." fi } if command -v sentry-cli >/dev/null 2>&1; then - upload_debug_info "aarch64-apple-darwin" - upload_debug_info "x86_64-apple-darwin" + upload_debug_info else echo "sentry-cli not found. skipping sentry upload." echo "install with: 'curl -sL https://sentry.io/get-cli | bash'" diff --git a/script/bundle-windows.ps1 b/script/bundle-windows.ps1 index f6f44307ff7c2be960b40cd837739d2657095ab2..facb8d07ee65e04bbb12636620b23c9b1137aebb 100644 --- a/script/bundle-windows.ps1 +++ b/script/bundle-windows.ps1 @@ -2,10 +2,10 @@ Param( [Parameter()][Alias('i')][switch]$Install, [Parameter()][Alias('h')][switch]$Help, + [Parameter()][Alias('a')][string]$Architecture, [Parameter()][string]$Name ) -. "$PSScriptRoot/lib/blob-store.ps1" . "$PSScriptRoot/lib/workspace.ps1" # https://stackoverflow.com/questions/57949031/powershell-script-stops-if-program-fails-like-bash-set-o-errexit @@ -14,12 +14,44 @@ $PSNativeCommandUseErrorActionPreference = $true $buildSuccess = $false +$OSArchitecture = switch ([System.Runtime.InteropServices.RuntimeInformation]::OSArchitecture) { + "X64" { "x86_64" } + "Arm64" { "aarch64" } + default { throw "Unsupported architecture" } +} + +$Architecture = if ($Architecture) { + $Architecture +} else { + $OSArchitecture +} + +$CargoOutDir = "./target/$Architecture-pc-windows-msvc/release" + +function Get-VSArch { + param( + [string]$Arch + ) + + switch ($Arch) { + "x86_64" { "amd64" } + "aarch64" { "arm64" } + } +} + +Push-Location +& "C:\Program Files\Microsoft Visual Studio\2022\Community\Common7\Tools\Launch-VsDevShell.ps1" -Arch (Get-VSArch -Arch $Architecture) -HostArch (Get-VSArch -Arch $OSArchitecture) +Pop-Location + +$target = "$Architecture-pc-windows-msvc" + if ($Help) { Write-Output "Usage: test.ps1 [-Install] [-Help]" Write-Output "Build the installer for Windows.\n" Write-Output "Options:" - Write-Output " -Install, -i Run the installer after building." - Write-Output " -Help, -h Show this help message." + Write-Output " -Architecture, -a Which architecture to build (x86_64 or aarch64)" + Write-Output " -Install, -i Run the installer after building." + Write-Output " -Help, -h Show this help message." exit 0 } @@ -30,6 +62,10 @@ $env:RELEASE_CHANNEL = $channel Pop-Location function CheckEnvironmentVariables { + if(-not $env:CI) { + return + } + $requiredVars = @( 'ZED_WORKSPACE', 'RELEASE_VERSION', 'ZED_RELEASE_CHANNEL', 'AZURE_TENANT_ID', 'AZURE_CLIENT_ID', 'AZURE_CLIENT_SECRET', @@ -55,6 +91,8 @@ function PrepareForBundle { New-Item -Path "$innoDir\appx" -ItemType Directory -Force New-Item -Path "$innoDir\bin" -ItemType Directory -Force New-Item -Path "$innoDir\tools" -ItemType Directory -Force + + rustup target add $target } function GenerateLicenses { @@ -67,34 +105,34 @@ function GenerateLicenses { function BuildZedAndItsFriends { Write-Output "Building Zed and its friends, for channel: $channel" # Build zed.exe, cli.exe and auto_update_helper.exe - cargo build --release --package zed --package cli --package auto_update_helper - Copy-Item -Path ".\target\release\zed.exe" -Destination "$innoDir\Zed.exe" -Force - Copy-Item -Path ".\target\release\cli.exe" -Destination "$innoDir\cli.exe" -Force - Copy-Item -Path ".\target\release\auto_update_helper.exe" -Destination "$innoDir\auto_update_helper.exe" -Force + cargo build --release --package zed --package cli --package auto_update_helper --target $target + Copy-Item -Path ".\$CargoOutDir\zed.exe" -Destination "$innoDir\Zed.exe" -Force + Copy-Item -Path ".\$CargoOutDir\cli.exe" -Destination "$innoDir\cli.exe" -Force + Copy-Item -Path ".\$CargoOutDir\auto_update_helper.exe" -Destination "$innoDir\auto_update_helper.exe" -Force # Build explorer_command_injector.dll switch ($channel) { "stable" { - cargo build --release --features stable --no-default-features --package explorer_command_injector + cargo build --release --features stable --no-default-features --package explorer_command_injector --target $target } "preview" { - cargo build --release --features preview --no-default-features --package explorer_command_injector + cargo build --release --features preview --no-default-features --package explorer_command_injector --target $target } default { - cargo build --release --package explorer_command_injector + cargo build --release --package explorer_command_injector --target $target } } - Copy-Item -Path ".\target\release\explorer_command_injector.dll" -Destination "$innoDir\zed_explorer_command_injector.dll" -Force + Copy-Item -Path ".\$CargoOutDir\explorer_command_injector.dll" -Destination "$innoDir\zed_explorer_command_injector.dll" -Force } function ZipZedAndItsFriendsDebug { $items = @( - ".\target\release\zed.pdb", - ".\target\release\cli.pdb", - ".\target\release\auto_update_helper.pdb", - ".\target\release\explorer_command_injector.pdb" + ".\$CargoOutDir\zed.pdb", + ".\$CargoOutDir\cli.pdb", + ".\$CargoOutDir\auto_update_helper.pdb", + ".\$CargoOutDir\explorer_command_injector.pdb" ) - Compress-Archive -Path $items -DestinationPath ".\target\release\zed-$env:RELEASE_VERSION-$env:ZED_RELEASE_CHANNEL.dbg.zip" -Force + Compress-Archive -Path $items -DestinationPath ".\$CargoOutDir\zed-$env:RELEASE_VERSION-$env:ZED_RELEASE_CHANNEL.dbg.zip" -Force } @@ -109,7 +147,7 @@ function UploadToSentry { return } Write-Output "Uploading zed debug symbols to sentry..." - sentry-cli debug-files upload --include-sources --wait -p zed -o zed-dev .\target\release\ + sentry-cli debug-files upload --include-sources --wait -p zed -o zed-dev $CargoOutDir } function MakeAppx { @@ -132,6 +170,10 @@ function MakeAppx { } function SignZedAndItsFriends { + if (-not $env:CI) { + return + } + $files = "$innoDir\Zed.exe,$innoDir\cli.exe,$innoDir\auto_update_helper.exe,$innoDir\zed_explorer_command_injector.dll,$innoDir\zed_explorer_command_injector.appx" & "$innoDir\sign.ps1" $files } @@ -159,9 +201,19 @@ function CollectFiles { Move-Item -Path "$innoDir\cli.exe" -Destination "$innoDir\bin\zed.exe" -Force Move-Item -Path "$innoDir\zed.sh" -Destination "$innoDir\bin\zed" -Force Move-Item -Path "$innoDir\auto_update_helper.exe" -Destination "$innoDir\tools\auto_update_helper.exe" -Force - Move-Item -Path ".\AGS_SDK-6.3.0\ags_lib\lib\amd_ags_x64.dll" -Destination "$innoDir\amd_ags_x64.dll" -Force - Move-Item -Path ".\conpty\build\native\runtimes\x64\OpenConsole.exe" -Destination "$innoDir\OpenConsole.exe" -Force - Move-Item -Path ".\conpty\runtimes\win10-x64\native\conpty.dll" -Destination "$innoDir\conpty.dll" -Force + if($Architecture -eq "aarch64") { + New-Item -Type Directory -Path "$innoDir\arm64" -Force + Move-Item -Path ".\conpty\build\native\runtimes\arm64\OpenConsole.exe" -Destination "$innoDir\arm64\OpenConsole.exe" -Force + Move-Item -Path ".\conpty\runtimes\win10-arm64\native\conpty.dll" -Destination "$innoDir\conpty.dll" -Force + } + else { + New-Item -Type Directory -Path "$innoDir\x64" -Force + New-Item -Type Directory -Path "$innoDir\arm64" -Force + Move-Item -Path ".\AGS_SDK-6.3.0\ags_lib\lib\amd_ags_x64.dll" -Destination "$innoDir\amd_ags_x64.dll" -Force + Move-Item -Path ".\conpty\build\native\runtimes\x64\OpenConsole.exe" -Destination "$innoDir\x64\OpenConsole.exe" -Force + Move-Item -Path ".\conpty\build\native\runtimes\arm64\OpenConsole.exe" -Destination "$innoDir\arm64\OpenConsole.exe" -Force + Move-Item -Path ".\conpty\runtimes\win10-x64\native\conpty.dll" -Destination "$innoDir\conpty.dll" -Force + } } function BuildInstaller { @@ -172,7 +224,7 @@ function BuildInstaller { $appIconName = "app-icon" $appName = "Zed" $appDisplayName = "Zed" - $appSetupName = "Zed-x86_64" + $appSetupName = "Zed-$Architecture" # The mutex name here should match the mutex name in crates\zed\src\zed\windows_only_instance.rs $appMutex = "Zed-Stable-Instance-Mutex" $appExeName = "Zed" @@ -186,7 +238,7 @@ function BuildInstaller { $appIconName = "app-icon-preview" $appName = "Zed Preview" $appDisplayName = "Zed Preview" - $appSetupName = "Zed-x86_64" + $appSetupName = "Zed-$Architecture" # The mutex name here should match the mutex name in crates\zed\src\zed\windows_only_instance.rs $appMutex = "Zed-Preview-Instance-Mutex" $appExeName = "Zed" @@ -200,7 +252,7 @@ function BuildInstaller { $appIconName = "app-icon-nightly" $appName = "Zed Nightly" $appDisplayName = "Zed Nightly" - $appSetupName = "Zed-x86_64" + $appSetupName = "Zed-$Architecture" # The mutex name here should match the mutex name in crates\zed\src\zed\windows_only_instance.rs $appMutex = "Zed-Nightly-Instance-Mutex" $appExeName = "Zed" @@ -214,7 +266,7 @@ function BuildInstaller { $appIconName = "app-icon-dev" $appName = "Zed Dev" $appDisplayName = "Zed Dev" - $appSetupName = "Zed-x86_64" + $appSetupName = "Zed-$Architecture" # The mutex name here should match the mutex name in crates\zed\src\zed\windows_only_instance.rs $appMutex = "Zed-Dev-Instance-Mutex" $appExeName = "Zed" @@ -252,14 +304,16 @@ function BuildInstaller { "AppxFullName" = $appAppxFullName } - $signTool = "powershell.exe -ExecutionPolicy Bypass -File $innoDir\sign.ps1 `$f" - $defs = @() foreach ($key in $definitions.Keys) { $defs += "/d$key=`"$($definitions[$key])`"" } - $innoArgs = @($issFilePath) + $defs + "/sDefaultsign=`"$signTool`"" + $innoArgs = @($issFilePath) + $defs + if($env:CI) { + $signTool = "powershell.exe -ExecutionPolicy Bypass -File $innoDir\sign.ps1 `$f" + $innoArgs += "/sDefaultsign=`"$signTool`"" + } # Execute Inno Setup Write-Host "🚀 Running Inno Setup: $innoSetupPath $innoArgs" @@ -277,8 +331,8 @@ function BuildInstaller { } ParseZedWorkspace -$innoDir = "$env:ZED_WORKSPACE\inno" -$debugArchive = ".\target\release\zed-$env:RELEASE_VERSION-$env:ZED_RELEASE_CHANNEL.dbg.zip" +$innoDir = "$env:ZED_WORKSPACE\inno\$Architecture" +$debugArchive = "$CargoOutDir\zed-$env:RELEASE_VERSION-$env:ZED_RELEASE_CHANNEL.dbg.zip" $debugStoreKey = "$env:ZED_RELEASE_CHANNEL/zed-$env:RELEASE_VERSION-$env:ZED_RELEASE_CHANNEL.dbg.zip" CheckEnvironmentVariables @@ -293,8 +347,9 @@ DownloadConpty CollectFiles BuildInstaller -UploadToBlobStorePublic -BucketName "zed-debug-symbols" -FileToUpload $debugArchive -BlobStoreKey $debugStoreKey -UploadToSentry +if($env:CI) { + UploadToSentry +} if ($buildSuccess) { Write-Output "Build successful" diff --git a/script/deploy-postgrest b/script/deploy-postgrest deleted file mode 100755 index ca8f3686468e52c41977624e6198e69ca3718a25..0000000000000000000000000000000000000000 --- a/script/deploy-postgrest +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env bash - -set -eu -source script/lib/deploy-helpers.sh - -if [[ $# != 1 ]]; then - echo "Usage: $0 (postgrest not needed on preview or nightly)" - exit 1 -fi -environment=$1 - -export_vars_for_environment ${environment} - -export ZED_DO_CERTIFICATE_ID=$(doctl compute certificate list --format ID --no-header) -export ZED_KUBE_NAMESPACE=${environment} - -target_zed_kube_cluster -envsubst < crates/collab/k8s/postgrest.template.yml | kubectl apply -f - - -echo "deployed postgrest" diff --git a/script/get-preview-channel-changes b/script/get-preview-channel-changes index d1ca705736cccb9fc8a28cd19850a56ca88de88f..6ba274eabc1a9c850e53c86ddafb73a26c0c5d34 100755 --- a/script/get-preview-channel-changes +++ b/script/get-preview-channel-changes @@ -1,7 +1,7 @@ #!/usr/bin/env node --redirect-warnings=/dev/null const { execFileSync } = require("child_process"); -const { GITHUB_ACCESS_TOKEN } = process.env; +let { GITHUB_ACCESS_TOKEN } = process.env; const GITHUB_URL = "https://github.com"; const SKIPPABLE_NOTE_REGEX = /^\s*-?\s*n\/?a\s*/ims; const PULL_REQUEST_WEB_URL = "https://github.com/zed-industries/zed/pull"; diff --git a/script/get-stable-channel-release-notes b/script/get-stable-channel-release-notes index b16bc9e41f3111821180ce7844e3a804e5d0a9d7..cbaf6497eeee7f6642c4b8b884cb42c3774047d5 100755 --- a/script/get-stable-channel-release-notes +++ b/script/get-stable-channel-release-notes @@ -13,7 +13,7 @@ // stable that didn't make it into a release, as they were cherry picked const { execFileSync } = require("child_process"); -const { GITHUB_ACCESS_TOKEN } = process.env; +let { GITHUB_ACCESS_TOKEN } = process.env; const GITHUB_TAGS_API_URL = "https://api.github.com/repos/zed-industries/zed/releases/tags"; const DIVIDER = "-".repeat(80); diff --git a/script/install-mold b/script/install-mold index 9b90f3e9047e7990892b785864c4ce930d922817..b0bf8517700beb2226d6f06e71fa8d4823175653 100755 --- a/script/install-mold +++ b/script/install-mold @@ -12,13 +12,11 @@ set -euo pipefail -MOLD_VERSION="${MOLD_VERSION:-${1:-}}" +MOLD_VERSION="2.34.0" + if [ "$(uname -s)" != "Linux" ]; then echo "Error: This script is intended for Linux systems only." exit 1 -elif [ -z "$MOLD_VERSION" ]; then - echo "Usage: $0 2.34.0" - exit 1 elif [ -e /usr/local/bin/mold ]; then echo "Warning: existing mold found at /usr/local/bin/mold. Skipping installation." exit 0 diff --git a/script/upload-nightly b/script/upload-nightly index 2fcb2994383842d53ccb8bf6b63f847ef76a7d12..985c43936457f53d1db72e4436434aa0ee59296a 100755 --- a/script/upload-nightly +++ b/script/upload-nightly @@ -14,20 +14,31 @@ is_allowed_target() { return 1 } -if [[ -n "${1:-}" ]]; then - if is_allowed_target "$1"; then - target="$1" - else - echo "Error: Target '$1' is not allowed" - echo "Usage: $0 [${allowed_targets[*]}]" - exit 1 - fi +allowed_arch=("x86_64" "aarch64") +is_allowed_arch() { + for val in "${allowed_arch[@]}"; do + if [[ "$1" == "$val" ]]; then + return 0 + fi + done + return 1 +} + +if is_allowed_target "$1"; then + target="$1" +else + echo "Error: Target '$1' is not allowed" + echo "Usage: $0 [${allowed_targets[*]}] {arch}" + exit 1 +fi +if is_allowed_arch "$2"; then + arch="$2" else -echo "Error: Target is not specified" -echo "Usage: $0 [${allowed_targets[*]}]" -exit 1 + echo "Error: Arch '$2' is not allowed" + echo "Usage: $0 $1 [${allowed_arch[*]}]" + exit 1 fi -echo "Uploading nightly for target: $target" +echo "Uploading nightly for target: $target $arch" bucket_name="zed-nightly-host" @@ -41,10 +52,9 @@ done case "$target" in macos) - upload_to_blob_store $bucket_name "target/aarch64-apple-darwin/release/Zed.dmg" "nightly/Zed-aarch64.dmg" - upload_to_blob_store $bucket_name "target/x86_64-apple-darwin/release/Zed.dmg" "nightly/Zed-x86_64.dmg" + upload_to_blob_store $bucket_name "target/$arch-apple-darwin/release/Zed.dmg" "nightly/Zed-$arch.dmg" upload_to_blob_store $bucket_name "target/latest-sha" "nightly/latest-sha" - rm -f "target/aarch64-apple-darwin/release/Zed.dmg" "target/x86_64-apple-darwin/release/Zed.dmg" "target/release/Zed.dmg" + rm -f "target/$arch-apple-darwin/release/Zed.dmg" "target/release/Zed.dmg" rm -f "target/latest-sha" ;; linux-targz) diff --git a/script/upload-nightly.ps1 b/script/upload-nightly.ps1 index 94f00ae9084a991669201281bdcd6110521fb50a..deec4baecc9274381b4d3f99e611190ab0865636 100644 --- a/script/upload-nightly.ps1 +++ b/script/upload-nightly.ps1 @@ -1,32 +1,13 @@ +[CmdletBinding()] +Param( + [Parameter()][string]$Architecture +) + # Based on the template in: https://docs.digitalocean.com/reference/api/spaces-api/ $ErrorActionPreference = "Stop" . "$PSScriptRoot\lib\blob-store.ps1" . "$PSScriptRoot\lib\workspace.ps1" -$allowedTargets = @("windows") - -function Test-AllowedTarget { - param ( - [string]$Target - ) - - return $allowedTargets -contains $Target -} - -# Process arguments -if ($args.Count -gt 0) { - $target = $args[0] - if (Test-AllowedTarget $target) { - # Valid target - } else { - Write-Error "Error: Target '$target' is not allowed.`nUsage: $($MyInvocation.MyCommand.Name) [$($allowedTargets -join ', ')]" - exit 1 - } -} else { - Write-Error "Error: Target is not specified.`nUsage: $($MyInvocation.MyCommand.Name) [$($allowedTargets -join ', ')]" - exit 1 -} - ParseZedWorkspace Write-Host "Uploading nightly for target: $target" @@ -44,17 +25,8 @@ $sha | Out-File -FilePath "target/latest-sha" -NoNewline # Remove-Item -Path $file.FullName # } -switch ($target) { - "windows" { - UploadToBlobStore -BucketName $bucketName -FileToUpload $env:SETUP_PATH -BlobStoreKey "nightly/Zed-x86_64.exe" - UploadToBlobStore -BucketName $bucketName -FileToUpload "target/latest-sha" -BlobStoreKey "nightly/latest-sha-windows" - - Remove-Item -Path $env:SETUP_PATH -ErrorAction SilentlyContinue - Remove-Item -Path "target/latest-sha" -ErrorAction SilentlyContinue - } +UploadToBlobStore -BucketName $bucketName -FileToUpload "target/Zed-$Architecture.exe" -BlobStoreKey "nightly/Zed-$Architecture.exe" +UploadToBlobStore -BucketName $bucketName -FileToUpload "target/latest-sha" -BlobStoreKey "nightly/latest-sha-windows" - default { - Write-Error "Error: Unknown target '$target'" - exit 1 - } -} +Remove-Item -Path "target/Zed-$Architecture.exe" -ErrorAction SilentlyContinue +Remove-Item -Path "target/latest-sha" -ErrorAction SilentlyContinue diff --git a/tooling/xtask/Cargo.toml b/tooling/xtask/Cargo.toml index aa06e6164683edd3bb011136a127b9fb99215e52..7fc03a563e0a0375b0d3003349530f9b738964d9 100644 --- a/tooling/xtask/Cargo.toml +++ b/tooling/xtask/Cargo.toml @@ -10,9 +10,12 @@ workspace = true [dependencies] anyhow.workspace = true +backtrace.workspace = true cargo_metadata.workspace = true cargo_toml.workspace = true clap = { workspace = true, features = ["derive"] } toml.workspace = true indoc.workspace = true +indexmap.workspace = true toml_edit.workspace = true +gh-workflow.workspace = true diff --git a/tooling/xtask/src/main.rs b/tooling/xtask/src/main.rs index 5b265392f4035c205c4387dd10f1410f6c04d064..6f83927d6730cb2f846d001a9bbbdd010589d998 100644 --- a/tooling/xtask/src/main.rs +++ b/tooling/xtask/src/main.rs @@ -20,6 +20,7 @@ enum CliCommand { PackageConformity(tasks::package_conformity::PackageConformityArgs), /// Publishes GPUI and its dependencies to crates.io. PublishGpui(tasks::publish_gpui::PublishGpuiArgs), + Workflows(tasks::workflows::GenerateWorkflowArgs), } fn main() -> Result<()> { @@ -32,5 +33,6 @@ fn main() -> Result<()> { tasks::package_conformity::run_package_conformity(args) } CliCommand::PublishGpui(args) => tasks::publish_gpui::run_publish_gpui(args), + CliCommand::Workflows(args) => tasks::workflows::run_workflows(args), } } diff --git a/tooling/xtask/src/tasks.rs b/tooling/xtask/src/tasks.rs index b73aeb0e7fce47980d61e326c8f41cebc06e07b2..01b3907f0486854b1bd18a5a3d21930b16670bd4 100644 --- a/tooling/xtask/src/tasks.rs +++ b/tooling/xtask/src/tasks.rs @@ -2,3 +2,4 @@ pub mod clippy; pub mod licenses; pub mod package_conformity; pub mod publish_gpui; +pub mod workflows; diff --git a/tooling/xtask/src/tasks/workflows.rs b/tooling/xtask/src/tasks/workflows.rs new file mode 100644 index 0000000000000000000000000000000000000000..b86f31cbd26321998d1b1c26d94459d512e7d817 --- /dev/null +++ b/tooling/xtask/src/tasks/workflows.rs @@ -0,0 +1,46 @@ +use anyhow::{Context, Result}; +use clap::Parser; +use std::fs; +use std::path::Path; + +mod danger; +mod nix_build; +mod release_nightly; +mod run_bundling; + +mod runners; +mod steps; +mod vars; + +#[derive(Parser)] +pub struct GenerateWorkflowArgs {} + +pub fn run_workflows(_: GenerateWorkflowArgs) -> Result<()> { + let dir = Path::new(".github/workflows"); + + let workflows = vec![ + ("danger.yml", danger::danger()), + ("nix_build.yml", nix_build::nix_build()), + ("run_bundling.yml", run_bundling::run_bundling()), + ("release_nightly.yml", release_nightly::release_nightly()), + // ("run_tests.yml", run_tests::run_tests()), + // ("release.yml", release::release()), + ]; + fs::create_dir_all(dir) + .with_context(|| format!("Failed to create directory: {}", dir.display()))?; + + for (filename, workflow) in workflows { + let content = workflow + .to_string() + .map_err(|e| anyhow::anyhow!("{}: {:?}", filename, e))?; + let content = format!( + "# Generated from xtask::workflows::{}\n# Rebuild with `cargo xtask workflows`.\n{}", + workflow.name.unwrap(), + content + ); + let file_path = dir.join(filename); + fs::write(&file_path, content)?; + } + + Ok(()) +} diff --git a/tooling/xtask/src/tasks/workflows/danger.rs b/tooling/xtask/src/tasks/workflows/danger.rs new file mode 100644 index 0000000000000000000000000000000000000000..e4121d8f5c20852f1d7eda446a2742c82ef80fb9 --- /dev/null +++ b/tooling/xtask/src/tasks/workflows/danger.rs @@ -0,0 +1,53 @@ +use gh_workflow::*; + +use crate::tasks::workflows::steps::named; + +use super::{runners, steps}; + +/// Generates the danger.yml workflow +pub fn danger() -> Workflow { + named::workflow() + .on( + Event::default().pull_request(PullRequest::default().add_branch("main").types([ + PullRequestType::Opened, + PullRequestType::Synchronize, + PullRequestType::Reopened, + PullRequestType::Edited, + ])), + ) + .add_job( + "danger", + Job::default() + .cond(Expression::new( + "github.repository_owner == 'zed-industries'", + )) + .runs_on(runners::LINUX_CHEAP) + .add_step(steps::checkout_repo()) + .add_step(steps::setup_pnpm()) + .add_step( + steps::setup_node() + .add_with(("cache", "pnpm")) + .add_with(("cache-dependency-path", "script/danger/pnpm-lock.yaml")), + ) + .add_step(install_deps()) + .add_step(run()), + ) +} + +pub fn install_deps() -> Step { + named::bash("pnpm install --dir script/danger") +} + +pub fn run() -> Step { + named::bash("pnpm run --dir script/danger danger ci") + // This GitHub token is not used, but the value needs to be here to prevent + // Danger from throwing an error. + .add_env(("GITHUB_TOKEN", "not_a_real_token")) + // All requests are instead proxied through an instance of + // https://github.com/maxdeviant/danger-proxy that allows Danger to securely + // authenticate with GitHub while still being able to run on PRs from forks. + .add_env(( + "DANGER_GITHUB_API_BASE_URL", + "https://danger-proxy.fly.dev/github", + )) +} diff --git a/tooling/xtask/src/tasks/workflows/nix_build.rs b/tooling/xtask/src/tasks/workflows/nix_build.rs new file mode 100644 index 0000000000000000000000000000000000000000..1406f2a1e12518c39d65d208205d6abd3ea38bb4 --- /dev/null +++ b/tooling/xtask/src/tasks/workflows/nix_build.rs @@ -0,0 +1,152 @@ +use crate::tasks::workflows::{ + runners::{Arch, Platform}, + steps::NamedJob, +}; + +use super::{runners, steps, steps::named, vars}; +use gh_workflow::*; +use indoc::indoc; + +/// Generates the nix.yml workflow +pub fn nix_build() -> Workflow { + // todo(ci) instead of having these as optional YAML inputs, + // should we just generate two copies of the job (one for release-nightly + // and one for CI?) + let (input_flake_output, flake_output) = vars::input( + "flake-output", + WorkflowCallInput { + input_type: "string".into(), + default: Some("default".into()), + ..Default::default() + }, + ); + let (input_cachix_filter, cachix_filter) = vars::input( + "cachix-filter", + WorkflowCallInput { + input_type: "string".into(), + ..Default::default() + }, + ); + + let linux_x86 = build_nix( + Platform::Linux, + Arch::X86_64, + &input_flake_output, + Some(&input_cachix_filter), + &[], + ); + let mac_arm = build_nix( + Platform::Mac, + Arch::ARM64, + &input_flake_output, + Some(&input_cachix_filter), + &[], + ); + + named::workflow() + .on(Event::default().workflow_call( + WorkflowCall::default() + .add_input(flake_output.0, flake_output.1) + .add_input(cachix_filter.0, cachix_filter.1), + )) + .add_job(linux_x86.name, linux_x86.job) + .add_job(mac_arm.name, mac_arm.job) +} + +pub(crate) fn build_nix( + platform: Platform, + arch: Arch, + flake_output: &str, + cachix_filter: Option<&str>, + deps: &[&NamedJob], +) -> NamedJob { + let runner = match platform { + Platform::Windows => unimplemented!(), + Platform::Linux => runners::LINUX_X86_BUNDLER, + Platform::Mac => runners::MAC_DEFAULT, + }; + let mut job = Job::default() + .timeout_minutes(60u32) + .continue_on_error(true) + .cond(Expression::new( + "github.repository_owner == 'zed-industries'", + )) + .runs_on(runner) + .add_env(("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED)) + .add_env(("ZED_MINIDUMP_ENDPOINT", vars::ZED_SENTRY_MINIDUMP_ENDPOINT)) + .add_env(( + "ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON", + vars::ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON, + )) + .add_env(("GIT_LFS_SKIP_SMUDGE", "1")) // breaks the livekit rust sdk examples which we don't actually depend on + .add_step(steps::checkout_repo()); + + if deps.len() > 0 { + job = job.needs(deps.iter().map(|d| d.name.clone()).collect::>()); + } + + job = if platform == Platform::Linux { + job.add_step(install_nix()) + .add_step(cachix_action(cachix_filter)) + .add_step(build(&flake_output)) + } else { + job.add_step(set_path()) + .add_step(cachix_action(cachix_filter)) + .add_step(build(&flake_output)) + .add_step(limit_store()) + }; + + NamedJob { + name: format!("build_nix_{platform}_{arch}"), + job, + } +} + +// on our macs we manually install nix. for some reason the cachix action is running +// under a non-login /bin/bash shell which doesn't source the proper script to add the +// nix profile to PATH, so we manually add them here +pub fn set_path() -> Step { + named::bash(indoc! {r#" + echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH" + echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH" + "#}) +} + +pub fn install_nix() -> Step { + named::uses( + "cachix", + "install-nix-action", + "02a151ada4993995686f9ed4f1be7cfbb229e56f", // v31 + ) + .add_with(("github_access_token", vars::GITHUB_TOKEN)) +} + +pub fn cachix_action(cachix_filter: Option<&str>) -> Step { + let mut step = named::uses( + "cachix", + "cachix-action", + "0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad", // v16 + ) + .add_with(("name", "zed")) + .add_with(("authToken", vars::CACHIX_AUTH_TOKEN)) + .add_with(("cachixArgs", "-v")); + if let Some(cachix_filter) = cachix_filter { + step = step.add_with(("pushFilter", cachix_filter)); + } + step +} + +pub fn build(flake_output: &str) -> Step { + named::bash(&format!( + "nix build .#{} -L --accept-flake-config", + flake_output + )) +} + +pub fn limit_store() -> Step { + named::bash(indoc! {r#" + if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then + nix-collect-garbage -d || true + fi"# + }) +} diff --git a/tooling/xtask/src/tasks/workflows/release_nightly.rs b/tooling/xtask/src/tasks/workflows/release_nightly.rs new file mode 100644 index 0000000000000000000000000000000000000000..4e203f6934d50ec869068d237bcf8fd193678736 --- /dev/null +++ b/tooling/xtask/src/tasks/workflows/release_nightly.rs @@ -0,0 +1,276 @@ +use crate::tasks::workflows::{ + nix_build::build_nix, + run_bundling::bundle_mac, + runners::{Arch, Platform}, + steps::NamedJob, + vars::{mac_bundle_envs, windows_bundle_envs}, +}; + +use super::{runners, steps, steps::named, vars}; +use gh_workflow::*; +use indexmap::IndexMap; + +/// Generates the release_nightly.yml workflow +pub fn release_nightly() -> Workflow { + let env: IndexMap<_, _> = [ + ("CARGO_TERM_COLOR", "always"), + ("CARGO_INCREMENTAL", "0"), + ("RUST_BACKTRACE", "1"), + ("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED), + ("ZED_MINIDUMP_ENDPOINT", vars::ZED_SENTRY_MINIDUMP_ENDPOINT), + ( + "DIGITALOCEAN_SPACES_ACCESS_KEY", + vars::DIGITALOCEAN_SPACES_ACCESS_KEY, + ), + ( + "DIGITALOCEAN_SPACES_SECRET_KEY", + vars::DIGITALOCEAN_SPACES_SECRET_KEY, + ), + ] + .into_iter() + .map(|(key, value)| (key.into(), value.into())) + .collect(); + + let style = check_style(); + let tests = run_tests(Platform::Mac); + let windows_tests = run_tests(Platform::Windows); + let bundle_mac_x86 = bundle_mac_nightly(Arch::X86_64, &[&style, &tests]); + let bundle_mac_arm = bundle_mac_nightly(Arch::ARM64, &[&style, &tests]); + let linux_x86 = bundle_linux_nightly(Arch::X86_64, &[&style, &tests]); + let linux_arm = bundle_linux_nightly(Arch::ARM64, &[&style, &tests]); + let windows_x86 = bundle_windows_nightly(Arch::X86_64, &[&style, &windows_tests]); + let windows_arm = bundle_windows_nightly(Arch::ARM64, &[&style, &windows_tests]); + + let nix_linux_x86 = build_nix( + Platform::Linux, + Arch::X86_64, + "default", + None, + &[&style, &tests], + ); + let nix_mac_arm = build_nix( + Platform::Mac, + Arch::ARM64, + "default", + None, + &[&style, &tests], + ); + let update_nightly_tag = update_nightly_tag_job(&[ + &bundle_mac_x86, + &bundle_mac_arm, + &linux_x86, + &linux_arm, + &windows_x86, + &windows_arm, + ]); + + named::workflow() + .on(Event::default() + // Fire every day at 7:00am UTC (Roughly before EU workday and after US workday) + .schedule([Schedule::new("0 7 * * *")]) + .push(Push::default().add_tag("nightly"))) + .envs(env) + .add_job(style.name, style.job) + .add_job(tests.name, tests.job) + .add_job(windows_tests.name, windows_tests.job) + .add_job(bundle_mac_x86.name, bundle_mac_x86.job) + .add_job(bundle_mac_arm.name, bundle_mac_arm.job) + .add_job(linux_x86.name, linux_x86.job) + .add_job(linux_arm.name, linux_arm.job) + .add_job(windows_x86.name, windows_x86.job) + .add_job(windows_arm.name, windows_arm.job) + .add_job(nix_linux_x86.name, nix_linux_x86.job) + .add_job(nix_mac_arm.name, nix_mac_arm.job) + .add_job(update_nightly_tag.name, update_nightly_tag.job) +} + +fn check_style() -> NamedJob { + let job = release_job(&[]) + .runs_on(runners::MAC_DEFAULT) + .add_step( + steps::checkout_repo() + .add_with(("clean", false)) + .add_with(("fetch-depth", 0)), + ) + .add_step(steps::cargo_fmt()) + .add_step(steps::script("./script/clippy")); + + named::job(job) +} + +fn release_job(deps: &[&NamedJob]) -> Job { + let job = Job::default() + .cond(Expression::new( + "github.repository_owner == 'zed-industries'", + )) + .timeout_minutes(60u32); + if deps.len() > 0 { + job.needs(deps.iter().map(|j| j.name.clone()).collect::>()) + } else { + job + } +} + +fn run_tests(platform: Platform) -> NamedJob { + let runner = match platform { + Platform::Windows => runners::WINDOWS_DEFAULT, + Platform::Linux => runners::LINUX_DEFAULT, + Platform::Mac => runners::MAC_DEFAULT, + }; + NamedJob { + name: format!("run_tests_{platform}"), + job: release_job(&[]) + .runs_on(runner) + .add_step(steps::checkout_repo()) + .add_step(steps::setup_cargo_config(platform)) + .add_step(steps::setup_node()) + .add_step(steps::cargo_install_nextest(platform)) + .add_step(steps::clear_target_dir_if_large(platform)) + .add_step(steps::cargo_nextest(platform)) + .add_step(steps::cleanup_cargo_config(platform)), + } +} + +fn bundle_mac_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob { + let platform = Platform::Mac; + NamedJob { + name: format!("bundle_mac_nightly_{arch}"), + job: release_job(deps) + .runs_on(runners::MAC_DEFAULT) + .envs(mac_bundle_envs()) + .add_step(steps::checkout_repo()) + .add_step(steps::setup_node()) + .add_step(steps::setup_sentry()) + .add_step(steps::clear_target_dir_if_large(platform)) + .add_step(set_release_channel_to_nightly(platform)) + .add_step(bundle_mac(arch)) + .add_step(upload_zed_nightly(platform, arch)), + } +} + +fn bundle_linux_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob { + let platform = Platform::Linux; + let mut job = release_job(deps) + .runs_on(arch.linux_bundler()) + .add_step(steps::checkout_repo()) + .add_step(steps::setup_sentry()) + .add_step(add_rust_to_path()) + .add_step(steps::script("./script/linux")); + + // todo(ci) can we do this on arm too? + if arch == Arch::X86_64 { + job = job.add_step(steps::script("./script/install-mold")); + } + job = job + .add_step(steps::clear_target_dir_if_large(platform)) + .add_step(set_release_channel_to_nightly(platform)) + .add_step(steps::script("./script/bundle-linux")) + .add_step(upload_zed_nightly(platform, arch)); + NamedJob { + name: format!("bundle_linux_nightly_{arch}"), + job, + } +} + +fn bundle_windows_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob { + let platform = Platform::Windows; + NamedJob { + name: format!("bundle_windows_nightly_{arch}"), + job: release_job(deps) + .runs_on(runners::WINDOWS_DEFAULT) + .envs(windows_bundle_envs()) + .add_step(steps::checkout_repo()) + .add_step(steps::setup_sentry()) + .add_step(set_release_channel_to_nightly(platform)) + .add_step(build_zed_installer(arch)) + .add_step(upload_zed_nightly_windows(arch)), + } +} + +fn update_nightly_tag_job(deps: &[&NamedJob]) -> NamedJob { + NamedJob { + name: "update_nightly_tag".to_owned(), + job: release_job(deps) + .runs_on(runners::LINUX_CHEAP) + .add_step(steps::checkout_repo().add_with(("fetch-depth", 0))) + .add_step(update_nightly_tag()) + .add_step(create_sentry_release()), + } +} + +fn set_release_channel_to_nightly(platform: Platform) -> Step { + match platform { + Platform::Linux | Platform::Mac => named::bash(indoc::indoc! {r#" + set -eu + version=$(git rev-parse --short HEAD) + echo "Publishing version: ${version} on release channel nightly" + echo "nightly" > crates/zed/RELEASE_CHANNEL + "#}), + Platform::Windows => named::pwsh(indoc::indoc! {r#" + $ErrorActionPreference = "Stop" + $version = git rev-parse --short HEAD + Write-Host "Publishing version: $version on release channel nightly" + "nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL" + "#}) + .working_directory("${{ env.ZED_WORKSPACE }}"), + } +} + +fn add_rust_to_path() -> Step { + named::bash(r#"echo "$HOME/.cargo/bin" >> "$GITHUB_PATH""#) +} + +fn upload_zed_nightly(platform: Platform, arch: Arch) -> Step { + match platform { + Platform::Linux => named::bash(&format!("script/upload-nightly linux-targz {arch}")), + Platform::Mac => named::bash(&format!("script/upload-nightly macos {arch}")), + Platform::Windows => { + let cmd = match arch { + Arch::X86_64 => "script/upload-nightly.ps1 -Architecture x86_64", + Arch::ARM64 => "script/upload-nightly.ps1 -Architecture aarch64", + }; + named::pwsh(cmd).working_directory("${{ env.ZED_WORKSPACE }}") + } + } +} + +fn build_zed_installer(arch: Arch) -> Step { + let cmd = match arch { + Arch::X86_64 => "script/bundle-windows.ps1 -Architecture x86_64", + Arch::ARM64 => "script/bundle-windows.ps1 -Architecture aarch64", + }; + named::pwsh(cmd).working_directory("${{ env.ZED_WORKSPACE }}") +} + +fn upload_zed_nightly_windows(arch: Arch) -> Step { + let cmd = match arch { + Arch::X86_64 => "script/upload-nightly.ps1 -Architecture x86_64", + Arch::ARM64 => "script/upload-nightly.ps1 -Architecture aarch64", + }; + named::pwsh(cmd).working_directory("${{ env.ZED_WORKSPACE }}") +} + +fn update_nightly_tag() -> Step { + named::bash(indoc::indoc! {r#" + if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then + echo "Nightly tag already points to current commit. Skipping tagging." + exit 0 + fi + git config user.name github-actions + git config user.email github-actions@github.com + git tag -f nightly + git push origin nightly --force + "#}) +} + +fn create_sentry_release() -> Step { + named::uses( + "getsentry", + "action-release", + "526942b68292201ac6bbb99b9a0747d4abee354c", // v3 + ) + .add_env(("SENTRY_ORG", "zed-dev")) + .add_env(("SENTRY_PROJECT", "zed")) + .add_env(("SENTRY_AUTH_TOKEN", vars::SENTRY_AUTH_TOKEN)) + .add_with(("environment", "production")) +} diff --git a/tooling/xtask/src/tasks/workflows/run_bundling.rs b/tooling/xtask/src/tasks/workflows/run_bundling.rs new file mode 100644 index 0000000000000000000000000000000000000000..ee3d5b2a7558af7b8561952836badcd38d20f01e --- /dev/null +++ b/tooling/xtask/src/tasks/workflows/run_bundling.rs @@ -0,0 +1,119 @@ +use crate::tasks::workflows::{ + steps::named, + vars::{mac_bundle_envs, windows_bundle_envs}, +}; + +use super::{runners, steps, vars}; +use gh_workflow::*; + +pub fn run_bundling() -> Workflow { + named::workflow() + .on(Event::default().pull_request( + PullRequest::default().types([PullRequestType::Labeled, PullRequestType::Synchronize]), + )) + .concurrency( + Concurrency::new(Expression::new( + "${{ github.workflow }}-${{ github.head_ref || github.ref }}", + )) + .cancel_in_progress(true), + ) + .add_env(("CARGO_TERM_COLOR", "always")) + .add_env(("CARGO_INCREMENTAL", "0")) + .add_env(("RUST_BACKTRACE", "1")) + .add_env(("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED)) + .add_env(("ZED_MINIDUMP_ENDPOINT", vars::ZED_SENTRY_MINIDUMP_ENDPOINT)) + .add_job("bundle_mac_x86_64", bundle_mac_job(runners::Arch::X86_64)) + .add_job("bundle_mac_arm64", bundle_mac_job(runners::Arch::ARM64)) + .add_job("bundle_linux_x86_64", bundle_linux(runners::Arch::X86_64)) + .add_job("bundle_linux_arm64", bundle_linux(runners::Arch::ARM64)) + .add_job( + "bundle_windows_x86_64", + bundle_windows_job(runners::Arch::X86_64), + ) + .add_job( + "bundle_windows_arm64", + bundle_windows_job(runners::Arch::ARM64), + ) +} + +fn bundle_job() -> Job { + Job::default() + .cond(Expression::new( + "(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') || + (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))", + )) + .timeout_minutes(60u32) +} + +fn bundle_mac_job(arch: runners::Arch) -> Job { + use vars::GITHUB_SHA; + bundle_job() + .runs_on(runners::MAC_DEFAULT) + .envs(mac_bundle_envs()) + .add_step(steps::checkout_repo()) + .add_step(steps::setup_node()) + .add_step(steps::setup_sentry()) + .add_step(steps::clear_target_dir_if_large(runners::Platform::Mac)) + .add_step(bundle_mac(arch)) + .add_step(steps::upload_artifact( + &format!("Zed_{GITHUB_SHA}-{arch}.dmg"), + &format!("target/{arch}-apple-darwin/release/Zed.dmg"), + )) + .add_step(steps::upload_artifact( + &format!("zed-remote-server-{GITHUB_SHA}-macos-{arch}.gz"), + &format!("target/zed-remote-server-macos-{arch}.gz"), + )) +} + +pub fn bundle_mac(arch: runners::Arch) -> Step { + named::bash(&format!("./script/bundle-mac {arch}-apple-darwin")) +} + +fn bundle_linux(arch: runners::Arch) -> Job { + let artifact_name = format!("zed-{}-{}.tar.gz", vars::GITHUB_SHA, arch.triple()); + let remote_server_artifact_name = format!( + "zed-remote-server-{}-{}.tar.gz", + vars::GITHUB_SHA, + arch.triple() + ); + let mut job = bundle_job() + .runs_on(arch.linux_bundler()) + .add_step(steps::checkout_repo()) + .add_step(steps::setup_sentry()) + .add_step(steps::script("./script/linux")); + // todo(ci) can we do this on arm too? + if arch == runners::Arch::X86_64 { + job = job.add_step(steps::script("./script/install-mold")); + } + job.add_step(steps::script("./script/bundle-linux")) + .add_step(steps::upload_artifact( + &artifact_name, + "target/release/zed-*.tar.gz", + )) + .add_step(steps::upload_artifact( + &remote_server_artifact_name, + "target/release/zed-remote-server-*.tar.gz", + )) +} + +fn bundle_windows_job(arch: runners::Arch) -> Job { + use vars::GITHUB_SHA; + bundle_job() + .runs_on(runners::WINDOWS_DEFAULT) + .envs(windows_bundle_envs()) + .add_step(steps::checkout_repo()) + .add_step(steps::setup_sentry()) + .add_step(bundle_windows(arch)) + .add_step(steps::upload_artifact( + &format!("Zed_{GITHUB_SHA}-{arch}.exe"), + "${{ env.SETUP_PATH }}", + )) +} + +fn bundle_windows(arch: runners::Arch) -> Step { + let step = match arch { + runners::Arch::X86_64 => named::pwsh("script/bundle-windows.ps1 -Architecture x86_64"), + runners::Arch::ARM64 => named::pwsh("script/bundle-windows.ps1 -Architecture aarch64"), + }; + step.working_directory("${{ env.ZED_WORKSPACE }}") +} diff --git a/tooling/xtask/src/tasks/workflows/runners.rs b/tooling/xtask/src/tasks/workflows/runners.rs new file mode 100644 index 0000000000000000000000000000000000000000..02263ef42bb54dc31c10bfa07a4dde76010fdd85 --- /dev/null +++ b/tooling/xtask/src/tasks/workflows/runners.rs @@ -0,0 +1,65 @@ +pub const LINUX_CHEAP: Runner = Runner("namespace-profile-2x4-ubuntu-2404"); +pub const LINUX_DEFAULT: Runner = Runner("namespace-profile-16x32-ubuntu-2204"); + +// Using Ubuntu 20.04 for minimal glibc version +pub const LINUX_X86_BUNDLER: Runner = Runner("namespace-profile-32x64-ubuntu-2004"); +pub const LINUX_ARM_BUNDLER: Runner = Runner("namespace-profile-8x32-ubuntu-2004-arm-m4"); + +pub const MAC_DEFAULT: Runner = Runner("self-mini-macos"); +pub const WINDOWS_DEFAULT: Runner = Runner("self-32vcpu-windows-2022"); + +pub struct Runner(&'static str); + +impl Into for Runner { + fn into(self) -> gh_workflow::RunsOn { + self.0.into() + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum Arch { + X86_64, + ARM64, +} + +impl std::fmt::Display for Arch { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Arch::X86_64 => write!(f, "x86_64"), + Arch::ARM64 => write!(f, "aarch64"), + } + } +} + +impl Arch { + pub fn triple(&self) -> &'static str { + match self { + Arch::X86_64 => "x86_64-unknown-linux-gnu", + Arch::ARM64 => "aarch64-unknown-linux-gnu", + } + } + + pub fn linux_bundler(&self) -> Runner { + match self { + Arch::X86_64 => LINUX_X86_BUNDLER, + Arch::ARM64 => LINUX_ARM_BUNDLER, + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum Platform { + Windows, + Linux, + Mac, +} + +impl std::fmt::Display for Platform { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Platform::Windows => write!(f, "windows"), + Platform::Linux => write!(f, "linux"), + Platform::Mac => write!(f, "mac"), + } + } +} diff --git a/tooling/xtask/src/tasks/workflows/steps.rs b/tooling/xtask/src/tasks/workflows/steps.rs new file mode 100644 index 0000000000000000000000000000000000000000..235fcd64b1e40c8809c4c237f4bbcdcb37874acd --- /dev/null +++ b/tooling/xtask/src/tasks/workflows/steps.rs @@ -0,0 +1,203 @@ +use gh_workflow::*; + +use crate::tasks::workflows::{runners::Platform, vars}; + +const BASH_SHELL: &str = "bash -euxo pipefail {0}"; +// https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#jobsjob_idstepsshell +const PWSH_SHELL: &str = "pwsh"; + +pub fn checkout_repo() -> Step { + named::uses( + "actions", + "checkout", + "11bd71901bbe5b1630ceea73d27597364c9af683", // v4 + ) + // prevent checkout action from running `git clean -ffdx` which + // would delete the target directory + .add_with(("clean", false)) +} + +pub fn setup_pnpm() -> Step { + named::uses( + "pnpm", + "action-setup", + "fe02b34f77f8bc703788d5817da081398fad5dd2", // v4.0.0 + ) + .add_with(("version", "9")) +} + +pub fn setup_node() -> Step { + named::uses( + "actions", + "setup-node", + "49933ea5288caeca8642d1e84afbd3f7d6820020", // v4 + ) + .add_with(("node-version", "20")) +} + +pub fn setup_sentry() -> Step { + named::uses( + "matbour", + "setup-sentry-cli", + "3e938c54b3018bdd019973689ef984e033b0454b", + ) + .add_with(("token", vars::SENTRY_AUTH_TOKEN)) +} + +pub fn cargo_fmt() -> Step { + named::bash("cargo fmt --all -- --check") +} + +pub fn cargo_install_nextest(platform: Platform) -> Step { + named::run(platform, "cargo install cargo-nextest --locked") +} + +pub fn cargo_nextest(platform: Platform) -> Step { + named::run( + platform, + "cargo nextest run --workspace --no-fail-fast --failure-output immediate-final", + ) +} + +pub fn setup_cargo_config(platform: Platform) -> Step { + match platform { + Platform::Windows => named::pwsh(indoc::indoc! {r#" + New-Item -ItemType Directory -Path "./../.cargo" -Force + Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml" + "#}), + + Platform::Linux | Platform::Mac => named::bash(indoc::indoc! {r#" + mkdir -p ./../.cargo + cp ./.cargo/ci-config.toml ./../.cargo/config.toml + "#}), + } +} + +pub fn cleanup_cargo_config(platform: Platform) -> Step { + let step = match platform { + Platform::Windows => named::pwsh(indoc::indoc! {r#" + Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue + "#}), + Platform::Linux | Platform::Mac => named::bash(indoc::indoc! {r#" + rm -rf ./../.cargo + "#}), + }; + + step.if_condition(Expression::new("always()")) +} + +pub fn upload_artifact(name: &str, path: &str) -> Step { + Step::new(format!("@actions/upload-artifact {}", name)) + .uses( + "actions", + "upload-artifact", + "330a01c490aca151604b8cf639adc76d48f6c5d4", // v5 + ) + .add_with(("name", name)) + .add_with(("path", path)) +} + +pub fn clear_target_dir_if_large(platform: Platform) -> Step { + match platform { + Platform::Windows => named::pwsh("./script/clear-target-dir-if-larger-than.ps1 250"), + Platform::Linux => named::bash("./script/clear-target-dir-if-larger-than 100"), + Platform::Mac => named::bash("./script/clear-target-dir-if-larger-than 300"), + } +} + +pub fn script(name: &str) -> Step { + if name.ends_with(".ps1") { + Step::new(name).run(name).shell(PWSH_SHELL) + } else { + Step::new(name).run(name).shell(BASH_SHELL) + } +} + +pub(crate) struct NamedJob { + pub name: String, + pub job: Job, +} + +// (janky) helper to generate steps with a name that corresponds +// to the name of the calling function. +pub(crate) mod named { + use super::*; + + /// Returns a uses step with the same name as the enclosing function. + /// (You shouldn't inline this function into the workflow definition, you must + /// wrap it in a new function.) + pub(crate) fn uses(owner: &str, repo: &str, ref_: &str) -> Step { + Step::new(function_name(1)).uses(owner, repo, ref_) + } + + /// Returns a bash-script step with the same name as the enclosing function. + /// (You shouldn't inline this function into the workflow definition, you must + /// wrap it in a new function.) + pub(crate) fn bash(script: &str) -> Step { + Step::new(function_name(1)).run(script).shell(BASH_SHELL) + } + + /// Returns a pwsh-script step with the same name as the enclosing function. + /// (You shouldn't inline this function into the workflow definition, you must + /// wrap it in a new function.) + pub(crate) fn pwsh(script: &str) -> Step { + Step::new(function_name(1)).run(script).shell(PWSH_SHELL) + } + + /// Runs the command in either powershell or bash, depending on platform. + /// (You shouldn't inline this function into the workflow definition, you must + /// wrap it in a new function.) + pub(crate) fn run(platform: Platform, script: &str) -> Step { + match platform { + Platform::Windows => Step::new(function_name(1)).run(script).shell(PWSH_SHELL), + Platform::Linux | Platform::Mac => { + Step::new(function_name(1)).run(script).shell(BASH_SHELL) + } + } + } + + /// Returns a Workflow with the same name as the enclosing module. + pub(crate) fn workflow() -> Workflow { + Workflow::default().name( + named::function_name(1) + .split("::") + .next() + .unwrap() + .to_owned(), + ) + } + + /// Returns a Job with the same name as the enclosing function. + /// (note job names may not contain `::`) + pub(crate) fn job(job: Job) -> NamedJob { + NamedJob { + name: function_name(1).split("::").last().unwrap().to_owned(), + job, + } + } + + /// Returns the function name N callers above in the stack + /// (typically 1). + /// This only works because xtask always runs debug builds. + pub(crate) fn function_name(i: usize) -> String { + let mut name = "".to_string(); + let mut count = 0; + backtrace::trace(|frame| { + if count < i + 3 { + count += 1; + return true; + } + backtrace::resolve_frame(frame, |cb| { + if let Some(s) = cb.name() { + name = s.to_string() + } + }); + false + }); + name.split("::") + .skip_while(|s| s != &"workflows") + .skip(1) + .collect::>() + .join("::") + } +} diff --git a/tooling/xtask/src/tasks/workflows/vars.rs b/tooling/xtask/src/tasks/workflows/vars.rs new file mode 100644 index 0000000000000000000000000000000000000000..6220e3960b091dc04798283ff7239a56ffef5eb0 --- /dev/null +++ b/tooling/xtask/src/tasks/workflows/vars.rs @@ -0,0 +1,63 @@ +use gh_workflow::{Env, WorkflowCallInput}; + +macro_rules! secret { + ($secret_name:ident) => { + pub const $secret_name: &str = concat!("${{ secrets.", stringify!($secret_name), " }}"); + }; +} + +macro_rules! var { + ($secret_name:ident) => { + pub const $secret_name: &str = concat!("${{ vars.", stringify!($secret_name), " }}"); + }; +} + +pub fn input(name: &str, input: WorkflowCallInput) -> (String, (&str, WorkflowCallInput)) { + return (format!("${{{{ inputs.{name} }}}}"), (name, input)); +} + +secret!(APPLE_NOTARIZATION_ISSUER_ID); +secret!(APPLE_NOTARIZATION_KEY); +secret!(APPLE_NOTARIZATION_KEY_ID); +secret!(AZURE_SIGNING_CLIENT_ID); +secret!(AZURE_SIGNING_CLIENT_SECRET); +secret!(AZURE_SIGNING_TENANT_ID); +secret!(CACHIX_AUTH_TOKEN); +secret!(DIGITALOCEAN_SPACES_ACCESS_KEY); +secret!(DIGITALOCEAN_SPACES_SECRET_KEY); +secret!(GITHUB_TOKEN); +secret!(MACOS_CERTIFICATE); +secret!(MACOS_CERTIFICATE_PASSWORD); +secret!(SENTRY_AUTH_TOKEN); +secret!(ZED_CLIENT_CHECKSUM_SEED); +secret!(ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON); +secret!(ZED_SENTRY_MINIDUMP_ENDPOINT); + +// todo(ci) make these secrets too... +var!(AZURE_SIGNING_ACCOUNT_NAME); +var!(AZURE_SIGNING_CERT_PROFILE_NAME); +var!(AZURE_SIGNING_ENDPOINT); + +pub const GITHUB_SHA: &str = "${{ github.event.pull_request.head.sha || github.sha }}"; + +pub fn mac_bundle_envs() -> Env { + Env::default() + .add("MACOS_CERTIFICATE", MACOS_CERTIFICATE) + .add("MACOS_CERTIFICATE_PASSWORD", MACOS_CERTIFICATE_PASSWORD) + .add("APPLE_NOTARIZATION_KEY", APPLE_NOTARIZATION_KEY) + .add("APPLE_NOTARIZATION_KEY_ID", APPLE_NOTARIZATION_KEY_ID) + .add("APPLE_NOTARIZATION_ISSUER_ID", APPLE_NOTARIZATION_ISSUER_ID) +} + +pub fn windows_bundle_envs() -> Env { + Env::default() + .add("AZURE_TENANT_ID", AZURE_SIGNING_TENANT_ID) + .add("AZURE_CLIENT_ID", AZURE_SIGNING_CLIENT_ID) + .add("AZURE_CLIENT_SECRET", AZURE_SIGNING_CLIENT_SECRET) + .add("ACCOUNT_NAME", AZURE_SIGNING_ACCOUNT_NAME) + .add("CERT_PROFILE_NAME", AZURE_SIGNING_CERT_PROFILE_NAME) + .add("ENDPOINT", AZURE_SIGNING_ENDPOINT) + .add("FILE_DIGEST", "SHA256") + .add("TIMESTAMP_DIGEST", "SHA256") + .add("TIMESTAMP_SERVER", "http://timestamp.acs.microsoft.com") +}