Detailed changes
  
  
    
    @@ -1,71 +0,0 @@
-name: Run Agent Eval
-
-on:
-  schedule:
-    - cron: "0 0 * * *"
-
-  pull_request:
-    branches:
-      - "**"
-    types: [synchronize, reopened, labeled]
-
-  workflow_dispatch:
-
-concurrency:
-  # Allow only one workflow per any non-`main` branch.
-  group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
-  cancel-in-progress: true
-
-env:
-  CARGO_TERM_COLOR: always
-  CARGO_INCREMENTAL: 0
-  RUST_BACKTRACE: 1
-  ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
-  ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
-  ZED_EVAL_TELEMETRY: 1
-
-jobs:
-  run_eval:
-    timeout-minutes: 60
-    name: Run Agent Eval
-    if: >
-      github.repository_owner == 'zed-industries' &&
-      (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval'))
-    runs-on:
-      - namespace-profile-16x32-ubuntu-2204
-    steps:
-      - name: Add Rust to the PATH
-        run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
-
-      - name: Checkout repo
-        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
-        with:
-          clean: false
-
-      - name: Cache dependencies
-        uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
-        with:
-          save-if: ${{ github.ref == 'refs/heads/main' }}
-          # cache-provider: "buildjet"
-
-      - name: Install Linux dependencies
-        run: ./script/linux
-
-      - name: Configure CI
-        run: |
-          mkdir -p ./../.cargo
-          cp ./.cargo/ci-config.toml ./../.cargo/config.toml
-
-      - name: Compile eval
-        run: cargo build --package=eval
-
-      - name: Run eval
-        run: cargo run --package=eval -- --repetitions=8 --concurrency=1
-
-      # Even the Linux runner is not stateful, in theory there is no need to do this cleanup.
-      # But, to avoid potential issues in the future if we choose to use a stateful Linux runner and forget to add code
-      # to clean up the config file, Iβve included the cleanup code here as a precaution.
-      # While itβs not strictly necessary at this moment, I believe itβs better to err on the side of caution.
-      - name: Clean CI config file
-        if: always()
-        run: rm -rf ./../.cargo
  
  
  
    
    @@ -3,10 +3,7 @@
 name: release
 env:
   CARGO_TERM_COLOR: always
-  CARGO_INCREMENTAL: '0'
   RUST_BACKTRACE: '1'
-  ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
-  ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
 on:
   push:
     tags:
@@ -66,6 +63,10 @@ jobs:
     - name: steps::install_mold
       run: ./script/install-mold
       shell: bash -euxo pipefail {0}
+    - name: steps::cache_rust_dependencies_namespace
+      uses: namespacelabs/nscloud-cache-action@v1
+      with:
+        cache: rust
     - name: steps::setup_node
       uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
       with:
@@ -77,7 +78,7 @@ jobs:
       run: cargo install cargo-nextest --locked
       shell: bash -euxo pipefail {0}
     - name: steps::clear_target_dir_if_large
-      run: ./script/clear-target-dir-if-larger-than 100
+      run: ./script/clear-target-dir-if-larger-than 250
       shell: bash -euxo pipefail {0}
     - name: steps::cargo_nextest
       run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
@@ -177,11 +178,15 @@ jobs:
       env:
         GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
     timeout-minutes: 60
-  bundle_linux_arm64:
+  bundle_linux_aarch64:
     needs:
     - run_tests_linux
     - check_scripts
     runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4
+    env:
+      CARGO_INCREMENTAL: 0
+      ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+      ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
@@ -200,27 +205,28 @@ jobs:
     - name: ./script/bundle-linux
       run: ./script/bundle-linux
       shell: bash -euxo pipefail {0}
-    - name: '@actions/upload-artifact zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz'
+    - name: '@actions/upload-artifact zed-linux-aarch64.tar.gz'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
-        name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
-        path: target/release/zed-*.tar.gz
+        name: zed-linux-aarch64.tar.gz
+        path: target/release/zed-linux-aarch64.tar.gz
         if-no-files-found: error
-    - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz'
+    - name: '@actions/upload-artifact zed-remote-server-linux-aarch64.gz'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
-        name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
-        path: target/zed-remote-server-*.gz
+        name: zed-remote-server-linux-aarch64.gz
+        path: target/zed-remote-server-linux-aarch64.gz
         if-no-files-found: error
-    outputs:
-      zed: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
-      remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
     timeout-minutes: 60
   bundle_linux_x86_64:
     needs:
     - run_tests_linux
     - check_scripts
     runs-on: namespace-profile-32x64-ubuntu-2004
+    env:
+      CARGO_INCREMENTAL: 0
+      ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+      ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
@@ -239,28 +245,28 @@ jobs:
     - name: ./script/bundle-linux
       run: ./script/bundle-linux
       shell: bash -euxo pipefail {0}
-    - name: '@actions/upload-artifact zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz'
+    - name: '@actions/upload-artifact zed-linux-x86_64.tar.gz'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
-        name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
-        path: target/release/zed-*.tar.gz
+        name: zed-linux-x86_64.tar.gz
+        path: target/release/zed-linux-x86_64.tar.gz
         if-no-files-found: error
-    - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz'
+    - name: '@actions/upload-artifact zed-remote-server-linux-x86_64.gz'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
-        name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
-        path: target/zed-remote-server-*.gz
+        name: zed-remote-server-linux-x86_64.gz
+        path: target/zed-remote-server-linux-x86_64.gz
         if-no-files-found: error
-    outputs:
-      zed: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
-      remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
     timeout-minutes: 60
-  bundle_mac_arm64:
+  bundle_mac_aarch64:
     needs:
     - run_tests_mac
     - check_scripts
     runs-on: self-mini-macos
     env:
+      CARGO_INCREMENTAL: 0
+      ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+      ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
       MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
       MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
       APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
@@ -282,24 +288,21 @@ jobs:
     - name: steps::clear_target_dir_if_large
       run: ./script/clear-target-dir-if-larger-than 300
       shell: bash -euxo pipefail {0}
-    - name: run_bundling::bundle_mac
+    - name: run_bundling::bundle_mac::bundle_mac
       run: ./script/bundle-mac aarch64-apple-darwin
       shell: bash -euxo pipefail {0}
-    - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg'
+    - name: '@actions/upload-artifact Zed-aarch64.dmg'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
-        name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
-        path: target/aarch64-apple-darwin/release/Zed.dmg
+        name: Zed-aarch64.dmg
+        path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
         if-no-files-found: error
-    - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz'
+    - name: '@actions/upload-artifact zed-remote-server-macos-aarch64.gz'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
-        name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz
+        name: zed-remote-server-macos-aarch64.gz
         path: target/zed-remote-server-macos-aarch64.gz
         if-no-files-found: error
-    outputs:
-      zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
-      remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz
     timeout-minutes: 60
   bundle_mac_x86_64:
     needs:
@@ -307,6 +310,9 @@ jobs:
     - check_scripts
     runs-on: self-mini-macos
     env:
+      CARGO_INCREMENTAL: 0
+      ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+      ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
       MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
       MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
       APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
@@ -328,31 +334,31 @@ jobs:
     - name: steps::clear_target_dir_if_large
       run: ./script/clear-target-dir-if-larger-than 300
       shell: bash -euxo pipefail {0}
-    - name: run_bundling::bundle_mac
+    - name: run_bundling::bundle_mac::bundle_mac
       run: ./script/bundle-mac x86_64-apple-darwin
       shell: bash -euxo pipefail {0}
-    - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg'
+    - name: '@actions/upload-artifact Zed-x86_64.dmg'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
-        name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
-        path: target/x86_64-apple-darwin/release/Zed.dmg
+        name: Zed-x86_64.dmg
+        path: target/x86_64-apple-darwin/release/Zed-x86_64.dmg
         if-no-files-found: error
-    - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz'
+    - name: '@actions/upload-artifact zed-remote-server-macos-x86_64.gz'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
-        name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz
+        name: zed-remote-server-macos-x86_64.gz
         path: target/zed-remote-server-macos-x86_64.gz
         if-no-files-found: error
-    outputs:
-      zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
-      remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz
     timeout-minutes: 60
-  bundle_windows_arm64:
+  bundle_windows_aarch64:
     needs:
     - run_tests_windows
     - check_scripts
     runs-on: self-32vcpu-windows-2022
     env:
+      CARGO_INCREMENTAL: 0
+      ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+      ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
       AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
       AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
       AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
@@ -371,18 +377,16 @@ jobs:
       uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
       with:
         token: ${{ secrets.SENTRY_AUTH_TOKEN }}
-    - name: run_bundling::bundle_windows
+    - name: run_bundling::bundle_windows::bundle_windows
       run: script/bundle-windows.ps1 -Architecture aarch64
       shell: pwsh
       working-directory: ${{ env.ZED_WORKSPACE }}
-    - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe'
+    - name: '@actions/upload-artifact Zed-aarch64.exe'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
-        name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe
-        path: ${{ env.SETUP_PATH }}
+        name: Zed-aarch64.exe
+        path: target/Zed-aarch64.exe
         if-no-files-found: error
-    outputs:
-      zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe
     timeout-minutes: 60
   bundle_windows_x86_64:
     needs:
@@ -390,6 +394,9 @@ jobs:
     - check_scripts
     runs-on: self-32vcpu-windows-2022
     env:
+      CARGO_INCREMENTAL: 0
+      ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+      ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
       AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
       AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
       AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
@@ -408,51 +415,49 @@ jobs:
       uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
       with:
         token: ${{ secrets.SENTRY_AUTH_TOKEN }}
-    - name: run_bundling::bundle_windows
+    - name: run_bundling::bundle_windows::bundle_windows
       run: script/bundle-windows.ps1 -Architecture x86_64
       shell: pwsh
       working-directory: ${{ env.ZED_WORKSPACE }}
-    - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe'
+    - name: '@actions/upload-artifact Zed-x86_64.exe'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
-        name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe
-        path: ${{ env.SETUP_PATH }}
+        name: Zed-x86_64.exe
+        path: target/Zed-x86_64.exe
         if-no-files-found: error
-    outputs:
-      zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe
     timeout-minutes: 60
   upload_release_assets:
     needs:
     - create_draft_release
-    - bundle_linux_arm64
+    - bundle_linux_aarch64
     - bundle_linux_x86_64
-    - bundle_mac_arm64
+    - bundle_mac_aarch64
     - bundle_mac_x86_64
-    - bundle_windows_arm64
+    - bundle_windows_aarch64
     - bundle_windows_x86_64
     runs-on: namespace-profile-4x8-ubuntu-2204
     steps:
-    - name: release::upload_release_assets::download_workflow_artifacts
+    - name: release::download_workflow_artifacts
       uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53
       with:
         path: ./artifacts/
     - name: ls -lR ./artifacts
       run: ls -lR ./artifacts
       shell: bash -euxo pipefail {0}
-    - name: release::upload_release_assets::prep_release_artifacts
+    - name: release::prep_release_artifacts
       run: |-
         mkdir -p release-artifacts/
 
-        mv ./artifacts/${{ needs.bundle_mac_x86_64.outputs.zed }}/* release-artifacts/Zed-x86_64.dmg
-        mv ./artifacts/${{ needs.bundle_mac_arm64.outputs.zed }}/* release-artifacts/Zed-aarch64.dmg
-        mv ./artifacts/${{ needs.bundle_windows_x86_64.outputs.zed }}/* release-artifacts/Zed-x86_64.exe
-        mv ./artifacts/${{ needs.bundle_windows_arm64.outputs.zed }}/* release-artifacts/Zed-aarch64.exe
-        mv ./artifacts/${{ needs.bundle_linux_arm64.outputs.zed }}/* release-artifacts/zed-linux-aarch64.tar.gz
-        mv ./artifacts/${{ needs.bundle_linux_x86_64.outputs.zed }}/* release-artifacts/zed-linux-x86_64.tar.gz
-        mv ./artifacts/${{ needs.bundle_linux_x86_64.outputs.remote-server }}/* release-artifacts/zed-remote-server-linux-x86_64.gz
-        mv ./artifacts/${{ needs.bundle_linux_arm64.outputs.remote-server }}/* release-artifacts/zed-remote-server-linux-aarch64.gz
-        mv ./artifacts/${{ needs.bundle_mac_x86_64.outputs.remote-server }}/* release-artifacts/zed-remote-server-macos-x86_64.gz
-        mv ./artifacts/${{ needs.bundle_mac_arm64.outputs.remote-server }}/* release-artifacts/zed-remote-server-macos-aarch64.gz
+        mv ./artifacts/Zed-aarch64.dmg/Zed-aarch64.dmg release-artifacts/Zed-aarch64.dmg
+        mv ./artifacts/Zed-x86_64.dmg/Zed-x86_64.dmg release-artifacts/Zed-x86_64.dmg
+        mv ./artifacts/zed-linux-aarch64.tar.gz/zed-linux-aarch64.tar.gz release-artifacts/zed-linux-aarch64.tar.gz
+        mv ./artifacts/zed-linux-x86_64.tar.gz/zed-linux-x86_64.tar.gz release-artifacts/zed-linux-x86_64.tar.gz
+        mv ./artifacts/Zed-x86_64.exe/Zed-x86_64.exe release-artifacts/Zed-x86_64.exe
+        mv ./artifacts/Zed-aarch64.exe/Zed-aarch64.exe release-artifacts/Zed-aarch64.exe
+        mv ./artifacts/zed-remote-server-macos-aarch64.gz/zed-remote-server-macos-aarch64.gz release-artifacts/zed-remote-server-macos-aarch64.gz
+        mv ./artifacts/zed-remote-server-macos-x86_64.gz/zed-remote-server-macos-x86_64.gz release-artifacts/zed-remote-server-macos-x86_64.gz
+        mv ./artifacts/zed-remote-server-linux-aarch64.gz/zed-remote-server-linux-aarch64.gz release-artifacts/zed-remote-server-linux-aarch64.gz
+        mv ./artifacts/zed-remote-server-linux-x86_64.gz/zed-remote-server-linux-x86_64.gz release-artifacts/zed-remote-server-linux-x86_64.gz
       shell: bash -euxo pipefail {0}
     - name: gh release upload "$GITHUB_REF_NAME" --repo=zed-industries/zed release-artifacts/*
       run: gh release upload "$GITHUB_REF_NAME" --repo=zed-industries/zed release-artifacts/*
@@ -473,7 +478,7 @@ jobs:
       shell: bash -euxo pipefail {0}
       env:
         GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-    - name: release::auto_release_preview::create_sentry_release
+    - name: release::create_sentry_release
       uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c
       with:
         environment: production
  
  
  
    
    @@ -3,12 +3,7 @@
 name: release_nightly
 env:
   CARGO_TERM_COLOR: always
-  CARGO_INCREMENTAL: '0'
   RUST_BACKTRACE: '1'
-  ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
-  ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
-  DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
-  DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
 on:
   push:
     tags:
@@ -32,41 +27,6 @@ jobs:
       run: ./script/clippy
       shell: bash -euxo pipefail {0}
     timeout-minutes: 60
-  run_tests_mac:
-    if: github.repository_owner == 'zed-industries'
-    runs-on: self-mini-macos
-    steps:
-    - name: steps::checkout_repo
-      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
-      with:
-        clean: false
-    - name: steps::setup_cargo_config
-      run: |
-        mkdir -p ./../.cargo
-        cp ./.cargo/ci-config.toml ./../.cargo/config.toml
-      shell: bash -euxo pipefail {0}
-    - name: steps::setup_node
-      uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
-      with:
-        node-version: '20'
-    - name: steps::clippy
-      run: ./script/clippy
-      shell: bash -euxo pipefail {0}
-    - name: steps::cargo_install_nextest
-      run: cargo install cargo-nextest --locked
-      shell: bash -euxo pipefail {0}
-    - name: steps::clear_target_dir_if_large
-      run: ./script/clear-target-dir-if-larger-than 300
-      shell: bash -euxo pipefail {0}
-    - name: steps::cargo_nextest
-      run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
-      shell: bash -euxo pipefail {0}
-    - name: steps::cleanup_cargo_config
-      if: always()
-      run: |
-        rm -rf ./../.cargo
-      shell: bash -euxo pipefail {0}
-    timeout-minutes: 60
   run_tests_windows:
     if: github.repository_owner == 'zed-industries'
     runs-on: self-32vcpu-windows-2022
@@ -102,55 +62,109 @@ jobs:
         Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue
       shell: pwsh
     timeout-minutes: 60
-  bundle_mac_nightly_x86_64:
+  bundle_linux_aarch64:
     needs:
     - check_style
-    - run_tests_mac
-    if: github.repository_owner == 'zed-industries'
-    runs-on: self-mini-macos
+    - run_tests_windows
+    runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4
     env:
-      MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
-      MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
-      APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
-      APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
-      APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
+      CARGO_INCREMENTAL: 0
+      ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+      ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
       with:
         clean: false
-    - name: steps::setup_node
-      uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
-      with:
-        node-version: '20'
+    - name: run_bundling::set_release_channel_to_nightly
+      run: |
+        set -eu
+        version=$(git rev-parse --short HEAD)
+        echo "Publishing version: ${version} on release channel nightly"
+        echo "nightly" > crates/zed/RELEASE_CHANNEL
+      shell: bash -euxo pipefail {0}
     - name: steps::setup_sentry
       uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
       with:
         token: ${{ secrets.SENTRY_AUTH_TOKEN }}
-    - name: steps::clear_target_dir_if_large
-      run: ./script/clear-target-dir-if-larger-than 300
+    - name: steps::setup_linux
+      run: ./script/linux
+      shell: bash -euxo pipefail {0}
+    - name: steps::install_mold
+      run: ./script/install-mold
       shell: bash -euxo pipefail {0}
-    - name: release_nightly::set_release_channel_to_nightly
+    - name: ./script/bundle-linux
+      run: ./script/bundle-linux
+      shell: bash -euxo pipefail {0}
+    - name: '@actions/upload-artifact zed-linux-aarch64.tar.gz'
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: zed-linux-aarch64.tar.gz
+        path: target/release/zed-linux-aarch64.tar.gz
+        if-no-files-found: error
+    - name: '@actions/upload-artifact zed-remote-server-linux-aarch64.gz'
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: zed-remote-server-linux-aarch64.gz
+        path: target/zed-remote-server-linux-aarch64.gz
+        if-no-files-found: error
+    timeout-minutes: 60
+  bundle_linux_x86_64:
+    needs:
+    - check_style
+    - run_tests_windows
+    runs-on: namespace-profile-32x64-ubuntu-2004
+    env:
+      CARGO_INCREMENTAL: 0
+      ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+      ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: run_bundling::set_release_channel_to_nightly
       run: |
         set -eu
         version=$(git rev-parse --short HEAD)
         echo "Publishing version: ${version} on release channel nightly"
         echo "nightly" > crates/zed/RELEASE_CHANNEL
       shell: bash -euxo pipefail {0}
-    - name: run_bundling::bundle_mac
-      run: ./script/bundle-mac x86_64-apple-darwin
+    - name: steps::setup_sentry
+      uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
+      with:
+        token: ${{ secrets.SENTRY_AUTH_TOKEN }}
+    - name: steps::setup_linux
+      run: ./script/linux
       shell: bash -euxo pipefail {0}
-    - name: release_nightly::upload_zed_nightly
-      run: script/upload-nightly macos x86_64
+    - name: steps::install_mold
+      run: ./script/install-mold
+      shell: bash -euxo pipefail {0}
+    - name: ./script/bundle-linux
+      run: ./script/bundle-linux
       shell: bash -euxo pipefail {0}
+    - name: '@actions/upload-artifact zed-linux-x86_64.tar.gz'
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: zed-linux-x86_64.tar.gz
+        path: target/release/zed-linux-x86_64.tar.gz
+        if-no-files-found: error
+    - name: '@actions/upload-artifact zed-remote-server-linux-x86_64.gz'
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: zed-remote-server-linux-x86_64.gz
+        path: target/zed-remote-server-linux-x86_64.gz
+        if-no-files-found: error
     timeout-minutes: 60
-  bundle_mac_nightly_aarch64:
+  bundle_mac_aarch64:
     needs:
     - check_style
-    - run_tests_mac
-    if: github.repository_owner == 'zed-industries'
+    - run_tests_windows
     runs-on: self-mini-macos
     env:
+      CARGO_INCREMENTAL: 0
+      ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+      ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
       MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
       MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
       APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
@@ -161,6 +175,13 @@ jobs:
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
       with:
         clean: false
+    - name: run_bundling::set_release_channel_to_nightly
+      run: |
+        set -eu
+        version=$(git rev-parse --short HEAD)
+        echo "Publishing version: ${version} on release channel nightly"
+        echo "nightly" > crates/zed/RELEASE_CHANNEL
+      shell: bash -euxo pipefail {0}
     - name: steps::setup_node
       uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
       with:
@@ -172,100 +193,84 @@ jobs:
     - name: steps::clear_target_dir_if_large
       run: ./script/clear-target-dir-if-larger-than 300
       shell: bash -euxo pipefail {0}
-    - name: release_nightly::set_release_channel_to_nightly
-      run: |
-        set -eu
-        version=$(git rev-parse --short HEAD)
-        echo "Publishing version: ${version} on release channel nightly"
-        echo "nightly" > crates/zed/RELEASE_CHANNEL
-      shell: bash -euxo pipefail {0}
-    - name: run_bundling::bundle_mac
+    - name: run_bundling::bundle_mac::bundle_mac
       run: ./script/bundle-mac aarch64-apple-darwin
       shell: bash -euxo pipefail {0}
-    - name: release_nightly::upload_zed_nightly
-      run: script/upload-nightly macos aarch64
-      shell: bash -euxo pipefail {0}
+    - name: '@actions/upload-artifact Zed-aarch64.dmg'
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: Zed-aarch64.dmg
+        path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
+        if-no-files-found: error
+    - name: '@actions/upload-artifact zed-remote-server-macos-aarch64.gz'
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: zed-remote-server-macos-aarch64.gz
+        path: target/zed-remote-server-macos-aarch64.gz
+        if-no-files-found: error
     timeout-minutes: 60
-  bundle_linux_nightly_x86_64:
+  bundle_mac_x86_64:
     needs:
     - check_style
-    - run_tests_mac
-    if: github.repository_owner == 'zed-industries'
-    runs-on: namespace-profile-32x64-ubuntu-2004
+    - run_tests_windows
+    runs-on: self-mini-macos
+    env:
+      CARGO_INCREMENTAL: 0
+      ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+      ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
+      MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
+      MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
+      APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
+      APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
+      APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
       with:
         clean: false
-    - name: steps::setup_sentry
-      uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
-      with:
-        token: ${{ secrets.SENTRY_AUTH_TOKEN }}
-    - name: ./script/linux
-      run: ./script/linux
-      shell: bash -euxo pipefail {0}
-    - name: ./script/install-mold
-      run: ./script/install-mold
-      shell: bash -euxo pipefail {0}
-    - name: steps::clear_target_dir_if_large
-      run: ./script/clear-target-dir-if-larger-than 100
-      shell: bash -euxo pipefail {0}
-    - name: release_nightly::set_release_channel_to_nightly
+    - name: run_bundling::set_release_channel_to_nightly
       run: |
         set -eu
         version=$(git rev-parse --short HEAD)
         echo "Publishing version: ${version} on release channel nightly"
         echo "nightly" > crates/zed/RELEASE_CHANNEL
       shell: bash -euxo pipefail {0}
-    - name: ./script/bundle-linux
-      run: ./script/bundle-linux
-      shell: bash -euxo pipefail {0}
-    - name: release_nightly::upload_zed_nightly
-      run: script/upload-nightly linux-targz x86_64
-      shell: bash -euxo pipefail {0}
-    timeout-minutes: 60
-  bundle_linux_nightly_aarch64:
-    needs:
-    - check_style
-    - run_tests_mac
-    if: github.repository_owner == 'zed-industries'
-    runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4
-    steps:
-    - name: steps::checkout_repo
-      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+    - name: steps::setup_node
+      uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
       with:
-        clean: false
+        node-version: '20'
     - name: steps::setup_sentry
       uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
       with:
         token: ${{ secrets.SENTRY_AUTH_TOKEN }}
-    - name: ./script/linux
-      run: ./script/linux
-      shell: bash -euxo pipefail {0}
     - name: steps::clear_target_dir_if_large
-      run: ./script/clear-target-dir-if-larger-than 100
-      shell: bash -euxo pipefail {0}
-    - name: release_nightly::set_release_channel_to_nightly
-      run: |
-        set -eu
-        version=$(git rev-parse --short HEAD)
-        echo "Publishing version: ${version} on release channel nightly"
-        echo "nightly" > crates/zed/RELEASE_CHANNEL
-      shell: bash -euxo pipefail {0}
-    - name: ./script/bundle-linux
-      run: ./script/bundle-linux
+      run: ./script/clear-target-dir-if-larger-than 300
       shell: bash -euxo pipefail {0}
-    - name: release_nightly::upload_zed_nightly
-      run: script/upload-nightly linux-targz aarch64
+    - name: run_bundling::bundle_mac::bundle_mac
+      run: ./script/bundle-mac x86_64-apple-darwin
       shell: bash -euxo pipefail {0}
+    - name: '@actions/upload-artifact Zed-x86_64.dmg'
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: Zed-x86_64.dmg
+        path: target/x86_64-apple-darwin/release/Zed-x86_64.dmg
+        if-no-files-found: error
+    - name: '@actions/upload-artifact zed-remote-server-macos-x86_64.gz'
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: zed-remote-server-macos-x86_64.gz
+        path: target/zed-remote-server-macos-x86_64.gz
+        if-no-files-found: error
     timeout-minutes: 60
-  bundle_windows_nightly_x86_64:
+  bundle_windows_aarch64:
     needs:
     - check_style
     - run_tests_windows
-    if: github.repository_owner == 'zed-industries'
     runs-on: self-32vcpu-windows-2022
     env:
+      CARGO_INCREMENTAL: 0
+      ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+      ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
       AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
       AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
       AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
@@ -280,11 +285,7 @@ jobs:
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
       with:
         clean: false
-    - name: steps::setup_sentry
-      uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
-      with:
-        token: ${{ secrets.SENTRY_AUTH_TOKEN }}
-    - name: release_nightly::set_release_channel_to_nightly
+    - name: run_bundling::set_release_channel_to_nightly
       run: |
         $ErrorActionPreference = "Stop"
         $version = git rev-parse --short HEAD
@@ -292,22 +293,30 @@ jobs:
         "nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL"
       shell: pwsh
       working-directory: ${{ env.ZED_WORKSPACE }}
-    - name: run_bundling::bundle_windows
-      run: script/bundle-windows.ps1 -Architecture x86_64
-      shell: pwsh
-      working-directory: ${{ env.ZED_WORKSPACE }}
-    - name: release_nightly::upload_zed_nightly
-      run: script/upload-nightly.ps1 -Architecture x86_64
+    - name: steps::setup_sentry
+      uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
+      with:
+        token: ${{ secrets.SENTRY_AUTH_TOKEN }}
+    - name: run_bundling::bundle_windows::bundle_windows
+      run: script/bundle-windows.ps1 -Architecture aarch64
       shell: pwsh
       working-directory: ${{ env.ZED_WORKSPACE }}
+    - name: '@actions/upload-artifact Zed-aarch64.exe'
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: Zed-aarch64.exe
+        path: target/Zed-aarch64.exe
+        if-no-files-found: error
     timeout-minutes: 60
-  bundle_windows_nightly_aarch64:
+  bundle_windows_x86_64:
     needs:
     - check_style
     - run_tests_windows
-    if: github.repository_owner == 'zed-industries'
     runs-on: self-32vcpu-windows-2022
     env:
+      CARGO_INCREMENTAL: 0
+      ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+      ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
       AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
       AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
       AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
@@ -322,11 +331,7 @@ jobs:
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
       with:
         clean: false
-    - name: steps::setup_sentry
-      uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
-      with:
-        token: ${{ secrets.SENTRY_AUTH_TOKEN }}
-    - name: release_nightly::set_release_channel_to_nightly
+    - name: run_bundling::set_release_channel_to_nightly
       run: |
         $ErrorActionPreference = "Stop"
         $version = git rev-parse --short HEAD
@@ -334,19 +339,25 @@ jobs:
         "nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL"
       shell: pwsh
       working-directory: ${{ env.ZED_WORKSPACE }}
-    - name: run_bundling::bundle_windows
-      run: script/bundle-windows.ps1 -Architecture aarch64
-      shell: pwsh
-      working-directory: ${{ env.ZED_WORKSPACE }}
-    - name: release_nightly::upload_zed_nightly
-      run: script/upload-nightly.ps1 -Architecture aarch64
+    - name: steps::setup_sentry
+      uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
+      with:
+        token: ${{ secrets.SENTRY_AUTH_TOKEN }}
+    - name: run_bundling::bundle_windows::bundle_windows
+      run: script/bundle-windows.ps1 -Architecture x86_64
       shell: pwsh
       working-directory: ${{ env.ZED_WORKSPACE }}
+    - name: '@actions/upload-artifact Zed-x86_64.exe'
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: Zed-x86_64.exe
+        path: target/Zed-x86_64.exe
+        if-no-files-found: error
     timeout-minutes: 60
   build_nix_linux_x86_64:
     needs:
     - check_style
-    - run_tests_mac
+    - run_tests_windows
     if: github.repository_owner == 'zed-industries'
     runs-on: namespace-profile-32x64-ubuntu-2004
     env:
@@ -377,7 +388,7 @@ jobs:
   build_nix_mac_aarch64:
     needs:
     - check_style
-    - run_tests_mac
+    - run_tests_windows
     if: github.repository_owner == 'zed-industries'
     runs-on: self-mini-macos
     env:
@@ -414,20 +425,48 @@ jobs:
     continue-on-error: true
   update_nightly_tag:
     needs:
-    - bundle_mac_nightly_x86_64
-    - bundle_mac_nightly_aarch64
-    - bundle_linux_nightly_x86_64
-    - bundle_linux_nightly_aarch64
-    - bundle_windows_nightly_x86_64
-    - bundle_windows_nightly_aarch64
+    - bundle_linux_aarch64
+    - bundle_linux_x86_64
+    - bundle_mac_aarch64
+    - bundle_mac_x86_64
+    - bundle_windows_aarch64
+    - bundle_windows_x86_64
     if: github.repository_owner == 'zed-industries'
-    runs-on: namespace-profile-2x4-ubuntu-2404
+    runs-on: namespace-profile-4x8-ubuntu-2204
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
       with:
         clean: false
         fetch-depth: 0
+    - name: release::download_workflow_artifacts
+      uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53
+      with:
+        path: ./artifacts/
+    - name: ls -lR ./artifacts
+      run: ls -lR ./artifacts
+      shell: bash -euxo pipefail {0}
+    - name: release::prep_release_artifacts
+      run: |-
+        mkdir -p release-artifacts/
+
+        mv ./artifacts/Zed-aarch64.dmg/Zed-aarch64.dmg release-artifacts/Zed-aarch64.dmg
+        mv ./artifacts/Zed-x86_64.dmg/Zed-x86_64.dmg release-artifacts/Zed-x86_64.dmg
+        mv ./artifacts/zed-linux-aarch64.tar.gz/zed-linux-aarch64.tar.gz release-artifacts/zed-linux-aarch64.tar.gz
+        mv ./artifacts/zed-linux-x86_64.tar.gz/zed-linux-x86_64.tar.gz release-artifacts/zed-linux-x86_64.tar.gz
+        mv ./artifacts/Zed-x86_64.exe/Zed-x86_64.exe release-artifacts/Zed-x86_64.exe
+        mv ./artifacts/Zed-aarch64.exe/Zed-aarch64.exe release-artifacts/Zed-aarch64.exe
+        mv ./artifacts/zed-remote-server-macos-aarch64.gz/zed-remote-server-macos-aarch64.gz release-artifacts/zed-remote-server-macos-aarch64.gz
+        mv ./artifacts/zed-remote-server-macos-x86_64.gz/zed-remote-server-macos-x86_64.gz release-artifacts/zed-remote-server-macos-x86_64.gz
+        mv ./artifacts/zed-remote-server-linux-aarch64.gz/zed-remote-server-linux-aarch64.gz release-artifacts/zed-remote-server-linux-aarch64.gz
+        mv ./artifacts/zed-remote-server-linux-x86_64.gz/zed-remote-server-linux-x86_64.gz release-artifacts/zed-remote-server-linux-x86_64.gz
+      shell: bash -euxo pipefail {0}
+    - name: ./script/upload-nightly
+      run: ./script/upload-nightly
+      shell: bash -euxo pipefail {0}
+      env:
+        DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
+        DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
     - name: release_nightly::update_nightly_tag_job::update_nightly_tag
       run: |
         if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then
@@ -439,7 +478,7 @@ jobs:
         git tag -f nightly
         git push origin nightly --force
       shell: bash -euxo pipefail {0}
-    - name: release_nightly::update_nightly_tag_job::create_sentry_release
+    - name: release::create_sentry_release
       uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c
       with:
         environment: production
  
  
  
    
    @@ -0,0 +1,62 @@
+# Generated from xtask::workflows::run_agent_evals
+# Rebuild with `cargo xtask workflows`.
+name: run_agent_evals
+env:
+  CARGO_TERM_COLOR: always
+  CARGO_INCREMENTAL: '0'
+  RUST_BACKTRACE: '1'
+  ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
+  ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+  ZED_EVAL_TELEMETRY: '1'
+on:
+  pull_request:
+    types:
+    - synchronize
+    - reopened
+    - labeled
+    branches:
+    - '**'
+  schedule:
+  - cron: 0 0 * * *
+  workflow_dispatch: {}
+jobs:
+  agent_evals:
+    if: |
+      github.repository_owner == 'zed-industries' &&
+      (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval'))
+    runs-on: namespace-profile-16x32-ubuntu-2204
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: steps::cache_rust_dependencies_namespace
+      uses: namespacelabs/nscloud-cache-action@v1
+      with:
+        cache: rust
+    - name: steps::setup_linux
+      run: ./script/linux
+      shell: bash -euxo pipefail {0}
+    - name: steps::install_mold
+      run: ./script/install-mold
+      shell: bash -euxo pipefail {0}
+    - name: steps::setup_cargo_config
+      run: |
+        mkdir -p ./../.cargo
+        cp ./.cargo/ci-config.toml ./../.cargo/config.toml
+      shell: bash -euxo pipefail {0}
+    - name: cargo build --package=eval
+      run: cargo build --package=eval
+      shell: bash -euxo pipefail {0}
+    - name: run_agent_evals::agent_evals::run_eval
+      run: cargo run --package=eval -- --repetitions=8 --concurrency=1
+      shell: bash -euxo pipefail {0}
+    - name: steps::cleanup_cargo_config
+      if: always()
+      run: |
+        rm -rf ./../.cargo
+      shell: bash -euxo pipefail {0}
+    timeout-minutes: 60
+concurrency:
+  group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
+  cancel-in-progress: true
  
  
  
    
    @@ -3,192 +3,194 @@
 name: run_bundling
 env:
   CARGO_TERM_COLOR: always
-  CARGO_INCREMENTAL: '0'
   RUST_BACKTRACE: '1'
-  ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
-  ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
 on:
   pull_request:
     types:
     - labeled
     - synchronize
 jobs:
-  bundle_mac_x86_64:
+  bundle_linux_aarch64:
     if: |-
       (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
                        (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
-    runs-on: self-mini-macos
+    runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4
     env:
-      MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
-      MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
-      APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
-      APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
-      APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
+      CARGO_INCREMENTAL: 0
+      ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+      ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
       with:
         clean: false
-    - name: steps::setup_node
-      uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
-      with:
-        node-version: '20'
     - name: steps::setup_sentry
       uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
       with:
         token: ${{ secrets.SENTRY_AUTH_TOKEN }}
-    - name: steps::clear_target_dir_if_large
-      run: ./script/clear-target-dir-if-larger-than 300
+    - name: steps::setup_linux
+      run: ./script/linux
       shell: bash -euxo pipefail {0}
-    - name: run_bundling::bundle_mac
-      run: ./script/bundle-mac x86_64-apple-darwin
+    - name: steps::install_mold
+      run: ./script/install-mold
       shell: bash -euxo pipefail {0}
-    - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg'
+    - name: ./script/bundle-linux
+      run: ./script/bundle-linux
+      shell: bash -euxo pipefail {0}
+    - name: '@actions/upload-artifact zed-linux-aarch64.tar.gz'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
-        name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
-        path: target/x86_64-apple-darwin/release/Zed.dmg
+        name: zed-linux-aarch64.tar.gz
+        path: target/release/zed-linux-aarch64.tar.gz
         if-no-files-found: error
-    - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz'
+    - name: '@actions/upload-artifact zed-remote-server-linux-aarch64.gz'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
-        name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz
-        path: target/zed-remote-server-macos-x86_64.gz
+        name: zed-remote-server-linux-aarch64.gz
+        path: target/zed-remote-server-linux-aarch64.gz
         if-no-files-found: error
-    outputs:
-      zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
-      remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz
     timeout-minutes: 60
-  bundle_mac_arm64:
+  bundle_linux_x86_64:
     if: |-
       (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
                        (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
-    runs-on: self-mini-macos
+    runs-on: namespace-profile-32x64-ubuntu-2004
     env:
-      MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
-      MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
-      APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
-      APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
-      APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
+      CARGO_INCREMENTAL: 0
+      ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+      ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
       with:
         clean: false
-    - name: steps::setup_node
-      uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
-      with:
-        node-version: '20'
     - name: steps::setup_sentry
       uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
       with:
         token: ${{ secrets.SENTRY_AUTH_TOKEN }}
-    - name: steps::clear_target_dir_if_large
-      run: ./script/clear-target-dir-if-larger-than 300
+    - name: steps::setup_linux
+      run: ./script/linux
       shell: bash -euxo pipefail {0}
-    - name: run_bundling::bundle_mac
-      run: ./script/bundle-mac aarch64-apple-darwin
+    - name: steps::install_mold
+      run: ./script/install-mold
+      shell: bash -euxo pipefail {0}
+    - name: ./script/bundle-linux
+      run: ./script/bundle-linux
       shell: bash -euxo pipefail {0}
-    - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg'
+    - name: '@actions/upload-artifact zed-linux-x86_64.tar.gz'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
-        name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
-        path: target/aarch64-apple-darwin/release/Zed.dmg
+        name: zed-linux-x86_64.tar.gz
+        path: target/release/zed-linux-x86_64.tar.gz
         if-no-files-found: error
-    - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz'
+    - name: '@actions/upload-artifact zed-remote-server-linux-x86_64.gz'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
-        name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz
-        path: target/zed-remote-server-macos-aarch64.gz
+        name: zed-remote-server-linux-x86_64.gz
+        path: target/zed-remote-server-linux-x86_64.gz
         if-no-files-found: error
-    outputs:
-      zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
-      remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz
     timeout-minutes: 60
-  bundle_linux_x86_64:
+  bundle_mac_aarch64:
     if: |-
       (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
                        (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
-    runs-on: namespace-profile-32x64-ubuntu-2004
+    runs-on: self-mini-macos
+    env:
+      CARGO_INCREMENTAL: 0
+      ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+      ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
+      MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
+      MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
+      APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
+      APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
+      APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
       with:
         clean: false
+    - name: steps::setup_node
+      uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
+      with:
+        node-version: '20'
     - name: steps::setup_sentry
       uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
       with:
         token: ${{ secrets.SENTRY_AUTH_TOKEN }}
-    - name: steps::setup_linux
-      run: ./script/linux
-      shell: bash -euxo pipefail {0}
-    - name: steps::install_mold
-      run: ./script/install-mold
+    - name: steps::clear_target_dir_if_large
+      run: ./script/clear-target-dir-if-larger-than 300
       shell: bash -euxo pipefail {0}
-    - name: ./script/bundle-linux
-      run: ./script/bundle-linux
+    - name: run_bundling::bundle_mac::bundle_mac
+      run: ./script/bundle-mac aarch64-apple-darwin
       shell: bash -euxo pipefail {0}
-    - name: '@actions/upload-artifact zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz'
+    - name: '@actions/upload-artifact Zed-aarch64.dmg'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
-        name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
-        path: target/release/zed-*.tar.gz
+        name: Zed-aarch64.dmg
+        path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
         if-no-files-found: error
-    - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz'
+    - name: '@actions/upload-artifact zed-remote-server-macos-aarch64.gz'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
-        name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
-        path: target/zed-remote-server-*.gz
+        name: zed-remote-server-macos-aarch64.gz
+        path: target/zed-remote-server-macos-aarch64.gz
         if-no-files-found: error
-    outputs:
-      zed: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
-      remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
     timeout-minutes: 60
-  bundle_linux_arm64:
+  bundle_mac_x86_64:
     if: |-
       (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
                        (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
-    runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4
+    runs-on: self-mini-macos
+    env:
+      CARGO_INCREMENTAL: 0
+      ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+      ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
+      MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
+      MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
+      APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
+      APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
+      APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
       with:
         clean: false
+    - name: steps::setup_node
+      uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
+      with:
+        node-version: '20'
     - name: steps::setup_sentry
       uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
       with:
         token: ${{ secrets.SENTRY_AUTH_TOKEN }}
-    - name: steps::setup_linux
-      run: ./script/linux
-      shell: bash -euxo pipefail {0}
-    - name: steps::install_mold
-      run: ./script/install-mold
+    - name: steps::clear_target_dir_if_large
+      run: ./script/clear-target-dir-if-larger-than 300
       shell: bash -euxo pipefail {0}
-    - name: ./script/bundle-linux
-      run: ./script/bundle-linux
+    - name: run_bundling::bundle_mac::bundle_mac
+      run: ./script/bundle-mac x86_64-apple-darwin
       shell: bash -euxo pipefail {0}
-    - name: '@actions/upload-artifact zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz'
+    - name: '@actions/upload-artifact Zed-x86_64.dmg'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
-        name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
-        path: target/release/zed-*.tar.gz
+        name: Zed-x86_64.dmg
+        path: target/x86_64-apple-darwin/release/Zed-x86_64.dmg
         if-no-files-found: error
-    - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz'
+    - name: '@actions/upload-artifact zed-remote-server-macos-x86_64.gz'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
-        name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
-        path: target/zed-remote-server-*.gz
+        name: zed-remote-server-macos-x86_64.gz
+        path: target/zed-remote-server-macos-x86_64.gz
         if-no-files-found: error
-    outputs:
-      zed: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
-      remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
     timeout-minutes: 60
-  bundle_windows_x86_64:
+  bundle_windows_aarch64:
     if: |-
       (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
                        (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
     runs-on: self-32vcpu-windows-2022
     env:
+      CARGO_INCREMENTAL: 0
+      ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+      ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
       AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
       AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
       AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
@@ -207,25 +209,26 @@ jobs:
       uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
       with:
         token: ${{ secrets.SENTRY_AUTH_TOKEN }}
-    - name: run_bundling::bundle_windows
-      run: script/bundle-windows.ps1 -Architecture x86_64
+    - name: run_bundling::bundle_windows::bundle_windows
+      run: script/bundle-windows.ps1 -Architecture aarch64
       shell: pwsh
       working-directory: ${{ env.ZED_WORKSPACE }}
-    - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe'
+    - name: '@actions/upload-artifact Zed-aarch64.exe'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
-        name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe
-        path: ${{ env.SETUP_PATH }}
+        name: Zed-aarch64.exe
+        path: target/Zed-aarch64.exe
         if-no-files-found: error
-    outputs:
-      zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe
     timeout-minutes: 60
-  bundle_windows_arm64:
+  bundle_windows_x86_64:
     if: |-
       (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
                        (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
     runs-on: self-32vcpu-windows-2022
     env:
+      CARGO_INCREMENTAL: 0
+      ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+      ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
       AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
       AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
       AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
@@ -244,18 +247,16 @@ jobs:
       uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
       with:
         token: ${{ secrets.SENTRY_AUTH_TOKEN }}
-    - name: run_bundling::bundle_windows
-      run: script/bundle-windows.ps1 -Architecture aarch64
+    - name: run_bundling::bundle_windows::bundle_windows
+      run: script/bundle-windows.ps1 -Architecture x86_64
       shell: pwsh
       working-directory: ${{ env.ZED_WORKSPACE }}
-    - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe'
+    - name: '@actions/upload-artifact Zed-x86_64.exe'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
-        name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe
-        path: ${{ env.SETUP_PATH }}
+        name: Zed-x86_64.exe
+        path: target/Zed-x86_64.exe
         if-no-files-found: error
-    outputs:
-      zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe
     timeout-minutes: 60
 concurrency:
   group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
  
  
  
    
    @@ -66,6 +66,10 @@ jobs:
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
       with:
         clean: false
+    - name: steps::cache_rust_dependencies_namespace
+      uses: namespacelabs/nscloud-cache-action@v1
+      with:
+        cache: rust
     - name: steps::setup_pnpm
       uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2
       with:
@@ -145,6 +149,10 @@ jobs:
     - name: steps::install_mold
       run: ./script/install-mold
       shell: bash -euxo pipefail {0}
+    - name: steps::cache_rust_dependencies_namespace
+      uses: namespacelabs/nscloud-cache-action@v1
+      with:
+        cache: rust
     - name: steps::setup_node
       uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
       with:
@@ -156,7 +164,7 @@ jobs:
       run: cargo install cargo-nextest --locked
       shell: bash -euxo pipefail {0}
     - name: steps::clear_target_dir_if_large
-      run: ./script/clear-target-dir-if-larger-than 100
+      run: ./script/clear-target-dir-if-larger-than 250
       shell: bash -euxo pipefail {0}
     - name: steps::cargo_nextest
       run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
@@ -214,10 +222,10 @@ jobs:
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
       with:
         clean: false
-    - name: steps::cache_rust_dependencies
-      uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6
+    - name: steps::cache_rust_dependencies_namespace
+      uses: namespacelabs/nscloud-cache-action@v1
       with:
-        save-if: ${{ github.ref == 'refs/heads/main' }}
+        cache: rust
     - name: steps::setup_linux
       run: ./script/linux
       shell: bash -euxo pipefail {0}
@@ -261,6 +269,10 @@ jobs:
     - name: steps::install_mold
       run: ./script/install-mold
       shell: bash -euxo pipefail {0}
+    - name: steps::cache_rust_dependencies_namespace
+      uses: namespacelabs/nscloud-cache-action@v1
+      with:
+        cache: rust
     - name: cargo build -p collab
       run: cargo build -p collab
       shell: bash -euxo pipefail {0}
@@ -317,6 +329,10 @@ jobs:
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
       with:
         clean: false
+    - name: steps::cache_rust_dependencies_namespace
+      uses: namespacelabs/nscloud-cache-action@v1
+      with:
+        cache: rust
     - name: run_tests::check_dependencies::install_cargo_machete
       uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386
       with:
@@ -350,10 +366,10 @@ jobs:
         mkdir -p ./../.cargo
         cp ./.cargo/ci-config.toml ./../.cargo/config.toml
       shell: bash -euxo pipefail {0}
-    - name: steps::cache_rust_dependencies
-      uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6
+    - name: steps::cache_rust_dependencies_namespace
+      uses: namespacelabs/nscloud-cache-action@v1
       with:
-        save-if: ${{ github.ref == 'refs/heads/main' }}
+        cache: rust
     - name: run_tests::check_docs::lychee_link_check
       uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332
       with:
@@ -392,6 +408,10 @@ jobs:
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
       with:
         clean: false
+    - name: steps::cache_rust_dependencies_namespace
+      uses: namespacelabs/nscloud-cache-action@v1
+      with:
+        cache: rust
     - name: ./script/check-licenses
       run: ./script/check-licenses
       shell: bash -euxo pipefail {0}
  
  
  
    
    @@ -0,0 +1,63 @@
+# Generated from xtask::workflows::run_agent_evals
+# Rebuild with `cargo xtask workflows`.
+name: run_agent_evals
+env:
+  CARGO_TERM_COLOR: always
+  CARGO_INCREMENTAL: '0'
+  RUST_BACKTRACE: '1'
+  ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+on:
+  schedule:
+  - cron: 47 1 * * 2
+  workflow_dispatch: {}
+jobs:
+  unit_evals:
+    runs-on: namespace-profile-16x32-ubuntu-2204
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: steps::setup_cargo_config
+      run: |
+        mkdir -p ./../.cargo
+        cp ./.cargo/ci-config.toml ./../.cargo/config.toml
+      shell: bash -euxo pipefail {0}
+    - name: steps::cache_rust_dependencies_namespace
+      uses: namespacelabs/nscloud-cache-action@v1
+      with:
+        cache: rust
+    - name: steps::setup_linux
+      run: ./script/linux
+      shell: bash -euxo pipefail {0}
+    - name: steps::install_mold
+      run: ./script/install-mold
+      shell: bash -euxo pipefail {0}
+    - name: steps::cargo_install_nextest
+      run: cargo install cargo-nextest --locked
+      shell: bash -euxo pipefail {0}
+    - name: steps::clear_target_dir_if_large
+      run: ./script/clear-target-dir-if-larger-than 250
+      shell: bash -euxo pipefail {0}
+    - name: ./script/run-unit-evals
+      run: ./script/run-unit-evals
+      shell: bash -euxo pipefail {0}
+      env:
+        ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
+    - name: run_agent_evals::unit_evals::send_failure_to_slack
+      if: ${{ failure() }}
+      uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52
+      with:
+        method: chat.postMessage
+        token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }}
+        payload: |
+          channel: C04UDRNNJFQ
+          text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}"
+    - name: steps::cleanup_cargo_config
+      if: always()
+      run: |
+        rm -rf ./../.cargo
+      shell: bash -euxo pipefail {0}
+concurrency:
+  group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
+  cancel-in-progress: true
  
  
  
    
    @@ -1,86 +0,0 @@
-name: Run Unit Evals
-
-on:
-  schedule:
-    # GitHub might drop jobs at busy times, so we choose a random time in the middle of the night.
-    - cron: "47 1 * * 2"
-  workflow_dispatch:
-
-concurrency:
-  # Allow only one workflow per any non-`main` branch.
-  group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
-  cancel-in-progress: true
-
-env:
-  CARGO_TERM_COLOR: always
-  CARGO_INCREMENTAL: 0
-  RUST_BACKTRACE: 1
-  ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
-
-jobs:
-  unit_evals:
-    if: github.repository_owner == 'zed-industries'
-    timeout-minutes: 60
-    name: Run unit evals
-    runs-on:
-      - namespace-profile-16x32-ubuntu-2204
-    steps:
-      - name: Add Rust to the PATH
-        run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
-
-      - name: Checkout repo
-        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
-        with:
-          clean: false
-
-      - name: Cache dependencies
-        uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
-        with:
-          save-if: ${{ github.ref == 'refs/heads/main' }}
-          # cache-provider: "buildjet"
-
-      - name: Install Linux dependencies
-        run: ./script/linux
-
-      - name: Configure CI
-        run: |
-          mkdir -p ./../.cargo
-          cp ./.cargo/ci-config.toml ./../.cargo/config.toml
-
-      - name: Install Rust
-        shell: bash -euxo pipefail {0}
-        run: |
-          cargo install cargo-nextest --locked
-
-      - name: Install Node
-        uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
-        with:
-          node-version: "18"
-
-      - name: Limit target directory size
-        shell: bash -euxo pipefail {0}
-        run: script/clear-target-dir-if-larger-than 100
-
-      - name: Run unit evals
-        shell: bash -euxo pipefail {0}
-        run: cargo nextest run --workspace --no-fail-fast --features unit-eval --no-capture -E 'test(::eval_)'
-        env:
-          ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
-
-      - name: Send failure message to Slack channel if needed
-        if: ${{ failure() }}
-        uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52
-        with:
-          method: chat.postMessage
-          token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }}
-          payload: |
-            channel: C04UDRNNJFQ
-            text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}"
-
-      # Even the Linux runner is not stateful, in theory there is no need to do this cleanup.
-      # But, to avoid potential issues in the future if we choose to use a stateful Linux runner and forget to add code
-      # to clean up the config file, Iβve included the cleanup code here as a precaution.
-      # While itβs not strictly necessary at this moment, I believe itβs better to err on the side of caution.
-      - name: Clean CI config file
-        if: always()
-        run: rm -rf ./../.cargo
  
  
  
    
    @@ -1339,6 +1339,7 @@ dependencies = [
  "settings",
  "smol",
  "tempfile",
+ "util",
  "which 6.0.3",
  "workspace",
 ]
@@ -4935,6 +4936,7 @@ dependencies = [
  "editor",
  "gpui",
  "indoc",
+ "itertools 0.14.0",
  "language",
  "log",
  "lsp",
@@ -7077,6 +7079,7 @@ dependencies = [
  "serde_json",
  "settings",
  "url",
+ "urlencoding",
  "util",
 ]
 
@@ -12714,12 +12717,6 @@ version = "0.2.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "5da3b0203fd7ee5720aa0b5e790b591aa5d3f41c3ed2c34a3a393382198af2f7"
 
-[[package]]
-name = "pollster"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2f3a9f18d041e6d0e102a0a46750538147e5e8992d3b4873aaafee2520b00ce3"
-
 [[package]]
 name = "portable-atomic"
 version = "1.11.1"
@@ -12768,7 +12765,7 @@ dependencies = [
  "log",
  "parking_lot",
  "pin-project",
- "pollster 0.2.5",
+ "pollster",
  "static_assertions",
  "thiserror 1.0.69",
 ]
@@ -14320,6 +14317,7 @@ dependencies = [
  "gpui",
  "log",
  "rand 0.9.2",
+ "rayon",
  "sum_tree",
  "unicode-segmentation",
  "util",
@@ -16245,7 +16243,6 @@ checksum = "2b2231b7c3057d5e4ad0156fb3dc807d900806020c5ffa3ee6ff2c8c76fb8520"
 name = "streaming_diff"
 version = "0.1.0"
 dependencies = [
- "gpui",
  "ordered-float 2.10.1",
  "rand 0.9.2",
  "rope",
@@ -16364,11 +16361,9 @@ version = "0.1.0"
 dependencies = [
  "arrayvec",
  "ctor",
- "futures 0.3.31",
- "itertools 0.14.0",
  "log",
- "pollster 0.4.0",
  "rand 0.9.2",
+ "rayon",
  "zlog",
 ]
 
@@ -18053,7 +18048,7 @@ dependencies = [
 [[package]]
 name = "tree-sitter-gomod"
 version = "1.1.1"
-source = "git+https://github.com/camdencheek/tree-sitter-go-mod?rev=6efb59652d30e0e9cd5f3b3a669afd6f1a926d3c#6efb59652d30e0e9cd5f3b3a669afd6f1a926d3c"
+source = "git+https://github.com/camdencheek/tree-sitter-go-mod?rev=2e886870578eeba1927a2dc4bd2e2b3f598c5f9a#2e886870578eeba1927a2dc4bd2e2b3f598c5f9a"
 dependencies = [
  "cc",
  "tree-sitter-language",
@@ -21231,6 +21226,7 @@ dependencies = [
  "project_symbols",
  "prompt_store",
  "proto",
+ "rayon",
  "recent_projects",
  "release_channel",
  "remote",
  
  
  
    
    @@ -680,7 +680,7 @@ tree-sitter-elixir = "0.3"
 tree-sitter-embedded-template = "0.23.0"
 tree-sitter-gitcommit = { git = "https://github.com/zed-industries/tree-sitter-git-commit", rev = "88309716a69dd13ab83443721ba6e0b491d37ee9" }
 tree-sitter-go = "0.23"
-tree-sitter-go-mod = { git = "https://github.com/camdencheek/tree-sitter-go-mod", rev = "6efb59652d30e0e9cd5f3b3a669afd6f1a926d3c", package = "tree-sitter-gomod" }
+tree-sitter-go-mod = { git = "https://github.com/camdencheek/tree-sitter-go-mod", rev = "2e886870578eeba1927a2dc4bd2e2b3f598c5f9a", package = "tree-sitter-gomod" }
 tree-sitter-gowork = { git = "https://github.com/zed-industries/tree-sitter-go-work", rev = "acb0617bf7f4fda02c6217676cc64acb89536dc7" }
 tree-sitter-heex = { git = "https://github.com/zed-industries/tree-sitter-heex", rev = "1dd45142fbb05562e35b2040c6129c9bca346592" }
 tree-sitter-html = "0.23"
  
  
  
    
    @@ -0,0 +1,4 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M11.3335 13.3333L8.00017 10L4.66685 13.3333" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M11.3335 2.66669L8.00017 6.00002L4.66685 2.66669" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+</svg>
  
  
  
    
    @@ -407,6 +407,7 @@
     "bindings": {
       "escape": "project_search::ToggleFocus",
       "shift-find": "search::FocusSearch",
+      "shift-enter": "project_search::ToggleAllSearchResults",
       "ctrl-shift-f": "search::FocusSearch",
       "ctrl-shift-h": "search::ToggleReplace",
       "alt-ctrl-g": "search::ToggleRegex",
@@ -479,6 +480,7 @@
       "alt-w": "search::ToggleWholeWord",
       "alt-find": "project_search::ToggleFilters",
       "alt-ctrl-f": "project_search::ToggleFilters",
+      "shift-enter": "project_search::ToggleAllSearchResults",
       "ctrl-alt-shift-r": "search::ToggleRegex",
       "ctrl-alt-shift-x": "search::ToggleRegex",
       "alt-r": "search::ToggleRegex",
  
  
  
    
    @@ -468,6 +468,7 @@
     "bindings": {
       "escape": "project_search::ToggleFocus",
       "cmd-shift-j": "project_search::ToggleFilters",
+      "shift-enter": "project_search::ToggleAllSearchResults",
       "cmd-shift-f": "search::FocusSearch",
       "cmd-shift-h": "search::ToggleReplace",
       "alt-cmd-g": "search::ToggleRegex",
@@ -496,6 +497,7 @@
     "bindings": {
       "escape": "project_search::ToggleFocus",
       "cmd-shift-j": "project_search::ToggleFilters",
+      "shift-enter": "project_search::ToggleAllSearchResults",
       "cmd-shift-h": "search::ToggleReplace",
       "alt-cmd-g": "search::ToggleRegex",
       "alt-cmd-x": "search::ToggleRegex"
  
  
  
    
    @@ -488,6 +488,7 @@
       "alt-c": "search::ToggleCaseSensitive",
       "alt-w": "search::ToggleWholeWord",
       "alt-f": "project_search::ToggleFilters",
+      "shift-enter": "project_search::ToggleAllSearchResults",
       "alt-r": "search::ToggleRegex",
       // "ctrl-shift-alt-x": "search::ToggleRegex",
       "ctrl-k shift-enter": "pane::TogglePinTab"
  
  
  
    
    @@ -255,6 +255,19 @@
   // Whether to display inline and alongside documentation for items in the
   // completions menu
   "show_completion_documentation": true,
+  // When to show the scrollbar in the completion menu.
+  // This setting can take four values:
+  //
+  // 1. Show the scrollbar if there's important information or
+  //    follow the system's configured behavior
+  //   "auto"
+  // 2. Match the system's configured behavior:
+  //    "system"
+  // 3. Always show the scrollbar:
+  //    "always"
+  // 4. Never show the scrollbar:
+  //    "never" (default)
+  "completion_menu_scrollbar": "never",
   // Show method signatures in the editor, when inside parentheses.
   "auto_signature_help": false,
   // Whether to show the signature help after completion or a bracket pair inserted.
@@ -602,7 +615,9 @@
     "whole_word": false,
     "case_sensitive": false,
     "include_ignored": false,
-    "regex": false
+    "regex": false,
+    // Whether to center the cursor on each search match when navigating.
+    "center_on_match": false
   },
   // When to populate a new search's query based on the text under the cursor.
   // This setting can take the following three values:
@@ -1719,6 +1734,9 @@
         "allowed": true
       }
     },
+    "HTML+ERB": {
+      "language_servers": ["herb", "!ruby-lsp", "..."]
+    },
     "Java": {
       "prettier": {
         "allowed": true,
@@ -1741,6 +1759,9 @@
         "allowed": true
       }
     },
+    "JS+ERB": {
+      "language_servers": ["!ruby-lsp", "..."]
+    },
     "Kotlin": {
       "language_servers": ["!kotlin-language-server", "kotlin-lsp", "..."]
     },
@@ -1755,6 +1776,7 @@
     "Markdown": {
       "format_on_save": "off",
       "use_on_type_format": false,
+      "remove_trailing_whitespace_on_save": false,
       "allow_rewrap": "anywhere",
       "soft_wrap": "editor_width",
       "prettier": {
@@ -1845,6 +1867,9 @@
         "allowed": true
       }
     },
+    "YAML+ERB": {
+      "language_servers": ["!ruby-lsp", "..."]
+    },
     "Zig": {
       "language_servers": ["zls", "..."]
     }
  
  
  
    
    @@ -3,7 +3,6 @@ mod diff;
 mod mention;
 mod terminal;
 
-use ::terminal::terminal_settings::TerminalSettings;
 use agent_settings::AgentSettings;
 use collections::HashSet;
 pub use connection::*;
@@ -12,7 +11,7 @@ use language::language_settings::FormatOnSave;
 pub use mention::*;
 use project::lsp_store::{FormatTrigger, LspFormatTarget};
 use serde::{Deserialize, Serialize};
-use settings::{Settings as _, SettingsLocation};
+use settings::Settings as _;
 use task::{Shell, ShellBuilder};
 pub use terminal::*;
 
@@ -2141,17 +2140,9 @@ impl AcpThread {
     ) -> Task<Result<Entity<Terminal>>> {
         let env = match &cwd {
             Some(dir) => self.project.update(cx, |project, cx| {
-                let worktree = project.find_worktree(dir.as_path(), cx);
-                let shell = TerminalSettings::get(
-                    worktree.as_ref().map(|(worktree, path)| SettingsLocation {
-                        worktree_id: worktree.read(cx).id(),
-                        path: &path,
-                    }),
-                    cx,
-                )
-                .shell
-                .clone();
-                project.directory_environment(&shell, dir.as_path().into(), cx)
+                project.environment().update(cx, |env, cx| {
+                    env.directory_environment(dir.as_path().into(), cx)
+                })
             }),
             None => Task::ready(None).shared(),
         };
  
  
  
    
    @@ -361,12 +361,10 @@ async fn build_buffer_diff(
 ) -> Result<Entity<BufferDiff>> {
     let buffer = cx.update(|cx| buffer.read(cx).snapshot())?;
 
-    let executor = cx.background_executor().clone();
     let old_text_rope = cx
         .background_spawn({
             let old_text = old_text.clone();
-            let executor = executor.clone();
-            async move { Rope::from_str(old_text.as_str(), &executor) }
+            async move { Rope::from(old_text.as_str()) }
         })
         .await;
     let base_buffer = cx
  
  
  
    
    @@ -5,10 +5,8 @@ use gpui::{App, AppContext, AsyncApp, Context, Entity, Task};
 use language::LanguageRegistry;
 use markdown::Markdown;
 use project::Project;
-use settings::{Settings as _, SettingsLocation};
 use std::{path::PathBuf, process::ExitStatus, sync::Arc, time::Instant};
 use task::Shell;
-use terminal::terminal_settings::TerminalSettings;
 use util::get_default_system_shell_preferring_bash;
 
 pub struct Terminal {
@@ -187,17 +185,9 @@ pub async fn create_terminal_entity(
     let mut env = if let Some(dir) = &cwd {
         project
             .update(cx, |project, cx| {
-                let worktree = project.find_worktree(dir.as_path(), cx);
-                let shell = TerminalSettings::get(
-                    worktree.as_ref().map(|(worktree, path)| SettingsLocation {
-                        worktree_id: worktree.read(cx).id(),
-                        path: &path,
-                    }),
-                    cx,
-                )
-                .shell
-                .clone();
-                project.directory_environment(&shell, dir.clone().into(), cx)
+                project.environment().update(cx, |env, cx| {
+                    env.directory_environment(dir.clone().into(), cx)
+                })
             })?
             .await
             .unwrap_or_default()
  
  
  
    
    @@ -19,7 +19,7 @@ use markdown::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownStyle};
 use project::Project;
 use settings::Settings;
 use theme::ThemeSettings;
-use ui::{Tooltip, prelude::*};
+use ui::{Tooltip, WithScrollbar, prelude::*};
 use util::ResultExt as _;
 use workspace::{
     Item, ItemHandle, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace,
@@ -291,17 +291,19 @@ impl AcpTools {
         let expanded = self.expanded.contains(&index);
 
         v_flex()
+            .id(index)
+            .group("message")
+            .cursor_pointer()
+            .font_buffer(cx)
             .w_full()
-            .px_4()
             .py_3()
-            .border_color(colors.border)
-            .border_b_1()
+            .pl_4()
+            .pr_5()
             .gap_2()
             .items_start()
-            .font_buffer(cx)
             .text_size(base_size)
-            .id(index)
-            .group("message")
+            .border_color(colors.border)
+            .border_b_1()
             .hover(|this| this.bg(colors.element_background.opacity(0.5)))
             .on_click(cx.listener(move |this, _, _, cx| {
                 if this.expanded.contains(&index) {
@@ -323,15 +325,14 @@ impl AcpTools {
                 h_flex()
                     .w_full()
                     .gap_2()
-                    .items_center()
                     .flex_shrink_0()
                     .child(match message.direction {
-                        acp::StreamMessageDirection::Incoming => {
-                            ui::Icon::new(ui::IconName::ArrowDown).color(Color::Error)
-                        }
-                        acp::StreamMessageDirection::Outgoing => {
-                            ui::Icon::new(ui::IconName::ArrowUp).color(Color::Success)
-                        }
+                        acp::StreamMessageDirection::Incoming => Icon::new(IconName::ArrowDown)
+                            .color(Color::Error)
+                            .size(IconSize::Small),
+                        acp::StreamMessageDirection::Outgoing => Icon::new(IconName::ArrowUp)
+                            .color(Color::Success)
+                            .size(IconSize::Small),
                     })
                     .child(
                         Label::new(message.name.clone())
@@ -501,7 +502,7 @@ impl Focusable for AcpTools {
 }
 
 impl Render for AcpTools {
-    fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+    fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
         v_flex()
             .track_focus(&self.focus_handle)
             .size_full()
@@ -516,13 +517,19 @@ impl Render for AcpTools {
                             .child("No messages recorded yet")
                             .into_any()
                     } else {
-                        list(
-                            connection.list_state.clone(),
-                            cx.processor(Self::render_message),
-                        )
-                        .with_sizing_behavior(gpui::ListSizingBehavior::Auto)
-                        .flex_grow()
-                        .into_any()
+                        div()
+                            .size_full()
+                            .flex_grow()
+                            .child(
+                                list(
+                                    connection.list_state.clone(),
+                                    cx.processor(Self::render_message),
+                                )
+                                .with_sizing_behavior(gpui::ListSizingBehavior::Auto)
+                                .size_full(),
+                            )
+                            .vertical_scrollbar_for(connection.list_state.clone(), window, cx)
+                            .into_any()
                     }
                 }
                 None => h_flex()
  
  
  
    
    @@ -3,9 +3,7 @@ use buffer_diff::BufferDiff;
 use clock;
 use collections::BTreeMap;
 use futures::{FutureExt, StreamExt, channel::mpsc};
-use gpui::{
-    App, AppContext, AsyncApp, BackgroundExecutor, Context, Entity, Subscription, Task, WeakEntity,
-};
+use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
 use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
 use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
 use std::{cmp, ops::Range, sync::Arc};
@@ -323,7 +321,6 @@ impl ActionLog {
                 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
                 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
                 let mut has_user_changes = false;
-                let executor = cx.background_executor().clone();
                 async move {
                     if let ChangeAuthor::User = author {
                         has_user_changes = apply_non_conflicting_edits(
@@ -331,7 +328,6 @@ impl ActionLog {
                             edits,
                             &mut base_text,
                             new_snapshot.as_rope(),
-                            &executor,
                         );
                     }
 
@@ -386,7 +382,6 @@ impl ActionLog {
                 let agent_diff_base = tracked_buffer.diff_base.clone();
                 let git_diff_base = git_diff.read(cx).base_text().as_rope().clone();
                 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
-                let executor = cx.background_executor().clone();
                 anyhow::Ok(cx.background_spawn(async move {
                     let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
                     let committed_edits = language::line_diff(
@@ -421,11 +416,8 @@ impl ActionLog {
                                             ),
                                             new_agent_diff_base.max_point(),
                                         ));
-                                    new_agent_diff_base.replace(
-                                        old_byte_start..old_byte_end,
-                                        &unreviewed_new,
-                                        &executor,
-                                    );
+                                    new_agent_diff_base
+                                        .replace(old_byte_start..old_byte_end, &unreviewed_new);
                                     row_delta +=
                                         unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
                                 }
@@ -619,7 +611,6 @@ impl ActionLog {
                                 .snapshot
                                 .text_for_range(new_range)
                                 .collect::<String>(),
-                            cx.background_executor(),
                         );
                         delta += edit.new_len() as i32 - edit.old_len() as i32;
                         false
@@ -833,7 +824,6 @@ fn apply_non_conflicting_edits(
     edits: Vec<Edit<u32>>,
     old_text: &mut Rope,
     new_text: &Rope,
-    executor: &BackgroundExecutor,
 ) -> bool {
     let mut old_edits = patch.edits().iter().cloned().peekable();
     let mut new_edits = edits.into_iter().peekable();
@@ -887,7 +877,6 @@ fn apply_non_conflicting_edits(
             old_text.replace(
                 old_bytes,
                 &new_text.chunks_in_range(new_bytes).collect::<String>(),
-                executor,
             );
             applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
             has_made_changes = true;
@@ -2293,7 +2282,6 @@ mod tests {
                     old_text.replace(
                         old_start..old_end,
                         &new_text.slice_rows(edit.new.clone()).to_string(),
-                        cx.background_executor(),
                     );
                 }
                 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
  
  
  
    
    @@ -13,7 +13,15 @@ const EDITS_END_TAG: &str = "</edits>";
 const SEARCH_MARKER: &str = "<<<<<<< SEARCH";
 const SEPARATOR_MARKER: &str = "=======";
 const REPLACE_MARKER: &str = ">>>>>>> REPLACE";
-const END_TAGS: [&str; 3] = [OLD_TEXT_END_TAG, NEW_TEXT_END_TAG, EDITS_END_TAG];
+const SONNET_PARAMETER_INVOKE_1: &str = "</parameter>\n</invoke>";
+const SONNET_PARAMETER_INVOKE_2: &str = "</parameter></invoke>";
+const END_TAGS: [&str; 5] = [
+    OLD_TEXT_END_TAG,
+    NEW_TEXT_END_TAG,
+    EDITS_END_TAG,
+    SONNET_PARAMETER_INVOKE_1, // Remove this after switching to streaming tool call
+    SONNET_PARAMETER_INVOKE_2,
+];
 
 #[derive(Debug)]
 pub enum EditParserEvent {
@@ -547,6 +555,37 @@ mod tests {
         );
     }
 
+    #[gpui::test(iterations = 1000)]
+    fn test_xml_edits_with_closing_parameter_invoke(mut rng: StdRng) {
+        // This case is a regression with Claude Sonnet 4.5.
+        // Sometimes Sonnet thinks that it's doing a tool call
+        // and closes its response with '</parameter></invoke>'
+        // instead of properly closing </new_text>
+
+        let mut parser = EditParser::new(EditFormat::XmlTags);
+        assert_eq!(
+            parse_random_chunks(
+                indoc! {"
+                    <old_text>some text</old_text><new_text>updated text</parameter></invoke>
+                "},
+                &mut parser,
+                &mut rng
+            ),
+            vec![Edit {
+                old_text: "some text".to_string(),
+                new_text: "updated text".to_string(),
+                line_hint: None,
+            },]
+        );
+        assert_eq!(
+            parser.finish(),
+            EditParserMetrics {
+                tags: 2,
+                mismatched_tags: 1
+            }
+        );
+    }
+
     #[gpui::test(iterations = 1000)]
     fn test_xml_nested_tags(mut rng: StdRng) {
         let mut parser = EditParser::new(EditFormat::XmlTags);
@@ -1035,6 +1074,11 @@ mod tests {
             last_ix = chunk_ix;
         }
 
+        if new_text.is_some() {
+            pending_edit.new_text = new_text.take().unwrap();
+            edits.push(pending_edit);
+        }
+
         edits
     }
 }
  
  
  
    
    @@ -305,20 +305,18 @@ impl SearchMatrix {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use gpui::TestAppContext;
     use indoc::indoc;
     use language::{BufferId, TextBuffer};
     use rand::prelude::*;
     use text::ReplicaId;
     use util::test::{generate_marked_text, marked_text_ranges};
 
-    #[gpui::test]
-    fn test_empty_query(cx: &mut gpui::TestAppContext) {
+    #[test]
+    fn test_empty_query() {
         let buffer = TextBuffer::new(
             ReplicaId::LOCAL,
             BufferId::new(1).unwrap(),
             "Hello world\nThis is a test\nFoo bar baz",
-            cx.background_executor(),
         );
         let snapshot = buffer.snapshot();
 
@@ -327,13 +325,12 @@ mod tests {
         assert_eq!(finish(finder), None);
     }
 
-    #[gpui::test]
-    fn test_streaming_exact_match(cx: &mut gpui::TestAppContext) {
+    #[test]
+    fn test_streaming_exact_match() {
         let buffer = TextBuffer::new(
             ReplicaId::LOCAL,
             BufferId::new(1).unwrap(),
             "Hello world\nThis is a test\nFoo bar baz",
-            cx.background_executor(),
         );
         let snapshot = buffer.snapshot();
 
@@ -352,8 +349,8 @@ mod tests {
         assert_eq!(finish(finder), Some("This is a test".to_string()));
     }
 
-    #[gpui::test]
-    fn test_streaming_fuzzy_match(cx: &mut gpui::TestAppContext) {
+    #[test]
+    fn test_streaming_fuzzy_match() {
         let buffer = TextBuffer::new(
             ReplicaId::LOCAL,
             BufferId::new(1).unwrap(),
@@ -366,7 +363,6 @@ mod tests {
                     return x * y;
                 }
             "},
-            cx.background_executor(),
         );
         let snapshot = buffer.snapshot();
 
@@ -387,13 +383,12 @@ mod tests {
         );
     }
 
-    #[gpui::test]
-    fn test_incremental_improvement(cx: &mut gpui::TestAppContext) {
+    #[test]
+    fn test_incremental_improvement() {
         let buffer = TextBuffer::new(
             ReplicaId::LOCAL,
             BufferId::new(1).unwrap(),
             "Line 1\nLine 2\nLine 3\nLine 4\nLine 5",
-            cx.background_executor(),
         );
         let snapshot = buffer.snapshot();
 
@@ -413,8 +408,8 @@ mod tests {
         assert_eq!(finish(finder), Some("Line 3\nLine 4".to_string()));
     }
 
-    #[gpui::test]
-    fn test_incomplete_lines_buffering(cx: &mut gpui::TestAppContext) {
+    #[test]
+    fn test_incomplete_lines_buffering() {
         let buffer = TextBuffer::new(
             ReplicaId::LOCAL,
             BufferId::new(1).unwrap(),
@@ -423,7 +418,6 @@ mod tests {
                 jumps over the lazy dog
                 Pack my box with five dozen liquor jugs
             "},
-            cx.background_executor(),
         );
         let snapshot = buffer.snapshot();
 
@@ -441,8 +435,8 @@ mod tests {
         );
     }
 
-    #[gpui::test]
-    fn test_multiline_fuzzy_match(cx: &mut gpui::TestAppContext) {
+    #[test]
+    fn test_multiline_fuzzy_match() {
         let buffer = TextBuffer::new(
             ReplicaId::LOCAL,
             BufferId::new(1).unwrap(),
@@ -462,7 +456,6 @@ mod tests {
                     }
                 }
             "#},
-            cx.background_executor(),
         );
         let snapshot = buffer.snapshot();
 
@@ -516,7 +509,7 @@ mod tests {
     }
 
     #[gpui::test(iterations = 100)]
-    fn test_resolve_location_single_line(mut rng: StdRng, cx: &mut TestAppContext) {
+    fn test_resolve_location_single_line(mut rng: StdRng) {
         assert_location_resolution(
             concat!(
                 "    Lorem\n",
@@ -526,12 +519,11 @@ mod tests {
             ),
             "ipsum",
             &mut rng,
-            cx,
         );
     }
 
     #[gpui::test(iterations = 100)]
-    fn test_resolve_location_multiline(mut rng: StdRng, cx: &mut TestAppContext) {
+    fn test_resolve_location_multiline(mut rng: StdRng) {
         assert_location_resolution(
             concat!(
                 "    Lorem\n",
@@ -541,12 +533,11 @@ mod tests {
             ),
             "ipsum\ndolor sit amet",
             &mut rng,
-            cx,
         );
     }
 
     #[gpui::test(iterations = 100)]
-    fn test_resolve_location_function_with_typo(mut rng: StdRng, cx: &mut TestAppContext) {
+    fn test_resolve_location_function_with_typo(mut rng: StdRng) {
         assert_location_resolution(
             indoc! {"
                 Β«fn foo1(a: usize) -> usize {
@@ -559,12 +550,11 @@ mod tests {
             "},
             "fn foo1(a: usize) -> u32 {\n40\n}",
             &mut rng,
-            cx,
         );
     }
 
     #[gpui::test(iterations = 100)]
-    fn test_resolve_location_class_methods(mut rng: StdRng, cx: &mut TestAppContext) {
+    fn test_resolve_location_class_methods(mut rng: StdRng) {
         assert_location_resolution(
             indoc! {"
                 class Something {
@@ -585,12 +575,11 @@ mod tests {
                 six() { return 6666; }
             "},
             &mut rng,
-            cx,
         );
     }
 
     #[gpui::test(iterations = 100)]
-    fn test_resolve_location_imports_no_match(mut rng: StdRng, cx: &mut TestAppContext) {
+    fn test_resolve_location_imports_no_match(mut rng: StdRng) {
         assert_location_resolution(
             indoc! {"
                 use std::ops::Range;
@@ -620,12 +609,11 @@ mod tests {
                 use std::sync::Arc;
             "},
             &mut rng,
-            cx,
         );
     }
 
     #[gpui::test(iterations = 100)]
-    fn test_resolve_location_nested_closure(mut rng: StdRng, cx: &mut TestAppContext) {
+    fn test_resolve_location_nested_closure(mut rng: StdRng) {
         assert_location_resolution(
             indoc! {"
                 impl Foo {
@@ -653,12 +641,11 @@ mod tests {
                 "                    });",
             ),
             &mut rng,
-            cx,
         );
     }
 
     #[gpui::test(iterations = 100)]
-    fn test_resolve_location_tool_invocation(mut rng: StdRng, cx: &mut TestAppContext) {
+    fn test_resolve_location_tool_invocation(mut rng: StdRng) {
         assert_location_resolution(
             indoc! {r#"
                 let tool = cx
@@ -686,12 +673,11 @@ mod tests {
                 "    .output;",
             ),
             &mut rng,
-            cx,
         );
     }
 
     #[gpui::test]
-    fn test_line_hint_selection(cx: &mut TestAppContext) {
+    fn test_line_hint_selection() {
         let text = indoc! {r#"
             fn first_function() {
                 return 42;
@@ -710,7 +696,6 @@ mod tests {
             ReplicaId::LOCAL,
             BufferId::new(1).unwrap(),
             text.to_string(),
-            cx.background_executor(),
         );
         let snapshot = buffer.snapshot();
         let mut matcher = StreamingFuzzyMatcher::new(snapshot.clone());
@@ -742,19 +727,9 @@ mod tests {
     }
 
     #[track_caller]
-    fn assert_location_resolution(
-        text_with_expected_range: &str,
-        query: &str,
-        rng: &mut StdRng,
-        cx: &mut TestAppContext,
-    ) {
+    fn assert_location_resolution(text_with_expected_range: &str, query: &str, rng: &mut StdRng) {
         let (text, expected_ranges) = marked_text_ranges(text_with_expected_range, false);
-        let buffer = TextBuffer::new(
-            ReplicaId::LOCAL,
-            BufferId::new(1).unwrap(),
-            text.clone(),
-            cx.background_executor(),
-        );
+        let buffer = TextBuffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), text.clone());
         let snapshot = buffer.snapshot();
 
         let mut matcher = StreamingFuzzyMatcher::new(snapshot);
  
  
  
    
    @@ -569,7 +569,6 @@ mod tests {
     use prompt_store::ProjectContext;
     use serde_json::json;
     use settings::SettingsStore;
-    use text::Rope;
     use util::{path, rel_path::rel_path};
 
     #[gpui::test]
@@ -742,7 +741,7 @@ mod tests {
         // Create the file
         fs.save(
             path!("/root/src/main.rs").as_ref(),
-            &Rope::from_str_small("initial content"),
+            &"initial content".into(),
             language::LineEnding::Unix,
         )
         .await
@@ -909,7 +908,7 @@ mod tests {
         // Create a simple file with trailing whitespace
         fs.save(
             path!("/root/src/main.rs").as_ref(),
-            &Rope::from_str_small("initial content"),
+            &"initial content".into(),
             language::LineEnding::Unix,
         )
         .await
  
  
  
    
    @@ -4,7 +4,7 @@ use acp_thread::{AcpThread, AgentThreadEntry};
 use agent::HistoryStore;
 use agent_client_protocol::{self as acp, ToolCallId};
 use collections::HashMap;
-use editor::{Editor, EditorMode, MinimapVisibility};
+use editor::{Editor, EditorMode, MinimapVisibility, SizingBehavior};
 use gpui::{
     AnyEntity, App, AppContext as _, Entity, EntityId, EventEmitter, FocusHandle, Focusable,
     ScrollHandle, SharedString, TextStyleRefinement, WeakEntity, Window,
@@ -357,7 +357,7 @@ fn create_editor_diff(
             EditorMode::Full {
                 scale_ui_elements_with_buffer_font_size: false,
                 show_active_line_background: false,
-                sized_by_content: true,
+                sizing_behavior: SizingBehavior::SizeByContent,
             },
             diff.read(cx).multibuffer().clone(),
             None,
  
  
  
    
    @@ -17,7 +17,9 @@ use client::zed_urls;
 use cloud_llm_client::PlanV1;
 use collections::{HashMap, HashSet};
 use editor::scroll::Autoscroll;
-use editor::{Editor, EditorEvent, EditorMode, MultiBuffer, PathKey, SelectionEffects};
+use editor::{
+    Editor, EditorEvent, EditorMode, MultiBuffer, PathKey, SelectionEffects, SizingBehavior,
+};
 use file_icons::FileIcons;
 use fs::Fs;
 use futures::FutureExt as _;
@@ -881,6 +883,7 @@ impl AcpThreadView {
         cx: &mut Context<Self>,
     ) {
         self.set_editor_is_expanded(!self.editor_expanded, cx);
+        cx.stop_propagation();
         cx.notify();
     }
 
@@ -892,7 +895,7 @@ impl AcpThreadView {
                     EditorMode::Full {
                         scale_ui_elements_with_buffer_font_size: false,
                         show_active_line_background: false,
-                        sized_by_content: false,
+                        sizing_behavior: SizingBehavior::ExcludeOverscrollMargin,
                     },
                     cx,
                 )
@@ -3631,6 +3634,7 @@ impl AcpThreadView {
             .child(
                 h_flex()
                     .id("edits-container")
+                    .cursor_pointer()
                     .gap_1()
                     .child(Disclosure::new("edits-disclosure", expanded))
                     .map(|this| {
@@ -3770,6 +3774,7 @@ impl AcpThreadView {
                     Label::new(name.to_string())
                         .size(LabelSize::XSmall)
                         .buffer_font(cx)
+                        .ml_1p5()
                 });
 
                 let file_icon = FileIcons::get_icon(path.as_std_path(), cx)
@@ -3801,14 +3806,30 @@ impl AcpThreadView {
                     })
                     .child(
                         h_flex()
+                            .id(("file-name-row", index))
                             .relative()
-                            .id(("file-name", index))
                             .pr_8()
-                            .gap_1p5()
                             .w_full()
                             .overflow_x_scroll()
-                            .child(file_icon)
-                            .child(h_flex().gap_0p5().children(file_name).children(file_path))
+                            .child(
+                                h_flex()
+                                    .id(("file-name-path", index))
+                                    .cursor_pointer()
+                                    .pr_0p5()
+                                    .gap_0p5()
+                                    .hover(|s| s.bg(cx.theme().colors().element_hover))
+                                    .rounded_xs()
+                                    .child(file_icon)
+                                    .children(file_name)
+                                    .children(file_path)
+                                    .tooltip(Tooltip::text("Go to File"))
+                                    .on_click({
+                                        let buffer = buffer.clone();
+                                        cx.listener(move |this, _, window, cx| {
+                                            this.open_edited_buffer(&buffer, window, cx);
+                                        })
+                                    }),
+                            )
                             .child(
                                 div()
                                     .absolute()
@@ -3818,13 +3839,7 @@ impl AcpThreadView {
                                     .bottom_0()
                                     .right_0()
                                     .bg(overlay_gradient),
-                            )
-                            .on_click({
-                                let buffer = buffer.clone();
-                                cx.listener(move |this, _, window, cx| {
-                                    this.open_edited_buffer(&buffer, window, cx);
-                                })
-                            }),
+                            ),
                     )
                     .child(
                         h_flex()
@@ -3966,8 +3981,12 @@ impl AcpThreadView {
                                             )
                                         }
                                     })
-                                    .on_click(cx.listener(|_, _, window, cx| {
-                                        window.dispatch_action(Box::new(ExpandMessageEditor), cx);
+                                    .on_click(cx.listener(|this, _, window, cx| {
+                                        this.expand_message_editor(
+                                            &ExpandMessageEditor,
+                                            window,
+                                            cx,
+                                        );
                                     })),
                             ),
                     ),
  
  
  
    
    @@ -23,16 +23,17 @@ use language::LanguageRegistry;
 use language_model::{
     LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry, ZED_CLOUD_PROVIDER_ID,
 };
+use language_models::AllLanguageModelSettings;
 use notifications::status_toast::{StatusToast, ToastIcon};
 use project::{
     agent_server_store::{AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME},
     context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore},
 };
-use rope::Rope;
-use settings::{SettingsStore, update_settings_file};
+use settings::{Settings, SettingsStore, update_settings_file};
 use ui::{
-    Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor, ElevationIndex,
-    Indicator, PopoverMenu, Switch, SwitchColor, Tooltip, WithScrollbar, prelude::*,
+    Button, ButtonStyle, Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor,
+    ElevationIndex, IconName, IconPosition, IconSize, Indicator, LabelSize, PopoverMenu, Switch,
+    SwitchColor, Tooltip, WithScrollbar, prelude::*,
 };
 use util::ResultExt as _;
 use workspace::{Workspace, create_and_open_local_file};
@@ -304,10 +305,76 @@ impl AgentConfiguration {
                                 }
                             })),
                         )
-                    }),
+                    })
+                    .when(
+                        is_expanded && is_removable_provider(&provider.id(), cx),
+                        |this| {
+                            this.child(
+                                Button::new(
+                                    SharedString::from(format!("delete-provider-{provider_id}")),
+                                    "Remove Provider",
+                                )
+                                .full_width()
+                                .style(ButtonStyle::Outlined)
+                                .icon_position(IconPosition::Start)
+                                .icon(IconName::Trash)
+                                .icon_size(IconSize::Small)
+                                .icon_color(Color::Muted)
+                                .label_size(LabelSize::Small)
+                                .on_click(cx.listener({
+                                    let provider = provider.clone();
+                                    move |this, _event, window, cx| {
+                                        this.delete_provider(provider.clone(), window, cx);
+                                    }
+                                })),
+                            )
+                        },
+                    ),
             )
     }
 
+    fn delete_provider(
+        &mut self,
+        provider: Arc<dyn LanguageModelProvider>,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        let fs = self.fs.clone();
+        let provider_id = provider.id();
+
+        cx.spawn_in(window, async move |_, cx| {
+            cx.update(|_window, cx| {
+                update_settings_file(fs.clone(), cx, {
+                    let provider_id = provider_id.clone();
+                    move |settings, _| {
+                        if let Some(ref mut openai_compatible) = settings
+                            .language_models
+                            .as_mut()
+                            .and_then(|lm| lm.openai_compatible.as_mut())
+                        {
+                            let key_to_remove: Arc<str> = Arc::from(provider_id.0.as_ref());
+                            openai_compatible.remove(&key_to_remove);
+                        }
+                    }
+                });
+            })
+            .log_err();
+
+            cx.update(|_window, cx| {
+                LanguageModelRegistry::global(cx).update(cx, {
+                    let provider_id = provider_id.clone();
+                    move |registry, cx| {
+                        registry.unregister_provider(provider_id, cx);
+                    }
+                })
+            })
+            .log_err();
+
+            anyhow::Ok(())
+        })
+        .detach_and_log_err(cx);
+    }
+
     fn render_provider_configuration_section(
         &mut self,
         cx: &mut Context<Self>,
@@ -1115,11 +1182,8 @@ async fn open_new_agent_servers_entry_in_settings_editor(
 ) -> Result<()> {
     let settings_editor = workspace
         .update_in(cx, |_, window, cx| {
-            create_and_open_local_file(paths::settings_file(), window, cx, |cx| {
-                Rope::from_str(
-                    &settings::initial_user_settings_content(),
-                    cx.background_executor(),
-                )
+            create_and_open_local_file(paths::settings_file(), window, cx, || {
+                settings::initial_user_settings_content().as_ref().into()
             })
         })?
         .await?
@@ -1225,3 +1289,14 @@ fn find_text_in_buffer(
         None
     }
 }
+
+// OpenAI-compatible providers are user-configured and can be removed,
+// whereas built-in providers (like Anthropic, OpenAI, Google, etc.) can't.
+//
+// If in the future we have more "API-compatible-type" of providers,
+// they should be included here as removable providers.
+fn is_removable_provider(provider_id: &LanguageModelProviderId, cx: &App) -> bool {
+    AllLanguageModelSettings::get_global(cx)
+        .openai_compatible
+        .contains_key(provider_id.0.as_ref())
+}
  
  
  
    
    @@ -70,14 +70,6 @@ impl AgentDiffThread {
         }
     }
 
-    fn is_generating(&self, cx: &App) -> bool {
-        match self {
-            AgentDiffThread::AcpThread(thread) => {
-                thread.read(cx).status() == acp_thread::ThreadStatus::Generating
-            }
-        }
-    }
-
     fn has_pending_edit_tool_uses(&self, cx: &App) -> bool {
         match self {
             AgentDiffThread::AcpThread(thread) => thread.read(cx).has_pending_edit_tool_calls(),
@@ -970,9 +962,7 @@ impl AgentDiffToolbar {
             None => ToolbarItemLocation::Hidden,
             Some(AgentDiffToolbarItem::Pane(_)) => ToolbarItemLocation::PrimaryRight,
             Some(AgentDiffToolbarItem::Editor { state, .. }) => match state {
-                EditorState::Generating | EditorState::Reviewing => {
-                    ToolbarItemLocation::PrimaryRight
-                }
+                EditorState::Reviewing => ToolbarItemLocation::PrimaryRight,
                 EditorState::Idle => ToolbarItemLocation::Hidden,
             },
         }
@@ -1050,7 +1040,6 @@ impl Render for AgentDiffToolbar {
 
                 let content = match state {
                     EditorState::Idle => return Empty.into_any(),
-                    EditorState::Generating => vec![spinner_icon],
                     EditorState::Reviewing => vec![
                         h_flex()
                             .child(
@@ -1222,7 +1211,6 @@ pub struct AgentDiff {
 pub enum EditorState {
     Idle,
     Reviewing,
-    Generating,
 }
 
 struct WorkspaceThread {
@@ -1545,15 +1533,11 @@ impl AgentDiff {
                     multibuffer.add_diff(diff_handle.clone(), cx);
                 });
 
-                let new_state = if thread.is_generating(cx) {
-                    EditorState::Generating
-                } else {
-                    EditorState::Reviewing
-                };
+                let reviewing_state = EditorState::Reviewing;
 
                 let previous_state = self
                     .reviewing_editors
-                    .insert(weak_editor.clone(), new_state.clone());
+                    .insert(weak_editor.clone(), reviewing_state.clone());
 
                 if previous_state.is_none() {
                     editor.update(cx, |editor, cx| {
@@ -1566,7 +1550,9 @@ impl AgentDiff {
                     unaffected.remove(weak_editor);
                 }
 
-                if new_state == EditorState::Reviewing && previous_state != Some(new_state) {
+                if reviewing_state == EditorState::Reviewing
+                    && previous_state != Some(reviewing_state)
+                {
                     // Jump to first hunk when we enter review mode
                     editor.update(cx, |editor, cx| {
                         let snapshot = multibuffer.read(cx).snapshot(cx);
  
  
  
    
    @@ -19,7 +19,6 @@ use settings::{
 use zed_actions::OpenBrowser;
 use zed_actions::agent::{OpenClaudeCodeOnboardingModal, ReauthenticateAgent};
 
-use crate::acp::{AcpThreadHistory, ThreadHistoryEvent};
 use crate::context_store::ContextStore;
 use crate::ui::{AcpOnboardingModal, ClaudeCodeOnboardingModal};
 use crate::{
@@ -33,6 +32,10 @@ use crate::{
     text_thread_editor::{AgentPanelDelegate, TextThreadEditor, make_lsp_adapter_delegate},
     ui::{AgentOnboardingModal, EndTrialUpsell},
 };
+use crate::{
+    ExpandMessageEditor,
+    acp::{AcpThreadHistory, ThreadHistoryEvent},
+};
 use crate::{
     ExternalAgent, NewExternalAgentThread, NewNativeAgentThreadFromSummary, placeholder_command,
 };
@@ -106,6 +109,12 @@ pub fn init(cx: &mut App) {
                         }
                     },
                 )
+                .register_action(|workspace, _: &ExpandMessageEditor, window, cx| {
+                    if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
+                        workspace.focus_panel::<AgentPanel>(window, cx);
+                        panel.update(cx, |panel, cx| panel.expand_message_editor(window, cx));
+                    }
+                })
                 .register_action(|workspace, _: &OpenHistory, window, cx| {
                     if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
                         workspace.focus_panel::<AgentPanel>(window, cx);
@@ -944,6 +953,15 @@ impl AgentPanel {
         .detach_and_log_err(cx);
     }
 
+    fn expand_message_editor(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+        if let Some(thread_view) = self.active_thread_view() {
+            thread_view.update(cx, |view, cx| {
+                view.expand_message_editor(&ExpandMessageEditor, window, cx);
+                view.focus_handle(cx).focus(window);
+            });
+        }
+    }
+
     fn open_history(&mut self, window: &mut Window, cx: &mut Context<Self>) {
         if matches!(self.active_view, ActiveView::History) {
             if let Some(previous_view) = self.previous_view.take() {
  
  
  
    
    @@ -487,10 +487,9 @@ impl CodegenAlternative {
     ) {
         let start_time = Instant::now();
         let snapshot = self.snapshot.clone();
-        let selected_text = Rope::from_iter(
-            snapshot.text_for_range(self.range.start..self.range.end),
-            cx.background_executor(),
-        );
+        let selected_text = snapshot
+            .text_for_range(self.range.start..self.range.end)
+            .collect::<Rope>();
 
         let selection_start = self.range.start.to_point(&snapshot);
 
  
  
  
    
    @@ -620,8 +620,18 @@ impl TextThreadContextHandle {
 
 impl Display for TextThreadContext {
     fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
-        // TODO: escape title?
-        writeln!(f, "<text_thread title=\"{}\">", self.title)?;
+        write!(f, "<text_thread title=\"")?;
+        for c in self.title.chars() {
+            match c {
+                '&' => write!(f, "&")?,
+                '<' => write!(f, "<")?,
+                '>' => write!(f, ">")?,
+                '"' => write!(f, """)?,
+                '\'' => write!(f, "'")?,
+                _ => write!(f, "{}", c)?,
+            }
+        }
+        writeln!(f, "\">")?;
         write!(f, "{}", self.text.trim())?;
         write!(f, "\n</text_thread>")
     }
  
  
  
    
    @@ -744,13 +744,12 @@ impl TextThread {
         telemetry: Option<Arc<Telemetry>>,
         cx: &mut Context<Self>,
     ) -> Self {
-        let buffer = cx.new(|cx| {
+        let buffer = cx.new(|_cx| {
             let buffer = Buffer::remote(
                 language::BufferId::new(1).unwrap(),
                 replica_id,
                 capability,
                 "",
-                cx.background_executor(),
             );
             buffer.set_language_registry(language_registry.clone());
             buffer
  
  
  
    
    @@ -26,6 +26,7 @@ serde_json.workspace = true
 settings.workspace = true
 smol.workspace = true
 tempfile.workspace = true
+util.workspace = true
 workspace.workspace = true
 
 [target.'cfg(not(target_os = "windows"))'.dependencies]
  
  
  
    
    @@ -962,7 +962,7 @@ pub async fn finalize_auto_update_on_quit() {
             .parent()
             .map(|p| p.join("tools").join("auto_update_helper.exe"))
     {
-        let mut command = smol::process::Command::new(helper);
+        let mut command = util::command::new_smol_command(helper);
         command.arg("--launch");
         command.arg("false");
         if let Ok(mut cmd) = command.spawn() {
  
  
  
    
    @@ -100,13 +100,21 @@ impl Render for Breadcrumbs {
 
         let breadcrumbs_stack = h_flex().gap_1().children(breadcrumbs);
 
+        let prefix_element = active_item.breadcrumb_prefix(window, cx);
+
+        let breadcrumbs = if let Some(prefix) = prefix_element {
+            h_flex().gap_1p5().child(prefix).child(breadcrumbs_stack)
+        } else {
+            breadcrumbs_stack
+        };
+
         match active_item
             .downcast::<Editor>()
             .map(|editor| editor.downgrade())
         {
             Some(editor) => element.child(
                 ButtonLike::new("toggle outline view")
-                    .child(breadcrumbs_stack)
+                    .child(breadcrumbs)
                     .style(ButtonStyle::Transparent)
                     .on_click({
                         let editor = editor.clone();
@@ -141,7 +149,7 @@ impl Render for Breadcrumbs {
                 // Match the height and padding of the `ButtonLike` in the other arm.
                 .h(rems_from_px(22.))
                 .pl_1()
-                .child(breadcrumbs_stack),
+                .child(breadcrumbs),
         }
     }
 }
  
  
  
    
    @@ -1,9 +1,6 @@
 use futures::channel::oneshot;
 use git2::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch};
-use gpui::{
-    App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, EventEmitter, Task,
-    TaskLabel,
-};
+use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Task, TaskLabel};
 use language::{Language, LanguageRegistry};
 use rope::Rope;
 use std::{
@@ -194,7 +191,7 @@ impl BufferDiffSnapshot {
         let base_text_exists;
         let base_text_snapshot;
         if let Some(text) = &base_text {
-            let base_text_rope = Rope::from_str(text.as_str(), cx.background_executor());
+            let base_text_rope = Rope::from(text.as_str());
             base_text_pair = Some((text.clone(), base_text_rope.clone()));
             let snapshot =
                 language::Buffer::build_snapshot(base_text_rope, language, language_registry, cx);
@@ -314,7 +311,6 @@ impl BufferDiffInner {
         hunks: &[DiffHunk],
         buffer: &text::BufferSnapshot,
         file_exists: bool,
-        cx: &BackgroundExecutor,
     ) -> Option<Rope> {
         let head_text = self
             .base_text_exists
@@ -509,7 +505,7 @@ impl BufferDiffInner {
         for (old_range, replacement_text) in edits {
             new_index_text.append(index_cursor.slice(old_range.start));
             index_cursor.seek_forward(old_range.end);
-            new_index_text.push(&replacement_text, cx);
+            new_index_text.push(&replacement_text);
         }
         new_index_text.append(index_cursor.suffix());
         Some(new_index_text)
@@ -966,7 +962,6 @@ impl BufferDiff {
             hunks,
             buffer,
             file_exists,
-            cx.background_executor(),
         );
 
         cx.emit(BufferDiffEvent::HunksStagedOrUnstaged(
@@ -1390,12 +1385,7 @@ mod tests {
         "
         .unindent();
 
-        let mut buffer = Buffer::new(
-            ReplicaId::LOCAL,
-            BufferId::new(1).unwrap(),
-            buffer_text,
-            cx.background_executor(),
-        );
+        let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text);
         let mut diff = BufferDiffSnapshot::new_sync(buffer.clone(), diff_base.clone(), cx);
         assert_hunks(
             diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer),
@@ -1404,7 +1394,7 @@ mod tests {
             &[(1..2, "two\n", "HELLO\n", DiffHunkStatus::modified_none())],
         );
 
-        buffer.edit([(0..0, "point five\n")], cx.background_executor());
+        buffer.edit([(0..0, "point five\n")]);
         diff = BufferDiffSnapshot::new_sync(buffer.clone(), diff_base.clone(), cx);
         assert_hunks(
             diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer),
@@ -1469,12 +1459,7 @@ mod tests {
         "
         .unindent();
 
-        let buffer = Buffer::new(
-            ReplicaId::LOCAL,
-            BufferId::new(1).unwrap(),
-            buffer_text,
-            cx.background_executor(),
-        );
+        let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text);
         let unstaged_diff = BufferDiffSnapshot::new_sync(buffer.clone(), index_text, cx);
         let mut uncommitted_diff =
             BufferDiffSnapshot::new_sync(buffer.clone(), head_text.clone(), cx);
@@ -1543,12 +1528,7 @@ mod tests {
         "
         .unindent();
 
-        let buffer = Buffer::new(
-            ReplicaId::LOCAL,
-            BufferId::new(1).unwrap(),
-            buffer_text,
-            cx.background_executor(),
-        );
+        let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text);
         let diff = cx
             .update(|cx| {
                 BufferDiffSnapshot::new_with_base_text(
@@ -1811,12 +1791,7 @@ mod tests {
 
         for example in table {
             let (buffer_text, ranges) = marked_text_ranges(&example.buffer_marked_text, false);
-            let buffer = Buffer::new(
-                ReplicaId::LOCAL,
-                BufferId::new(1).unwrap(),
-                buffer_text,
-                cx.background_executor(),
-            );
+            let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text);
             let hunk_range =
                 buffer.anchor_before(ranges[0].start)..buffer.anchor_before(ranges[0].end);
 
@@ -1893,7 +1868,6 @@ mod tests {
             ReplicaId::LOCAL,
             BufferId::new(1).unwrap(),
             buffer_text.clone(),
-            cx.background_executor(),
         );
         let unstaged = BufferDiffSnapshot::new_sync(buffer.clone(), index_text, cx);
         let uncommitted = BufferDiffSnapshot::new_sync(buffer.clone(), head_text.clone(), cx);
@@ -1967,12 +1941,7 @@ mod tests {
         "
         .unindent();
 
-        let mut buffer = Buffer::new(
-            ReplicaId::LOCAL,
-            BufferId::new(1).unwrap(),
-            buffer_text_1,
-            cx.background_executor(),
-        );
+        let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text_1);
 
         let empty_diff = cx.update(|cx| BufferDiffSnapshot::empty(&buffer, cx));
         let diff_1 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx);
@@ -1992,7 +1961,6 @@ mod tests {
                 NINE
             "
             .unindent(),
-            cx.background_executor(),
         );
         let diff_2 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx);
         assert_eq!(None, diff_2.inner.compare(&diff_1.inner, &buffer));
@@ -2010,7 +1978,6 @@ mod tests {
                 NINE
             "
             .unindent(),
-            cx.background_executor(),
         );
         let diff_3 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx);
         let range = diff_3.inner.compare(&diff_2.inner, &buffer).unwrap();
@@ -2028,7 +1995,6 @@ mod tests {
                 NINE
             "
             .unindent(),
-            cx.background_executor(),
         );
         let diff_4 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx);
         let range = diff_4.inner.compare(&diff_3.inner, &buffer).unwrap();
@@ -2047,7 +2013,6 @@ mod tests {
                 NINE
             "
             .unindent(),
-            cx.background_executor(),
         );
         let diff_5 = BufferDiffSnapshot::new_sync(buffer.snapshot(), base_text.clone(), cx);
         let range = diff_5.inner.compare(&diff_4.inner, &buffer).unwrap();
@@ -2066,7 +2031,6 @@ mod tests {
                 Β«nineΒ»
             "
             .unindent(),
-            cx.background_executor(),
         );
         let diff_6 = BufferDiffSnapshot::new_sync(buffer.snapshot(), base_text, cx);
         let range = diff_6.inner.compare(&diff_5.inner, &buffer).unwrap();
@@ -2176,14 +2140,14 @@ mod tests {
         let working_copy = gen_working_copy(rng, &head_text);
         let working_copy = cx.new(|cx| {
             language::Buffer::local_normalized(
-                Rope::from_str(working_copy.as_str(), cx.background_executor()),
+                Rope::from(working_copy.as_str()),
                 text::LineEnding::default(),
                 cx,
             )
         });
         let working_copy = working_copy.read_with(cx, |working_copy, _| working_copy.snapshot());
         let mut index_text = if rng.random() {
-            Rope::from_str(head_text.as_str(), cx.background_executor())
+            Rope::from(head_text.as_str())
         } else {
             working_copy.as_rope().clone()
         };
  
  
  
    
    @@ -70,7 +70,6 @@ impl ChannelBuffer {
                 ReplicaId::new(response.replica_id as u16),
                 capability,
                 base_text,
-                cx.background_executor(),
             )
         })?;
         buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
  
  
  
    
    @@ -701,12 +701,12 @@ impl Database {
             return Ok(());
         }
 
-        let mut text_buffer = text::Buffer::new_slow(
+        let mut text_buffer = text::Buffer::new(
             clock::ReplicaId::LOCAL,
             text::BufferId::new(1).unwrap(),
             base_text,
         );
-        text_buffer.apply_ops(operations.into_iter().filter_map(operation_from_wire), None);
+        text_buffer.apply_ops(operations.into_iter().filter_map(operation_from_wire));
 
         let base_text = text_buffer.text();
         let epoch = buffer.epoch + 1;
  
  
  
    
    @@ -74,21 +74,11 @@ async fn test_channel_buffers(db: &Arc<Database>) {
         ReplicaId::new(0),
         text::BufferId::new(1).unwrap(),
         "".to_string(),
-        &db.test_options.as_ref().unwrap().executor,
     );
     let operations = vec![
-        buffer_a.edit(
-            [(0..0, "hello world")],
-            &db.test_options.as_ref().unwrap().executor,
-        ),
-        buffer_a.edit(
-            [(5..5, ", cruel")],
-            &db.test_options.as_ref().unwrap().executor,
-        ),
-        buffer_a.edit(
-            [(0..5, "goodbye")],
-            &db.test_options.as_ref().unwrap().executor,
-        ),
+        buffer_a.edit([(0..0, "hello world")]),
+        buffer_a.edit([(5..5, ", cruel")]),
+        buffer_a.edit([(0..5, "goodbye")]),
         buffer_a.undo().unwrap().1,
     ];
     assert_eq!(buffer_a.text(), "hello, cruel world");
@@ -112,19 +102,15 @@ async fn test_channel_buffers(db: &Arc<Database>) {
         ReplicaId::new(0),
         text::BufferId::new(1).unwrap(),
         buffer_response_b.base_text,
-        &db.test_options.as_ref().unwrap().executor,
-    );
-    buffer_b.apply_ops(
-        buffer_response_b.operations.into_iter().map(|operation| {
-            let operation = proto::deserialize_operation(operation).unwrap();
-            if let language::Operation::Buffer(operation) = operation {
-                operation
-            } else {
-                unreachable!()
-            }
-        }),
-        None,
     );
+    buffer_b.apply_ops(buffer_response_b.operations.into_iter().map(|operation| {
+        let operation = proto::deserialize_operation(operation).unwrap();
+        if let language::Operation::Buffer(operation) = operation {
+            operation
+        } else {
+            unreachable!()
+        }
+    }));
 
     assert_eq!(buffer_b.text(), "hello, cruel world");
 
@@ -261,7 +247,6 @@ async fn test_channel_buffers_last_operations(db: &Database) {
             ReplicaId::new(res.replica_id as u16),
             text::BufferId::new(1).unwrap(),
             "".to_string(),
-            &db.test_options.as_ref().unwrap().executor,
         ));
     }
 
@@ -270,9 +255,9 @@ async fn test_channel_buffers_last_operations(db: &Database) {
         user_id,
         db,
         vec![
-            text_buffers[0].edit([(0..0, "a")], &db.test_options.as_ref().unwrap().executor),
-            text_buffers[0].edit([(0..0, "b")], &db.test_options.as_ref().unwrap().executor),
-            text_buffers[0].edit([(0..0, "c")], &db.test_options.as_ref().unwrap().executor),
+            text_buffers[0].edit([(0..0, "a")]),
+            text_buffers[0].edit([(0..0, "b")]),
+            text_buffers[0].edit([(0..0, "c")]),
         ],
     )
     .await;
@@ -282,9 +267,9 @@ async fn test_channel_buffers_last_operations(db: &Database) {
         user_id,
         db,
         vec![
-            text_buffers[1].edit([(0..0, "d")], &db.test_options.as_ref().unwrap().executor),
-            text_buffers[1].edit([(1..1, "e")], &db.test_options.as_ref().unwrap().executor),
-            text_buffers[1].edit([(2..2, "f")], &db.test_options.as_ref().unwrap().executor),
+            text_buffers[1].edit([(0..0, "d")]),
+            text_buffers[1].edit([(1..1, "e")]),
+            text_buffers[1].edit([(2..2, "f")]),
         ],
     )
     .await;
@@ -301,15 +286,14 @@ async fn test_channel_buffers_last_operations(db: &Database) {
         replica_id,
         text::BufferId::new(1).unwrap(),
         "def".to_string(),
-        &db.test_options.as_ref().unwrap().executor,
     );
     update_buffer(
         buffers[1].channel_id,
         user_id,
         db,
         vec![
-            text_buffers[1].edit([(0..0, "g")], &db.test_options.as_ref().unwrap().executor),
-            text_buffers[1].edit([(0..0, "h")], &db.test_options.as_ref().unwrap().executor),
+            text_buffers[1].edit([(0..0, "g")]),
+            text_buffers[1].edit([(0..0, "h")]),
         ],
     )
     .await;
@@ -318,7 +302,7 @@ async fn test_channel_buffers_last_operations(db: &Database) {
         buffers[2].channel_id,
         user_id,
         db,
-        vec![text_buffers[2].edit([(0..0, "i")], &db.test_options.as_ref().unwrap().executor)],
+        vec![text_buffers[2].edit([(0..0, "i")])],
     )
     .await;
 
  
  
  
    
    @@ -39,6 +39,7 @@ use std::{
         Arc,
         atomic::{self, AtomicBool, AtomicUsize},
     },
+    time::Duration,
 };
 use text::Point;
 use util::{path, rel_path::rel_path, uri};
@@ -1817,14 +1818,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
                 settings.project.all_languages.defaults.inlay_hints =
                     Some(InlayHintSettingsContent {
                         enabled: Some(true),
-                        show_value_hints: Some(true),
-                        edit_debounce_ms: Some(0),
-                        scroll_debounce_ms: Some(0),
-                        show_type_hints: Some(true),
-                        show_parameter_hints: Some(false),
-                        show_other_hints: Some(true),
-                        show_background: Some(false),
-                        toggle_on_modifiers_press: None,
+                        ..InlayHintSettingsContent::default()
                     })
             });
         });
@@ -1834,15 +1828,8 @@ async fn test_mutual_editor_inlay_hint_cache_update(
             store.update_user_settings(cx, |settings| {
                 settings.project.all_languages.defaults.inlay_hints =
                     Some(InlayHintSettingsContent {
-                        show_value_hints: Some(true),
                         enabled: Some(true),
-                        edit_debounce_ms: Some(0),
-                        scroll_debounce_ms: Some(0),
-                        show_type_hints: Some(true),
-                        show_parameter_hints: Some(false),
-                        show_other_hints: Some(true),
-                        show_background: Some(false),
-                        toggle_on_modifiers_press: None,
+                        ..InlayHintSettingsContent::default()
                     })
             });
         });
@@ -1935,6 +1922,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
     });
     let fake_language_server = fake_language_servers.next().await.unwrap();
     let editor_a = file_a.await.unwrap().downcast::<Editor>().unwrap();
+    executor.advance_clock(Duration::from_millis(100));
     executor.run_until_parked();
 
     let initial_edit = edits_made.load(atomic::Ordering::Acquire);
@@ -1955,6 +1943,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
         .downcast::<Editor>()
         .unwrap();
 
+    executor.advance_clock(Duration::from_millis(100));
     executor.run_until_parked();
     editor_b.update(cx_b, |editor, cx| {
         assert_eq!(
@@ -1973,6 +1962,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
     });
     cx_b.focus(&editor_b);
 
+    executor.advance_clock(Duration::from_secs(1));
     executor.run_until_parked();
     editor_a.update(cx_a, |editor, cx| {
         assert_eq!(
@@ -1996,6 +1986,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
     });
     cx_a.focus(&editor_a);
 
+    executor.advance_clock(Duration::from_secs(1));
     executor.run_until_parked();
     editor_a.update(cx_a, |editor, cx| {
         assert_eq!(
@@ -2017,6 +2008,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
         .into_response()
         .expect("inlay refresh request failed");
 
+    executor.advance_clock(Duration::from_secs(1));
     executor.run_until_parked();
     editor_a.update(cx_a, |editor, cx| {
         assert_eq!(
  
  
  
    
    @@ -3694,7 +3694,7 @@ async fn test_buffer_reloading(
         assert_eq!(buf.line_ending(), LineEnding::Unix);
     });
 
-    let new_contents = Rope::from_str_small("d\ne\nf");
+    let new_contents = Rope::from("d\ne\nf");
     client_a
         .fs()
         .save(
@@ -4479,7 +4479,7 @@ async fn test_reloading_buffer_manually(
         .fs()
         .save(
             path!("/a/a.rs").as_ref(),
-            &Rope::from_str_small("let seven = 7;"),
+            &Rope::from("let seven = 7;"),
             LineEnding::Unix,
         )
         .await
  
  
  
    
    @@ -27,7 +27,6 @@ use std::{
     rc::Rc,
     sync::Arc,
 };
-use text::Rope;
 use util::{
     ResultExt, path,
     paths::PathStyle,
@@ -939,11 +938,7 @@ impl RandomizedTest for ProjectCollaborationTest {
 
                     client
                         .fs()
-                        .save(
-                            &path,
-                            &Rope::from_str_small(content.as_str()),
-                            text::LineEnding::Unix,
-                        )
+                        .save(&path, &content.as_str().into(), text::LineEnding::Unix)
                         .await
                         .unwrap();
                 }
  
  
  
    
    @@ -42,61 +42,63 @@ pub fn init(cx: &mut App) {
 }
 
 #[cfg(test)]
-struct MockDelegate {
-    worktree_root: PathBuf,
-}
+mod test_mocks {
+    use super::*;
 
-#[cfg(test)]
-impl MockDelegate {
-    fn new() -> Arc<dyn adapters::DapDelegate> {
-        Arc::new(Self {
-            worktree_root: PathBuf::from("/tmp/test"),
-        })
+    pub(crate) struct MockDelegate {
+        worktree_root: PathBuf,
     }
-}
 
-#[cfg(test)]
-#[async_trait::async_trait]
-impl adapters::DapDelegate for MockDelegate {
-    fn worktree_id(&self) -> settings::WorktreeId {
-        settings::WorktreeId::from_usize(0)
+    impl MockDelegate {
+        pub(crate) fn new() -> Arc<dyn adapters::DapDelegate> {
+            Arc::new(Self {
+                worktree_root: PathBuf::from("/tmp/test"),
+            })
+        }
     }
 
-    fn worktree_root_path(&self) -> &std::path::Path {
-        &self.worktree_root
-    }
+    #[async_trait::async_trait]
+    impl adapters::DapDelegate for MockDelegate {
+        fn worktree_id(&self) -> settings::WorktreeId {
+            settings::WorktreeId::from_usize(0)
+        }
 
-    fn http_client(&self) -> Arc<dyn http_client::HttpClient> {
-        unimplemented!("Not needed for tests")
-    }
+        fn worktree_root_path(&self) -> &std::path::Path {
+            &self.worktree_root
+        }
 
-    fn node_runtime(&self) -> node_runtime::NodeRuntime {
-        unimplemented!("Not needed for tests")
-    }
+        fn http_client(&self) -> Arc<dyn http_client::HttpClient> {
+            unimplemented!("Not needed for tests")
+        }
 
-    fn toolchain_store(&self) -> Arc<dyn language::LanguageToolchainStore> {
-        unimplemented!("Not needed for tests")
-    }
+        fn node_runtime(&self) -> node_runtime::NodeRuntime {
+            unimplemented!("Not needed for tests")
+        }
 
-    fn fs(&self) -> Arc<dyn fs::Fs> {
-        unimplemented!("Not needed for tests")
-    }
+        fn toolchain_store(&self) -> Arc<dyn language::LanguageToolchainStore> {
+            unimplemented!("Not needed for tests")
+        }
 
-    fn output_to_console(&self, _msg: String) {}
+        fn fs(&self) -> Arc<dyn fs::Fs> {
+            unimplemented!("Not needed for tests")
+        }
 
-    async fn which(&self, _command: &std::ffi::OsStr) -> Option<PathBuf> {
-        None
-    }
+        fn output_to_console(&self, _msg: String) {}
 
-    async fn read_text_file(&self, _path: &util::rel_path::RelPath) -> Result<String> {
-        Ok(String::new())
-    }
+        async fn which(&self, _command: &std::ffi::OsStr) -> Option<PathBuf> {
+            None
+        }
 
-    async fn shell_env(&self) -> collections::HashMap<String, String> {
-        collections::HashMap::default()
-    }
+        async fn read_text_file(&self, _path: &util::rel_path::RelPath) -> Result<String> {
+            Ok(String::new())
+        }
 
-    fn is_headless(&self) -> bool {
-        false
+        async fn shell_env(&self) -> collections::HashMap<String, String> {
+            collections::HashMap::default()
+        }
+
+        fn is_headless(&self) -> bool {
+            false
+        }
     }
 }
  
  
  
    
    @@ -824,29 +824,58 @@ impl DebugAdapter for PythonDebugAdapter {
                 .await;
         }
 
-        let base_path = config
-            .config
-            .get("cwd")
-            .and_then(|cwd| {
-                RelPath::new(
-                    cwd.as_str()
-                        .map(Path::new)?
-                        .strip_prefix(delegate.worktree_root_path())
-                        .ok()?,
-                    PathStyle::local(),
-                )
-                .ok()
+        let base_paths = ["cwd", "program", "module"]
+            .into_iter()
+            .filter_map(|key| {
+                config.config.get(key).and_then(|cwd| {
+                    RelPath::new(
+                        cwd.as_str()
+                            .map(Path::new)?
+                            .strip_prefix(delegate.worktree_root_path())
+                            .ok()?,
+                        PathStyle::local(),
+                    )
+                    .ok()
+                })
             })
-            .unwrap_or_else(|| RelPath::empty().into());
-        let toolchain = delegate
-            .toolchain_store()
-            .active_toolchain(
-                delegate.worktree_id(),
-                base_path.into_arc(),
-                language::LanguageName::new(Self::LANGUAGE_NAME),
-                cx,
+            .chain(
+                // While Debugpy's wiki saids absolute paths are required, but it actually supports relative paths when cwd is passed in.
+                // (Which should always be the case because Zed defaults to the cwd worktree root)
+                // So we want to check that these relative paths find toolchains as well. Otherwise, they won't be checked
+                // because the strip prefix in the iteration above will return an error
+                config
+                    .config
+                    .get("cwd")
+                    .map(|_| {
+                        ["program", "module"].into_iter().filter_map(|key| {
+                            config.config.get(key).and_then(|value| {
+                                let path = Path::new(value.as_str()?);
+                                RelPath::new(path, PathStyle::local()).ok()
+                            })
+                        })
+                    })
+                    .into_iter()
+                    .flatten(),
             )
-            .await;
+            .chain([RelPath::empty().into()]);
+
+        let mut toolchain = None;
+
+        for base_path in base_paths {
+            if let Some(found_toolchain) = delegate
+                .toolchain_store()
+                .active_toolchain(
+                    delegate.worktree_id(),
+                    base_path.into_arc(),
+                    language::LanguageName::new(Self::LANGUAGE_NAME),
+                    cx,
+                )
+                .await
+            {
+                toolchain = Some(found_toolchain);
+                break;
+            }
+        }
 
         self.fetch_debugpy_whl(toolchain.clone(), delegate)
             .await
@@ -914,7 +943,7 @@ mod tests {
 
         let result = adapter
             .get_installed_binary(
-                &MockDelegate::new(),
+                &test_mocks::MockDelegate::new(),
                 &task_def,
                 None,
                 None,
@@ -955,7 +984,7 @@ mod tests {
 
         let result_host = adapter
             .get_installed_binary(
-                &MockDelegate::new(),
+                &test_mocks::MockDelegate::new(),
                 &task_def_host,
                 None,
                 None,
  
  
  
    
    @@ -6,7 +6,10 @@ use alacritty_terminal::vte::ansi;
 use anyhow::Result;
 use collections::HashMap;
 use dap::{CompletionItem, CompletionItemType, OutputEvent};
-use editor::{Bias, CompletionProvider, Editor, EditorElement, EditorStyle, ExcerptId};
+use editor::{
+    Bias, CompletionProvider, Editor, EditorElement, EditorMode, EditorStyle, ExcerptId,
+    SizingBehavior,
+};
 use fuzzy::StringMatchCandidate;
 use gpui::{
     Action as _, AppContext, Context, Corner, Entity, FocusHandle, Focusable, HighlightStyle, Hsla,
@@ -59,6 +62,11 @@ impl Console {
     ) -> Self {
         let console = cx.new(|cx| {
             let mut editor = Editor::multi_line(window, cx);
+            editor.set_mode(EditorMode::Full {
+                scale_ui_elements_with_buffer_font_size: true,
+                show_active_line_background: true,
+                sizing_behavior: SizingBehavior::ExcludeOverscrollMargin,
+            });
             editor.move_to_end(&editor::actions::MoveToEnd, window, cx);
             editor.set_read_only(true);
             editor.disable_scrollbars_and_minimap(window, cx);
  
  
  
    
    @@ -34,6 +34,7 @@ theme.workspace = true
 ui.workspace = true
 util.workspace = true
 workspace.workspace = true
+itertools.workspace = true
 
 [dev-dependencies]
 client = { workspace = true, features = ["test-support"] }
  
  
  
    
    @@ -1,5 +1,5 @@
 use crate::{
-    DIAGNOSTICS_UPDATE_DELAY, IncludeWarnings, ToggleWarnings, context_range_for_entry,
+    DIAGNOSTICS_UPDATE_DEBOUNCE, IncludeWarnings, ToggleWarnings, context_range_for_entry,
     diagnostic_renderer::{DiagnosticBlock, DiagnosticRenderer},
     toolbar_controls::DiagnosticsToolbarEditor,
 };
@@ -283,7 +283,7 @@ impl BufferDiagnosticsEditor {
 
         self.update_excerpts_task = Some(cx.spawn_in(window, async move |editor, cx| {
             cx.background_executor()
-                .timer(DIAGNOSTICS_UPDATE_DELAY)
+                .timer(DIAGNOSTICS_UPDATE_DEBOUNCE)
                 .await;
 
             if let Some(buffer) = buffer {
@@ -938,10 +938,6 @@ impl DiagnosticsToolbarEditor for WeakEntity<BufferDiagnosticsEditor> {
         .unwrap_or(false)
     }
 
-    fn has_stale_excerpts(&self, _cx: &App) -> bool {
-        false
-    }
-
     fn is_updating(&self, cx: &App) -> bool {
         self.read_with(cx, |buffer_diagnostics_editor, cx| {
             buffer_diagnostics_editor.update_excerpts_task.is_some()
  
  
  
    
    @@ -9,7 +9,7 @@ mod diagnostics_tests;
 
 use anyhow::Result;
 use buffer_diagnostics::BufferDiagnosticsEditor;
-use collections::{BTreeSet, HashMap};
+use collections::{BTreeSet, HashMap, HashSet};
 use diagnostic_renderer::DiagnosticBlock;
 use editor::{
     Editor, EditorEvent, ExcerptRange, MultiBuffer, PathKey,
@@ -17,10 +17,11 @@ use editor::{
     multibuffer_context_lines,
 };
 use gpui::{
-    AnyElement, AnyView, App, AsyncApp, Context, Entity, EventEmitter, FocusHandle, Focusable,
-    Global, InteractiveElement, IntoElement, ParentElement, Render, SharedString, Styled,
-    Subscription, Task, WeakEntity, Window, actions, div,
+    AnyElement, AnyView, App, AsyncApp, Context, Entity, EventEmitter, FocusHandle, FocusOutEvent,
+    Focusable, Global, InteractiveElement, IntoElement, ParentElement, Render, SharedString,
+    Styled, Subscription, Task, WeakEntity, Window, actions, div,
 };
+use itertools::Itertools as _;
 use language::{
     Bias, Buffer, BufferRow, BufferSnapshot, DiagnosticEntry, DiagnosticEntryRef, Point,
     ToTreeSitterPoint,
@@ -32,7 +33,7 @@ use project::{
 use settings::Settings;
 use std::{
     any::{Any, TypeId},
-    cmp::{self, Ordering},
+    cmp,
     ops::{Range, RangeInclusive},
     sync::Arc,
     time::Duration,
@@ -89,8 +90,8 @@ pub(crate) struct ProjectDiagnosticsEditor {
 
 impl EventEmitter<EditorEvent> for ProjectDiagnosticsEditor {}
 
-const DIAGNOSTICS_UPDATE_DELAY: Duration = Duration::from_millis(50);
-const DIAGNOSTICS_SUMMARY_UPDATE_DELAY: Duration = Duration::from_millis(30);
+const DIAGNOSTICS_UPDATE_DEBOUNCE: Duration = Duration::from_millis(50);
+const DIAGNOSTICS_SUMMARY_UPDATE_DEBOUNCE: Duration = Duration::from_millis(30);
 
 impl Render for ProjectDiagnosticsEditor {
     fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
@@ -149,6 +150,12 @@ impl Render for ProjectDiagnosticsEditor {
     }
 }
 
+#[derive(PartialEq, Eq, Copy, Clone, Debug)]
+enum RetainExcerpts {
+    Yes,
+    No,
+}
+
 impl ProjectDiagnosticsEditor {
     pub fn register(
         workspace: &mut Workspace,
@@ -165,14 +172,21 @@ impl ProjectDiagnosticsEditor {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) -> Self {
-        let project_event_subscription =
-            cx.subscribe_in(&project_handle, window, |this, _project, event, window, cx| match event {
+        let project_event_subscription = cx.subscribe_in(
+            &project_handle,
+            window,
+            |this, _project, event, window, cx| match event {
                 project::Event::DiskBasedDiagnosticsStarted { .. } => {
                     cx.notify();
                 }
                 project::Event::DiskBasedDiagnosticsFinished { language_server_id } => {
                     log::debug!("disk based diagnostics finished for server {language_server_id}");
-                    this.update_stale_excerpts(window, cx);
+                    this.close_diagnosticless_buffers(
+                        window,
+                        cx,
+                        this.editor.focus_handle(cx).contains_focused(window, cx)
+                            || this.focus_handle.contains_focused(window, cx),
+                    );
                 }
                 project::Event::DiagnosticsUpdated {
                     language_server_id,
@@ -181,34 +195,39 @@ impl ProjectDiagnosticsEditor {
                     this.paths_to_update.extend(paths.clone());
                     this.diagnostic_summary_update = cx.spawn(async move |this, cx| {
                         cx.background_executor()
-                            .timer(DIAGNOSTICS_SUMMARY_UPDATE_DELAY)
+                            .timer(DIAGNOSTICS_SUMMARY_UPDATE_DEBOUNCE)
                             .await;
                         this.update(cx, |this, cx| {
                             this.update_diagnostic_summary(cx);
                         })
                         .log_err();
                     });
-                    cx.emit(EditorEvent::TitleChanged);
 
-                    if this.editor.focus_handle(cx).contains_focused(window, cx) || this.focus_handle.contains_focused(window, cx) {
-                        log::debug!("diagnostics updated for server {language_server_id}, paths {paths:?}. recording change");
-                    } else {
-                        log::debug!("diagnostics updated for server {language_server_id}, paths {paths:?}. updating excerpts");
-                        this.update_stale_excerpts(window, cx);
-                    }
+                    log::debug!(
+                        "diagnostics updated for server {language_server_id}, \
+                        paths {paths:?}. updating excerpts"
+                    );
+                    let focused = this.editor.focus_handle(cx).contains_focused(window, cx)
+                        || this.focus_handle.contains_focused(window, cx);
+                    this.update_stale_excerpts(
+                        if focused {
+                            RetainExcerpts::Yes
+                        } else {
+                            RetainExcerpts::No
+                        },
+                        window,
+                        cx,
+                    );
                 }
                 _ => {}
-            });
+            },
+        );
 
         let focus_handle = cx.focus_handle();
-        cx.on_focus_in(&focus_handle, window, |this, window, cx| {
-            this.focus_in(window, cx)
-        })
-        .detach();
-        cx.on_focus_out(&focus_handle, window, |this, _event, window, cx| {
-            this.focus_out(window, cx)
-        })
-        .detach();
+        cx.on_focus_in(&focus_handle, window, Self::focus_in)
+            .detach();
+        cx.on_focus_out(&focus_handle, window, Self::focus_out)
+            .detach();
 
         let excerpts = cx.new(|cx| MultiBuffer::new(project_handle.read(cx).capability()));
         let editor = cx.new(|cx| {
@@ -238,8 +257,11 @@ impl ProjectDiagnosticsEditor {
                             window.focus(&this.focus_handle);
                         }
                     }
-                    EditorEvent::Blurred => this.update_stale_excerpts(window, cx),
-                    EditorEvent::Saved => this.update_stale_excerpts(window, cx),
+                    EditorEvent::Blurred => this.close_diagnosticless_buffers(window, cx, false),
+                    EditorEvent::Saved => this.close_diagnosticless_buffers(window, cx, true),
+                    EditorEvent::SelectionsChanged { .. } => {
+                        this.close_diagnosticless_buffers(window, cx, true)
+                    }
                     _ => {}
                 }
             },
@@ -283,15 +305,67 @@ impl ProjectDiagnosticsEditor {
         this
     }
 
-    fn update_stale_excerpts(&mut self, window: &mut Window, cx: &mut Context<Self>) {
-        if self.update_excerpts_task.is_some() || self.multibuffer.read(cx).is_dirty(cx) {
+    /// Closes all excerpts of buffers that:
+    ///  - have no diagnostics anymore
+    ///  - are saved (not dirty)
+    ///  - and, if `reatin_selections` is true, do not have selections within them
+    fn close_diagnosticless_buffers(
+        &mut self,
+        _window: &mut Window,
+        cx: &mut Context<Self>,
+        retain_selections: bool,
+    ) {
+        let buffer_ids = self.multibuffer.read(cx).all_buffer_ids();
+        let selected_buffers = self.editor.update(cx, |editor, cx| {
+            editor
+                .selections
+                .all_anchors(cx)
+                .iter()
+                .filter_map(|anchor| anchor.start.buffer_id)
+                .collect::<HashSet<_>>()
+        });
+        for buffer_id in buffer_ids {
+            if retain_selections && selected_buffers.contains(&buffer_id) {
+                continue;
+            }
+            let has_blocks = self
+                .blocks
+                .get(&buffer_id)
+                .is_none_or(|blocks| blocks.is_empty());
+            if !has_blocks {
+                continue;
+            }
+            let is_dirty = self
+                .multibuffer
+                .read(cx)
+                .buffer(buffer_id)
+                .is_some_and(|buffer| buffer.read(cx).is_dirty());
+            if !is_dirty {
+                continue;
+            }
+            self.multibuffer.update(cx, |b, cx| {
+                b.remove_excerpts_for_buffer(buffer_id, cx);
+            });
+        }
+    }
+
+    fn update_stale_excerpts(
+        &mut self,
+        mut retain_excerpts: RetainExcerpts,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        if self.update_excerpts_task.is_some() {
             return;
         }
+        if self.multibuffer.read(cx).is_dirty(cx) {
+            retain_excerpts = RetainExcerpts::Yes;
+        }
 
         let project_handle = self.project.clone();
         self.update_excerpts_task = Some(cx.spawn_in(window, async move |this, cx| {
             cx.background_executor()
-                .timer(DIAGNOSTICS_UPDATE_DELAY)
+                .timer(DIAGNOSTICS_UPDATE_DEBOUNCE)
                 .await;
             loop {
                 let Some(path) = this.update(cx, |this, cx| {
@@ -312,7 +386,7 @@ impl ProjectDiagnosticsEditor {
                     .log_err()
                 {
                     this.update_in(cx, |this, window, cx| {
-                        this.update_excerpts(buffer, window, cx)
+                        this.update_excerpts(buffer, retain_excerpts, window, cx)
                     })?
                     .await?;
                 }
@@ -378,10 +452,10 @@ impl ProjectDiagnosticsEditor {
         }
     }
 
-    fn focus_out(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+    fn focus_out(&mut self, _: FocusOutEvent, window: &mut Window, cx: &mut Context<Self>) {
         if !self.focus_handle.is_focused(window) && !self.editor.focus_handle(cx).is_focused(window)
         {
-            self.update_stale_excerpts(window, cx);
+            self.close_diagnosticless_buffers(window, cx, false);
         }
     }
 
@@ -403,12 +477,13 @@ impl ProjectDiagnosticsEditor {
                         });
                     }
                 }
+                multibuffer.clear(cx);
             });
 
             self.paths_to_update = project_paths;
         });
 
-        self.update_stale_excerpts(window, cx);
+        self.update_stale_excerpts(RetainExcerpts::No, window, cx);
     }
 
     fn diagnostics_are_unchanged(
@@ -431,6 +506,7 @@ impl ProjectDiagnosticsEditor {
     fn update_excerpts(
         &mut self,
         buffer: Entity<Buffer>,
+        retain_excerpts: RetainExcerpts,
         window: &mut Window,
         cx: &mut Context<Self>,
     ) -> Task<Result<()>> {
@@ -497,24 +573,27 @@ impl ProjectDiagnosticsEditor {
                     )
                 })?;
 
-                for item in more {
-                    let i = blocks
-                        .binary_search_by(|probe| {
-                            probe
-                                .initial_range
-                                .start
-                                .cmp(&item.initial_range.start)
-                                .then(probe.initial_range.end.cmp(&item.initial_range.end))
-                                .then(Ordering::Greater)
-                        })
-                        .unwrap_or_else(|i| i);
-                    blocks.insert(i, item);
-                }
+                blocks.extend(more);
             }
 
-            let mut excerpt_ranges: Vec<ExcerptRange<Point>> = Vec::new();
+            let mut excerpt_ranges: Vec<ExcerptRange<Point>> = match retain_excerpts {
+                RetainExcerpts::No => Vec::new(),
+                RetainExcerpts::Yes => this.update(cx, |this, cx| {
+                    this.multibuffer.update(cx, |multi_buffer, cx| {
+                        multi_buffer
+                            .excerpts_for_buffer(buffer_id, cx)
+                            .into_iter()
+                            .map(|(_, range)| ExcerptRange {
+                                context: range.context.to_point(&buffer_snapshot),
+                                primary: range.primary.to_point(&buffer_snapshot),
+                            })
+                            .collect()
+                    })
+                })?,
+            };
+            let mut result_blocks = vec![None; excerpt_ranges.len()];
             let context_lines = cx.update(|_, cx| multibuffer_context_lines(cx))?;
-            for b in blocks.iter() {
+            for b in blocks {
                 let excerpt_range = context_range_for_entry(
                     b.initial_range.clone(),
                     context_lines,
@@ -541,7 +620,8 @@ impl ProjectDiagnosticsEditor {
                         context: excerpt_range,
                         primary: b.initial_range.clone(),
                     },
-                )
+                );
+                result_blocks.insert(i, Some(b));
             }
 
             this.update_in(cx, |this, window, cx| {
@@ -562,7 +642,7 @@ impl ProjectDiagnosticsEditor {
                     )
                 });
                 #[cfg(test)]
-                let cloned_blocks = blocks.clone();
+                let cloned_blocks = result_blocks.clone();
 
                 if was_empty && let Some(anchor_range) = anchor_ranges.first() {
                     let range_to_select = anchor_range.start..anchor_range.start;
@@ -576,22 +656,20 @@ impl ProjectDiagnosticsEditor {
                     }
                 }
 
-                let editor_blocks =
-                    anchor_ranges
-                        .into_iter()
-                        .zip(blocks.into_iter())
-                        .map(|(anchor, block)| {
-                            let editor = this.editor.downgrade();
-                            BlockProperties {
-                                placement: BlockPlacement::Near(anchor.start),
-                                height: Some(1),
-                                style: BlockStyle::Flex,
-                                render: Arc::new(move |bcx| {
-                                    block.render_block(editor.clone(), bcx)
-                                }),
-                                priority: 1,
-                            }
-                        });
+                let editor_blocks = anchor_ranges
+                    .into_iter()
+                    .zip_eq(result_blocks.into_iter())
+                    .filter_map(|(anchor, block)| {
+                        let block = block?;
+                        let editor = this.editor.downgrade();
+                        Some(BlockProperties {
+                            placement: BlockPlacement::Near(anchor.start),
+                            height: Some(1),
+                            style: BlockStyle::Flex,
+                            render: Arc::new(move |bcx| block.render_block(editor.clone(), bcx)),
+                            priority: 1,
+                        })
+                    });
 
                 let block_ids = this.editor.update(cx, |editor, cx| {
                     editor.display_map.update(cx, |display_map, cx| {
@@ -601,7 +679,9 @@ impl ProjectDiagnosticsEditor {
 
                 #[cfg(test)]
                 {
-                    for (block_id, block) in block_ids.iter().zip(cloned_blocks.iter()) {
+                    for (block_id, block) in
+                        block_ids.iter().zip(cloned_blocks.into_iter().flatten())
+                    {
                         let markdown = block.markdown.clone();
                         editor::test::set_block_content_for_tests(
                             &this.editor,
@@ -626,6 +706,7 @@ impl ProjectDiagnosticsEditor {
 
     fn update_diagnostic_summary(&mut self, cx: &mut Context<Self>) {
         self.summary = self.project.read(cx).diagnostic_summary(false, cx);
+        cx.emit(EditorEvent::TitleChanged);
     }
 }
 
@@ -843,13 +924,6 @@ impl DiagnosticsToolbarEditor for WeakEntity<ProjectDiagnosticsEditor> {
         .unwrap_or(false)
     }
 
-    fn has_stale_excerpts(&self, cx: &App) -> bool {
-        self.read_with(cx, |project_diagnostics_editor, _cx| {
-            !project_diagnostics_editor.paths_to_update.is_empty()
-        })
-        .unwrap_or(false)
-    }
-
     fn is_updating(&self, cx: &App) -> bool {
         self.read_with(cx, |project_diagnostics_editor, cx| {
             project_diagnostics_editor.update_excerpts_task.is_some()
@@ -1010,12 +1084,6 @@ async fn heuristic_syntactic_expand(
                                 return;
                             }
                         }
-
-                        log::info!(
-                            "Expanding to ancestor started on {} node\
-                            exceeding row limit of {max_row_count}.",
-                            node.grammar_name()
-                        );
                         *ancestor_range = Some(None);
                     }
                 })
  
  
  
    
    @@ -119,7 +119,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
     let editor = diagnostics.update(cx, |diagnostics, _| diagnostics.editor.clone());
 
     diagnostics
-        .next_notification(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10), cx)
+        .next_notification(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10), cx)
         .await;
 
     pretty_assertions::assert_eq!(
@@ -190,7 +190,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
     });
 
     diagnostics
-        .next_notification(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10), cx)
+        .next_notification(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10), cx)
         .await;
 
     pretty_assertions::assert_eq!(
@@ -277,7 +277,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
     });
 
     diagnostics
-        .next_notification(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10), cx)
+        .next_notification(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10), cx)
         .await;
 
     pretty_assertions::assert_eq!(
@@ -391,7 +391,7 @@ async fn test_diagnostics_with_folds(cx: &mut TestAppContext) {
 
     // Only the first language server's diagnostics are shown.
     cx.executor()
-        .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10));
+        .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10));
     cx.executor().run_until_parked();
     editor.update_in(cx, |editor, window, cx| {
         editor.fold_ranges(vec![Point::new(0, 0)..Point::new(3, 0)], false, window, cx);
@@ -490,7 +490,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
 
     // Only the first language server's diagnostics are shown.
     cx.executor()
-        .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10));
+        .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10));
     cx.executor().run_until_parked();
 
     pretty_assertions::assert_eq!(
@@ -530,7 +530,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
 
     // Both language server's diagnostics are shown.
     cx.executor()
-        .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10));
+        .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10));
     cx.executor().run_until_parked();
 
     pretty_assertions::assert_eq!(
@@ -587,7 +587,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
 
     // Only the first language server's diagnostics are updated.
     cx.executor()
-        .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10));
+        .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10));
     cx.executor().run_until_parked();
 
     pretty_assertions::assert_eq!(
@@ -629,7 +629,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
 
     // Both language servers' diagnostics are updated.
     cx.executor()
-        .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10));
+        .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10));
     cx.executor().run_until_parked();
 
     pretty_assertions::assert_eq!(
@@ -760,7 +760,7 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng
                         .unwrap()
                 });
                 cx.executor()
-                    .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10));
+                    .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10));
 
                 cx.run_until_parked();
             }
@@ -769,7 +769,7 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng
 
     log::info!("updating mutated diagnostics view");
     mutated_diagnostics.update_in(cx, |diagnostics, window, cx| {
-        diagnostics.update_stale_excerpts(window, cx)
+        diagnostics.update_stale_excerpts(RetainExcerpts::No, window, cx)
     });
 
     log::info!("constructing reference diagnostics view");
@@ -777,7 +777,7 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng
         ProjectDiagnosticsEditor::new(true, project.clone(), workspace.downgrade(), window, cx)
     });
     cx.executor()
-        .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10));
+        .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10));
     cx.run_until_parked();
 
     let mutated_excerpts =
@@ -789,7 +789,12 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng
 
     // The mutated view may contain more than the reference view as
     // we don't currently shrink excerpts when diagnostics were removed.
-    let mut ref_iter = reference_excerpts.lines().filter(|line| *line != "Β§ -----");
+    let mut ref_iter = reference_excerpts.lines().filter(|line| {
+        // ignore $ ---- and $ <file>.rs
+        !line.starts_with('Β§')
+            || line.starts_with("Β§ diagnostic")
+            || line.starts_with("Β§ related info")
+    });
     let mut next_ref_line = ref_iter.next();
     let mut skipped_block = false;
 
@@ -797,7 +802,12 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng
         if let Some(ref_line) = next_ref_line {
             if mut_line == ref_line {
                 next_ref_line = ref_iter.next();
-            } else if mut_line.contains('Β§') && mut_line != "Β§ -----" {
+            } else if mut_line.contains('Β§')
+                // ignore $ ---- and $ <file>.rs
+                && (!mut_line.starts_with('Β§')
+                    || mut_line.starts_with("Β§ diagnostic")
+                    || mut_line.starts_with("Β§ related info"))
+            {
                 skipped_block = true;
             }
         }
@@ -877,7 +887,7 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S
                             vec![Inlay::edit_prediction(
                                 post_inc(&mut next_inlay_id),
                                 snapshot.buffer_snapshot().anchor_before(position),
-                                Rope::from_iter_small(["Test inlay ", "next_inlay_id"]),
+                                Rope::from_iter(["Test inlay ", "next_inlay_id"]),
                             )],
                             cx,
                         );
@@ -949,7 +959,7 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S
                         .unwrap()
                 });
                 cx.executor()
-                    .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10));
+                    .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10));
 
                 cx.run_until_parked();
             }
@@ -958,11 +968,11 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S
 
     log::info!("updating mutated diagnostics view");
     mutated_diagnostics.update_in(cx, |diagnostics, window, cx| {
-        diagnostics.update_stale_excerpts(window, cx)
+        diagnostics.update_stale_excerpts(RetainExcerpts::No, window, cx)
     });
 
     cx.executor()
-        .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10));
+        .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10));
     cx.run_until_parked();
 }
 
@@ -1427,7 +1437,7 @@ async fn test_diagnostics_with_code(cx: &mut TestAppContext) {
     let editor = diagnostics.update(cx, |diagnostics, _| diagnostics.editor.clone());
 
     diagnostics
-        .next_notification(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10), cx)
+        .next_notification(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10), cx)
         .await;
 
     // Verify that the diagnostic codes are displayed correctly
@@ -1704,7 +1714,7 @@ async fn test_buffer_diagnostics(cx: &mut TestAppContext) {
     // wait a little bit to ensure that the buffer diagnostic's editor content
     // is rendered.
     cx.executor()
-        .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10));
+        .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10));
 
     pretty_assertions::assert_eq!(
         editor_content_with_blocks(&editor, cx),
@@ -1837,7 +1847,7 @@ async fn test_buffer_diagnostics_without_warnings(cx: &mut TestAppContext) {
     // wait a little bit to ensure that the buffer diagnostic's editor content
     // is rendered.
     cx.executor()
-        .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10));
+        .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10));
 
     pretty_assertions::assert_eq!(
         editor_content_with_blocks(&editor, cx),
@@ -1971,7 +1981,7 @@ async fn test_buffer_diagnostics_multiple_servers(cx: &mut TestAppContext) {
     // wait a little bit to ensure that the buffer diagnostic's editor content
     // is rendered.
     cx.executor()
-        .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10));
+        .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10));
 
     pretty_assertions::assert_eq!(
         editor_content_with_blocks(&editor, cx),
@@ -2070,7 +2080,7 @@ fn random_lsp_diagnostic(
     const ERROR_MARGIN: usize = 10;
 
     let file_content = fs.read_file_sync(path).unwrap();
-    let file_text = Rope::from_str_small(String::from_utf8_lossy(&file_content).as_ref());
+    let file_text = Rope::from(String::from_utf8_lossy(&file_content).as_ref());
 
     let start = rng.random_range(0..file_text.len().saturating_add(ERROR_MARGIN));
     let end = rng.random_range(start..file_text.len().saturating_add(ERROR_MARGIN));
  
  
  
    
    @@ -16,9 +16,6 @@ pub(crate) trait DiagnosticsToolbarEditor: Send + Sync {
     /// Toggles whether warning diagnostics should be displayed by the
     /// diagnostics editor.
     fn toggle_warnings(&self, window: &mut Window, cx: &mut App);
-    /// Indicates whether any of the excerpts displayed by the diagnostics
-    /// editor are stale.
-    fn has_stale_excerpts(&self, cx: &App) -> bool;
     /// Indicates whether the diagnostics editor is currently updating the
     /// diagnostics.
     fn is_updating(&self, cx: &App) -> bool;
@@ -37,14 +34,12 @@ pub(crate) trait DiagnosticsToolbarEditor: Send + Sync {
 
 impl Render for ToolbarControls {
     fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
-        let mut has_stale_excerpts = false;
         let mut include_warnings = false;
         let mut is_updating = false;
 
         match &self.editor {
             Some(editor) => {
                 include_warnings = editor.include_warnings(cx);
-                has_stale_excerpts = editor.has_stale_excerpts(cx);
                 is_updating = editor.is_updating(cx);
             }
             None => {}
@@ -86,7 +81,6 @@ impl Render for ToolbarControls {
                         IconButton::new("refresh-diagnostics", IconName::ArrowCircle)
                             .icon_color(Color::Info)
                             .shape(IconButtonShape::Square)
-                            .disabled(!has_stale_excerpts)
                             .tooltip(Tooltip::for_action_title(
                                 "Refresh diagnostics",
                                 &ToggleDiagnosticsRefresh,
  
  
  
    
    @@ -13,7 +13,7 @@ use gpui::{
 };
 use indoc::indoc;
 use language::{
-    EditPredictionsMode, File, Language, Rope,
+    EditPredictionsMode, File, Language,
     language_settings::{self, AllLanguageSettings, EditPredictionProvider, all_language_settings},
 };
 use project::DisableAiSettings;
@@ -1056,11 +1056,8 @@ async fn open_disabled_globs_setting_in_editor(
 ) -> Result<()> {
     let settings_editor = workspace
         .update_in(cx, |_, window, cx| {
-            create_and_open_local_file(paths::settings_file(), window, cx, |cx| {
-                Rope::from_str(
-                    settings::initial_user_settings_content().as_ref(),
-                    cx.background_executor(),
-                )
+            create_and_open_local_file(paths::settings_file(), window, cx, || {
+                settings::initial_user_settings_content().as_ref().into()
             })
         })?
         .await?
  
  
  
    
    @@ -28,10 +28,12 @@ use std::{
     rc::Rc,
 };
 use task::ResolvedTask;
-use ui::{Color, IntoElement, ListItem, Pixels, Popover, Styled, prelude::*};
+use ui::{
+    Color, IntoElement, ListItem, Pixels, Popover, ScrollAxes, Scrollbars, Styled, WithScrollbar,
+    prelude::*,
+};
 use util::ResultExt;
 
-use crate::CodeActionSource;
 use crate::hover_popover::{hover_markdown_style, open_markdown_url};
 use crate::{
     CodeActionProvider, CompletionId, CompletionItemKind, CompletionProvider, DisplayRow, Editor,
@@ -39,7 +41,8 @@ use crate::{
     actions::{ConfirmCodeAction, ConfirmCompletion},
     split_words, styled_runs_for_code_label,
 };
-use settings::SnippetSortOrder;
+use crate::{CodeActionSource, EditorSettings};
+use settings::{Settings, SnippetSortOrder};
 
 pub const MENU_GAP: Pixels = px(4.);
 pub const MENU_ASIDE_X_PADDING: Pixels = px(16.);
@@ -261,6 +264,20 @@ impl Drop for CompletionsMenu {
     }
 }
 
+struct CompletionMenuScrollBarSetting;
+
+impl ui::scrollbars::GlobalSetting for CompletionMenuScrollBarSetting {
+    fn get_value(_cx: &App) -> &Self {
+        &Self
+    }
+}
+
+impl ui::scrollbars::ScrollbarVisibility for CompletionMenuScrollBarSetting {
+    fn visibility(&self, cx: &App) -> ui::scrollbars::ShowScrollbar {
+        EditorSettings::get_global(cx).completion_menu_scrollbar
+    }
+}
+
 impl CompletionsMenu {
     pub fn new(
         id: CompletionId,
@@ -898,7 +915,17 @@ impl CompletionsMenu {
             }
         });
 
-        Popover::new().child(list).into_any_element()
+        Popover::new()
+            .child(
+                div().child(list).custom_scrollbars(
+                    Scrollbars::for_settings::<CompletionMenuScrollBarSetting>()
+                        .show_along(ScrollAxes::Vertical)
+                        .tracked_scroll_handle(self.scroll_handle.clone()),
+                    window,
+                    cx,
+                ),
+            )
+            .into_any_element()
     }
 
     fn render_aside(
  
  
  
    
    @@ -1569,7 +1569,6 @@ pub mod tests {
     use lsp::LanguageServerId;
     use project::Project;
     use rand::{Rng, prelude::*};
-    use rope::Rope;
     use settings::{SettingsContent, SettingsStore};
     use smol::stream::StreamExt;
     use std::{env, sync::Arc};
@@ -2075,7 +2074,7 @@ pub mod tests {
                 vec![Inlay::edit_prediction(
                     0,
                     buffer_snapshot.anchor_after(0),
-                    Rope::from_str_small("\n"),
+                    "\n",
                 )],
                 cx,
             );
  
  
  
    
    @@ -700,20 +700,16 @@ impl InlayMap {
                     .collect::<String>();
 
                 let next_inlay = if i % 2 == 0 {
-                    use rope::Rope;
-
                     Inlay::mock_hint(
                         post_inc(next_inlay_id),
                         snapshot.buffer.anchor_at(position, bias),
-                        Rope::from_str_small(&text),
+                        &text,
                     )
                 } else {
-                    use rope::Rope;
-
                     Inlay::edit_prediction(
                         post_inc(next_inlay_id),
                         snapshot.buffer.anchor_at(position, bias),
-                        Rope::from_str_small(&text),
+                        &text,
                     )
                 };
                 let inlay_id = next_inlay.id;
@@ -1305,7 +1301,7 @@ mod tests {
             vec![Inlay::mock_hint(
                 post_inc(&mut next_inlay_id),
                 buffer.read(cx).snapshot(cx).anchor_after(3),
-                Rope::from_str_small("|123|"),
+                "|123|",
             )],
         );
         assert_eq!(inlay_snapshot.text(), "abc|123|defghi");
@@ -1382,12 +1378,12 @@ mod tests {
                 Inlay::mock_hint(
                     post_inc(&mut next_inlay_id),
                     buffer.read(cx).snapshot(cx).anchor_before(3),
-                    Rope::from_str_small("|123|"),
+                    "|123|",
                 ),
                 Inlay::edit_prediction(
                     post_inc(&mut next_inlay_id),
                     buffer.read(cx).snapshot(cx).anchor_after(3),
-                    Rope::from_str_small("|456|"),
+                    "|456|",
                 ),
             ],
         );
@@ -1597,17 +1593,17 @@ mod tests {
                 Inlay::mock_hint(
                     post_inc(&mut next_inlay_id),
                     buffer.read(cx).snapshot(cx).anchor_before(0),
-                    Rope::from_str_small("|123|\n"),
+                    "|123|\n",
                 ),
                 Inlay::mock_hint(
                     post_inc(&mut next_inlay_id),
                     buffer.read(cx).snapshot(cx).anchor_before(4),
-                    Rope::from_str_small("|456|"),
+                    "|456|",
                 ),
                 Inlay::edit_prediction(
                     post_inc(&mut next_inlay_id),
                     buffer.read(cx).snapshot(cx).anchor_before(7),
-                    Rope::from_str_small("\n|567|\n"),
+                    "\n|567|\n",
                 ),
             ],
         );
@@ -1681,14 +1677,9 @@ mod tests {
                     (offset, inlay.clone())
                 })
                 .collect::<Vec<_>>();
-            let mut expected_text =
-                Rope::from_str(&buffer_snapshot.text(), cx.background_executor());
+            let mut expected_text = Rope::from(&buffer_snapshot.text());
             for (offset, inlay) in inlays.iter().rev() {
-                expected_text.replace(
-                    *offset..*offset,
-                    &inlay.text().to_string(),
-                    cx.background_executor(),
-                );
+                expected_text.replace(*offset..*offset, &inlay.text().to_string());
             }
             assert_eq!(inlay_snapshot.text(), expected_text.to_string());
 
@@ -2076,7 +2067,7 @@ mod tests {
         let inlay = Inlay {
             id: InlayId::Hint(0),
             position,
-            content: InlayContent::Text(text::Rope::from_str(inlay_text, cx.background_executor())),
+            content: InlayContent::Text(text::Rope::from(inlay_text)),
         };
 
         let (inlay_snapshot, _) = inlay_map.splice(&[], vec![inlay]);
@@ -2190,10 +2181,7 @@ mod tests {
             let inlay = Inlay {
                 id: InlayId::Hint(0),
                 position,
-                content: InlayContent::Text(text::Rope::from_str(
-                    test_case.inlay_text,
-                    cx.background_executor(),
-                )),
+                content: InlayContent::Text(text::Rope::from(test_case.inlay_text)),
             };
 
             let (inlay_snapshot, _) = inlay_map.splice(&[], vec![inlay]);
  
  
  
    
    @@ -1042,7 +1042,7 @@ mod tests {
         let (mut tab_map, _) = TabMap::new(fold_snapshot, tab_size);
         let tabs_snapshot = tab_map.set_max_expansion_column(32);
 
-        let text = text::Rope::from_str(tabs_snapshot.text().as_str(), cx.background_executor());
+        let text = text::Rope::from(tabs_snapshot.text().as_str());
         log::info!(
             "TabMap text (tab size: {}): {:?}",
             tab_size,
  
  
  
    
    @@ -863,7 +863,7 @@ impl WrapSnapshot {
                 }
             }
 
-            let text = language::Rope::from_str_small(self.text().as_str());
+            let text = language::Rope::from(self.text().as_str());
             let mut input_buffer_rows = self.tab_snapshot.rows(0);
             let mut expected_buffer_rows = Vec::new();
             let mut prev_tab_row = 0;
@@ -1413,10 +1413,9 @@ mod tests {
             }
         }
 
-        let mut initial_text =
-            Rope::from_str(initial_snapshot.text().as_str(), cx.background_executor());
+        let mut initial_text = Rope::from(initial_snapshot.text().as_str());
         for (snapshot, patch) in edits {
-            let snapshot_text = Rope::from_str(snapshot.text().as_str(), cx.background_executor());
+            let snapshot_text = Rope::from(snapshot.text().as_str());
             for edit in &patch {
                 let old_start = initial_text.point_to_offset(Point::new(edit.new.start, 0));
                 let old_end = initial_text.point_to_offset(cmp::min(
@@ -1432,7 +1431,7 @@ mod tests {
                     .chunks_in_range(new_start..new_end)
                     .collect::<String>();
 
-                initial_text.replace(old_start..old_end, &new_text, cx.background_executor());
+                initial_text.replace(old_start..old_end, &new_text);
             }
             assert_eq!(initial_text.to_string(), snapshot_text.to_string());
         }
  
  
  
    
    @@ -452,6 +452,20 @@ pub enum SelectMode {
     All,
 }
 
+#[derive(Copy, Clone, Default, PartialEq, Eq, Debug)]
+pub enum SizingBehavior {
+    /// The editor will layout itself using `size_full` and will include the vertical
+    /// scroll margin as requested by user settings.
+    #[default]
+    Default,
+    /// The editor will layout itself using `size_full`, but will not have any
+    /// vertical overscroll.
+    ExcludeOverscrollMargin,
+    /// The editor will request a vertical size according to its content and will be
+    /// layouted without a vertical scroll margin.
+    SizeByContent,
+}
+
 #[derive(Clone, PartialEq, Eq, Debug)]
 pub enum EditorMode {
     SingleLine,
@@ -464,8 +478,8 @@ pub enum EditorMode {
         scale_ui_elements_with_buffer_font_size: bool,
         /// When set to `true`, the editor will render a background for the active line.
         show_active_line_background: bool,
-        /// When set to `true`, the editor's height will be determined by its content.
-        sized_by_content: bool,
+        /// Determines the sizing behavior for this editor
+        sizing_behavior: SizingBehavior,
     },
     Minimap {
         parent: WeakEntity<Editor>,
@@ -477,7 +491,7 @@ impl EditorMode {
         Self::Full {
             scale_ui_elements_with_buffer_font_size: true,
             show_active_line_background: true,
-            sized_by_content: false,
+            sizing_behavior: SizingBehavior::Default,
         }
     }
 
@@ -1832,9 +1846,15 @@ impl Editor {
                     project::Event::RefreshCodeLens => {
                         // we always query lens with actions, without storing them, always refreshing them
                     }
-                    project::Event::RefreshInlayHints(server_id) => {
+                    project::Event::RefreshInlayHints {
+                        server_id,
+                        request_id,
+                    } => {
                         editor.refresh_inlay_hints(
-                            InlayHintRefreshReason::RefreshRequested(*server_id),
+                            InlayHintRefreshReason::RefreshRequested {
+                                server_id: *server_id,
+                                request_id: *request_id,
+                            },
                             cx,
                         );
                     }
@@ -7852,7 +7872,7 @@ impl Editor {
                         let inlay = Inlay::edit_prediction(
                             post_inc(&mut self.next_inlay_id),
                             range.start,
-                            Rope::from_str_small(new_text.as_str()),
+                            new_text.as_str(),
                         );
                         inlay_ids.push(inlay.id);
                         inlays.push(inlay);
  
  
  
    
    @@ -55,6 +55,7 @@ pub struct EditorSettings {
     pub drag_and_drop_selection: DragAndDropSelection,
     pub lsp_document_colors: DocumentColorsRenderMode,
     pub minimum_contrast_for_highlights: f32,
+    pub completion_menu_scrollbar: ShowScrollbar,
 }
 #[derive(Debug, Clone)]
 pub struct Jupyter {
@@ -159,6 +160,7 @@ pub struct SearchSettings {
     pub case_sensitive: bool,
     pub include_ignored: bool,
     pub regex: bool,
+    pub center_on_match: bool,
 }
 
 impl EditorSettings {
@@ -249,6 +251,7 @@ impl Settings for EditorSettings {
                 case_sensitive: search.case_sensitive.unwrap(),
                 include_ignored: search.include_ignored.unwrap(),
                 regex: search.regex.unwrap(),
+                center_on_match: search.center_on_match.unwrap(),
             },
             auto_signature_help: editor.auto_signature_help.unwrap(),
             show_signature_help_after_edits: editor.show_signature_help_after_edits.unwrap(),
@@ -266,6 +269,7 @@ impl Settings for EditorSettings {
             },
             lsp_document_colors: editor.lsp_document_colors.unwrap(),
             minimum_contrast_for_highlights: editor.minimum_contrast_for_highlights.unwrap().0,
+            completion_menu_scrollbar: editor.completion_menu_scrollbar.map(Into::into).unwrap(),
         }
     }
 }
  
  
  
    
    @@ -14217,7 +14217,7 @@ async fn test_completion_in_multibuffer_with_replace_range(cx: &mut TestAppConte
                     EditorMode::Full {
                         scale_ui_elements_with_buffer_font_size: false,
                         show_active_line_background: false,
-                        sized_by_content: false,
+                        sizing_behavior: SizingBehavior::Default,
                     },
                     multi_buffer.clone(),
                     Some(project.clone()),
  
  
  
    
    @@ -8,8 +8,8 @@ use crate::{
     HandleInput, HoveredCursor, InlayHintRefreshReason, JumpData, LineDown, LineHighlight, LineUp,
     MAX_LINE_LEN, MINIMAP_FONT_SIZE, MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, OpenExcerpts, PageDown,
     PageUp, PhantomBreakpointIndicator, Point, RowExt, RowRangeExt, SelectPhase,
-    SelectedTextHighlight, Selection, SelectionDragState, SoftWrap, StickyHeaderExcerpt, ToPoint,
-    ToggleFold, ToggleFoldAll,
+    SelectedTextHighlight, Selection, SelectionDragState, SizingBehavior, SoftWrap,
+    StickyHeaderExcerpt, ToPoint, ToggleFold, ToggleFoldAll,
     code_context_menus::{CodeActionsMenu, MENU_ASIDE_MAX_WIDTH, MENU_ASIDE_MIN_WIDTH, MENU_GAP},
     display_map::{
         Block, BlockContext, BlockStyle, ChunkRendererId, DisplaySnapshot, EditorMargins,
@@ -8441,11 +8441,11 @@ impl Element for EditorElement {
                         window.request_layout(style, None, cx)
                     }
                     EditorMode::Full {
-                        sized_by_content, ..
+                        sizing_behavior, ..
                     } => {
                         let mut style = Style::default();
                         style.size.width = relative(1.).into();
-                        if sized_by_content {
+                        if sizing_behavior == SizingBehavior::SizeByContent {
                             let snapshot = editor.snapshot(window, cx);
                             let line_height =
                                 self.style.text.line_height_in_pixels(window.rem_size());
@@ -8609,7 +8609,8 @@ impl Element for EditorElement {
                         EditorMode::SingleLine
                             | EditorMode::AutoHeight { .. }
                             | EditorMode::Full {
-                                sized_by_content: true,
+                                sizing_behavior: SizingBehavior::ExcludeOverscrollMargin
+                                    | SizingBehavior::SizeByContent,
                                 ..
                             }
                     ) {
  
  
  
    
    @@ -1115,19 +1115,18 @@ mod tests {
 
         let fs = FakeFs::new(cx.executor());
         let buffer_initial_text_len = rng.random_range(5..15);
-        let mut buffer_initial_text = Rope::from_str(
+        let mut buffer_initial_text = Rope::from(
             RandomCharIter::new(&mut rng)
                 .take(buffer_initial_text_len)
                 .collect::<String>()
                 .as_str(),
-            cx.background_executor(),
         );
 
         let mut newline_ixs = (0..buffer_initial_text_len).choose_multiple(&mut rng, 5);
         newline_ixs.sort_unstable();
         for newline_ix in newline_ixs.into_iter().rev() {
             let newline_ix = buffer_initial_text.clip_offset(newline_ix, Bias::Right);
-            buffer_initial_text.replace(newline_ix..newline_ix, "\n", cx.background_executor());
+            buffer_initial_text.replace(newline_ix..newline_ix, "\n");
         }
         log::info!("initial buffer text: {:?}", buffer_initial_text);
 
  
  
  
    
    @@ -59,10 +59,10 @@ impl Inlay {
     pub fn hint(id: InlayId, position: Anchor, hint: &InlayHint) -> Self {
         let mut text = hint.text();
         if hint.padding_right && text.reversed_chars_at(text.len()).next() != Some(' ') {
-            text.push_small(" ");
+            text.push(" ");
         }
         if hint.padding_left && text.chars_at(0).next() != Some(' ') {
-            text.push_front_small(" ");
+            text.push_front(" ");
         }
         Self {
             id,
@@ -72,11 +72,11 @@ impl Inlay {
     }
 
     #[cfg(any(test, feature = "test-support"))]
-    pub fn mock_hint(id: usize, position: Anchor, text: Rope) -> Self {
+    pub fn mock_hint(id: usize, position: Anchor, text: impl Into<Rope>) -> Self {
         Self {
             id: InlayId::Hint(id),
             position,
-            content: InlayContent::Text(text),
+            content: InlayContent::Text(text.into()),
         }
     }
 
@@ -88,19 +88,19 @@ impl Inlay {
         }
     }
 
-    pub fn edit_prediction(id: usize, position: Anchor, text: Rope) -> Self {
+    pub fn edit_prediction<T: Into<Rope>>(id: usize, position: Anchor, text: T) -> Self {
         Self {
             id: InlayId::EditPrediction(id),
             position,
-            content: InlayContent::Text(text),
+            content: InlayContent::Text(text.into()),
         }
     }
 
-    pub fn debugger(id: usize, position: Anchor, text: Rope) -> Self {
+    pub fn debugger<T: Into<Rope>>(id: usize, position: Anchor, text: T) -> Self {
         Self {
             id: InlayId::DebuggerValue(id),
             position,
-            content: InlayContent::Text(text),
+            content: InlayContent::Text(text.into()),
         }
     }
 
@@ -108,7 +108,7 @@ impl Inlay {
         static COLOR_TEXT: OnceLock<Rope> = OnceLock::new();
         match &self.content {
             InlayContent::Text(text) => text,
-            InlayContent::Color(_) => COLOR_TEXT.get_or_init(|| Rope::from_str_small("βΌ")),
+            InlayContent::Color(_) => COLOR_TEXT.get_or_init(|| Rope::from("βΌ")),
         }
     }
 
  
  
  
    
    @@ -1,5 +1,4 @@
 use std::{
-    collections::hash_map,
     ops::{ControlFlow, Range},
     time::Duration,
 };
@@ -49,8 +48,8 @@ pub struct LspInlayHintData {
     allowed_hint_kinds: HashSet<Option<InlayHintKind>>,
     invalidate_debounce: Option<Duration>,
     append_debounce: Option<Duration>,
-    hint_refresh_tasks: HashMap<BufferId, HashMap<Vec<Range<BufferRow>>, Vec<Task<()>>>>,
-    hint_chunk_fetched: HashMap<BufferId, (Global, HashSet<Range<BufferRow>>)>,
+    hint_refresh_tasks: HashMap<BufferId, Vec<Task<()>>>,
+    hint_chunk_fetching: HashMap<BufferId, (Global, HashSet<Range<BufferRow>>)>,
     invalidate_hints_for_buffers: HashSet<BufferId>,
     pub added_hints: HashMap<InlayId, Option<InlayHintKind>>,
 }
@@ -63,7 +62,7 @@ impl LspInlayHintData {
             enabled_in_settings: settings.enabled,
             hint_refresh_tasks: HashMap::default(),
             added_hints: HashMap::default(),
-            hint_chunk_fetched: HashMap::default(),
+            hint_chunk_fetching: HashMap::default(),
             invalidate_hints_for_buffers: HashSet::default(),
             invalidate_debounce: debounce_value(settings.edit_debounce_ms),
             append_debounce: debounce_value(settings.scroll_debounce_ms),
@@ -99,9 +98,8 @@ impl LspInlayHintData {
 
     pub fn clear(&mut self) {
         self.hint_refresh_tasks.clear();
-        self.hint_chunk_fetched.clear();
+        self.hint_chunk_fetching.clear();
         self.added_hints.clear();
-        self.invalidate_hints_for_buffers.clear();
     }
 
     /// Checks inlay hint settings for enabled hint kinds and general enabled state.
@@ -199,7 +197,7 @@ impl LspInlayHintData {
     ) {
         for buffer_id in removed_buffer_ids {
             self.hint_refresh_tasks.remove(buffer_id);
-            self.hint_chunk_fetched.remove(buffer_id);
+            self.hint_chunk_fetching.remove(buffer_id);
         }
     }
 }
@@ -211,7 +209,10 @@ pub enum InlayHintRefreshReason {
     SettingsChange(InlayHintSettings),
     NewLinesShown,
     BufferEdited(BufferId),
-    RefreshRequested(LanguageServerId),
+    RefreshRequested {
+        server_id: LanguageServerId,
+        request_id: Option<usize>,
+    },
     ExcerptsRemoved(Vec<ExcerptId>),
 }
 
@@ -296,7 +297,7 @@ impl Editor {
             | InlayHintRefreshReason::Toggle(_)
             | InlayHintRefreshReason::SettingsChange(_) => true,
             InlayHintRefreshReason::NewLinesShown
-            | InlayHintRefreshReason::RefreshRequested(_)
+            | InlayHintRefreshReason::RefreshRequested { .. }
             | InlayHintRefreshReason::ExcerptsRemoved(_) => false,
             InlayHintRefreshReason::BufferEdited(buffer_id) => {
                 let Some(affected_language) = self
@@ -370,48 +371,45 @@ impl Editor {
             let Some(buffer) = multi_buffer.read(cx).buffer(buffer_id) else {
                 continue;
             };
-            let fetched_tasks = inlay_hints.hint_chunk_fetched.entry(buffer_id).or_default();
+
+            let (fetched_for_version, fetched_chunks) = inlay_hints
+                .hint_chunk_fetching
+                .entry(buffer_id)
+                .or_default();
             if visible_excerpts
                 .buffer_version
-                .changed_since(&fetched_tasks.0)
+                .changed_since(fetched_for_version)
             {
-                fetched_tasks.1.clear();
-                fetched_tasks.0 = visible_excerpts.buffer_version.clone();
+                *fetched_for_version = visible_excerpts.buffer_version.clone();
+                fetched_chunks.clear();
                 inlay_hints.hint_refresh_tasks.remove(&buffer_id);
             }
 
-            let applicable_chunks =
-                semantics_provider.applicable_inlay_chunks(&buffer, &visible_excerpts.ranges, cx);
+            let known_chunks = if ignore_previous_fetches {
+                None
+            } else {
+                Some((fetched_for_version.clone(), fetched_chunks.clone()))
+            };
 
-            match inlay_hints
+            let mut applicable_chunks =
+                semantics_provider.applicable_inlay_chunks(&buffer, &visible_excerpts.ranges, cx);
+            applicable_chunks.retain(|chunk| fetched_chunks.insert(chunk.clone()));
+            if applicable_chunks.is_empty() && !ignore_previous_fetches {
+                continue;
+            }
+            inlay_hints
                 .hint_refresh_tasks
                 .entry(buffer_id)
                 .or_default()
-                .entry(applicable_chunks)
-            {
-                hash_map::Entry::Occupied(mut o) => {
-                    if invalidate_cache.should_invalidate() || ignore_previous_fetches {
-                        o.get_mut().push(spawn_editor_hints_refresh(
-                            buffer_id,
-                            invalidate_cache,
-                            ignore_previous_fetches,
-                            debounce,
-                            visible_excerpts,
-                            cx,
-                        ));
-                    }
-                }
-                hash_map::Entry::Vacant(v) => {
-                    v.insert(Vec::new()).push(spawn_editor_hints_refresh(
-                        buffer_id,
-                        invalidate_cache,
-                        ignore_previous_fetches,
-                        debounce,
-                        visible_excerpts,
-                        cx,
-                    ));
-                }
-            }
+                .push(spawn_editor_hints_refresh(
+                    buffer_id,
+                    invalidate_cache,
+                    debounce,
+                    visible_excerpts,
+                    known_chunks,
+                    applicable_chunks,
+                    cx,
+                ));
         }
     }
 
@@ -506,9 +504,13 @@ impl Editor {
             }
             InlayHintRefreshReason::NewLinesShown => InvalidationStrategy::None,
             InlayHintRefreshReason::BufferEdited(_) => InvalidationStrategy::BufferEdited,
-            InlayHintRefreshReason::RefreshRequested(server_id) => {
-                InvalidationStrategy::RefreshRequested(*server_id)
-            }
+            InlayHintRefreshReason::RefreshRequested {
+                server_id,
+                request_id,
+            } => InvalidationStrategy::RefreshRequested {
+                server_id: *server_id,
+                request_id: *request_id,
+            },
         };
 
         match &mut self.inlay_hints {
@@ -718,44 +720,29 @@ impl Editor {
     fn inlay_hints_for_buffer(
         &mut self,
         invalidate_cache: InvalidationStrategy,
-        ignore_previous_fetches: bool,
         buffer_excerpts: VisibleExcerpts,
+        known_chunks: Option<(Global, HashSet<Range<BufferRow>>)>,
         cx: &mut Context<Self>,
     ) -> Option<Vec<Task<(Range<BufferRow>, anyhow::Result<CacheInlayHints>)>>> {
         let semantics_provider = self.semantics_provider()?;
-        let inlay_hints = self.inlay_hints.as_mut()?;
-        let buffer_id = buffer_excerpts.buffer.read(cx).remote_id();
 
         let new_hint_tasks = semantics_provider
             .inlay_hints(
                 invalidate_cache,
                 buffer_excerpts.buffer,
                 buffer_excerpts.ranges,
-                inlay_hints
-                    .hint_chunk_fetched
-                    .get(&buffer_id)
-                    .filter(|_| !ignore_previous_fetches && !invalidate_cache.should_invalidate())
-                    .cloned(),
+                known_chunks,
                 cx,
             )
             .unwrap_or_default();
 
-        let (known_version, known_chunks) =
-            inlay_hints.hint_chunk_fetched.entry(buffer_id).or_default();
-        if buffer_excerpts.buffer_version.changed_since(known_version) {
-            known_chunks.clear();
-            *known_version = buffer_excerpts.buffer_version;
-        }
-
-        let mut hint_tasks = Vec::new();
+        let mut hint_tasks = None;
         for (row_range, new_hints_task) in new_hint_tasks {
-            let inserted = known_chunks.insert(row_range.clone());
-            if inserted || ignore_previous_fetches || invalidate_cache.should_invalidate() {
-                hint_tasks.push(cx.spawn(async move |_, _| (row_range, new_hints_task.await)));
-            }
+            hint_tasks
+                .get_or_insert_with(Vec::new)
+                .push(cx.spawn(async move |_, _| (row_range, new_hints_task.await)));
         }
-
-        Some(hint_tasks)
+        hint_tasks
     }
 
     fn apply_fetched_hints(
@@ -793,20 +780,28 @@ impl Editor {
         let excerpts = self.buffer.read(cx).excerpt_ids();
         let hints_to_insert = new_hints
             .into_iter()
-            .filter_map(|(chunk_range, hints_result)| match hints_result {
-                Ok(new_hints) => Some(new_hints),
-                Err(e) => {
-                    log::error!(
-                        "Failed to query inlays for buffer row range {chunk_range:?}, {e:#}"
-                    );
-                    if let Some((for_version, chunks_fetched)) =
-                        inlay_hints.hint_chunk_fetched.get_mut(&buffer_id)
-                    {
-                        if for_version == &query_version {
-                            chunks_fetched.remove(&chunk_range);
+            .filter_map(|(chunk_range, hints_result)| {
+                let chunks_fetched = inlay_hints.hint_chunk_fetching.get_mut(&buffer_id);
+                match hints_result {
+                    Ok(new_hints) => {
+                        if new_hints.is_empty() {
+                            if let Some((_, chunks_fetched)) = chunks_fetched {
+                                chunks_fetched.remove(&chunk_range);
+                            }
                         }
+                        Some(new_hints)
+                    }
+                    Err(e) => {
+                        log::error!(
+                            "Failed to query inlays for buffer row range {chunk_range:?}, {e:#}"
+                        );
+                        if let Some((for_version, chunks_fetched)) = chunks_fetched {
+                            if for_version == &query_version {
+                                chunks_fetched.remove(&chunk_range);
+                            }
+                        }
+                        None
                     }
-                    None
                 }
             })
             .flat_map(|hints| hints.into_values())
@@ -856,9 +851,10 @@ struct VisibleExcerpts {
 fn spawn_editor_hints_refresh(
     buffer_id: BufferId,
     invalidate_cache: InvalidationStrategy,
-    ignore_previous_fetches: bool,
     debounce: Option<Duration>,
     buffer_excerpts: VisibleExcerpts,
+    known_chunks: Option<(Global, HashSet<Range<BufferRow>>)>,
+    applicable_chunks: Vec<Range<BufferRow>>,
     cx: &mut Context<'_, Editor>,
 ) -> Task<()> {
     cx.spawn(async move |editor, cx| {
@@ -869,12 +865,7 @@ fn spawn_editor_hints_refresh(
         let query_version = buffer_excerpts.buffer_version.clone();
         let Some(hint_tasks) = editor
             .update(cx, |editor, cx| {
-                editor.inlay_hints_for_buffer(
-                    invalidate_cache,
-                    ignore_previous_fetches,
-                    buffer_excerpts,
-                    cx,
-                )
+                editor.inlay_hints_for_buffer(invalidate_cache, buffer_excerpts, known_chunks, cx)
             })
             .ok()
         else {
@@ -882,6 +873,19 @@ fn spawn_editor_hints_refresh(
         };
         let hint_tasks = hint_tasks.unwrap_or_default();
         if hint_tasks.is_empty() {
+            editor
+                .update(cx, |editor, _| {
+                    if let Some((_, hint_chunk_fetching)) = editor
+                        .inlay_hints
+                        .as_mut()
+                        .and_then(|inlay_hints| inlay_hints.hint_chunk_fetching.get_mut(&buffer_id))
+                    {
+                        for applicable_chunks in &applicable_chunks {
+                            hint_chunk_fetching.remove(applicable_chunks);
+                        }
+                    }
+                })
+                .ok();
             return;
         }
         let new_hints = join_all(hint_tasks).await;
@@ -1102,7 +1106,10 @@ pub mod tests {
         editor
             .update(cx, |editor, _window, cx| {
                 editor.refresh_inlay_hints(
-                    InlayHintRefreshReason::RefreshRequested(fake_server.server.server_id()),
+                    InlayHintRefreshReason::RefreshRequested {
+                        server_id: fake_server.server.server_id(),
+                        request_id: Some(1),
+                    },
                     cx,
                 );
             })
@@ -1958,15 +1965,8 @@ pub mod tests {
     async fn test_large_buffer_inlay_requests_split(cx: &mut gpui::TestAppContext) {
         init_test(cx, |settings| {
             settings.defaults.inlay_hints = Some(InlayHintSettingsContent {
-                show_value_hints: Some(true),
                 enabled: Some(true),
-                edit_debounce_ms: Some(0),
-                scroll_debounce_ms: Some(0),
-                show_type_hints: Some(true),
-                show_parameter_hints: Some(true),
-                show_other_hints: Some(true),
-                show_background: Some(false),
-                toggle_on_modifiers_press: None,
+                ..InlayHintSettingsContent::default()
             })
         });
 
@@ -2044,6 +2044,7 @@ pub mod tests {
             cx.add_window(|window, cx| Editor::for_buffer(buffer, Some(project), window, cx));
         cx.executor().run_until_parked();
         let _fake_server = fake_servers.next().await.unwrap();
+        cx.executor().advance_clock(Duration::from_millis(100));
         cx.executor().run_until_parked();
 
         let ranges = lsp_request_ranges
@@ -2129,6 +2130,7 @@ pub mod tests {
                 );
             })
             .unwrap();
+        cx.executor().advance_clock(Duration::from_millis(100));
         cx.executor().run_until_parked();
         editor.update(cx, |_, _, _| {
             let ranges = lsp_request_ranges
@@ -2145,6 +2147,7 @@ pub mod tests {
                 editor.handle_input("++++more text++++", window, cx);
             })
             .unwrap();
+        cx.executor().advance_clock(Duration::from_secs(1));
         cx.executor().run_until_parked();
         editor.update(cx, |editor, _window, cx| {
             let mut ranges = lsp_request_ranges.lock().drain(..).collect::<Vec<_>>();
@@ -3887,7 +3890,10 @@ let c = 3;"#
         editor
             .update(cx, |editor, _, cx| {
                 editor.refresh_inlay_hints(
-                    InlayHintRefreshReason::RefreshRequested(fake_server.server.server_id()),
+                    InlayHintRefreshReason::RefreshRequested {
+                        server_id: fake_server.server.server_id(),
+                        request_id: Some(1),
+                    },
                     cx,
                 );
             })
@@ -4022,7 +4028,7 @@ let c = 3;"#
         let mut all_fetched_hints = Vec::new();
         for buffer in editor.buffer.read(cx).all_buffers() {
             lsp_store.update(cx, |lsp_store, cx| {
-                let hints = &lsp_store.latest_lsp_data(&buffer, cx).inlay_hints();
+                let hints = lsp_store.latest_lsp_data(&buffer, cx).inlay_hints();
                 all_cached_labels.extend(hints.all_cached_hints().into_iter().map(|hint| {
                     let mut label = hint.text().to_string();
                     if hint.padding_left {
  
  
  
    
    @@ -1593,7 +1593,12 @@ impl SearchableItem for Editor {
     ) {
         self.unfold_ranges(&[matches[index].clone()], false, true, cx);
         let range = self.range_for_match(&matches[index], collapse);
-        self.change_selections(Default::default(), window, cx, |s| {
+        let autoscroll = if EditorSettings::get_global(cx).search.center_on_match {
+            Autoscroll::center()
+        } else {
+            Autoscroll::fit()
+        };
+        self.change_selections(SelectionEffects::scroll(autoscroll), window, cx, |s| {
             s.select_ranges([range]);
         })
     }
  
  
  
    
    @@ -60,8 +60,10 @@ async fn lsp_task_context(
     buffer: &Entity<Buffer>,
     cx: &mut AsyncApp,
 ) -> Option<TaskContext> {
-    let worktree_store = project
-        .read_with(cx, |project, _| project.worktree_store())
+    let (worktree_store, environment) = project
+        .read_with(cx, |project, _| {
+            (project.worktree_store(), project.environment().clone())
+        })
         .ok()?;
 
     let worktree_abs_path = cx
@@ -74,9 +76,9 @@ async fn lsp_task_context(
         })
         .ok()?;
 
-    let project_env = project
-        .update(cx, |project, cx| {
-            project.buffer_environment(buffer, &worktree_store, cx)
+    let project_env = environment
+        .update(cx, |environment, cx| {
+            environment.buffer_environment(buffer, &worktree_store, cx)
         })
         .ok()?
         .await;
  
  
  
    
    @@ -878,7 +878,6 @@ mod tests {
     use gpui::{AppContext as _, font, px};
     use language::Capability;
     use project::{Project, project_settings::DiagnosticSeverity};
-    use rope::Rope;
     use settings::SettingsStore;
     use util::post_inc;
 
@@ -1025,22 +1024,22 @@ mod tests {
                     Inlay::edit_prediction(
                         post_inc(&mut id),
                         buffer_snapshot.anchor_before(offset),
-                        Rope::from_str_small("test"),
+                        "test",
                     ),
                     Inlay::edit_prediction(
                         post_inc(&mut id),
                         buffer_snapshot.anchor_after(offset),
-                        Rope::from_str_small("test"),
+                        "test",
                     ),
                     Inlay::mock_hint(
                         post_inc(&mut id),
                         buffer_snapshot.anchor_before(offset),
-                        Rope::from_str_small("test"),
+                        "test",
                     ),
                     Inlay::mock_hint(
                         post_inc(&mut id),
                         buffer_snapshot.anchor_after(offset),
-                        Rope::from_str_small("test"),
+                        "test",
                     ),
                 ]
             })
  
  
  
    
    @@ -193,7 +193,7 @@ impl Editor {
 
                         if let Some(language) = language {
                             for signature in &mut signature_help.signatures {
-                                let text = Rope::from_str_small(signature.label.as_ref());
+                                let text = Rope::from(signature.label.as_ref());
                                 let highlights = language
                                     .highlight_text(&text, 0..signature.label.len())
                                     .into_iter()
  
  
  
    
    @@ -1468,7 +1468,6 @@ impl ExtensionStore {
         let extensions_dir = self.installed_dir.clone();
         let index_path = self.index_path.clone();
         let proxy = self.proxy.clone();
-        let executor = cx.background_executor().clone();
         cx.background_spawn(async move {
             let start_time = Instant::now();
             let mut index = ExtensionIndex::default();
@@ -1502,14 +1501,10 @@ impl ExtensionStore {
             }
 
             if let Ok(index_json) = serde_json::to_string_pretty(&index) {
-                fs.save(
-                    &index_path,
-                    &Rope::from_str(&index_json, &executor),
-                    Default::default(),
-                )
-                .await
-                .context("failed to save extension index")
-                .log_err();
+                fs.save(&index_path, &index_json.as_str().into(), Default::default())
+                    .await
+                    .context("failed to save extension index")
+                    .log_err();
             }
 
             log::info!("rebuilt extension index in {:?}", start_time.elapsed());
@@ -1676,7 +1671,7 @@ impl ExtensionStore {
                 let manifest_toml = toml::to_string(&loaded_extension.manifest)?;
                 fs.save(
                     &tmp_dir.join(EXTENSION_TOML),
-                    &Rope::from_str_small(&manifest_toml),
+                    &Rope::from(manifest_toml),
                     language::LineEnding::Unix,
                 )
                 .await?;
  
  
  
    
    @@ -805,25 +805,22 @@ impl ExtensionsPage {
             )
             .child(
                 h_flex()
-                    .gap_2()
+                    .gap_1()
                     .justify_between()
                     .child(
-                        h_flex()
-                            .gap_1()
-                            .child(
-                                Icon::new(IconName::Person)
-                                    .size(IconSize::XSmall)
-                                    .color(Color::Muted),
-                            )
-                            .child(
-                                Label::new(extension.manifest.authors.join(", "))
-                                    .size(LabelSize::Small)
-                                    .color(Color::Muted)
-                                    .truncate(),
-                            ),
+                        Icon::new(IconName::Person)
+                            .size(IconSize::XSmall)
+                            .color(Color::Muted),
+                    )
+                    .child(
+                        Label::new(extension.manifest.authors.join(", "))
+                            .size(LabelSize::Small)
+                            .color(Color::Muted)
+                            .truncate(),
                     )
                     .child(
                         h_flex()
+                            .ml_auto()
                             .gap_1()
                             .child(
                                 IconButton::new(
  
  
  
    
    @@ -711,7 +711,9 @@ impl PickerDelegate for OpenPathDelegate {
 
         match &self.directory_state {
             DirectoryState::List { parent_path, .. } => {
-                let (label, indices) = if *parent_path == self.prompt_root {
+                let (label, indices) = if is_current_dir_candidate {
+                    ("open this directory".to_string(), vec![])
+                } else if *parent_path == self.prompt_root {
                     match_positions.iter_mut().for_each(|position| {
                         *position += self.prompt_root.len();
                     });
@@ -719,8 +721,6 @@ impl PickerDelegate for OpenPathDelegate {
                         format!("{}{}", self.prompt_root, candidate.path.string),
                         match_positions,
                     )
-                } else if is_current_dir_candidate {
-                    ("open this directory".to_string(), vec![])
                 } else {
                     (candidate.path.string, match_positions)
                 };
  
  
  
    
    @@ -377,7 +377,7 @@ impl Fs for RealFs {
 
         #[cfg(windows)]
         if smol::fs::metadata(&target).await?.is_dir() {
-            let status = smol::process::Command::new("cmd")
+            let status = new_smol_command("cmd")
                 .args(["/C", "mklink", "/J"])
                 .args([path, target.as_path()])
                 .status()
  
  
  
    
    @@ -23,6 +23,7 @@ serde.workspace = true
 serde_json.workspace = true
 settings.workspace = true
 url.workspace = true
+urlencoding.workspace = true
 util.workspace = true
 
 [dev-dependencies]
  
  
  
    
    @@ -1,5 +1,11 @@
-use std::str::FromStr;
-
+use std::{str::FromStr, sync::Arc};
+
+use anyhow::{Context as _, Result, bail};
+use async_trait::async_trait;
+use futures::AsyncReadExt;
+use gpui::SharedString;
+use http_client::{AsyncBody, HttpClient, HttpRequestExt, Request};
+use serde::Deserialize;
 use url::Url;
 
 use git::{
@@ -9,6 +15,55 @@ use git::{
 
 pub struct Gitee;
 
+#[derive(Debug, Deserialize)]
+struct CommitDetails {
+    author: Option<Author>,
+}
+
+#[derive(Debug, Deserialize)]
+struct Author {
+    avatar_url: String,
+}
+
+impl Gitee {
+    async fn fetch_gitee_commit_author(
+        &self,
+        repo_owner: &str,
+        repo: &str,
+        commit: &str,
+        client: &Arc<dyn HttpClient>,
+    ) -> Result<Option<Author>> {
+        let url = format!("https://gitee.com/api/v5/repos/{repo_owner}/{repo}/commits/{commit}");
+
+        let request = Request::get(&url)
+            .header("Content-Type", "application/json")
+            .follow_redirects(http_client::RedirectPolicy::FollowAll);
+
+        let mut response = client
+            .send(request.body(AsyncBody::default())?)
+            .await
+            .with_context(|| format!("error fetching Gitee commit details at {:?}", url))?;
+
+        let mut body = Vec::new();
+        response.body_mut().read_to_end(&mut body).await?;
+
+        if response.status().is_client_error() {
+            let text = String::from_utf8_lossy(body.as_slice());
+            bail!(
+                "status error {}, response: {text:?}",
+                response.status().as_u16()
+            );
+        }
+
+        let body_str = std::str::from_utf8(&body)?;
+
+        serde_json::from_str::<CommitDetails>(body_str)
+            .map(|commit| commit.author)
+            .context("failed to deserialize Gitee commit details")
+    }
+}
+
+#[async_trait]
 impl GitHostingProvider for Gitee {
     fn name(&self) -> String {
         "Gitee".to_string()
@@ -19,7 +74,7 @@ impl GitHostingProvider for Gitee {
     }
 
     fn supports_avatars(&self) -> bool {
-        false
+        true
     }
 
     fn format_line_number(&self, line: u32) -> String {
@@ -80,6 +135,26 @@ impl GitHostingProvider for Gitee {
         );
         permalink
     }
+
+    async fn commit_author_avatar_url(
+        &self,
+        repo_owner: &str,
+        repo: &str,
+        commit: SharedString,
+        http_client: Arc<dyn HttpClient>,
+    ) -> Result<Option<Url>> {
+        let commit = commit.to_string();
+        let avatar_url = self
+            .fetch_gitee_commit_author(repo_owner, repo, &commit, &http_client)
+            .await?
+            .map(|author| -> Result<Url, url::ParseError> {
+                let mut url = Url::parse(&author.avatar_url)?;
+                url.set_query(Some("width=128"));
+                Ok(url)
+            })
+            .transpose()?;
+        Ok(avatar_url)
+    }
 }
 
 #[cfg(test)]
  
  
  
    
    @@ -1,6 +1,11 @@
-use std::str::FromStr;
-
-use anyhow::{Result, bail};
+use std::{str::FromStr, sync::Arc};
+
+use anyhow::{Context as _, Result, bail};
+use async_trait::async_trait;
+use futures::AsyncReadExt;
+use gpui::SharedString;
+use http_client::{AsyncBody, HttpClient, HttpRequestExt, Request};
+use serde::Deserialize;
 use url::Url;
 
 use git::{
@@ -10,6 +15,16 @@ use git::{
 
 use crate::get_host_from_git_remote_url;
 
+#[derive(Debug, Deserialize)]
+struct CommitDetails {
+    author_email: String,
+}
+
+#[derive(Debug, Deserialize)]
+struct AvatarInfo {
+    avatar_url: String,
+}
+
 #[derive(Debug)]
 pub struct Gitlab {
     name: String,
@@ -46,8 +61,79 @@ impl Gitlab {
             Url::parse(&format!("https://{}", host))?,
         ))
     }
+
+    async fn fetch_gitlab_commit_author(
+        &self,
+        repo_owner: &str,
+        repo: &str,
+        commit: &str,
+        client: &Arc<dyn HttpClient>,
+    ) -> Result<Option<AvatarInfo>> {
+        let Some(host) = self.base_url.host_str() else {
+            bail!("failed to get host from gitlab base url");
+        };
+        let project_path = format!("{}/{}", repo_owner, repo);
+        let project_path_encoded = urlencoding::encode(&project_path);
+        let url = format!(
+            "https://{host}/api/v4/projects/{project_path_encoded}/repository/commits/{commit}"
+        );
+
+        let request = Request::get(&url)
+            .header("Content-Type", "application/json")
+            .follow_redirects(http_client::RedirectPolicy::FollowAll);
+
+        let mut response = client
+            .send(request.body(AsyncBody::default())?)
+            .await
+            .with_context(|| format!("error fetching GitLab commit details at {:?}", url))?;
+
+        let mut body = Vec::new();
+        response.body_mut().read_to_end(&mut body).await?;
+
+        if response.status().is_client_error() {
+            let text = String::from_utf8_lossy(body.as_slice());
+            bail!(
+                "status error {}, response: {text:?}",
+                response.status().as_u16()
+            );
+        }
+
+        let body_str = std::str::from_utf8(&body)?;
+
+        let author_email = serde_json::from_str::<CommitDetails>(body_str)
+            .map(|commit| commit.author_email)
+            .context("failed to deserialize GitLab commit details")?;
+
+        let avatar_info_url = format!("https://{host}/api/v4/avatar?email={author_email}");
+
+        let request = Request::get(&avatar_info_url)
+            .header("Content-Type", "application/json")
+            .follow_redirects(http_client::RedirectPolicy::FollowAll);
+
+        let mut response = client
+            .send(request.body(AsyncBody::default())?)
+            .await
+            .with_context(|| format!("error fetching GitLab avatar info at {:?}", url))?;
+
+        let mut body = Vec::new();
+        response.body_mut().read_to_end(&mut body).await?;
+
+        if response.status().is_client_error() {
+            let text = String::from_utf8_lossy(body.as_slice());
+            bail!(
+                "status error {}, response: {text:?}",
+                response.status().as_u16()
+            );
+        }
+
+        let body_str = std::str::from_utf8(&body)?;
+
+        serde_json::from_str::<Option<AvatarInfo>>(body_str)
+            .context("failed to deserialize GitLab avatar info")
+    }
 }
 
+#[async_trait]
 impl GitHostingProvider for Gitlab {
     fn name(&self) -> String {
         self.name.clone()
@@ -58,7 +144,7 @@ impl GitHostingProvider for Gitlab {
     }
 
     fn supports_avatars(&self) -> bool {
-        false
+        true
     }
 
     fn format_line_number(&self, line: u32) -> String {
@@ -122,6 +208,39 @@ impl GitHostingProvider for Gitlab {
         );
         permalink
     }
+
+    async fn commit_author_avatar_url(
+        &self,
+        repo_owner: &str,
+        repo: &str,
+        commit: SharedString,
+        http_client: Arc<dyn HttpClient>,
+    ) -> Result<Option<Url>> {
+        let commit = commit.to_string();
+        let avatar_url = self
+            .fetch_gitlab_commit_author(repo_owner, repo, &commit, &http_client)
+            .await?
+            .map(|author| -> Result<Url, url::ParseError> {
+                let mut url = Url::parse(&author.avatar_url)?;
+                if let Some(host) = url.host_str() {
+                    let size_query = if host.contains("gravatar") || host.contains("libravatar") {
+                        Some("s=128")
+                    } else if self
+                        .base_url
+                        .host_str()
+                        .is_some_and(|base_host| host.contains(base_host))
+                    {
+                        Some("width=128")
+                    } else {
+                        None
+                    };
+                    url.set_query(size_query);
+                }
+                Ok(url)
+            })
+            .transpose()?;
+        Ok(avatar_url)
+    }
 }
 
 #[cfg(test)]
@@ -134,8 +253,8 @@ mod tests {
     #[test]
     fn test_invalid_self_hosted_remote_url() {
         let remote_url = "https://gitlab.com/zed-industries/zed.git";
-        let github = Gitlab::from_remote_url(remote_url);
-        assert!(github.is_err());
+        let gitlab = Gitlab::from_remote_url(remote_url);
+        assert!(gitlab.is_err());
     }
 
     #[test]
  
  
  
    
    @@ -170,10 +170,7 @@ impl CommitView {
                     ReplicaId::LOCAL,
                     cx.entity_id().as_non_zero_u64().into(),
                     LineEnding::default(),
-                    Rope::from_str(
-                        &format_commit(&commit, stash.is_some()),
-                        cx.background_executor(),
-                    ),
+                    format_commit(&commit, stash.is_some()).into(),
                 );
                 metadata_buffer_id = Some(buffer.remote_id());
                 Buffer::build(buffer, Some(file.clone()), Capability::ReadWrite)
@@ -339,7 +336,7 @@ async fn build_buffer(
 ) -> Result<Entity<Buffer>> {
     let line_ending = LineEnding::detect(&text);
     LineEnding::normalize(&mut text);
-    let text = Rope::from_str(&text, cx.background_executor());
+    let text = Rope::from(text);
     let language = cx.update(|cx| language_registry.language_for_file(&blob, Some(&text), cx))?;
     let language = if let Some(language) = language {
         language_registry
@@ -379,7 +376,7 @@ async fn build_buffer_diff(
     let base_buffer = cx
         .update(|cx| {
             Buffer::build_snapshot(
-                Rope::from_str(old_text.as_deref().unwrap_or(""), cx.background_executor()),
+                old_text.as_deref().unwrap_or("").into(),
                 buffer.language().cloned(),
                 Some(language_registry.clone()),
                 cx,
  
  
  
    
    @@ -359,7 +359,6 @@ mod tests {
     use super::*;
     use editor::test::editor_test_context::assert_state_with_diff;
     use gpui::TestAppContext;
-    use language::Rope;
     use project::{FakeFs, Fs, Project};
     use settings::SettingsStore;
     use std::path::PathBuf;
@@ -430,7 +429,7 @@ mod tests {
         // Modify the new file on disk
         fs.save(
             path!("/test/new_file.txt").as_ref(),
-            &Rope::from_str_small(&unindent(
+            &unindent(
                 "
                 new line 1
                 line 2
@@ -438,7 +437,8 @@ mod tests {
                 line 4
                 new line 5
                 ",
-            )),
+            )
+            .into(),
             Default::default(),
         )
         .await
@@ -465,14 +465,15 @@ mod tests {
         // Modify the old file on disk
         fs.save(
             path!("/test/old_file.txt").as_ref(),
-            &Rope::from_str_small(&unindent(
+            &unindent(
                 "
                 new line 1
                 line 2
                 old line 3
                 line 4
                 ",
-            )),
+            )
+            .into(),
             Default::default(),
         )
         .await
  
  
  
    
    @@ -260,19 +260,6 @@ impl AsyncApp {
     }
 }
 
-impl sum_tree::BackgroundSpawn for BackgroundExecutor {
-    type Task<R>
-        = Task<R>
-    where
-        R: Send + Sync;
-    fn background_spawn<R>(&self, future: impl Future<Output = R> + Send + 'static) -> Self::Task<R>
-    where
-        R: Send + Sync + 'static,
-    {
-        self.spawn(future)
-    }
-}
-
 /// A cloneable, owned handle to the application context,
 /// composed with the window associated with the current task.
 #[derive(Clone, Deref, DerefMut)]
  
  
  
    
    @@ -393,11 +393,6 @@ impl TestAppContext {
         }
     }
 
-    /// Returns the background executor for this context.
-    pub fn background_executor(&self) -> &BackgroundExecutor {
-        &self.background_executor
-    }
-
     /// Wait until there are no more pending tasks.
     pub fn run_until_parked(&mut self) {
         self.background_executor.run_until_parked()
  
  
  
    
    @@ -251,8 +251,6 @@ impl Element for UniformList {
         None
     }
 
-    // self.max_found_width = 0.0
-    //
     fn request_layout(
         &mut self,
         global_id: Option<&GlobalElementId>,
  
  
  
    
    @@ -342,7 +342,7 @@ impl BackgroundExecutor {
     /// for all of them to complete before returning.
     pub async fn scoped<'scope, F>(&self, scheduler: F)
     where
-        F: for<'a> FnOnce(&'a mut Scope<'scope>),
+        F: FnOnce(&mut Scope<'scope>),
     {
         let mut scope = Scope::new(self.clone());
         (scheduler)(&mut scope);
  
  
  
    
    @@ -80,27 +80,15 @@ impl PlatformDispatcher for WindowsDispatcher {
     }
 
     fn dispatch_on_main_thread(&self, runnable: Runnable) {
-        let was_empty = self.main_sender.is_empty();
         match self.main_sender.send(runnable) {
             Ok(_) => unsafe {
-                // Only send a `WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD` to the
-                // queue if we have no runnables queued up yet, otherwise we
-                // risk filling the message queue with gpui messages causing us
-                // to starve the message loop of system messages, resulting in a
-                // process hang.
-                //
-                // When the message loop receives a
-                // `WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD` message we drain the
-                // runnable queue entirely.
-                if was_empty {
-                    PostMessageW(
-                        Some(self.platform_window_handle.as_raw()),
-                        WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD,
-                        WPARAM(self.validation_number),
-                        LPARAM(0),
-                    )
-                    .log_err();
-                }
+                PostMessageW(
+                    Some(self.platform_window_handle.as_raw()),
+                    WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD,
+                    WPARAM(self.validation_number),
+                    LPARAM(0),
+                )
+                .log_err();
             },
             Err(runnable) => {
                 // NOTE: Runnable may wrap a Future that is !Send.
  
  
  
    
    @@ -9,7 +9,6 @@ use windows::Win32::UI::{
     },
     WindowsAndMessaging::KL_NAMELENGTH,
 };
-use windows_core::HSTRING;
 
 use crate::{
     KeybindingKeystroke, Keystroke, Modifiers, PlatformKeyboardLayout, PlatformKeyboardMapper,
@@ -93,14 +92,13 @@ impl PlatformKeyboardMapper for WindowsKeyboardMapper {
 
 impl WindowsKeyboardLayout {
     pub(crate) fn new() -> Result<Self> {
-        let mut buffer = [0u16; KL_NAMELENGTH as usize];
+        let mut buffer = [0u16; KL_NAMELENGTH as usize]; // KL_NAMELENGTH includes the null terminator
         unsafe { GetKeyboardLayoutNameW(&mut buffer)? };
-        let id = HSTRING::from_wide(&buffer).to_string();
+        let id = String::from_utf16_lossy(&buffer[..buffer.len() - 1]); // Remove the null terminator
         let entry = windows_registry::LOCAL_MACHINE.open(format!(
-            "System\\CurrentControlSet\\Control\\Keyboard Layouts\\{}",
-            id
+            "System\\CurrentControlSet\\Control\\Keyboard Layouts\\{id}"
         ))?;
-        let name = entry.get_hstring("Layout Text")?.to_string();
+        let name = entry.get_string("Layout Text")?;
         Ok(Self { id, name })
     }
 
@@ -135,6 +133,7 @@ impl WindowsKeyboardLayout {
                 b"0405" | // Czech
                 b"040E" | // Hungarian
                 b"0424" | // Slovenian
+                b"041A" | // Croatian
                 b"041B" | // Slovak
                 b"0418" // Romanian
             )
  
  
  
    
    @@ -53,6 +53,7 @@ pub enum IconName {
     Check,
     CheckDouble,
     ChevronDown,
+    ChevronDownUp,
     ChevronLeft,
     ChevronRight,
     ChevronUp,
  
  
  
    
    @@ -11,7 +11,7 @@ mod ui_components;
 
 use anyhow::{Context as _, anyhow};
 use collections::{HashMap, HashSet};
-use editor::{CompletionProvider, Editor, EditorEvent};
+use editor::{CompletionProvider, Editor, EditorEvent, EditorMode, SizingBehavior};
 use fs::Fs;
 use fuzzy::{StringMatch, StringMatchCandidate};
 use gpui::{
@@ -22,7 +22,7 @@ use gpui::{
     ScrollWheelEvent, Stateful, StyledText, Subscription, Task, TextStyleRefinement, WeakEntity,
     actions, anchored, deferred, div,
 };
-use language::{Language, LanguageConfig, Rope, ToOffset as _};
+use language::{Language, LanguageConfig, ToOffset as _};
 use notifications::status_toast::{StatusToast, ToastIcon};
 use project::{CompletionDisplayOptions, Project};
 use settings::{
@@ -2119,7 +2119,7 @@ impl RenderOnce for SyntaxHighlightedText {
 
         let highlights = self
             .language
-            .highlight_text(&Rope::from_str_small(text.as_ref()), 0..text.len());
+            .highlight_text(&text.as_ref().into(), 0..text.len());
         let mut runs = Vec::with_capacity(highlights.len());
         let mut offset = 0;
 
@@ -2788,10 +2788,10 @@ impl ActionArgumentsEditor {
                 let editor = cx.new_window_entity(|window, cx| {
                     let multi_buffer = cx.new(|cx| editor::MultiBuffer::singleton(buffer, cx));
                     let mut editor = Editor::new(
-                        editor::EditorMode::Full {
+                        EditorMode::Full {
                             scale_ui_elements_with_buffer_font_size: true,
                             show_active_line_background: false,
-                            sized_by_content: true,
+                            sizing_behavior: SizingBehavior::SizeByContent,
                         },
                         multi_buffer,
                         project.upgrade(),
  
  
  
    
    @@ -24,8 +24,8 @@ use collections::HashMap;
 use fs::MTime;
 use futures::channel::oneshot;
 use gpui::{
-    App, AppContext as _, BackgroundExecutor, Context, Entity, EventEmitter, HighlightStyle,
-    SharedString, StyledText, Task, TaskLabel, TextStyle,
+    App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
+    Task, TaskLabel, TextStyle,
 };
 
 use lsp::{LanguageServerId, NumberOrString};
@@ -832,7 +832,6 @@ impl Buffer {
                 ReplicaId::LOCAL,
                 cx.entity_id().as_non_zero_u64().into(),
                 base_text.into(),
-                &cx.background_executor(),
             ),
             None,
             Capability::ReadWrite,
@@ -863,10 +862,9 @@ impl Buffer {
         replica_id: ReplicaId,
         capability: Capability,
         base_text: impl Into<String>,
-        cx: &BackgroundExecutor,
     ) -> Self {
         Self::build(
-            TextBuffer::new(replica_id, remote_id, base_text.into(), cx),
+            TextBuffer::new(replica_id, remote_id, base_text.into()),
             None,
             capability,
         )
@@ -879,10 +877,9 @@ impl Buffer {
         capability: Capability,
         message: proto::BufferState,
         file: Option<Arc<dyn File>>,
-        cx: &BackgroundExecutor,
     ) -> Result<Self> {
         let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
-        let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text, cx);
+        let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
         let mut this = Self::build(buffer, file, capability);
         this.text.set_line_ending(proto::deserialize_line_ending(
             rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
@@ -1141,14 +1138,13 @@ impl Buffer {
         let old_snapshot = self.text.snapshot();
         let mut branch_buffer = self.text.branch();
         let mut syntax_snapshot = self.syntax_map.lock().snapshot();
-        let executor = cx.background_executor().clone();
         cx.background_spawn(async move {
             if !edits.is_empty() {
                 if let Some(language) = language.clone() {
                     syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
                 }
 
-                branch_buffer.edit(edits.iter().cloned(), &executor);
+                branch_buffer.edit(edits.iter().cloned());
                 let snapshot = branch_buffer.snapshot();
                 syntax_snapshot.interpolate(&snapshot);
 
@@ -2365,9 +2361,7 @@ impl Buffer {
         let autoindent_request = autoindent_mode
             .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
 
-        let edit_operation = self
-            .text
-            .edit(edits.iter().cloned(), cx.background_executor());
+        let edit_operation = self.text.edit(edits.iter().cloned());
         let edit_id = edit_operation.timestamp();
 
         if let Some((before_edit, mode)) = autoindent_request {
@@ -2598,8 +2592,7 @@ impl Buffer {
         for operation in buffer_ops.iter() {
             self.send_operation(Operation::Buffer(operation.clone()), false, cx);
         }
-        self.text
-            .apply_ops(buffer_ops, Some(cx.background_executor()));
+        self.text.apply_ops(buffer_ops);
         self.deferred_ops.insert(deferred_ops);
         self.flush_deferred_ops(cx);
         self.did_edit(&old_version, was_dirty, cx);
  
  
  
    
    @@ -75,7 +75,6 @@ fn test_set_line_ending(cx: &mut TestAppContext) {
             Capability::ReadWrite,
             base.read(cx).to_proto(cx),
             None,
-            cx.background_executor(),
         )
         .unwrap()
     });
@@ -256,18 +255,14 @@ async fn test_first_line_pattern(cx: &mut TestAppContext) {
             .is_none()
     );
     assert!(
-        cx.read(|cx| languages.language_for_file(
-            &file("the/script"),
-            Some(&Rope::from_str("nothing", cx.background_executor())),
-            cx
-        ))
-        .is_none()
+        cx.read(|cx| languages.language_for_file(&file("the/script"), Some(&"nothing".into()), cx))
+            .is_none()
     );
 
     assert_eq!(
         cx.read(|cx| languages.language_for_file(
             &file("the/script"),
-            Some(&Rope::from_str("#!/bin/env node", cx.background_executor())),
+            Some(&"#!/bin/env node".into()),
             cx
         ))
         .unwrap()
@@ -411,7 +406,6 @@ fn test_edit_events(cx: &mut gpui::App) {
             ReplicaId::new(1),
             Capability::ReadWrite,
             "abcdef",
-            cx.background_executor(),
         )
     });
     let buffer1_ops = Arc::new(Mutex::new(Vec::new()));
@@ -2787,14 +2781,8 @@ fn test_serialization(cx: &mut gpui::App) {
         .background_executor()
         .block(buffer1.read(cx).serialize_ops(None, cx));
     let buffer2 = cx.new(|cx| {
-        let mut buffer = Buffer::from_proto(
-            ReplicaId::new(1),
-            Capability::ReadWrite,
-            state,
-            None,
-            cx.background_executor(),
-        )
-        .unwrap();
+        let mut buffer =
+            Buffer::from_proto(ReplicaId::new(1), Capability::ReadWrite, state, None).unwrap();
         buffer.apply_ops(
             ops.into_iter()
                 .map(|op| proto::deserialize_operation(op).unwrap()),
@@ -2818,7 +2806,6 @@ fn test_branch_and_merge(cx: &mut TestAppContext) {
             Capability::ReadWrite,
             base.read(cx).to_proto(cx),
             None,
-            cx.background_executor(),
         )
         .unwrap()
     });
@@ -3133,14 +3120,9 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) {
             let ops = cx
                 .background_executor()
                 .block(base_buffer.read(cx).serialize_ops(None, cx));
-            let mut buffer = Buffer::from_proto(
-                ReplicaId::new(i as u16),
-                Capability::ReadWrite,
-                state,
-                None,
-                cx.background_executor(),
-            )
-            .unwrap();
+            let mut buffer =
+                Buffer::from_proto(ReplicaId::new(i as u16), Capability::ReadWrite, state, None)
+                    .unwrap();
             buffer.apply_ops(
                 ops.into_iter()
                     .map(|op| proto::deserialize_operation(op).unwrap()),
@@ -3269,7 +3251,6 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) {
                         Capability::ReadWrite,
                         old_buffer_state,
                         None,
-                        cx.background_executor(),
                     )
                     .unwrap();
                     new_buffer.apply_ops(
@@ -3433,7 +3414,7 @@ fn test_contiguous_ranges() {
 }
 
 #[gpui::test(iterations = 500)]
-fn test_trailing_whitespace_ranges(mut rng: StdRng, cx: &mut TestAppContext) {
+fn test_trailing_whitespace_ranges(mut rng: StdRng) {
     // Generate a random multi-line string containing
     // some lines with trailing whitespace.
     let mut text = String::new();
@@ -3457,7 +3438,7 @@ fn test_trailing_whitespace_ranges(mut rng: StdRng, cx: &mut TestAppContext) {
         _ => {}
     }
 
-    let rope = Rope::from_str(text.as_str(), cx.background_executor());
+    let rope = Rope::from(text.as_str());
     let actual_ranges = trailing_whitespace_ranges(&rope);
     let expected_ranges = TRAILING_WHITESPACE_REGEX
         .find_iter(&text)
  
  
  
    
    @@ -100,7 +100,6 @@ fn test_syntax_map_layers_for_range(cx: &mut App) {
             }
         "#
         .unindent(),
-        cx.background_executor(),
     );
 
     let mut syntax_map = SyntaxMap::new(&buffer);
@@ -148,7 +147,7 @@ fn test_syntax_map_layers_for_range(cx: &mut App) {
 
     // Replace a vec! macro invocation with a plain slice, removing a syntactic layer.
     let macro_name_range = range_for_text(&buffer, "vec!");
-    buffer.edit([(macro_name_range, "&")], cx.background_executor());
+    buffer.edit([(macro_name_range, "&")]);
     syntax_map.interpolate(&buffer);
     syntax_map.reparse(language.clone(), &buffer);
 
@@ -200,7 +199,6 @@ fn test_dynamic_language_injection(cx: &mut App) {
             ```
         "#
         .unindent(),
-        cx.background_executor(),
     );
 
     let mut syntax_map = SyntaxMap::new(&buffer);
@@ -220,10 +218,7 @@ fn test_dynamic_language_injection(cx: &mut App) {
 
     // Replace `rs` with a path to ending in `.rb` in code block.
     let macro_name_range = range_for_text(&buffer, "rs");
-    buffer.edit(
-        [(macro_name_range, "foo/bar/baz.rb")],
-        cx.background_executor(),
-    );
+    buffer.edit([(macro_name_range, "foo/bar/baz.rb")]);
     syntax_map.interpolate(&buffer);
     syntax_map.reparse(markdown.clone(), &buffer);
     syntax_map.reparse(markdown_inline.clone(), &buffer);
@@ -240,7 +235,7 @@ fn test_dynamic_language_injection(cx: &mut App) {
 
     // Replace Ruby with a language that hasn't been loaded yet.
     let macro_name_range = range_for_text(&buffer, "foo/bar/baz.rb");
-    buffer.edit([(macro_name_range, "html")], cx.background_executor());
+    buffer.edit([(macro_name_range, "html")]);
     syntax_map.interpolate(&buffer);
     syntax_map.reparse(markdown.clone(), &buffer);
     syntax_map.reparse(markdown_inline.clone(), &buffer);
@@ -816,12 +811,7 @@ fn test_syntax_map_languages_loading_with_erb(cx: &mut App) {
     .unindent();
 
     let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone()));
-    let mut buffer = Buffer::new(
-        ReplicaId::LOCAL,
-        BufferId::new(1).unwrap(),
-        text,
-        cx.background_executor(),
-    );
+    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), text);
 
     let mut syntax_map = SyntaxMap::new(&buffer);
     syntax_map.set_language_registry(registry.clone());
@@ -869,7 +859,7 @@ fn test_syntax_map_languages_loading_with_erb(cx: &mut App) {
     .unindent();
 
     log::info!("editing");
-    buffer.edit_via_marked_text(&text, cx.background_executor());
+    buffer.edit_via_marked_text(&text);
     syntax_map.interpolate(&buffer);
     syntax_map.reparse(language, &buffer);
 
@@ -913,7 +903,7 @@ fn test_random_syntax_map_edits_rust_macros(rng: StdRng, cx: &mut App) {
     let language = Arc::new(rust_lang());
     registry.add(language.clone());
 
-    test_random_edits(text, registry, language, rng, cx);
+    test_random_edits(text, registry, language, rng);
 }
 
 #[gpui::test(iterations = 50)]
@@ -942,7 +932,7 @@ fn test_random_syntax_map_edits_with_erb(rng: StdRng, cx: &mut App) {
     registry.add(Arc::new(ruby_lang()));
     registry.add(Arc::new(html_lang()));
 
-    test_random_edits(text, registry, language, rng, cx);
+    test_random_edits(text, registry, language, rng);
 }
 
 #[gpui::test(iterations = 50)]
@@ -975,7 +965,7 @@ fn test_random_syntax_map_edits_with_heex(rng: StdRng, cx: &mut App) {
     registry.add(Arc::new(heex_lang()));
     registry.add(Arc::new(html_lang()));
 
-    test_random_edits(text, registry, language, rng, cx);
+    test_random_edits(text, registry, language, rng);
 }
 
 fn test_random_edits(
@@ -983,18 +973,12 @@ fn test_random_edits(
     registry: Arc<LanguageRegistry>,
     language: Arc<Language>,
     mut rng: StdRng,
-    cx: &mut App,
 ) {
     let operations = env::var("OPERATIONS")
         .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
         .unwrap_or(10);
 
-    let mut buffer = Buffer::new(
-        ReplicaId::LOCAL,
-        BufferId::new(1).unwrap(),
-        text,
-        cx.background_executor(),
-    );
+    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), text);
 
     let mut syntax_map = SyntaxMap::new(&buffer);
     syntax_map.set_language_registry(registry.clone());
@@ -1009,7 +993,7 @@ fn test_random_edits(
         let prev_buffer = buffer.snapshot();
         let prev_syntax_map = syntax_map.snapshot();
 
-        buffer.randomly_edit(&mut rng, 3, cx.background_executor());
+        buffer.randomly_edit(&mut rng, 3);
         log::info!("text:\n{}", buffer.text());
 
         syntax_map.interpolate(&buffer);
@@ -1175,12 +1159,7 @@ fn test_edit_sequence(language_name: &str, steps: &[&str], cx: &mut App) -> (Buf
         .now_or_never()
         .unwrap()
         .unwrap();
-    let mut buffer = Buffer::new(
-        ReplicaId::LOCAL,
-        BufferId::new(1).unwrap(),
-        "",
-        cx.background_executor(),
-    );
+    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "");
 
     let mut mutated_syntax_map = SyntaxMap::new(&buffer);
     mutated_syntax_map.set_language_registry(registry.clone());
@@ -1189,7 +1168,7 @@ fn test_edit_sequence(language_name: &str, steps: &[&str], cx: &mut App) -> (Buf
     for (i, marked_string) in steps.iter().enumerate() {
         let marked_string = marked_string.unindent();
         log::info!("incremental parse {i}: {marked_string:?}");
-        buffer.edit_via_marked_text(&marked_string, cx.background_executor());
+        buffer.edit_via_marked_text(&marked_string);
 
         // Reparse the syntax map
         mutated_syntax_map.interpolate(&buffer);
  
  
  
    
    @@ -11,7 +11,7 @@ use futures::{Future, FutureExt, future::join_all};
 use gpui::{App, AppContext, AsyncApp, Task};
 use language::{
     BinaryStatus, CodeLabel, DynLspInstaller, HighlightId, Language, LanguageName, LspAdapter,
-    LspAdapterDelegate, Rope, Toolchain,
+    LspAdapterDelegate, Toolchain,
 };
 use lsp::{
     CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions, LanguageServerName,
@@ -403,10 +403,7 @@ fn labels_from_extension(
             let runs = if label.code.is_empty() {
                 Vec::new()
             } else {
-                language.highlight_text(
-                    &Rope::from_str_small(label.code.as_str()),
-                    0..label.code.len(),
-                )
+                language.highlight_text(&label.code.as_str().into(), 0..label.code.len())
             };
             build_code_label(&label, &runs, language)
         })
  
  
  
    
    @@ -189,7 +189,7 @@ impl super::LspAdapter for CLspAdapter {
             Some(lsp::CompletionItemKind::FIELD) if completion.detail.is_some() => {
                 let detail = completion.detail.as_ref().unwrap();
                 let text = format!("{} {}", detail, label);
-                let source = Rope::from_str_small(format!("struct S {{ {} }}", text).as_str());
+                let source = Rope::from(format!("struct S {{ {} }}", text).as_str());
                 let runs = language.highlight_text(&source, 11..11 + text.len());
                 let filter_range = completion
                     .filter_text
@@ -206,8 +206,7 @@ impl super::LspAdapter for CLspAdapter {
             {
                 let detail = completion.detail.as_ref().unwrap();
                 let text = format!("{} {}", detail, label);
-                let runs =
-                    language.highlight_text(&Rope::from_str_small(text.as_str()), 0..text.len());
+                let runs = language.highlight_text(&Rope::from(text.as_str()), 0..text.len());
                 let filter_range = completion
                     .filter_text
                     .as_deref()
@@ -223,8 +222,7 @@ impl super::LspAdapter for CLspAdapter {
             {
                 let detail = completion.detail.as_ref().unwrap();
                 let text = format!("{} {}", detail, label);
-                let runs =
-                    language.highlight_text(&Rope::from_str_small(text.as_str()), 0..text.len());
+                let runs = language.highlight_text(&Rope::from(text.as_str()), 0..text.len());
                 let filter_range = completion
                     .filter_text
                     .as_deref()
@@ -328,7 +326,7 @@ impl super::LspAdapter for CLspAdapter {
         Some(CodeLabel::new(
             text[display_range.clone()].to_string(),
             filter_range,
-            language.highlight_text(&Rope::from_str_small(text.as_str()), display_range),
+            language.highlight_text(&text.as_str().into(), display_range),
         ))
     }
 
  
  
  
    
    @@ -221,7 +221,7 @@ impl LspAdapter for GoLspAdapter {
         match completion.kind.zip(completion.detail.as_ref()) {
             Some((lsp::CompletionItemKind::MODULE, detail)) => {
                 let text = format!("{label} {detail}");
-                let source = Rope::from_str_small(format!("import {text}").as_str());
+                let source = Rope::from(format!("import {text}").as_str());
                 let runs = language.highlight_text(&source, 7..7 + text[name_offset..].len());
                 let filter_range = completion
                     .filter_text
@@ -238,9 +238,8 @@ impl LspAdapter for GoLspAdapter {
                 detail,
             )) => {
                 let text = format!("{label} {detail}");
-                let source = Rope::from_str_small(
-                    format!("var {} {}", &text[name_offset..], detail).as_str(),
-                );
+                let source =
+                    Rope::from(format!("var {} {}", &text[name_offset..], detail).as_str());
                 let runs = adjust_runs(
                     name_offset,
                     language.highlight_text(&source, 4..4 + text[name_offset..].len()),
@@ -257,8 +256,7 @@ impl LspAdapter for GoLspAdapter {
             }
             Some((lsp::CompletionItemKind::STRUCT, _)) => {
                 let text = format!("{label} struct {{}}");
-                let source =
-                    Rope::from_str_small(format!("type {}", &text[name_offset..]).as_str());
+                let source = Rope::from(format!("type {}", &text[name_offset..]).as_str());
                 let runs = adjust_runs(
                     name_offset,
                     language.highlight_text(&source, 5..5 + text[name_offset..].len()),
@@ -275,8 +273,7 @@ impl LspAdapter for GoLspAdapter {
             }
             Some((lsp::CompletionItemKind::INTERFACE, _)) => {
                 let text = format!("{label} interface {{}}");
-                let source =
-                    Rope::from_str_small(format!("type {}", &text[name_offset..]).as_str());
+                let source = Rope::from(format!("type {}", &text[name_offset..]).as_str());
                 let runs = adjust_runs(
                     name_offset,
                     language.highlight_text(&source, 5..5 + text[name_offset..].len()),
@@ -293,9 +290,8 @@ impl LspAdapter for GoLspAdapter {
             }
             Some((lsp::CompletionItemKind::FIELD, detail)) => {
                 let text = format!("{label} {detail}");
-                let source = Rope::from_str_small(
-                    format!("type T struct {{ {} }}", &text[name_offset..]).as_str(),
-                );
+                let source =
+                    Rope::from(format!("type T struct {{ {} }}", &text[name_offset..]).as_str());
                 let runs = adjust_runs(
                     name_offset,
                     language.highlight_text(&source, 16..16 + text[name_offset..].len()),
@@ -313,9 +309,7 @@ impl LspAdapter for GoLspAdapter {
             Some((lsp::CompletionItemKind::FUNCTION | lsp::CompletionItemKind::METHOD, detail)) => {
                 if let Some(signature) = detail.strip_prefix("func") {
                     let text = format!("{label}{signature}");
-                    let source = Rope::from_str_small(
-                        format!("func {} {{}}", &text[name_offset..]).as_str(),
-                    );
+                    let source = Rope::from(format!("func {} {{}}", &text[name_offset..]).as_str());
                     let runs = adjust_runs(
                         name_offset,
                         language.highlight_text(&source, 5..5 + text[name_offset..].len()),
@@ -391,7 +385,7 @@ impl LspAdapter for GoLspAdapter {
         Some(CodeLabel::new(
             text[display_range.clone()].to_string(),
             filter_range,
-            language.highlight_text(&Rope::from_str_small(text.as_str()), display_range),
+            language.highlight_text(&text.as_str().into(), display_range),
         ))
     }
 
  
  
  
    
    @@ -7,6 +7,7 @@
   "exclude"
   "retract"
   "module"
+  "ignore"
 ] @keyword
 
 "=>" @operator
  
  
  
    
    @@ -27,3 +27,9 @@
   ("(") @structure.open
   (")") @structure.close
 )
+
+(ignore_directive
+  "ignore" @structure.anchor
+  ("(") @structure.open
+  (")") @structure.close
+)
  
  
  
    
    @@ -19,7 +19,6 @@ use pet_core::python_environment::{PythonEnvironment, PythonEnvironmentKind};
 use pet_virtualenv::is_virtualenv_dir;
 use project::Fs;
 use project::lsp_store::language_server_settings;
-use rope::Rope;
 use serde::{Deserialize, Serialize};
 use serde_json::{Value, json};
 use smol::lock::OnceCell;
@@ -467,7 +466,7 @@ impl LspAdapter for PyrightLspAdapter {
         Some(language::CodeLabel::new(
             text[display_range.clone()].to_string(),
             filter_range,
-            language.highlight_text(&Rope::from_str_small(text.as_str()), display_range),
+            language.highlight_text(&text.as_str().into(), display_range),
         ))
     }
 
@@ -1211,7 +1210,7 @@ impl ToolchainLister for PythonToolchainProvider {
                 activation_script.extend(match shell {
                     ShellKind::Fish => Some(format!("\"{pyenv}\" shell - fish {version}")),
                     ShellKind::Posix => Some(format!("\"{pyenv}\" shell - sh {version}")),
-                    ShellKind::Nushell => Some(format!("\"{pyenv}\" shell - nu {version}")),
+                    ShellKind::Nushell => Some(format!("^\"{pyenv}\" shell - nu {version}")),
                     ShellKind::PowerShell => None,
                     ShellKind::Csh => None,
                     ShellKind::Tcsh => None,
@@ -1512,7 +1511,7 @@ impl LspAdapter for PyLspAdapter {
         Some(language::CodeLabel::new(
             text[display_range.clone()].to_string(),
             filter_range,
-            language.highlight_text(&Rope::from_str_small(text.as_str()), display_range),
+            language.highlight_text(&text.as_str().into(), display_range),
         ))
     }
 
@@ -1801,7 +1800,7 @@ impl LspAdapter for BasedPyrightLspAdapter {
         Some(language::CodeLabel::new(
             text[display_range.clone()].to_string(),
             filter_range,
-            language.highlight_text(&Rope::from_str_small(text.as_str()), display_range),
+            language.highlight_text(&text.as_str().into(), display_range),
         ))
     }
 
  
  
  
    
    @@ -252,7 +252,7 @@ impl LspAdapter for RustLspAdapter {
                 let name = &completion.label;
                 let text = format!("{name}: {signature}");
                 let prefix = "struct S { ";
-                let source = Rope::from_iter_small([prefix, &text, " }"]);
+                let source = Rope::from_iter([prefix, &text, " }"]);
                 let runs =
                     language.highlight_text(&source, prefix.len()..prefix.len() + text.len());
                 mk_label(text, &|| 0..completion.label.len(), runs)
@@ -264,7 +264,7 @@ impl LspAdapter for RustLspAdapter {
                 let name = &completion.label;
                 let text = format!("{name}: {signature}",);
                 let prefix = "let ";
-                let source = Rope::from_iter_small([prefix, &text, " = ();"]);
+                let source = Rope::from_iter([prefix, &text, " = ();"]);
                 let runs =
                     language.highlight_text(&source, prefix.len()..prefix.len() + text.len());
                 mk_label(text, &|| 0..completion.label.len(), runs)
@@ -302,7 +302,7 @@ impl LspAdapter for RustLspAdapter {
                     .filter(|it| it.contains(&label))
                     .and_then(|it| Some((it, FULL_SIGNATURE_REGEX.find(it)?)))
                 {
-                    let source = Rope::from_str_small(function_signature);
+                    let source = Rope::from(function_signature);
                     let runs = language.highlight_text(&source, 0..function_signature.len());
                     mk_label(
                         function_signature.to_owned(),
@@ -311,7 +311,7 @@ impl LspAdapter for RustLspAdapter {
                     )
                 } else if let Some((prefix, suffix)) = fn_prefixed {
                     let text = format!("{label}{suffix}");
-                    let source = Rope::from_iter_small([prefix, " ", &text, " {}"]);
+                    let source = Rope::from_iter([prefix, " ", &text, " {}"]);
                     let run_start = prefix.len() + 1;
                     let runs = language.highlight_text(&source, run_start..run_start + text.len());
                     mk_label(text, &|| 0..label.len(), runs)
@@ -322,7 +322,7 @@ impl LspAdapter for RustLspAdapter {
                 {
                     let text = completion.label.clone();
                     let len = text.len();
-                    let source = Rope::from_str_small(text.as_str());
+                    let source = Rope::from(text.as_str());
                     let runs = language.highlight_text(&source, 0..len);
                     mk_label(text, &|| 0..completion.label.len(), runs)
                 } else if detail_left.is_none() {
@@ -399,10 +399,7 @@ impl LspAdapter for RustLspAdapter {
         Some(CodeLabel::new(
             format!("{prefix}{name}"),
             filter_range,
-            language.highlight_text(
-                &Rope::from_iter_small([prefix, name, suffix]),
-                display_range,
-            ),
+            language.highlight_text(&Rope::from_iter([prefix, name, suffix]), display_range),
         ))
     }
 
  
  
  
    
    @@ -121,6 +121,15 @@
 
 ; Tokens
 
+[
+  ";"
+  "?."
+  "."
+  ","
+  ":"
+  "?"
+] @punctuation.delimiter
+
 [
   "..."
   "-"
@@ -179,15 +188,6 @@
   ] @operator
 )
 
-[
-  ";"
-  "?."
-  "."
-  ","
-  ":"
-  "?"
-] @punctuation.delimiter
-
 [
   "("
   ")"
  
  
  
    
    @@ -1558,9 +1558,7 @@ impl MarkdownElementBuilder {
 
         if let Some(Some(language)) = self.code_block_stack.last() {
             let mut offset = 0;
-            for (range, highlight_id) in
-                language.highlight_text(&Rope::from_str_small(text), 0..text.len())
-            {
+            for (range, highlight_id) in language.highlight_text(&Rope::from(text), 0..text.len()) {
                 if range.start > offset {
                     self.pending_line
                         .runs
  
  
  
    
    @@ -779,7 +779,7 @@ impl<'a> MarkdownParser<'a> {
 
         let highlights = if let Some(language) = &language {
             if let Some(registry) = &self.language_registry {
-                let rope = language::Rope::from_str_small(code.as_str());
+                let rope: language::Rope = code.as_str().into();
                 registry
                     .language_for_name_or_extension(language)
                     .await
  
  
  
    
    @@ -1,6 +1,6 @@
 use super::*;
 use buffer_diff::{DiffHunkStatus, DiffHunkStatusKind};
-use gpui::{App, BackgroundExecutor, TestAppContext};
+use gpui::{App, TestAppContext};
 use indoc::indoc;
 use language::{Buffer, Rope};
 use parking_lot::RwLock;
@@ -79,14 +79,9 @@ fn test_remote(cx: &mut App) {
         let ops = cx
             .background_executor()
             .block(host_buffer.read(cx).serialize_ops(None, cx));
-        let mut buffer = Buffer::from_proto(
-            ReplicaId::REMOTE_SERVER,
-            Capability::ReadWrite,
-            state,
-            None,
-            cx.background_executor(),
-        )
-        .unwrap();
+        let mut buffer =
+            Buffer::from_proto(ReplicaId::REMOTE_SERVER, Capability::ReadWrite, state, None)
+                .unwrap();
         buffer.apply_ops(
             ops.into_iter()
                 .map(|op| language::proto::deserialize_operation(op).unwrap()),
@@ -1229,7 +1224,7 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) {
     assert_chunks_in_ranges(&snapshot);
     assert_consistent_line_numbers(&snapshot);
     assert_position_translation(&snapshot);
-    assert_line_indents(&snapshot, cx.background_executor());
+    assert_line_indents(&snapshot);
 
     multibuffer.update(cx, |multibuffer, cx| {
         multibuffer.collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], cx)
@@ -1253,7 +1248,7 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) {
     assert_chunks_in_ranges(&snapshot);
     assert_consistent_line_numbers(&snapshot);
     assert_position_translation(&snapshot);
-    assert_line_indents(&snapshot, cx.background_executor());
+    assert_line_indents(&snapshot);
 
     // Expand the first diff hunk
     multibuffer.update(cx, |multibuffer, cx| {
@@ -1305,7 +1300,7 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) {
     assert_chunks_in_ranges(&snapshot);
     assert_consistent_line_numbers(&snapshot);
     assert_position_translation(&snapshot);
-    assert_line_indents(&snapshot, cx.background_executor());
+    assert_line_indents(&snapshot);
 
     // Edit the buffer before the first hunk
     buffer.update(cx, |buffer, cx| {
@@ -1347,7 +1342,7 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) {
     assert_chunks_in_ranges(&snapshot);
     assert_consistent_line_numbers(&snapshot);
     assert_position_translation(&snapshot);
-    assert_line_indents(&snapshot, cx.background_executor());
+    assert_line_indents(&snapshot);
 
     // Recalculate the diff, changing the first diff hunk.
     diff.update(cx, |diff, cx| {
@@ -2072,7 +2067,7 @@ fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) {
     }
 
     assert_position_translation(&snapshot);
-    assert_line_indents(&snapshot, cx.background_executor());
+    assert_line_indents(&snapshot);
 
     assert_eq!(
         snapshot
@@ -2123,7 +2118,7 @@ fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) {
         ),
     );
 
-    assert_line_indents(&snapshot, cx.background_executor());
+    assert_line_indents(&snapshot);
 }
 
 /// A naive implementation of a multi-buffer that does not maintain
@@ -2893,7 +2888,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
             );
         }
 
-        let text_rope = Rope::from_str(expected_text.as_str(), cx.background_executor());
+        let text_rope = Rope::from(expected_text.as_str());
         for _ in 0..10 {
             let end_ix = text_rope.clip_offset(rng.random_range(0..=text_rope.len()), Bias::Right);
             let start_ix = text_rope.clip_offset(rng.random_range(0..=end_ix), Bias::Left);
@@ -3517,7 +3512,7 @@ fn assert_consistent_line_numbers(snapshot: &MultiBufferSnapshot) {
 
 #[track_caller]
 fn assert_position_translation(snapshot: &MultiBufferSnapshot) {
-    let text = Rope::from_str_small(&snapshot.text());
+    let text = Rope::from(snapshot.text());
 
     let mut left_anchors = Vec::new();
     let mut right_anchors = Vec::new();
@@ -3641,10 +3636,10 @@ fn assert_position_translation(snapshot: &MultiBufferSnapshot) {
     }
 }
 
-fn assert_line_indents(snapshot: &MultiBufferSnapshot, executor: &BackgroundExecutor) {
+fn assert_line_indents(snapshot: &MultiBufferSnapshot) {
     let max_row = snapshot.max_point().row;
     let buffer_id = snapshot.excerpts().next().unwrap().1.remote_id();
-    let text = text::Buffer::new(ReplicaId::LOCAL, buffer_id, snapshot.text(), executor);
+    let text = text::Buffer::new(ReplicaId::LOCAL, buffer_id, snapshot.text());
     let mut line_indents = text
         .line_indents_in_row_range(0..max_row + 1)
         .collect::<Vec<_>>();
  
  
  
    
    @@ -5,7 +5,7 @@ use gpui::{App, AppContext, Context, Entity};
 use itertools::Itertools;
 use language::{Buffer, BufferSnapshot};
 use rope::Point;
-use text::{Bias, OffsetRangeExt, locator::Locator};
+use text::{Bias, BufferId, OffsetRangeExt, locator::Locator};
 use util::{post_inc, rel_path::RelPath};
 
 use crate::{
@@ -152,6 +152,15 @@ impl MultiBuffer {
         }
     }
 
+    pub fn remove_excerpts_for_buffer(&mut self, buffer: BufferId, cx: &mut Context<Self>) {
+        self.remove_excerpts(
+            self.excerpts_for_buffer(buffer, cx)
+                .into_iter()
+                .map(|(excerpt, _)| excerpt),
+            cx,
+        );
+    }
+
     pub(super) fn expand_excerpts_with_paths(
         &mut self,
         ids: impl IntoIterator<Item = ExcerptId>,
  
  
  
    
    @@ -465,14 +465,8 @@ impl SearchData {
         let match_offset_range = match_range.to_offset(multi_buffer_snapshot);
 
         let mut search_match_indices = vec![
-            multi_buffer_snapshot.clip_offset(
-                match_offset_range.start - context_offset_range.start,
-                Bias::Left,
-            )
-                ..multi_buffer_snapshot.clip_offset(
-                    match_offset_range.end - context_offset_range.start,
-                    Bias::Right,
-                ),
+            match_offset_range.start - context_offset_range.start
+                ..match_offset_range.end - context_offset_range.start,
         ];
 
         let entire_context_text = multi_buffer_snapshot
@@ -509,14 +503,8 @@ impl SearchData {
                 .next()
                 .is_some_and(|c| !c.is_whitespace());
         search_match_indices.iter_mut().for_each(|range| {
-            range.start = multi_buffer_snapshot.clip_offset(
-                range.start.saturating_sub(left_whitespaces_offset),
-                Bias::Left,
-            );
-            range.end = multi_buffer_snapshot.clip_offset(
-                range.end.saturating_sub(left_whitespaces_offset),
-                Bias::Right,
-            );
+            range.start = range.start.saturating_sub(left_whitespaces_offset);
+            range.end = range.end.saturating_sub(left_whitespaces_offset);
         });
 
         let trimmed_row_offset_range =
@@ -5256,10 +5244,13 @@ mod tests {
     use language::{Language, LanguageConfig, LanguageMatcher, tree_sitter_rust};
     use pretty_assertions::assert_eq;
     use project::FakeFs;
-    use search::project_search::{self, perform_project_search};
+    use search::{
+        buffer_search,
+        project_search::{self, perform_project_search},
+    };
     use serde_json::json;
     use util::path;
-    use workspace::{OpenOptions, OpenVisible};
+    use workspace::{OpenOptions, OpenVisible, ToolbarItemView};
 
     use super::*;
 
@@ -5322,25 +5313,28 @@ mod tests {
     ide/src/
       inlay_hints/
         fn_lifetime_fn.rs
-          search: match config.param_names_for_lifetime_elision_hints {
-          search: allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints {
-          search: Some(it) if config.param_names_for_lifetime_elision_hints => {
-          search: InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG },
+          search: match config.Β«param_names_for_lifetime_elision_hintsΒ» {
+          search: allocated_lifetimes.push(if config.Β«param_names_for_lifetime_elision_hintsΒ» {
+          search: Some(it) if config.Β«param_names_for_lifetime_elision_hintsΒ» => {
+          search: InlayHintsConfig { Β«param_names_for_lifetime_elision_hintsΒ»: true, ..TEST_CONFIG },
       inlay_hints.rs
-        search: pub param_names_for_lifetime_elision_hints: bool,
-        search: param_names_for_lifetime_elision_hints: self
+        search: pub Β«param_names_for_lifetime_elision_hintsΒ»: bool,
+        search: Β«param_names_for_lifetime_elision_hintsΒ»: self
       static_index.rs
-        search: param_names_for_lifetime_elision_hints: false,
+        search: Β«param_names_for_lifetime_elision_hintsΒ»: false,
     rust-analyzer/src/
       cli/
         analysis_stats.rs
-          search: param_names_for_lifetime_elision_hints: true,
+          search: Β«param_names_for_lifetime_elision_hintsΒ»: true,
       config.rs
-        search: param_names_for_lifetime_elision_hints: self"#
+        search: Β«param_names_for_lifetime_elision_hintsΒ»: self"#
             .to_string();
 
         let select_first_in_all_matches = |line_to_select: &str| {
-            assert!(all_matches.contains(line_to_select));
+            assert!(
+                all_matches.contains(line_to_select),
+                "`{line_to_select}` was not found in all matches `{all_matches}`"
+            );
             all_matches.replacen(
                 line_to_select,
                 &format!("{line_to_select}{SELECTED_MARKER}"),
@@ -5361,7 +5355,7 @@ mod tests {
                     cx,
                 ),
                 select_first_in_all_matches(
-                    "search: match config.param_names_for_lifetime_elision_hints {"
+                    "search: match config.Β«param_names_for_lifetime_elision_hintsΒ» {"
                 )
             );
         });
@@ -5401,16 +5395,16 @@ mod tests {
       inlay_hints/
         fn_lifetime_fn.rs{SELECTED_MARKER}
       inlay_hints.rs
-        search: pub param_names_for_lifetime_elision_hints: bool,
-        search: param_names_for_lifetime_elision_hints: self
+        search: pub Β«param_names_for_lifetime_elision_hintsΒ»: bool,
+        search: Β«param_names_for_lifetime_elision_hintsΒ»: self
       static_index.rs
-        search: param_names_for_lifetime_elision_hints: false,
+        search: Β«param_names_for_lifetime_elision_hintsΒ»: false,
     rust-analyzer/src/
       cli/
         analysis_stats.rs
-          search: param_names_for_lifetime_elision_hints: true,
+          search: Β«param_names_for_lifetime_elision_hintsΒ»: true,
       config.rs
-        search: param_names_for_lifetime_elision_hints: self"#,
+        search: Β«param_names_for_lifetime_elision_hintsΒ»: self"#,
                 )
             );
         });
@@ -5471,9 +5465,9 @@ mod tests {
     rust-analyzer/src/
       cli/
         analysis_stats.rs
-          search: param_names_for_lifetime_elision_hints: true,
+          search: Β«param_names_for_lifetime_elision_hintsΒ»: true,
       config.rs
-        search: param_names_for_lifetime_elision_hints: self"#,
+        search: Β«param_names_for_lifetime_elision_hintsΒ»: self"#,
                 )
             );
         });
@@ -5553,21 +5547,21 @@ mod tests {
     ide/src/
       inlay_hints/
         fn_lifetime_fn.rs
-          search: match config.param_names_for_lifetime_elision_hints {
-          search: allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints {
-          search: Some(it) if config.param_names_for_lifetime_elision_hints => {
-          search: InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG },
+          search: match config.Β«param_names_for_lifetime_elision_hintsΒ» {
+          search: allocated_lifetimes.push(if config.Β«param_names_for_lifetime_elision_hintsΒ» {
+          search: Some(it) if config.Β«param_names_for_lifetime_elision_hintsΒ» => {
+          search: InlayHintsConfig { Β«param_names_for_lifetime_elision_hintsΒ»: true, ..TEST_CONFIG },
       inlay_hints.rs
-        search: pub param_names_for_lifetime_elision_hints: bool,
-        search: param_names_for_lifetime_elision_hints: self
+        search: pub Β«param_names_for_lifetime_elision_hintsΒ»: bool,
+        search: Β«param_names_for_lifetime_elision_hintsΒ»: self
       static_index.rs
-        search: param_names_for_lifetime_elision_hints: false,
+        search: Β«param_names_for_lifetime_elision_hintsΒ»: false,
     rust-analyzer/src/
       cli/
         analysis_stats.rs
-          search: param_names_for_lifetime_elision_hints: true,
+          search: Β«param_names_for_lifetime_elision_hintsΒ»: true,
       config.rs
-        search: param_names_for_lifetime_elision_hints: self"#
+        search: Β«param_names_for_lifetime_elision_hintsΒ»: self"#
             .to_string();
 
         cx.executor()
@@ -5692,30 +5686,40 @@ mod tests {
     ide/src/
       inlay_hints/
         fn_lifetime_fn.rs
-          search: match config.param_names_for_lifetime_elision_hints {
-          search: allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints {
-          search: Some(it) if config.param_names_for_lifetime_elision_hints => {
-          search: InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG },
+          search: match config.Β«param_names_for_lifetime_elision_hintsΒ» {
+          search: allocated_lifetimes.push(if config.Β«param_names_for_lifetime_elision_hintsΒ» {
+          search: Some(it) if config.Β«param_names_for_lifetime_elision_hintsΒ» => {
+          search: InlayHintsConfig { Β«param_names_for_lifetime_elision_hintsΒ»: true, ..TEST_CONFIG },
       inlay_hints.rs
-        search: pub param_names_for_lifetime_elision_hints: bool,
-        search: param_names_for_lifetime_elision_hints: self
+        search: pub Β«param_names_for_lifetime_elision_hintsΒ»: bool,
+        search: Β«param_names_for_lifetime_elision_hintsΒ»: self
       static_index.rs
-        search: param_names_for_lifetime_elision_hints: false,
+        search: Β«param_names_for_lifetime_elision_hintsΒ»: false,
     rust-analyzer/src/
       cli/
         analysis_stats.rs
-          search: param_names_for_lifetime_elision_hints: true,
+          search: Β«param_names_for_lifetime_elision_hintsΒ»: true,
       config.rs
-        search: param_names_for_lifetime_elision_hints: self"#
+        search: Β«param_names_for_lifetime_elision_hintsΒ»: self"#
             .to_string();
         let select_first_in_all_matches = |line_to_select: &str| {
-            assert!(all_matches.contains(line_to_select));
+            assert!(
+                all_matches.contains(line_to_select),
+                "`{line_to_select}` was not found in all matches `{all_matches}`"
+            );
             all_matches.replacen(
                 line_to_select,
                 &format!("{line_to_select}{SELECTED_MARKER}"),
                 1,
             )
         };
+        let clear_outline_metadata = |input: &str| {
+            input
+                .replace("search: ", "")
+                .replace("Β«", "")
+                .replace("Β»", "")
+        };
+
         cx.executor()
             .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100));
         cx.run_until_parked();
@@ -5726,7 +5730,7 @@ mod tests {
                 .expect("should have an active editor open")
         });
         let initial_outline_selection =
-            "search: match config.param_names_for_lifetime_elision_hints {";
+            "search: match config.Β«param_names_for_lifetime_elision_hintsΒ» {";
         outline_panel.update_in(cx, |outline_panel, window, cx| {
             assert_eq!(
                 display_entries(
@@ -5740,7 +5744,7 @@ mod tests {
             );
             assert_eq!(
                 selected_row_text(&active_editor, cx),
-                initial_outline_selection.replace("search: ", ""), // Clear outline metadata prefixes
+                clear_outline_metadata(initial_outline_selection),
                 "Should place the initial editor selection on the corresponding search result"
             );
 
@@ -5749,7 +5753,7 @@ mod tests {
         });
 
         let navigated_outline_selection =
-            "search: Some(it) if config.param_names_for_lifetime_elision_hints => {";
+            "search: Some(it) if config.Β«param_names_for_lifetime_elision_hintsΒ» => {";
         outline_panel.update(cx, |outline_panel, cx| {
             assert_eq!(
                 display_entries(
@@ -5767,7 +5771,7 @@ mod tests {
         outline_panel.update(cx, |_, cx| {
             assert_eq!(
                 selected_row_text(&active_editor, cx),
-                navigated_outline_selection.replace("search: ", ""), // Clear outline metadata prefixes
+                clear_outline_metadata(navigated_outline_selection),
                 "Should still have the initial caret position after SelectNext calls"
             );
         });
@@ -5778,7 +5782,7 @@ mod tests {
         outline_panel.update(cx, |_outline_panel, cx| {
             assert_eq!(
                 selected_row_text(&active_editor, cx),
-                navigated_outline_selection.replace("search: ", ""), // Clear outline metadata prefixes
+                clear_outline_metadata(navigated_outline_selection),
                 "After opening, should move the caret to the opened outline entry's position"
             );
         });
@@ -5786,7 +5790,7 @@ mod tests {
         outline_panel.update_in(cx, |outline_panel, window, cx| {
             outline_panel.select_next(&SelectNext, window, cx);
         });
-        let next_navigated_outline_selection = "search: InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG },";
+        let next_navigated_outline_selection = "search: InlayHintsConfig { Β«param_names_for_lifetime_elision_hintsΒ»: true, ..TEST_CONFIG },";
         outline_panel.update(cx, |outline_panel, cx| {
             assert_eq!(
                 display_entries(
@@ -5804,7 +5808,7 @@ mod tests {
         outline_panel.update(cx, |_outline_panel, cx| {
             assert_eq!(
                 selected_row_text(&active_editor, cx),
-                next_navigated_outline_selection.replace("search: ", ""), // Clear outline metadata prefixes
+                clear_outline_metadata(next_navigated_outline_selection),
                 "Should again preserve the selection after another SelectNext call"
             );
         });
@@ -5837,7 +5841,7 @@ mod tests {
             );
             assert_eq!(
                 selected_row_text(&new_active_editor, cx),
-                next_navigated_outline_selection.replace("search: ", ""), // Clear outline metadata prefixes
+                clear_outline_metadata(next_navigated_outline_selection),
                 "When opening the excerpt, should navigate to the place corresponding the outline entry"
             );
         });
@@ -5939,11 +5943,11 @@ mod tests {
                 format!(
                     r#"one/
   a.txt
-    search: aaa aaa  <==== selected
-    search: aaa aaa
+    search: Β«aaaΒ» aaa  <==== selected
+    search: aaa Β«aaaΒ»
 two/
   b.txt
-    search: a aaa"#,
+    search: a Β«aaaΒ»"#,
                 ),
             );
         });
@@ -5969,7 +5973,7 @@ two/
   a.txt  <==== selected
 two/
   b.txt
-    search: a aaa"#,
+    search: a Β«aaaΒ»"#,
                 ),
             );
         });
@@ -6018,7 +6022,7 @@ two/  <==== selected"#,
   a.txt
 two/  <==== selected
   b.txt
-    search: a aaa"#,
+    search: a Β«aaaΒ»"#,
                 )
             );
         });
@@ -6483,18 +6487,18 @@ outline: struct OutlineEntryExcerpt
                     r#"frontend-project/
   public/lottie/
     syntax-tree.json
-      search: {{ "something": "static" }}  <==== selected
+      search: {{ "something": "Β«staticΒ»" }}  <==== selected
   src/
     app/(site)/
       (about)/jobs/[slug]/
         page.tsx
-          search: static
+          search: Β«staticΒ»
       (blog)/post/[slug]/
         page.tsx
-          search: static
+          search: Β«staticΒ»
     components/
       ErrorBoundary.tsx
-        search: static"#
+        search: Β«staticΒ»"#
                 )
             );
         });
@@ -6522,12 +6526,12 @@ outline: struct OutlineEntryExcerpt
                     r#"frontend-project/
   public/lottie/
     syntax-tree.json
-      search: {{ "something": "static" }}
+      search: {{ "something": "Β«staticΒ»" }}
   src/
     app/(site)/  <==== selected
     components/
       ErrorBoundary.tsx
-        search: static"#
+        search: Β«staticΒ»"#
                 )
             );
         });
@@ -6552,12 +6556,12 @@ outline: struct OutlineEntryExcerpt
                     r#"frontend-project/
   public/lottie/
     syntax-tree.json
-      search: {{ "something": "static" }}
+      search: {{ "something": "Β«staticΒ»" }}
   src/
     app/(site)/
     components/
       ErrorBoundary.tsx
-        search: static  <==== selected"#
+        search: Β«staticΒ»  <==== selected"#
                 )
             );
         });
@@ -6586,7 +6590,7 @@ outline: struct OutlineEntryExcerpt
                     r#"frontend-project/
   public/lottie/
     syntax-tree.json
-      search: {{ "something": "static" }}
+      search: {{ "something": "Β«staticΒ»" }}
   src/
     app/(site)/
     components/
@@ -6619,12 +6623,12 @@ outline: struct OutlineEntryExcerpt
                     r#"frontend-project/
   public/lottie/
     syntax-tree.json
-      search: {{ "something": "static" }}
+      search: {{ "something": "Β«staticΒ»" }}
   src/
     app/(site)/
     components/
       ErrorBoundary.tsx  <==== selected
-        search: static"#
+        search: Β«staticΒ»"#
                 )
             );
         });
@@ -6667,18 +6671,18 @@ outline: struct OutlineEntryExcerpt
                     r#"frontend-project/
   public/lottie/
     syntax-tree.json
-      search: {{ "something": "static" }}
+      search: {{ "something": "Β«staticΒ»" }}
   src/
     app/(site)/
       (about)/jobs/[slug]/
         page.tsx
-          search: static
+          search: Β«staticΒ»
       (blog)/post/[slug]/
         page.tsx
-          search: static
+          search: Β«staticΒ»
     components/
       ErrorBoundary.tsx  <==== selected
-        search: static"#
+        search: Β«staticΒ»"#
                 )
             );
         });
@@ -6784,16 +6788,21 @@ outline: struct OutlineEntryExcerpt
                     }
                 },
                 PanelEntry::Search(search_entry) => {
-                    format!(
-                        "search: {}",
-                        search_entry
-                            .render_data
-                            .get_or_init(|| SearchData::new(
-                                &search_entry.match_range,
-                                multi_buffer_snapshot
-                            ))
-                            .context_text
-                    )
+                    let search_data = search_entry.render_data.get_or_init(|| {
+                        SearchData::new(&search_entry.match_range, multi_buffer_snapshot)
+                    });
+                    let mut search_result = String::new();
+                    let mut last_end = 0;
+                    for range in &search_data.search_match_indices {
+                        search_result.push_str(&search_data.context_text[last_end..range.start]);
+                        search_result.push('Β«');
+                        search_result.push_str(&search_data.context_text[range.start..range.end]);
+                        search_result.push('Β»');
+                        last_end = range.end;
+                    }
+                    search_result.push_str(&search_data.context_text[last_end..]);
+
+                    format!("search: {search_result}")
                 }
             };
 
@@ -6816,6 +6825,7 @@ outline: struct OutlineEntryExcerpt
             workspace::init_settings(cx);
             Project::init_settings(cx);
             project_search::init(cx);
+            buffer_search::init(cx);
             super::init(cx);
         });
     }
@@ -7827,4 +7837,102 @@ outline: fn main()"
             };
         });
     }
+
+    #[gpui::test]
+    async fn test_buffer_search(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let fs = FakeFs::new(cx.background_executor.clone());
+        fs.insert_tree(
+            "/test",
+            json!({
+                "foo.txt": r#"<_constitution>
+
+</_constitution>
+
+
+
+## π Output
+
+| Field          | Meaning                |
+"#
+            }),
+        )
+        .await;
+
+        let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await;
+        let workspace = add_outline_panel(&project, cx).await;
+        let cx = &mut VisualTestContext::from_window(*workspace, cx);
+
+        let editor = workspace
+            .update(cx, |workspace, window, cx| {
+                workspace.open_abs_path(
+                    PathBuf::from("/test/foo.txt"),
+                    OpenOptions {
+                        visible: Some(OpenVisible::All),
+                        ..OpenOptions::default()
+                    },
+                    window,
+                    cx,
+                )
+            })
+            .unwrap()
+            .await
+            .unwrap()
+            .downcast::<Editor>()
+            .unwrap();
+
+        let search_bar = workspace
+            .update(cx, |_, window, cx| {
+                cx.new(|cx| {
+                    let mut search_bar = BufferSearchBar::new(None, window, cx);
+                    search_bar.set_active_pane_item(Some(&editor), window, cx);
+                    search_bar.show(window, cx);
+                    search_bar
+                })
+            })
+            .unwrap();
+
+        let outline_panel = outline_panel(&workspace, cx);
+
+        outline_panel.update_in(cx, |outline_panel, window, cx| {
+            outline_panel.set_active(true, window, cx)
+        });
+
+        search_bar
+            .update_in(cx, |search_bar, window, cx| {
+                search_bar.search("  ", None, true, window, cx)
+            })
+            .await
+            .unwrap();
+
+        cx.executor()
+            .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(500));
+        cx.run_until_parked();
+
+        outline_panel.update(cx, |outline_panel, cx| {
+            assert_eq!(
+                display_entries(
+                    &project,
+                    &snapshot(outline_panel, cx),
+                    &outline_panel.cached_entries,
+                    outline_panel.selected_entry(),
+                    cx,
+                ),
+                "search: | FieldΒ«  Β»        | Meaning                |  <==== selected
+search: | Field  Β«  Β»      | Meaning                |
+search: | Field    Β«  Β»    | Meaning                |
+search: | Field      Β«  Β»  | Meaning                |
+search: | Field        Β«  Β»| Meaning                |
+search: | Field          | MeaningΒ«  Β»              |
+search: | Field          | Meaning  Β«  Β»            |
+search: | Field          | Meaning    Β«  Β»          |
+search: | Field          | Meaning      Β«  Β»        |
+search: | Field          | Meaning        Β«  Β»      |
+search: | Field          | Meaning          Β«  Β»    |
+search: | Field          | Meaning            Β«  Β»  |
+search: | Field          | Meaning              Β«  Β»|"
+            );
+        });
+    }
 }
  
  
  
    
    @@ -1,7 +1,6 @@
 use std::{
     any::Any,
     borrow::Borrow,
-    collections::HashSet,
     path::{Path, PathBuf},
     str::FromStr as _,
     sync::Arc,
@@ -137,7 +136,7 @@ impl EventEmitter<AgentServersUpdated> for AgentServerStore {}
 #[cfg(test)]
 mod ext_agent_tests {
     use super::*;
-    use std::fmt::Write as _;
+    use std::{collections::HashSet, fmt::Write as _};
 
     // Helper to build a store in Collab mode so we can mutate internal maps without
     // needing to spin up a full project environment.
@@ -244,25 +243,18 @@ impl AgentServerStore {
         // Collect manifests first so we can iterate twice
         let manifests: Vec<_> = manifests.into_iter().collect();
 
-        // Remove existing extension-provided agents by tracking which ones we're about to add
-        let extension_agent_names: HashSet<_> = manifests
-            .iter()
-            .flat_map(|(_, manifest)| manifest.agent_servers.keys().map(|k| k.to_string()))
-            .collect();
-
-        let keys_to_remove: Vec<_> = self
-            .external_agents
-            .keys()
-            .filter(|name| {
-                // Remove if it matches an extension agent name from any extension
-                extension_agent_names.contains(name.0.as_ref())
-            })
-            .cloned()
-            .collect();
-        for key in &keys_to_remove {
-            self.external_agents.remove(key);
-            self.agent_icons.remove(key);
-        }
+        // Remove all extension-provided agents
+        // (They will be re-added below if they're in the currently installed extensions)
+        self.external_agents.retain(|name, agent| {
+            if agent.downcast_mut::<LocalExtensionArchiveAgent>().is_some() {
+                self.agent_icons.remove(name);
+                false
+            } else {
+                // Keep the hardcoded external agents that don't come from extensions
+                // (In the future we may move these over to being extensions too.)
+                true
+            }
+        });
 
         // Insert agent servers from extension manifests
         match &self.state {
@@ -1037,7 +1029,7 @@ impl ExternalAgentServer for LocalGemini {
         cx.spawn(async move |cx| {
             let mut env = project_environment
                 .update(cx, |project_environment, cx| {
-                    project_environment.get_local_directory_environment(
+                    project_environment.local_directory_environment(
                         &Shell::System,
                         root_dir.clone(),
                         cx,
@@ -1133,7 +1125,7 @@ impl ExternalAgentServer for LocalClaudeCode {
         cx.spawn(async move |cx| {
             let mut env = project_environment
                 .update(cx, |project_environment, cx| {
-                    project_environment.get_local_directory_environment(
+                    project_environment.local_directory_environment(
                         &Shell::System,
                         root_dir.clone(),
                         cx,
@@ -1227,7 +1219,7 @@ impl ExternalAgentServer for LocalCodex {
         cx.spawn(async move |cx| {
             let mut env = project_environment
                 .update(cx, |project_environment, cx| {
-                    project_environment.get_local_directory_environment(
+                    project_environment.local_directory_environment(
                         &Shell::System,
                         root_dir.clone(),
                         cx,
@@ -1402,7 +1394,7 @@ impl ExternalAgentServer for LocalExtensionArchiveAgent {
             // Get project environment
             let mut env = project_environment
                 .update(cx, |project_environment, cx| {
-                    project_environment.get_local_directory_environment(
+                    project_environment.local_directory_environment(
                         &Shell::System,
                         root_dir.clone(),
                         cx,
@@ -1585,7 +1577,7 @@ impl ExternalAgentServer for LocalCustomAgent {
         cx.spawn(async move |cx| {
             let mut env = project_environment
                 .update(cx, |project_environment, cx| {
-                    project_environment.get_local_directory_environment(
+                    project_environment.local_directory_environment(
                         &Shell::System,
                         root_dir.clone(),
                         cx,
@@ -1702,6 +1694,8 @@ impl settings::Settings for AllAgentServersSettings {
 
 #[cfg(test)]
 mod extension_agent_tests {
+    use crate::worktree_store::WorktreeStore;
+
     use super::*;
     use gpui::TestAppContext;
     use std::sync::Arc;
@@ -1826,7 +1820,9 @@ mod extension_agent_tests {
     async fn archive_agent_uses_extension_and_agent_id_for_cache_key(cx: &mut TestAppContext) {
         let fs = fs::FakeFs::new(cx.background_executor.clone());
         let http_client = http_client::FakeHttpClient::with_404_response();
-        let project_environment = cx.new(|cx| crate::ProjectEnvironment::new(None, cx));
+        let worktree_store = cx.new(|_| WorktreeStore::local(false, fs.clone()));
+        let project_environment =
+            cx.new(|cx| crate::ProjectEnvironment::new(None, worktree_store.downgrade(), None, cx));
 
         let agent = LocalExtensionArchiveAgent {
             fs,
  
  
  
    
    @@ -180,13 +180,7 @@ impl RemoteBufferStore {
                         buffer_file = Some(Arc::new(File::from_proto(file, worktree, cx)?)
                             as Arc<dyn language::File>);
                     }
-                    Buffer::from_proto(
-                        replica_id,
-                        capability,
-                        state,
-                        buffer_file,
-                        cx.background_executor(),
-                    )
+                    Buffer::from_proto(replica_id, capability, state, buffer_file)
                 });
 
                 match buffer_result {
@@ -634,10 +628,9 @@ impl LocalBufferStore {
                 Ok(loaded) => {
                     let reservation = cx.reserve_entity::<Buffer>()?;
                     let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64());
-                    let executor = cx.background_executor().clone();
                     let text_buffer = cx
                         .background_spawn(async move {
-                            text::Buffer::new(ReplicaId::LOCAL, buffer_id, loaded.text, &executor)
+                            text::Buffer::new(ReplicaId::LOCAL, buffer_id, loaded.text)
                         })
                         .await;
                     cx.insert_entity(reservation, |_| {
@@ -646,12 +639,7 @@ impl LocalBufferStore {
                 }
                 Err(error) if is_not_found_error(&error) => cx.new(|cx| {
                     let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64());
-                    let text_buffer = text::Buffer::new(
-                        ReplicaId::LOCAL,
-                        buffer_id,
-                        "",
-                        cx.background_executor(),
-                    );
+                    let text_buffer = text::Buffer::new(ReplicaId::LOCAL, buffer_id, "");
                     Buffer::build(
                         text_buffer,
                         Some(Arc::new(File {
  
  
  
    
    @@ -49,7 +49,7 @@ use std::{
     path::{Path, PathBuf},
     sync::{Arc, Once},
 };
-use task::{DebugScenario, Shell, SpawnInTerminal, TaskContext, TaskTemplate};
+use task::{DebugScenario, SpawnInTerminal, TaskContext, TaskTemplate};
 use util::{ResultExt as _, rel_path::RelPath};
 use worktree::Worktree;
 
@@ -267,8 +267,8 @@ impl DapStore {
                 let user_env = dap_settings.map(|s| s.env.clone());
 
                 let delegate = self.delegate(worktree, console, cx);
-                let cwd: Arc<Path> = worktree.read(cx).abs_path().as_ref().into();
 
+                let worktree = worktree.clone();
                 cx.spawn(async move |this, cx| {
                     let mut binary = adapter
                         .get_binary(
@@ -287,11 +287,7 @@ impl DapStore {
                                 .unwrap()
                                 .environment
                                 .update(cx, |environment, cx| {
-                                    environment.get_local_directory_environment(
-                                        &Shell::System,
-                                        cwd,
-                                        cx,
-                                    )
+                                    environment.worktree_environment(worktree, cx)
                                 })
                         })?
                         .await;
@@ -607,9 +603,9 @@ impl DapStore {
             local_store.node_runtime.clone(),
             local_store.http_client.clone(),
             local_store.toolchain_store.clone(),
-            local_store.environment.update(cx, |env, cx| {
-                env.get_worktree_environment(worktree.clone(), cx)
-            }),
+            local_store
+                .environment
+                .update(cx, |env, cx| env.worktree_environment(worktree.clone(), cx)),
             local_store.is_headless,
         ))
     }
  
  
  
    
    @@ -5,11 +5,12 @@ use remote::RemoteClient;
 use rpc::proto::{self, REMOTE_SERVER_PROJECT_ID};
 use std::{collections::VecDeque, path::Path, sync::Arc};
 use task::{Shell, shell_to_proto};
-use util::ResultExt;
+use terminal::terminal_settings::TerminalSettings;
+use util::{ResultExt, rel_path::RelPath};
 use worktree::Worktree;
 
 use collections::HashMap;
-use gpui::{AppContext as _, Context, Entity, EventEmitter, Task};
+use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Task, WeakEntity};
 use settings::Settings as _;
 
 use crate::{
@@ -23,6 +24,8 @@ pub struct ProjectEnvironment {
     remote_environments: HashMap<(Shell, Arc<Path>), Shared<Task<Option<HashMap<String, String>>>>>,
     environment_error_messages: VecDeque<String>,
     environment_error_messages_tx: mpsc::UnboundedSender<String>,
+    worktree_store: WeakEntity<WorktreeStore>,
+    remote_client: Option<WeakEntity<RemoteClient>>,
     _tasks: Vec<Task<()>>,
 }
 
@@ -33,7 +36,12 @@ pub enum ProjectEnvironmentEvent {
 impl EventEmitter<ProjectEnvironmentEvent> for ProjectEnvironment {}
 
 impl ProjectEnvironment {
-    pub fn new(cli_environment: Option<HashMap<String, String>>, cx: &mut Context<Self>) -> Self {
+    pub fn new(
+        cli_environment: Option<HashMap<String, String>>,
+        worktree_store: WeakEntity<WorktreeStore>,
+        remote_client: Option<WeakEntity<RemoteClient>>,
+        cx: &mut Context<Self>,
+    ) -> Self {
         let (tx, mut rx) = mpsc::unbounded();
         let task = cx.spawn(async move |this, cx| {
             while let Some(message) = rx.next().await {
@@ -50,12 +58,17 @@ impl ProjectEnvironment {
             remote_environments: Default::default(),
             environment_error_messages: Default::default(),
             environment_error_messages_tx: tx,
+            worktree_store,
+            remote_client,
             _tasks: vec![task],
         }
     }
 
     /// Returns the inherited CLI environment, if this project was opened from the Zed CLI.
     pub(crate) fn get_cli_environment(&self) -> Option<HashMap<String, String>> {
+        if cfg!(any(test, feature = "test-support")) {
+            return Some(HashMap::default());
+        }
         if let Some(mut env) = self.cli_environment.clone() {
             set_origin_marker(&mut env, EnvironmentOrigin::Cli);
             Some(env)
@@ -64,16 +77,12 @@ impl ProjectEnvironment {
         }
     }
 
-    pub(crate) fn get_buffer_environment(
+    pub fn buffer_environment(
         &mut self,
         buffer: &Entity<Buffer>,
         worktree_store: &Entity<WorktreeStore>,
         cx: &mut Context<Self>,
     ) -> Shared<Task<Option<HashMap<String, String>>>> {
-        if cfg!(any(test, feature = "test-support")) {
-            return Task::ready(Some(HashMap::default())).shared();
-        }
-
         if let Some(cli_environment) = self.get_cli_environment() {
             log::debug!("using project environment variables from CLI");
             return Task::ready(Some(cli_environment)).shared();
@@ -87,54 +96,105 @@ impl ProjectEnvironment {
         else {
             return Task::ready(None).shared();
         };
-
-        self.get_worktree_environment(worktree, cx)
+        self.worktree_environment(worktree, cx)
     }
 
-    pub fn get_worktree_environment(
+    pub fn worktree_environment(
         &mut self,
         worktree: Entity<Worktree>,
-        cx: &mut Context<Self>,
+        cx: &mut App,
     ) -> Shared<Task<Option<HashMap<String, String>>>> {
-        if cfg!(any(test, feature = "test-support")) {
-            return Task::ready(Some(HashMap::default())).shared();
-        }
-
         if let Some(cli_environment) = self.get_cli_environment() {
             log::debug!("using project environment variables from CLI");
             return Task::ready(Some(cli_environment)).shared();
         }
 
-        let mut abs_path = worktree.read(cx).abs_path();
-        if !worktree.read(cx).is_local() {
-            log::error!(
-                "attempted to get project environment for a non-local worktree at {abs_path:?}"
-            );
-            return Task::ready(None).shared();
-        } else if worktree.read(cx).is_single_file() {
+        let worktree = worktree.read(cx);
+        let mut abs_path = worktree.abs_path();
+        if worktree.is_single_file() {
             let Some(parent) = abs_path.parent() else {
                 return Task::ready(None).shared();
             };
             abs_path = parent.into();
         }
 
-        self.get_local_directory_environment(&Shell::System, abs_path, cx)
+        let remote_client = self.remote_client.as_ref().and_then(|it| it.upgrade());
+        match remote_client {
+            Some(remote_client) => remote_client.clone().read(cx).shell().map(|shell| {
+                self.remote_directory_environment(
+                    &Shell::Program(shell),
+                    abs_path,
+                    remote_client,
+                    cx,
+                )
+            }),
+            None => Some({
+                let shell = TerminalSettings::get(
+                    Some(settings::SettingsLocation {
+                        worktree_id: worktree.id(),
+                        path: RelPath::empty(),
+                    }),
+                    cx,
+                )
+                .shell
+                .clone();
+
+                self.local_directory_environment(&shell, abs_path, cx)
+            }),
+        }
+        .unwrap_or_else(|| Task::ready(None).shared())
+    }
+
+    pub fn directory_environment(
+        &mut self,
+        abs_path: Arc<Path>,
+        cx: &mut App,
+    ) -> Shared<Task<Option<HashMap<String, String>>>> {
+        let remote_client = self.remote_client.as_ref().and_then(|it| it.upgrade());
+        match remote_client {
+            Some(remote_client) => remote_client.clone().read(cx).shell().map(|shell| {
+                self.remote_directory_environment(
+                    &Shell::Program(shell),
+                    abs_path,
+                    remote_client,
+                    cx,
+                )
+            }),
+            None => self
+                .worktree_store
+                .read_with(cx, |worktree_store, cx| {
+                    worktree_store.find_worktree(&abs_path, cx)
+                })
+                .ok()
+                .map(|worktree| {
+                    let shell = terminal::terminal_settings::TerminalSettings::get(
+                        worktree
+                            .as_ref()
+                            .map(|(worktree, path)| settings::SettingsLocation {
+                                worktree_id: worktree.read(cx).id(),
+                                path: &path,
+                            }),
+                        cx,
+                    )
+                    .shell
+                    .clone();
+
+                    self.local_directory_environment(&shell, abs_path, cx)
+                }),
+        }
+        .unwrap_or_else(|| Task::ready(None).shared())
     }
 
     /// Returns the project environment, if possible.
     /// If the project was opened from the CLI, then the inherited CLI environment is returned.
     /// If it wasn't opened from the CLI, and an absolute path is given, then a shell is spawned in
     /// that directory, to get environment variables as if the user has `cd`'d there.
-    pub fn get_local_directory_environment(
+    pub fn local_directory_environment(
         &mut self,
         shell: &Shell,
         abs_path: Arc<Path>,
-        cx: &mut Context<Self>,
+        cx: &mut App,
     ) -> Shared<Task<Option<HashMap<String, String>>>> {
-        if cfg!(any(test, feature = "test-support")) {
-            return Task::ready(Some(HashMap::default())).shared();
-        }
-
         if let Some(cli_environment) = self.get_cli_environment() {
             log::debug!("using project environment variables from CLI");
             return Task::ready(Some(cli_environment)).shared();
@@ -146,7 +206,7 @@ impl ProjectEnvironment {
                 let load_direnv = ProjectSettings::get_global(cx).load_direnv.clone();
                 let shell = shell.clone();
                 let tx = self.environment_error_messages_tx.clone();
-                cx.spawn(async move |_, cx| {
+                cx.spawn(async move |cx| {
                     let mut shell_env = cx
                         .background_spawn(load_directory_shell_environment(
                             shell,
@@ -178,12 +238,12 @@ impl ProjectEnvironment {
             .clone()
     }
 
-    pub fn get_remote_directory_environment(
+    pub fn remote_directory_environment(
         &mut self,
         shell: &Shell,
         abs_path: Arc<Path>,
         remote_client: Entity<RemoteClient>,
-        cx: &mut Context<Self>,
+        cx: &mut App,
     ) -> Shared<Task<Option<HashMap<String, String>>>> {
         if cfg!(any(test, feature = "test-support")) {
             return Task::ready(Some(HashMap::default())).shared();
@@ -201,7 +261,7 @@ impl ProjectEnvironment {
                             shell: Some(shell_to_proto(shell.clone())),
                             directory: abs_path.to_string_lossy().to_string(),
                         });
-                cx.spawn(async move |_, _| {
+                cx.background_spawn(async move {
                     let environment = response.await.log_err()?;
                     Some(environment.environment.into_iter().collect())
                 })
  
  
  
    
    @@ -4804,7 +4804,7 @@ impl Repository {
                 .upgrade()
                 .context("missing project environment")?
                 .update(cx, |project_environment, cx| {
-                    project_environment.get_local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
+                    project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
                 })?
                 .await
                 .unwrap_or_else(|| {
  
  
  
    
    @@ -276,8 +276,8 @@ mod tests {
     use util::{path, rel_path::rel_path};
     use worktree::WorktreeSettings;
 
-    #[gpui::test]
-    fn test_parse_conflicts_in_buffer(cx: &mut TestAppContext) {
+    #[test]
+    fn test_parse_conflicts_in_buffer() {
         // Create a buffer with conflict markers
         let test_content = r#"
             This is some text before the conflict.
@@ -299,12 +299,7 @@ mod tests {
         .unindent();
 
         let buffer_id = BufferId::new(1).unwrap();
-        let buffer = Buffer::new(
-            ReplicaId::LOCAL,
-            buffer_id,
-            test_content,
-            cx.background_executor(),
-        );
+        let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content);
         let snapshot = buffer.snapshot();
 
         let conflict_snapshot = ConflictSet::parse(&snapshot);
@@ -360,8 +355,8 @@ mod tests {
         assert_eq!(conflicts_in_range.len(), 0);
     }
 
-    #[gpui::test]
-    fn test_nested_conflict_markers(cx: &mut TestAppContext) {
+    #[test]
+    fn test_nested_conflict_markers() {
         // Create a buffer with nested conflict markers
         let test_content = r#"
             This is some text before the conflict.
@@ -379,12 +374,7 @@ mod tests {
         .unindent();
 
         let buffer_id = BufferId::new(1).unwrap();
-        let buffer = Buffer::new(
-            ReplicaId::LOCAL,
-            buffer_id,
-            test_content,
-            cx.background_executor(),
-        );
+        let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content);
         let snapshot = buffer.snapshot();
 
         let conflict_snapshot = ConflictSet::parse(&snapshot);
@@ -406,8 +396,8 @@ mod tests {
         assert_eq!(their_text, "This is their version in a nested conflict\n");
     }
 
-    #[gpui::test]
-    fn test_conflict_markers_at_eof(cx: &mut TestAppContext) {
+    #[test]
+    fn test_conflict_markers_at_eof() {
         let test_content = r#"
             <<<<<<< ours
             =======
@@ -415,20 +405,15 @@ mod tests {
             >>>>>>> "#
             .unindent();
         let buffer_id = BufferId::new(1).unwrap();
-        let buffer = Buffer::new(
-            ReplicaId::LOCAL,
-            buffer_id,
-            test_content,
-            cx.background_executor(),
-        );
+        let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content);
         let snapshot = buffer.snapshot();
 
         let conflict_snapshot = ConflictSet::parse(&snapshot);
         assert_eq!(conflict_snapshot.conflicts.len(), 1);
     }
 
-    #[gpui::test]
-    fn test_conflicts_in_range(cx: &mut TestAppContext) {
+    #[test]
+    fn test_conflicts_in_range() {
         // Create a buffer with conflict markers
         let test_content = r#"
             one
@@ -462,12 +447,7 @@ mod tests {
         .unindent();
 
         let buffer_id = BufferId::new(1).unwrap();
-        let buffer = Buffer::new(
-            ReplicaId::LOCAL,
-            buffer_id,
-            test_content.clone(),
-            cx.background_executor(),
-        );
+        let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content.clone());
         let snapshot = buffer.snapshot();
 
         let conflict_snapshot = ConflictSet::parse(&snapshot);
  
  
  
    
    @@ -75,14 +75,14 @@ use language::{
     range_from_lsp, range_to_lsp,
 };
 use lsp::{
-    AdapterServerCapabilities, CodeActionKind, CompletionContext, DiagnosticServerCapabilities,
-    DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions, Edit,
-    FileOperationFilter, FileOperationPatternKind, FileOperationRegistrationOptions, FileRename,
-    FileSystemWatcher, LSP_REQUEST_TIMEOUT, LanguageServer, LanguageServerBinary,
-    LanguageServerBinaryOptions, LanguageServerId, LanguageServerName, LanguageServerSelector,
-    LspRequestFuture, MessageActionItem, MessageType, OneOf, RenameFilesParams, SymbolKind,
-    TextDocumentSyncSaveOptions, TextEdit, Uri, WillRenameFiles, WorkDoneProgressCancelParams,
-    WorkspaceFolder, notification::DidRenameFiles,
+    AdapterServerCapabilities, CodeActionKind, CompletionContext, CompletionOptions,
+    DiagnosticServerCapabilities, DiagnosticSeverity, DiagnosticTag,
+    DidChangeWatchedFilesRegistrationOptions, Edit, FileOperationFilter, FileOperationPatternKind,
+    FileOperationRegistrationOptions, FileRename, FileSystemWatcher, LSP_REQUEST_TIMEOUT,
+    LanguageServer, LanguageServerBinary, LanguageServerBinaryOptions, LanguageServerId,
+    LanguageServerName, LanguageServerSelector, LspRequestFuture, MessageActionItem, MessageType,
+    OneOf, RenameFilesParams, SymbolKind, TextDocumentSyncSaveOptions, TextEdit, Uri,
+    WillRenameFiles, WorkDoneProgressCancelParams, WorkspaceFolder, notification::DidRenameFiles,
 };
 use node_runtime::read_package_installed_version;
 use parking_lot::Mutex;
@@ -853,23 +853,32 @@ impl LocalLspStore {
         language_server
             .on_request::<lsp::request::InlayHintRefreshRequest, _, _>({
                 let lsp_store = lsp_store.clone();
+                let request_id = Arc::new(AtomicUsize::new(0));
                 move |(), cx| {
-                    let this = lsp_store.clone();
+                    let lsp_store = lsp_store.clone();
+                    let request_id = request_id.clone();
                     let mut cx = cx.clone();
                     async move {
-                        this.update(&mut cx, |lsp_store, cx| {
-                            cx.emit(LspStoreEvent::RefreshInlayHints(server_id));
-                            lsp_store
-                                .downstream_client
-                                .as_ref()
-                                .map(|(client, project_id)| {
-                                    client.send(proto::RefreshInlayHints {
-                                        project_id: *project_id,
-                                        server_id: server_id.to_proto(),
+                        lsp_store
+                            .update(&mut cx, |lsp_store, cx| {
+                                let request_id =
+                                    Some(request_id.fetch_add(1, atomic::Ordering::AcqRel));
+                                cx.emit(LspStoreEvent::RefreshInlayHints {
+                                    server_id,
+                                    request_id,
+                                });
+                                lsp_store
+                                    .downstream_client
+                                    .as_ref()
+                                    .map(|(client, project_id)| {
+                                        client.send(proto::RefreshInlayHints {
+                                            project_id: *project_id,
+                                            server_id: server_id.to_proto(),
+                                            request_id: request_id.map(|id| id as u64),
+                                        })
                                     })
-                                })
-                        })?
-                        .transpose()?;
+                            })?
+                            .transpose()?;
                         Ok(())
                     }
                 }
@@ -3659,7 +3668,10 @@ pub enum LspStoreEvent {
         new_language: Option<Arc<Language>>,
     },
     Notification(String),
-    RefreshInlayHints(LanguageServerId),
+    RefreshInlayHints {
+        server_id: LanguageServerId,
+        request_id: Option<usize>,
+    },
     RefreshCodeLens,
     DiagnosticsUpdated {
         server_id: LanguageServerId,
@@ -5329,8 +5341,8 @@ impl LspStore {
                 request.to_proto(project_id, buffer.read(cx)),
             );
             let buffer = buffer.clone();
-            cx.spawn(async move |weak_project, cx| {
-                let Some(project) = weak_project.upgrade() else {
+            cx.spawn(async move |weak_lsp_store, cx| {
+                let Some(lsp_store) = weak_lsp_store.upgrade() else {
                     return Ok(None);
                 };
                 let Some(responses) = request_task.await? else {
@@ -5339,7 +5351,7 @@ impl LspStore {
                 let actions = join_all(responses.payload.into_iter().map(|response| {
                     GetDefinitions { position }.response_from_proto(
                         response.response,
-                        project.clone(),
+                        lsp_store.clone(),
                         buffer.clone(),
                         cx.clone(),
                     )
@@ -5395,8 +5407,8 @@ impl LspStore {
                 request.to_proto(project_id, buffer.read(cx)),
             );
             let buffer = buffer.clone();
-            cx.spawn(async move |weak_project, cx| {
-                let Some(project) = weak_project.upgrade() else {
+            cx.spawn(async move |weak_lsp_store, cx| {
+                let Some(lsp_store) = weak_lsp_store.upgrade() else {
                     return Ok(None);
                 };
                 let Some(responses) = request_task.await? else {
@@ -5405,7 +5417,7 @@ impl LspStore {
                 let actions = join_all(responses.payload.into_iter().map(|response| {
                     GetDeclarations { position }.response_from_proto(
                         response.response,
-                        project.clone(),
+                        lsp_store.clone(),
                         buffer.clone(),
                         cx.clone(),
                     )
@@ -5461,8 +5473,8 @@ impl LspStore {
                 request.to_proto(project_id, buffer.read(cx)),
             );
             let buffer = buffer.clone();
-            cx.spawn(async move |weak_project, cx| {
-                let Some(project) = weak_project.upgrade() else {
+            cx.spawn(async move |weak_lsp_store, cx| {
+                let Some(lsp_store) = weak_lsp_store.upgrade() else {
                     return Ok(None);
                 };
                 let Some(responses) = request_task.await? else {
@@ -5471,7 +5483,7 @@ impl LspStore {
                 let actions = join_all(responses.payload.into_iter().map(|response| {
                     GetTypeDefinitions { position }.response_from_proto(
                         response.response,
-                        project.clone(),
+                        lsp_store.clone(),
                         buffer.clone(),
                         cx.clone(),
                     )
@@ -5527,8 +5539,8 @@ impl LspStore {
                 request.to_proto(project_id, buffer.read(cx)),
             );
             let buffer = buffer.clone();
-            cx.spawn(async move |weak_project, cx| {
-                let Some(project) = weak_project.upgrade() else {
+            cx.spawn(async move |weak_lsp_store, cx| {
+                let Some(lsp_store) = weak_lsp_store.upgrade() else {
                     return Ok(None);
                 };
                 let Some(responses) = request_task.await? else {
@@ -5537,7 +5549,7 @@ impl LspStore {
                 let actions = join_all(responses.payload.into_iter().map(|response| {
                     GetImplementations { position }.response_from_proto(
                         response.response,
-                        project.clone(),
+                        lsp_store.clone(),
                         buffer.clone(),
                         cx.clone(),
                     )
@@ -5594,8 +5606,8 @@ impl LspStore {
                 request.to_proto(project_id, buffer.read(cx)),
             );
             let buffer = buffer.clone();
-            cx.spawn(async move |weak_project, cx| {
-                let Some(project) = weak_project.upgrade() else {
+            cx.spawn(async move |weak_lsp_store, cx| {
+                let Some(lsp_store) = weak_lsp_store.upgrade() else {
                     return Ok(None);
                 };
                 let Some(responses) = request_task.await? else {
@@ -5605,7 +5617,7 @@ impl LspStore {
                 let locations = join_all(responses.payload.into_iter().map(|lsp_response| {
                     GetReferences { position }.response_from_proto(
                         lsp_response.response,
-                        project.clone(),
+                        lsp_store.clone(),
                         buffer.clone(),
                         cx.clone(),
                     )
@@ -5662,8 +5674,8 @@ impl LspStore {
                 request.to_proto(project_id, buffer.read(cx)),
             );
             let buffer = buffer.clone();
-            cx.spawn(async move |weak_project, cx| {
-                let Some(project) = weak_project.upgrade() else {
+            cx.spawn(async move |weak_lsp_store, cx| {
+                let Some(lsp_store) = weak_lsp_store.upgrade() else {
                     return Ok(None);
                 };
                 let Some(responses) = request_task.await? else {
@@ -5676,7 +5688,7 @@ impl LspStore {
                     }
                     .response_from_proto(
                         response.response,
-                        project.clone(),
+                        lsp_store.clone(),
                         buffer.clone(),
                         cx.clone(),
                     )
@@ -6636,14 +6648,22 @@ impl LspStore {
         cx: &mut Context<Self>,
     ) -> HashMap<Range<BufferRow>, Task<Result<CacheInlayHints>>> {
         let buffer_snapshot = buffer.read(cx).snapshot();
-        let for_server = if let InvalidationStrategy::RefreshRequested(server_id) = invalidate {
+        let next_hint_id = self.next_hint_id.clone();
+        let lsp_data = self.latest_lsp_data(&buffer, cx);
+        let mut lsp_refresh_requested = false;
+        let for_server = if let InvalidationStrategy::RefreshRequested {
+            server_id,
+            request_id,
+        } = invalidate
+        {
+            let invalidated = lsp_data
+                .inlay_hints
+                .invalidate_for_server_refresh(server_id, request_id);
+            lsp_refresh_requested = invalidated;
             Some(server_id)
         } else {
             None
         };
-        let invalidate_cache = invalidate.should_invalidate();
-        let next_hint_id = self.next_hint_id.clone();
-        let lsp_data = self.latest_lsp_data(&buffer, cx);
         let existing_inlay_hints = &mut lsp_data.inlay_hints;
         let known_chunks = known_chunks
             .filter(|(known_version, _)| !lsp_data.buffer_version.changed_since(known_version))
@@ -6651,8 +6671,8 @@ impl LspStore {
             .unwrap_or_default();
 
         let mut hint_fetch_tasks = Vec::new();
-        let mut cached_inlay_hints = HashMap::default();
-        let mut ranges_to_query = Vec::new();
+        let mut cached_inlay_hints = None;
+        let mut ranges_to_query = None;
         let applicable_chunks = existing_inlay_hints
             .applicable_chunks(ranges.as_slice())
             .filter(|chunk| !known_chunks.contains(&(chunk.start..chunk.end)))
@@ -6667,12 +6687,12 @@ impl LspStore {
             match (
                 existing_inlay_hints
                     .cached_hints(&row_chunk)
-                    .filter(|_| !invalidate_cache)
+                    .filter(|_| !lsp_refresh_requested)
                     .cloned(),
                 existing_inlay_hints
                     .fetched_hints(&row_chunk)
                     .as_ref()
-                    .filter(|_| !invalidate_cache)
+                    .filter(|_| !lsp_refresh_requested)
                     .cloned(),
             ) {
                 (None, None) => {
@@ -6681,19 +6701,18 @@ impl LspStore {
                     } else {
                         Point::new(row_chunk.end, 0)
                     };
-                    ranges_to_query.push((
+                    ranges_to_query.get_or_insert_with(Vec::new).push((
                         row_chunk,
                         buffer_snapshot.anchor_before(Point::new(row_chunk.start, 0))
                             ..buffer_snapshot.anchor_after(end),
                     ));
                 }
-                (None, Some(fetched_hints)) => {
-                    hint_fetch_tasks.push((row_chunk, fetched_hints.clone()))
-                }
+                (None, Some(fetched_hints)) => hint_fetch_tasks.push((row_chunk, fetched_hints)),
                 (Some(cached_hints), None) => {
                     for (server_id, cached_hints) in cached_hints {
                         if for_server.is_none_or(|for_server| for_server == server_id) {
                             cached_inlay_hints
+                                .get_or_insert_with(HashMap::default)
                                 .entry(row_chunk.start..row_chunk.end)
                                 .or_insert_with(HashMap::default)
                                 .entry(server_id)
@@ -6703,10 +6722,11 @@ impl LspStore {
                     }
                 }
                 (Some(cached_hints), Some(fetched_hints)) => {
-                    hint_fetch_tasks.push((row_chunk, fetched_hints.clone()));
+                    hint_fetch_tasks.push((row_chunk, fetched_hints));
                     for (server_id, cached_hints) in cached_hints {
                         if for_server.is_none_or(|for_server| for_server == server_id) {
                             cached_inlay_hints
+                                .get_or_insert_with(HashMap::default)
                                 .entry(row_chunk.start..row_chunk.end)
                                 .or_insert_with(HashMap::default)
                                 .entry(server_id)
@@ -6718,18 +6738,18 @@ impl LspStore {
             }
         }
 
-        let cached_chunk_data = cached_inlay_hints
-            .into_iter()
-            .map(|(row_chunk, hints)| (row_chunk, Task::ready(Ok(hints))))
-            .collect();
-        if hint_fetch_tasks.is_empty() && ranges_to_query.is_empty() {
-            cached_chunk_data
+        if hint_fetch_tasks.is_empty()
+            && ranges_to_query
+                .as_ref()
+                .is_none_or(|ranges| ranges.is_empty())
+            && let Some(cached_inlay_hints) = cached_inlay_hints
+        {
+            cached_inlay_hints
+                .into_iter()
+                .map(|(row_chunk, hints)| (row_chunk, Task::ready(Ok(hints))))
+                .collect()
         } else {
-            if invalidate_cache {
-                lsp_data.inlay_hints.clear();
-            }
-
-            for (chunk, range_to_query) in ranges_to_query {
+            for (chunk, range_to_query) in ranges_to_query.into_iter().flatten() {
                 let next_hint_id = next_hint_id.clone();
                 let buffer = buffer.clone();
                 let new_inlay_hints = cx
@@ -6745,31 +6765,38 @@ impl LspStore {
                                     let update_cache = !lsp_data
                                         .buffer_version
                                         .changed_since(&buffer.read(cx).version());
-                                    new_hints_by_server
-                                        .into_iter()
-                                        .map(|(server_id, new_hints)| {
-                                            let new_hints = new_hints
-                                                .into_iter()
-                                                .map(|new_hint| {
-                                                    (
-                                                        InlayId::Hint(next_hint_id.fetch_add(
-                                                            1,
-                                                            atomic::Ordering::AcqRel,
-                                                        )),
-                                                        new_hint,
-                                                    )
-                                                })
-                                                .collect::<Vec<_>>();
-                                            if update_cache {
-                                                lsp_data.inlay_hints.insert_new_hints(
-                                                    chunk,
-                                                    server_id,
-                                                    new_hints.clone(),
-                                                );
-                                            }
-                                            (server_id, new_hints)
-                                        })
-                                        .collect()
+                                    if new_hints_by_server.is_empty() {
+                                        if update_cache {
+                                            lsp_data.inlay_hints.invalidate_for_chunk(chunk);
+                                        }
+                                        HashMap::default()
+                                    } else {
+                                        new_hints_by_server
+                                            .into_iter()
+                                            .map(|(server_id, new_hints)| {
+                                                let new_hints = new_hints
+                                                    .into_iter()
+                                                    .map(|new_hint| {
+                                                        (
+                                                            InlayId::Hint(next_hint_id.fetch_add(
+                                                                1,
+                                                                atomic::Ordering::AcqRel,
+                                                            )),
+                                                            new_hint,
+                                                        )
+                                                    })
+                                                    .collect::<Vec<_>>();
+                                                if update_cache {
+                                                    lsp_data.inlay_hints.insert_new_hints(
+                                                        chunk,
+                                                        server_id,
+                                                        new_hints.clone(),
+                                                    );
+                                                }
+                                                (server_id, new_hints)
+                                            })
+                                            .collect()
+                                    }
                                 })
                             })
                             .map_err(Arc::new)
@@ -6781,22 +6808,25 @@ impl LspStore {
                 hint_fetch_tasks.push((chunk, new_inlay_hints));
             }
 
-            let mut combined_data = cached_chunk_data;
-            combined_data.extend(hint_fetch_tasks.into_iter().map(|(chunk, hints_fetch)| {
-                (
-                    chunk.start..chunk.end,
-                    cx.spawn(async move |_, _| {
-                        hints_fetch.await.map_err(|e| {
-                            if e.error_code() != ErrorCode::Internal {
-                                anyhow!(e.error_code())
-                            } else {
-                                anyhow!("{e:#}")
-                            }
-                        })
-                    }),
-                )
-            }));
-            combined_data
+            cached_inlay_hints
+                .unwrap_or_default()
+                .into_iter()
+                .map(|(row_chunk, hints)| (row_chunk, Task::ready(Ok(hints))))
+                .chain(hint_fetch_tasks.into_iter().map(|(chunk, hints_fetch)| {
+                    (
+                        chunk.start..chunk.end,
+                        cx.spawn(async move |_, _| {
+                            hints_fetch.await.map_err(|e| {
+                                if e.error_code() != ErrorCode::Internal {
+                                    anyhow!(e.error_code())
+                                } else {
+                                    anyhow!("{e:#}")
+                                }
+                            })
+                        }),
+                    )
+                }))
+                .collect()
         }
     }
 
@@ -7157,7 +7187,7 @@ impl LspStore {
             );
             let buffer = buffer.clone();
             cx.spawn(async move |lsp_store, cx| {
-                let Some(project) = lsp_store.upgrade() else {
+                let Some(lsp_store) = lsp_store.upgrade() else {
                     return Ok(None);
                 };
                 let colors = join_all(
@@ -7171,7 +7201,7 @@ impl LspStore {
                         .map(|color_response| {
                             let response = request.response_from_proto(
                                 color_response.response,
-                                project.clone(),
+                                lsp_store.clone(),
                                 buffer.clone(),
                                 cx.clone(),
                             );
@@ -7235,8 +7265,8 @@ impl LspStore {
                 request.to_proto(upstream_project_id, buffer.read(cx)),
             );
             let buffer = buffer.clone();
-            cx.spawn(async move |weak_project, cx| {
-                let project = weak_project.upgrade()?;
+            cx.spawn(async move |weak_lsp_store, cx| {
+                let lsp_store = weak_lsp_store.upgrade()?;
                 let signatures = join_all(
                     request_task
                         .await
@@ -7248,7 +7278,7 @@ impl LspStore {
                         .map(|response| {
                             let response = GetSignatureHelp { position }.response_from_proto(
                                 response.response,
-                                project.clone(),
+                                lsp_store.clone(),
                                 buffer.clone(),
                                 cx.clone(),
                             );
@@ -7299,8 +7329,8 @@ impl LspStore {
                 request.to_proto(upstream_project_id, buffer.read(cx)),
             );
             let buffer = buffer.clone();
-            cx.spawn(async move |weak_project, cx| {
-                let project = weak_project.upgrade()?;
+            cx.spawn(async move |weak_lsp_store, cx| {
+                let lsp_store = weak_lsp_store.upgrade()?;
                 let hovers = join_all(
                     request_task
                         .await
@@ -7312,7 +7342,7 @@ impl LspStore {
                         .map(|response| {
                             let response = GetHover { position }.response_from_proto(
                                 response.response,
-                                project.clone(),
+                                lsp_store.clone(),
                                 buffer.clone(),
                                 cx.clone(),
                             );
@@ -9604,7 +9634,10 @@ impl LspStore {
             if let Some(work) = status.pending_work.remove(&token)
                 && !work.is_disk_based_diagnostics_progress
             {
-                cx.emit(LspStoreEvent::RefreshInlayHints(language_server_id));
+                cx.emit(LspStoreEvent::RefreshInlayHints {
+                    server_id: language_server_id,
+                    request_id: None,
+                });
             }
             cx.notify();
         }
@@ -9743,9 +9776,10 @@ impl LspStore {
         mut cx: AsyncApp,
     ) -> Result<proto::Ack> {
         lsp_store.update(&mut cx, |_, cx| {
-            cx.emit(LspStoreEvent::RefreshInlayHints(
-                LanguageServerId::from_proto(envelope.payload.server_id),
-            ));
+            cx.emit(LspStoreEvent::RefreshInlayHints {
+                server_id: LanguageServerId::from_proto(envelope.payload.server_id),
+                request_id: envelope.payload.request_id.map(|id| id as usize),
+            });
         })?;
         Ok(proto::Ack {})
     }
@@ -10130,7 +10164,7 @@ impl LspStore {
     ) -> Shared<Task<Option<HashMap<String, String>>>> {
         if let Some(environment) = &self.as_local().map(|local| local.environment.clone()) {
             environment.update(cx, |env, cx| {
-                env.get_buffer_environment(buffer, &self.worktree_store, cx)
+                env.buffer_environment(buffer, &self.worktree_store, cx)
             })
         } else {
             Task::ready(None).shared()
@@ -10972,7 +11006,6 @@ impl LspStore {
             language_server.name(),
             Some(key.worktree_id),
         ));
-        cx.emit(LspStoreEvent::RefreshInlayHints(server_id));
 
         let server_capabilities = language_server.capabilities();
         if let Some((downstream_client, project_id)) = self.downstream_client.as_ref() {
@@ -11898,12 +11931,38 @@ impl LspStore {
                 "textDocument/completion" => {
                     if let Some(caps) = reg
                         .register_options
-                        .map(serde_json::from_value)
+                        .map(serde_json::from_value::<CompletionOptions>)
                         .transpose()?
                     {
                         server.update_capabilities(|capabilities| {
-                            capabilities.completion_provider = Some(caps);
+                            capabilities.completion_provider = Some(caps.clone());
                         });
+
+                        if let Some(local) = self.as_local() {
+                            let mut buffers_with_language_server = Vec::new();
+                            for handle in self.buffer_store.read(cx).buffers() {
+                                let buffer_id = handle.read(cx).remote_id();
+                                if local
+                                    .buffers_opened_in_servers
+                                    .get(&buffer_id)
+                                    .filter(|s| s.contains(&server_id))
+                                    .is_some()
+                                {
+                                    buffers_with_language_server.push(handle);
+                                }
+                            }
+                            let triggers = caps
+                                .trigger_characters
+                                .unwrap_or_default()
+                                .into_iter()
+                                .collect::<BTreeSet<_>>();
+                            for handle in buffers_with_language_server {
+                                let triggers = triggers.clone();
+                                let _ = handle.update(cx, move |buffer, cx| {
+                                    buffer.set_completion_triggers(server_id, triggers, cx);
+                                });
+                            }
+                        }
                         notify_server_capabilities_updated(&server, cx);
                     }
                 }
@@ -12890,7 +12949,7 @@ impl LanguageServerWatchedPathsBuilder {
         language_server_id: LanguageServerId,
         cx: &mut Context<LspStore>,
     ) -> LanguageServerWatchedPaths {
-        let project = cx.weak_entity();
+        let lsp_store = cx.weak_entity();
 
         const LSP_ABS_PATH_OBSERVE: Duration = Duration::from_millis(100);
         let abs_paths = self
@@ -12901,7 +12960,7 @@ impl LanguageServerWatchedPathsBuilder {
                     let abs_path = abs_path.clone();
                     let fs = fs.clone();
 
-                    let lsp_store = project.clone();
+                    let lsp_store = lsp_store.clone();
                     async move |_, cx| {
                         maybe!(async move {
                             let mut push_updates = fs.watch(&abs_path, LSP_ABS_PATH_OBSERVE).await;
@@ -13369,9 +13428,8 @@ impl LocalLspAdapterDelegate {
         fs: Arc<dyn Fs>,
         cx: &mut App,
     ) -> Arc<Self> {
-        let load_shell_env_task = environment.update(cx, |env, cx| {
-            env.get_worktree_environment(worktree.clone(), cx)
-        });
+        let load_shell_env_task =
+            environment.update(cx, |env, cx| env.worktree_environment(worktree.clone(), cx));
 
         Arc::new(Self {
             lsp_store,
  
  
  
    
    @@ -19,7 +19,10 @@ pub enum InvalidationStrategy {
     /// Demands to re-query all inlay hints needed and invalidate all cached entries, but does not require instant update with invalidation.
     ///
     /// Despite nothing forbids language server from sending this request on every edit, it is expected to be sent only when certain internal server state update, invisible for the editor otherwise.
-    RefreshRequested(LanguageServerId),
+    RefreshRequested {
+        server_id: LanguageServerId,
+        request_id: Option<usize>,
+    },
     /// Multibuffer excerpt(s) and/or singleton buffer(s) were edited at least on one place.
     /// Neither editor nor LSP is able to tell which open file hints' are not affected, so all of them have to be invalidated, re-queried and do that fast enough to avoid being slow, but also debounce to avoid loading hints on every fast keystroke sequence.
     BufferEdited,
@@ -36,7 +39,7 @@ impl InvalidationStrategy {
     pub fn should_invalidate(&self) -> bool {
         matches!(
             self,
-            InvalidationStrategy::RefreshRequested(_) | InvalidationStrategy::BufferEdited
+            InvalidationStrategy::RefreshRequested { .. } | InvalidationStrategy::BufferEdited
         )
     }
 }
@@ -47,6 +50,7 @@ pub struct BufferInlayHints {
     hints_by_chunks: Vec<Option<CacheInlayHints>>,
     fetches_by_chunks: Vec<Option<CacheInlayHintsTask>>,
     hints_by_id: HashMap<InlayId, HintForId>,
+    latest_invalidation_requests: HashMap<LanguageServerId, Option<usize>>,
     pub(super) hint_resolves: HashMap<InlayId, Shared<Task<()>>>,
 }
 
@@ -104,6 +108,7 @@ impl BufferInlayHints {
         Self {
             hints_by_chunks: vec![None; buffer_chunks.len()],
             fetches_by_chunks: vec![None; buffer_chunks.len()],
+            latest_invalidation_requests: HashMap::default(),
             hints_by_id: HashMap::default(),
             hint_resolves: HashMap::default(),
             snapshot,
@@ -176,6 +181,7 @@ impl BufferInlayHints {
         self.fetches_by_chunks = vec![None; self.buffer_chunks.len()];
         self.hints_by_id.clear();
         self.hint_resolves.clear();
+        self.latest_invalidation_requests.clear();
     }
 
     pub fn insert_new_hints(
@@ -222,4 +228,48 @@ impl BufferInlayHints {
     pub fn buffer_chunks_len(&self) -> usize {
         self.buffer_chunks.len()
     }
+
+    pub(crate) fn invalidate_for_server_refresh(
+        &mut self,
+        for_server: LanguageServerId,
+        request_id: Option<usize>,
+    ) -> bool {
+        match self.latest_invalidation_requests.entry(for_server) {
+            hash_map::Entry::Occupied(mut o) => {
+                if request_id > *o.get() {
+                    o.insert(request_id);
+                } else {
+                    return false;
+                }
+            }
+            hash_map::Entry::Vacant(v) => {
+                v.insert(request_id);
+            }
+        }
+
+        for (chunk_id, chunk_data) in self.hints_by_chunks.iter_mut().enumerate() {
+            if let Some(removed_hints) = chunk_data
+                .as_mut()
+                .and_then(|chunk_data| chunk_data.remove(&for_server))
+            {
+                for (id, _) in removed_hints {
+                    self.hints_by_id.remove(&id);
+                    self.hint_resolves.remove(&id);
+                }
+                self.fetches_by_chunks[chunk_id] = None;
+            }
+        }
+
+        true
+    }
+
+    pub(crate) fn invalidate_for_chunk(&mut self, chunk: BufferChunk) {
+        self.fetches_by_chunks[chunk.id] = None;
+        if let Some(hints_by_server) = self.hints_by_chunks[chunk.id].take() {
+            for (hint_id, _) in hints_by_server.into_values().flatten() {
+                self.hints_by_id.remove(&hint_id);
+                self.hint_resolves.remove(&hint_id);
+            }
+        }
+    }
 }
  
  
  
    
    @@ -13,9 +13,7 @@ use futures::{
     future::{self, Shared},
     stream::FuturesUnordered,
 };
-use gpui::{
-    AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, EventEmitter, Task, WeakEntity,
-};
+use gpui::{AppContext as _, AsyncApp, Context, Entity, EventEmitter, Task, WeakEntity};
 use language::{
     Buffer, LanguageRegistry, LocalFile,
     language_settings::{Formatter, LanguageSettings},
@@ -560,137 +558,99 @@ impl PrettierStore {
         let plugins_to_install = new_plugins.clone();
         let fs = Arc::clone(&self.fs);
         let new_installation_task = cx
-            .spawn(async move |prettier_store, cx| {
-                cx.background_executor()
-                    .timer(Duration::from_millis(30))
-                    .await;
+            .spawn(async move  |prettier_store, cx| {
+                cx.background_executor().timer(Duration::from_millis(30)).await;
                 let location_data = prettier_store.update(cx, |prettier_store, cx| {
-                    worktree
-                        .and_then(|worktree_id| {
-                            prettier_store
-                                .worktree_store
-                                .read(cx)
-                                .worktree_for_id(worktree_id, cx)
-                                .map(|worktree| worktree.read(cx).abs_path())
-                        })
-                        .map(|locate_from| {
-                            let installed_prettiers =
-                                prettier_store.prettier_instances.keys().cloned().collect();
-                            (locate_from, installed_prettiers)
-                        })
+                    worktree.and_then(|worktree_id| {
+                        prettier_store.worktree_store
+                            .read(cx)
+                            .worktree_for_id(worktree_id, cx)
+                            .map(|worktree| worktree.read(cx).abs_path())
+                    }).map(|locate_from| {
+                        let installed_prettiers = prettier_store.prettier_instances.keys().cloned().collect();
+                        (locate_from, installed_prettiers)
+                    })
                 })?;
                 let locate_prettier_installation = match location_data {
-                    Some((locate_from, installed_prettiers)) => {
-                        Prettier::locate_prettier_installation(
-                            fs.as_ref(),
-                            &installed_prettiers,
-                            locate_from.as_ref(),
-                        )
-                        .await
-                        .context("locate prettier installation")
-                        .map_err(Arc::new)?
-                    }
+                    Some((locate_from, installed_prettiers)) => Prettier::locate_prettier_installation(
+                        fs.as_ref(),
+                        &installed_prettiers,
+                        locate_from.as_ref(),
+                    )
+                    .await
+                    .context("locate prettier installation").map_err(Arc::new)?,
                     None => ControlFlow::Continue(None),
                 };
 
-                match locate_prettier_installation {
+                match locate_prettier_installation
+                {
                     ControlFlow::Break(()) => return Ok(()),
                     ControlFlow::Continue(prettier_path) => {
                         if prettier_path.is_some() {
                             new_plugins.clear();
                         }
-                        let mut needs_install =
-                            should_write_prettier_server_file(fs.as_ref()).await;
+                        let mut needs_install = should_write_prettier_server_file(fs.as_ref()).await;
                         if let Some(previous_installation_task) = previous_installation_task
-                            && let Err(e) = previous_installation_task.await
-                        {
-                            log::error!("Failed to install default prettier: {e:#}");
-                            prettier_store.update(cx, |prettier_store, _| {
-                                if let PrettierInstallation::NotInstalled {
-                                    attempts,
-                                    not_installed_plugins,
-                                    ..
-                                } = &mut prettier_store.default_prettier.prettier
-                                {
-                                    *attempts += 1;
-                                    new_plugins.extend(not_installed_plugins.iter().cloned());
-                                    installation_attempt = *attempts;
-                                    needs_install = true;
-                                };
-                            })?;
-                        };
+                            && let Err(e) = previous_installation_task.await {
+                                log::error!("Failed to install default prettier: {e:#}");
+                                prettier_store.update(cx, |prettier_store, _| {
+                                    if let PrettierInstallation::NotInstalled { attempts, not_installed_plugins, .. } = &mut prettier_store.default_prettier.prettier {
+                                        *attempts += 1;
+                                        new_plugins.extend(not_installed_plugins.iter().cloned());
+                                        installation_attempt = *attempts;
+                                        needs_install = true;
+                                    };
+                                })?;
+                            };
                         if installation_attempt > prettier::FAIL_THRESHOLD {
                             prettier_store.update(cx, |prettier_store, _| {
-                                if let PrettierInstallation::NotInstalled {
-                                    installation_task,
-                                    ..
-                                } = &mut prettier_store.default_prettier.prettier
-                                {
+                                if let PrettierInstallation::NotInstalled { installation_task, .. } = &mut prettier_store.default_prettier.prettier {
                                     *installation_task = None;
                                 };
                             })?;
                             log::warn!(
-                                "Default prettier installation had failed {installation_attempt} \
-                                times, not attempting again",
+                                "Default prettier installation had failed {installation_attempt} times, not attempting again",
                             );
                             return Ok(());
                         }
                         prettier_store.update(cx, |prettier_store, _| {
                             new_plugins.retain(|plugin| {
-                                !prettier_store
-                                    .default_prettier
-                                    .installed_plugins
-                                    .contains(plugin)
+                                !prettier_store.default_prettier.installed_plugins.contains(plugin)
                             });
-                            if let PrettierInstallation::NotInstalled {
-                                not_installed_plugins,
-                                ..
-                            } = &mut prettier_store.default_prettier.prettier
-                            {
+                            if let PrettierInstallation::NotInstalled { not_installed_plugins, .. } = &mut prettier_store.default_prettier.prettier {
                                 not_installed_plugins.retain(|plugin| {
-                                    !prettier_store
-                                        .default_prettier
-                                        .installed_plugins
-                                        .contains(plugin)
+                                    !prettier_store.default_prettier.installed_plugins.contains(plugin)
                                 });
                                 not_installed_plugins.extend(new_plugins.iter().cloned());
                             }
                             needs_install |= !new_plugins.is_empty();
                         })?;
                         if needs_install {
-                            log::info!(
-                                "Initializing default prettier with plugins {new_plugins:?}"
-                            );
+                            log::info!("Initializing default prettier with plugins {new_plugins:?}");
                             let installed_plugins = new_plugins.clone();
-                            let executor = cx.background_executor().clone();
                             cx.background_spawn(async move {
                                 install_prettier_packages(fs.as_ref(), new_plugins, node).await?;
                                 // Save the server file last, so the reinstall need could be determined by the absence of the file.
-                                save_prettier_server_file(fs.as_ref(), &executor).await?;
+                                save_prettier_server_file(fs.as_ref()).await?;
                                 anyhow::Ok(())
                             })
-                            .await
-                            .context("prettier & plugins install")
-                            .map_err(Arc::new)?;
-                            log::info!(
-                                "Initialized default prettier with plugins: {installed_plugins:?}"
-                            );
+                                .await
+                                .context("prettier & plugins install")
+                                .map_err(Arc::new)?;
+                            log::info!("Initialized default prettier with plugins: {installed_plugins:?}");
                             prettier_store.update(cx, |prettier_store, _| {
                                 prettier_store.default_prettier.prettier =
                                     PrettierInstallation::Installed(PrettierInstance {
                                         attempt: 0,
                                         prettier: None,
                                     });
-                                prettier_store
-                                    .default_prettier
+                                prettier_store.default_prettier
                                     .installed_plugins
                                     .extend(installed_plugins);
                             })?;
                         } else {
                             prettier_store.update(cx, |prettier_store, _| {
-                                if let PrettierInstallation::NotInstalled { .. } =
-                                    &mut prettier_store.default_prettier.prettier
-                                {
+                                if let PrettierInstallation::NotInstalled { .. } = &mut prettier_store.default_prettier.prettier {
                                     prettier_store.default_prettier.prettier =
                                         PrettierInstallation::Installed(PrettierInstance {
                                             attempt: 0,
@@ -976,14 +936,11 @@ async fn install_prettier_packages(
     anyhow::Ok(())
 }
 
-async fn save_prettier_server_file(
-    fs: &dyn Fs,
-    executor: &BackgroundExecutor,
-) -> anyhow::Result<()> {
+async fn save_prettier_server_file(fs: &dyn Fs) -> anyhow::Result<()> {
     let prettier_wrapper_path = default_prettier_dir().join(prettier::PRETTIER_SERVER_FILE);
     fs.save(
         &prettier_wrapper_path,
-        &text::Rope::from_str(prettier::PRETTIER_SERVER_JS, executor),
+        &text::Rope::from(prettier::PRETTIER_SERVER_JS),
         text::LineEnding::Unix,
     )
     .await
  
  
  
    
    @@ -33,7 +33,6 @@ pub mod search_history;
 mod yarn;
 
 use dap::inline_value::{InlineValueLocation, VariableLookupKind, VariableScope};
-use task::Shell;
 
 use crate::{
     agent_server_store::AllAgentServersSettings,
@@ -68,7 +67,7 @@ use futures::future::join_all;
 use futures::{
     StreamExt,
     channel::mpsc::{self, UnboundedReceiver},
-    future::{Shared, try_join_all},
+    future::try_join_all,
 };
 pub use image_store::{ImageItem, ImageStore};
 use image_store::{ImageItemEvent, ImageStoreEvent};
@@ -337,7 +336,10 @@ pub enum Event {
     HostReshared,
     Reshared,
     Rejoined,
-    RefreshInlayHints(LanguageServerId),
+    RefreshInlayHints {
+        server_id: LanguageServerId,
+        request_id: Option<usize>,
+    },
     RefreshCodeLens,
     RevealInProjectPanel(ProjectEntryId),
     SnippetEdit(BufferId, Vec<(lsp::Range, Snippet)>),
@@ -712,10 +714,8 @@ pub enum ResolveState {
 impl InlayHint {
     pub fn text(&self) -> Rope {
         match &self.label {
-            InlayHintLabel::String(s) => Rope::from_str_small(s),
-            InlayHintLabel::LabelParts(parts) => {
-                Rope::from_iter_small(parts.iter().map(|part| &*part.value))
-            }
+            InlayHintLabel::String(s) => Rope::from(s),
+            InlayHintLabel::LabelParts(parts) => parts.iter().map(|part| &*part.value).collect(),
         }
     }
 }
@@ -1070,9 +1070,10 @@ impl Project {
 
             let weak_self = cx.weak_entity();
             let context_server_store =
-                cx.new(|cx| ContextServerStore::new(worktree_store.clone(), weak_self, cx));
+                cx.new(|cx| ContextServerStore::new(worktree_store.clone(), weak_self.clone(), cx));
 
-            let environment = cx.new(|cx| ProjectEnvironment::new(env, cx));
+            let environment =
+                cx.new(|cx| ProjectEnvironment::new(env, worktree_store.downgrade(), None, cx));
             let manifest_tree = ManifestTree::new(worktree_store.clone(), cx);
             let toolchain_store = cx.new(|cx| {
                 ToolchainStore::local(
@@ -1261,7 +1262,7 @@ impl Project {
 
             let weak_self = cx.weak_entity();
             let context_server_store =
-                cx.new(|cx| ContextServerStore::new(worktree_store.clone(), weak_self, cx));
+                cx.new(|cx| ContextServerStore::new(worktree_store.clone(), weak_self.clone(), cx));
 
             let buffer_store = cx.new(|cx| {
                 BufferStore::remote(
@@ -1307,7 +1308,14 @@ impl Project {
             cx.subscribe(&settings_observer, Self::on_settings_observer_event)
                 .detach();
 
-            let environment = cx.new(|cx| ProjectEnvironment::new(None, cx));
+            let environment = cx.new(|cx| {
+                ProjectEnvironment::new(
+                    None,
+                    worktree_store.downgrade(),
+                    Some(remote.downgrade()),
+                    cx,
+                )
+            });
 
             let lsp_store = cx.new(|cx| {
                 LspStore::new_remote(
@@ -1520,8 +1528,8 @@ impl Project {
             ImageStore::remote(worktree_store.clone(), client.clone().into(), remote_id, cx)
         })?;
 
-        let environment = cx.new(|cx| ProjectEnvironment::new(None, cx))?;
-
+        let environment =
+            cx.new(|cx| ProjectEnvironment::new(None, worktree_store.downgrade(), None, cx))?;
         let breakpoint_store =
             cx.new(|_| BreakpointStore::remote(remote_id, client.clone().into()))?;
         let dap_store = cx.new(|cx| {
@@ -1925,32 +1933,6 @@ impl Project {
         self.environment.read(cx).get_cli_environment()
     }
 
-    pub fn buffer_environment<'a>(
-        &'a self,
-        buffer: &Entity<Buffer>,
-        worktree_store: &Entity<WorktreeStore>,
-        cx: &'a mut App,
-    ) -> Shared<Task<Option<HashMap<String, String>>>> {
-        self.environment.update(cx, |environment, cx| {
-            environment.get_buffer_environment(buffer, worktree_store, cx)
-        })
-    }
-
-    pub fn directory_environment(
-        &self,
-        shell: &Shell,
-        abs_path: Arc<Path>,
-        cx: &mut App,
-    ) -> Shared<Task<Option<HashMap<String, String>>>> {
-        self.environment.update(cx, |environment, cx| {
-            if let Some(remote_client) = self.remote_client.clone() {
-                environment.get_remote_directory_environment(shell, abs_path, remote_client, cx)
-            } else {
-                environment.get_local_directory_environment(shell, abs_path, cx)
-            }
-        })
-    }
-
     #[inline]
     pub fn peek_environment_error<'a>(&'a self, cx: &'a App) -> Option<&'a String> {
         self.environment.read(cx).peek_environment_error()
@@ -3076,9 +3058,13 @@ impl Project {
                     return;
                 };
             }
-            LspStoreEvent::RefreshInlayHints(server_id) => {
-                cx.emit(Event::RefreshInlayHints(*server_id))
-            }
+            LspStoreEvent::RefreshInlayHints {
+                server_id,
+                request_id,
+            } => cx.emit(Event::RefreshInlayHints {
+                server_id: *server_id,
+                request_id: *request_id,
+            }),
             LspStoreEvent::RefreshCodeLens => cx.emit(Event::RefreshCodeLens),
             LspStoreEvent::LanguageServerPrompt(prompt) => {
                 cx.emit(Event::LanguageServerPrompt(prompt.clone()))
@@ -5404,12 +5390,7 @@ impl Project {
             worktree
                 .update(cx, |worktree, cx| {
                     let line_ending = text::LineEnding::detect(&new_text);
-                    worktree.write_file(
-                        rel_path.clone(),
-                        Rope::from_str(&new_text, cx.background_executor()),
-                        line_ending,
-                        cx,
-                    )
+                    worktree.write_file(rel_path.clone(), new_text.into(), line_ending, cx)
                 })?
                 .await
                 .context("Failed to write settings file")?;
  
  
  
    
    @@ -1461,21 +1461,21 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
     .unwrap();
     fs.save(
         path!("/the-root/Cargo.lock").as_ref(),
-        &Rope::default(),
+        &"".into(),
         Default::default(),
     )
     .await
     .unwrap();
     fs.save(
         path!("/the-stdlib/LICENSE").as_ref(),
-        &Rope::default(),
+        &"".into(),
         Default::default(),
     )
     .await
     .unwrap();
     fs.save(
         path!("/the/stdlib/src/string.rs").as_ref(),
-        &Rope::default(),
+        &"".into(),
         Default::default(),
     )
     .await
@@ -1815,10 +1815,6 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
     fake_server
         .start_progress(format!("{}/0", progress_token))
         .await;
-    assert_eq!(
-        events.next().await.unwrap(),
-        Event::RefreshInlayHints(fake_server.server.server_id())
-    );
     assert_eq!(
         events.next().await.unwrap(),
         Event::DiskBasedDiagnosticsStarted {
@@ -1957,10 +1953,6 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC
             Some(worktree_id)
         )
     );
-    assert_eq!(
-        events.next().await.unwrap(),
-        Event::RefreshInlayHints(fake_server.server.server_id())
-    );
     fake_server.start_progress(progress_token).await;
     assert_eq!(
         events.next().await.unwrap(),
@@ -4072,7 +4064,7 @@ async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext)
     // to be detected by the worktree, so that the buffer starts reloading.
     fs.save(
         path!("/dir/file1").as_ref(),
-        &Rope::from_str("the first contents", cx.background_executor()),
+        &"the first contents".into(),
         Default::default(),
     )
     .await
@@ -4083,7 +4075,7 @@ async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext)
     // previous file change may still be in progress.
     fs.save(
         path!("/dir/file1").as_ref(),
-        &Rope::from_str("the second contents", cx.background_executor()),
+        &"the second contents".into(),
         Default::default(),
     )
     .await
@@ -4127,7 +4119,7 @@ async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
     // to be detected by the worktree, so that the buffer starts reloading.
     fs.save(
         path!("/dir/file1").as_ref(),
-        &Rope::from_str("the first contents", cx.background_executor()),
+        &"the first contents".into(),
         Default::default(),
     )
     .await
@@ -4805,7 +4797,7 @@ async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
         marked_text_offsets("oneΛ\nthree ΛFOURΛ five\nsixtyΛ seven\n");
     fs.save(
         path!("/dir/the-file").as_ref(),
-        &Rope::from_str(new_contents.as_str(), cx.background_executor()),
+        &new_contents.as_str().into(),
         LineEnding::Unix,
     )
     .await
@@ -4837,7 +4829,7 @@ async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
     // Change the file on disk again, adding blank lines to the beginning.
     fs.save(
         path!("/dir/the-file").as_ref(),
-        &Rope::from_str("\n\n\nAAAA\naaa\nBB\nbbbbb\n", cx.background_executor()),
+        &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
         LineEnding::Unix,
     )
     .await
@@ -4889,7 +4881,7 @@ async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
     // state updates correctly.
     fs.save(
         path!("/dir/file1").as_ref(),
-        &Rope::from_str("aaa\nb\nc\n", cx.background_executor()),
+        &"aaa\nb\nc\n".into(),
         LineEnding::Windows,
     )
     .await
  
  
  
    
    @@ -317,7 +317,7 @@ fn local_task_context_for_location(
     cx.spawn(async move |cx| {
         let project_env = environment
             .update(cx, |environment, cx| {
-                environment.get_buffer_environment(&location.buffer, &worktree_store, cx)
+                environment.buffer_environment(&location.buffer, &worktree_store, cx)
             })
             .ok()?
             .await;
  
  
  
    
    @@ -8,7 +8,6 @@ use remote::RemoteClient;
 use settings::{Settings, SettingsLocation};
 use smol::channel::bounded;
 use std::{
-    borrow::Cow,
     path::{Path, PathBuf},
     sync::Arc,
 };
@@ -122,6 +121,7 @@ impl Project {
         let lang_registry = self.languages.clone();
         cx.spawn(async move |project, cx| {
             let shell_kind = ShellKind::new(&shell, is_windows);
+
             let activation_script = maybe!(async {
                 for toolchain in toolchains {
                     let Some(toolchain) = toolchain.await else {
@@ -143,14 +143,8 @@ impl Project {
                 .update(cx, move |_, cx| {
                     let format_to_run = || {
                         if let Some(command) = &spawn_task.command {
-                            let mut command: Option<Cow<str>> = shell_kind.try_quote(command);
-                            if let Some(command) = &mut command
-                                && command.starts_with('"')
-                                && let Some(prefix) = shell_kind.command_prefix()
-                            {
-                                *command = Cow::Owned(format!("{prefix}{command}"));
-                            }
-
+                            let command = shell_kind.prepend_command_prefix(command);
+                            let command = shell_kind.try_quote_prefix_aware(&command);
                             let args = spawn_task
                                 .args
                                 .iter()
@@ -172,12 +166,13 @@ impl Project {
                                     let activation_script =
                                         activation_script.join(&format!("{separator} "));
                                     let to_run = format_to_run();
+
+                                    let arg = format!("{activation_script}{separator} {to_run}");
+                                    let args = shell_kind.args_for_shell(false, arg);
                                     let shell = remote_client
                                         .read(cx)
                                         .shell()
                                         .unwrap_or_else(get_default_system_shell);
-                                    let arg = format!("{activation_script}{separator} {to_run}");
-                                    let args = shell_kind.args_for_shell(false, arg);
 
                                     create_remote_shell(
                                         Some((&shell, &args)),
  
  
  
    
    @@ -527,7 +527,7 @@ impl LocalToolchainStore {
 
             let project_env = environment
                 .update(cx, |environment, cx| {
-                    environment.get_local_directory_environment(
+                    environment.local_directory_environment(
                         &Shell::System,
                         abs_path.as_path().into(),
                         cx,
@@ -590,7 +590,7 @@ impl LocalToolchainStore {
 
             let project_env = environment
                 .update(cx, |environment, cx| {
-                    environment.get_local_directory_environment(
+                    environment.local_directory_environment(
                         &Shell::System,
                         path.as_path().into(),
                         cx,
  
  
  
    
    @@ -466,6 +466,7 @@ message ResolveInlayHintResponse {
 message RefreshInlayHints {
     uint64 project_id = 1;
     uint64 server_id = 2;
+    optional uint64 request_id = 3;
 }
 
 message CodeLens {
  
  
  
    
    @@ -574,6 +574,7 @@ pub async fn open_remote_project(
     open_options: workspace::OpenOptions,
     cx: &mut AsyncApp,
 ) -> Result<()> {
+    let created_new_window = open_options.replace_window.is_none();
     let window = if let Some(window) = open_options.replace_window {
         window
     } else {
@@ -648,7 +649,45 @@ pub async fn open_remote_project(
         let Some(delegate) = delegate else { break };
 
         let remote_connection =
-            remote::connect(connection_options.clone(), delegate.clone(), cx).await?;
+            match remote::connect(connection_options.clone(), delegate.clone(), cx).await {
+                Ok(connection) => connection,
+                Err(e) => {
+                    window
+                        .update(cx, |workspace, _, cx| {
+                            if let Some(ui) = workspace.active_modal::<RemoteConnectionModal>(cx) {
+                                ui.update(cx, |modal, cx| modal.finished(cx))
+                            }
+                        })
+                        .ok();
+                    log::error!("Failed to open project: {e:?}");
+                    let response = window
+                        .update(cx, |_, window, cx| {
+                            window.prompt(
+                                PromptLevel::Critical,
+                                match connection_options {
+                                    RemoteConnectionOptions::Ssh(_) => "Failed to connect over SSH",
+                                    RemoteConnectionOptions::Wsl(_) => "Failed to connect to WSL",
+                                },
+                                Some(&e.to_string()),
+                                &["Retry", "Cancel"],
+                                cx,
+                            )
+                        })?
+                        .await;
+
+                    if response == Ok(0) {
+                        continue;
+                    }
+
+                    if created_new_window {
+                        window
+                            .update(cx, |_, window, _| window.remove_window())
+                            .ok();
+                    }
+                    break;
+                }
+            };
+
         let (paths, paths_with_positions) =
             determine_paths_with_positions(&remote_connection, paths.clone()).await;
 
@@ -686,7 +725,7 @@ pub async fn open_remote_project(
                                 RemoteConnectionOptions::Wsl(_) => "Failed to connect to WSL",
                             },
                             Some(&e.to_string()),
-                            &["Retry", "Ok"],
+                            &["Retry", "Cancel"],
                             cx,
                         )
                     })?
@@ -694,7 +733,14 @@ pub async fn open_remote_project(
                 if response == Ok(0) {
                     continue;
                 }
+
+                if created_new_window {
+                    window
+                        .update(cx, |_, window, _| window.remove_window())
+                        .ok();
+                }
             }
+
             Ok(items) => {
                 for (item, path) in items.into_iter().zip(paths_with_positions) {
                     let Some(item) = item else {
  
  
  
    
    @@ -39,6 +39,7 @@ pub(crate) struct SshRemoteConnection {
     ssh_platform: RemotePlatform,
     ssh_path_style: PathStyle,
     ssh_shell: String,
+    ssh_shell_kind: ShellKind,
     ssh_default_system_shell: String,
     _temp_dir: TempDir,
 }
@@ -241,6 +242,7 @@ impl RemoteConnection for SshRemoteConnection {
         let Self {
             ssh_path_style,
             socket,
+            ssh_shell_kind,
             ssh_shell,
             ..
         } = self;
@@ -254,6 +256,7 @@ impl RemoteConnection for SshRemoteConnection {
             env,
             *ssh_path_style,
             ssh_shell,
+            *ssh_shell_kind,
             socket.ssh_args(),
         )
     }
@@ -367,7 +370,7 @@ impl RemoteConnection for SshRemoteConnection {
 
         let ssh_proxy_process = match self
             .socket
-            .ssh_command("env", &proxy_args)
+            .ssh_command(self.ssh_shell_kind, "env", &proxy_args)
             // IMPORTANT: we kill this process when we drop the task that uses it.
             .kill_on_drop(true)
             .spawn()
@@ -490,6 +493,13 @@ impl SshRemoteConnection {
             _ => PathStyle::Posix,
         };
         let ssh_default_system_shell = String::from("/bin/sh");
+        let ssh_shell_kind = ShellKind::new(
+            &ssh_shell,
+            match ssh_platform.os {
+                "windows" => true,
+                _ => false,
+            },
+        );
 
         let mut this = Self {
             socket,
@@ -499,6 +509,7 @@ impl SshRemoteConnection {
             ssh_path_style,
             ssh_platform,
             ssh_shell,
+            ssh_shell_kind,
             ssh_default_system_shell,
         };
 
@@ -563,7 +574,11 @@ impl SshRemoteConnection {
 
         if self
             .socket
-            .run_command(&dst_path.display(self.path_style()), &["version"])
+            .run_command(
+                self.ssh_shell_kind,
+                &dst_path.display(self.path_style()),
+                &["version"],
+            )
             .await
             .is_ok()
         {
@@ -632,7 +647,11 @@ impl SshRemoteConnection {
     ) -> Result<()> {
         if let Some(parent) = tmp_path_gz.parent() {
             self.socket
-                .run_command("mkdir", &["-p", parent.display(self.path_style()).as_ref()])
+                .run_command(
+                    self.ssh_shell_kind,
+                    "mkdir",
+                    &["-p", parent.display(self.path_style()).as_ref()],
+                )
                 .await?;
         }
 
@@ -641,6 +660,7 @@ impl SshRemoteConnection {
         match self
             .socket
             .run_command(
+                self.ssh_shell_kind,
                 "curl",
                 &[
                     "-f",
@@ -660,13 +680,19 @@ impl SshRemoteConnection {
         {
             Ok(_) => {}
             Err(e) => {
-                if self.socket.run_command("which", &["curl"]).await.is_ok() {
+                if self
+                    .socket
+                    .run_command(self.ssh_shell_kind, "which", &["curl"])
+                    .await
+                    .is_ok()
+                {
                     return Err(e);
                 }
 
                 match self
                     .socket
                     .run_command(
+                        self.ssh_shell_kind,
                         "wget",
                         &[
                             "--header=Content-Type: application/json",
@@ -681,7 +707,12 @@ impl SshRemoteConnection {
                 {
                     Ok(_) => {}
                     Err(e) => {
-                        if self.socket.run_command("which", &["wget"]).await.is_ok() {
+                        if self
+                            .socket
+                            .run_command(self.ssh_shell_kind, "which", &["wget"])
+                            .await
+                            .is_ok()
+                        {
                             return Err(e);
                         } else {
                             anyhow::bail!("Neither curl nor wget is available");
@@ -703,7 +734,11 @@ impl SshRemoteConnection {
     ) -> Result<()> {
         if let Some(parent) = tmp_path_gz.parent() {
             self.socket
-                .run_command("mkdir", &["-p", parent.display(self.path_style()).as_ref()])
+                .run_command(
+                    self.ssh_shell_kind,
+                    "mkdir",
+                    &["-p", parent.display(self.path_style()).as_ref()],
+                )
                 .await?;
         }
 
@@ -750,7 +785,7 @@ impl SshRemoteConnection {
             format!("chmod {server_mode} {orig_tmp_path} && mv {orig_tmp_path} {dst_path}",)
         };
         let args = shell_kind.args_for_shell(false, script.to_string());
-        self.socket.run_command("sh", &args).await?;
+        self.socket.run_command(shell_kind, "sh", &args).await?;
         Ok(())
     }
 
@@ -894,11 +929,16 @@ impl SshSocket {
     // Furthermore, some setups (e.g. Coder) will change directory when SSH'ing
     // into a machine. You must use `cd` to get back to $HOME.
     // You need to do it like this: $ ssh host "cd; sh -c 'ls -l /tmp'"
-    fn ssh_command(&self, program: &str, args: &[impl AsRef<str>]) -> process::Command {
-        let shell_kind = ShellKind::Posix;
+    fn ssh_command(
+        &self,
+        shell_kind: ShellKind,
+        program: &str,
+        args: &[impl AsRef<str>],
+    ) -> process::Command {
         let mut command = util::command::new_smol_command("ssh");
+        let program = shell_kind.prepend_command_prefix(program);
         let mut to_run = shell_kind
-            .try_quote(program)
+            .try_quote_prefix_aware(&program)
             .expect("shell quoting")
             .into_owned();
         for arg in args {
@@ -920,8 +960,13 @@ impl SshSocket {
         command
     }
 
-    async fn run_command(&self, program: &str, args: &[impl AsRef<str>]) -> Result<String> {
-        let output = self.ssh_command(program, args).output().await?;
+    async fn run_command(
+        &self,
+        shell_kind: ShellKind,
+        program: &str,
+        args: &[impl AsRef<str>],
+    ) -> Result<String> {
+        let output = self.ssh_command(shell_kind, program, args).output().await?;
         anyhow::ensure!(
             output.status.success(),
             "failed to run command: {}",
@@ -994,12 +1039,7 @@ impl SshSocket {
     }
 
     async fn platform(&self, shell: ShellKind) -> Result<RemotePlatform> {
-        let program = if shell == ShellKind::Nushell {
-            "^uname"
-        } else {
-            "uname"
-        };
-        let uname = self.run_command(program, &["-sm"]).await?;
+        let uname = self.run_command(shell, "uname", &["-sm"]).await?;
         let Some((os, arch)) = uname.split_once(" ") else {
             anyhow::bail!("unknown uname: {uname:?}")
         };
@@ -1030,7 +1070,10 @@ impl SshSocket {
     }
 
     async fn shell(&self) -> String {
-        match self.run_command("sh", &["-c", "echo $SHELL"]).await {
+        match self
+            .run_command(ShellKind::Posix, "sh", &["-c", "echo $SHELL"])
+            .await
+        {
             Ok(shell) => shell.trim().to_owned(),
             Err(e) => {
                 log::error!("Failed to get shell: {e}");
@@ -1256,11 +1299,11 @@ fn build_command(
     ssh_env: HashMap<String, String>,
     ssh_path_style: PathStyle,
     ssh_shell: &str,
+    ssh_shell_kind: ShellKind,
     ssh_args: Vec<String>,
 ) -> Result<CommandTemplate> {
     use std::fmt::Write as _;
 
-    let shell_kind = ShellKind::new(ssh_shell, false);
     let mut exec = String::new();
     if let Some(working_dir) = working_dir {
         let working_dir = RemotePathBuf::new(working_dir, ssh_path_style).to_string();
@@ -1270,12 +1313,24 @@ fn build_command(
         const TILDE_PREFIX: &'static str = "~/";
         if working_dir.starts_with(TILDE_PREFIX) {
             let working_dir = working_dir.trim_start_matches("~").trim_start_matches("/");
-            write!(exec, "cd \"$HOME/{working_dir}\" && ",)?;
+            write!(
+                exec,
+                "cd \"$HOME/{working_dir}\" {} ",
+                ssh_shell_kind.sequential_and_commands_separator()
+            )?;
         } else {
-            write!(exec, "cd \"{working_dir}\" && ",)?;
+            write!(
+                exec,
+                "cd \"{working_dir}\" {} ",
+                ssh_shell_kind.sequential_and_commands_separator()
+            )?;
         }
     } else {
-        write!(exec, "cd && ")?;
+        write!(
+            exec,
+            "cd {} ",
+            ssh_shell_kind.sequential_and_commands_separator()
+        )?;
     };
     write!(exec, "exec env ")?;
 
@@ -1284,7 +1339,7 @@ fn build_command(
             exec,
             "{}={} ",
             k,
-            shell_kind.try_quote(v).context("shell quoting")?
+            ssh_shell_kind.try_quote(v).context("shell quoting")?
         )?;
     }
 
@@ -1292,12 +1347,12 @@ fn build_command(
         write!(
             exec,
             "{}",
-            shell_kind
-                .try_quote(&input_program)
+            ssh_shell_kind
+                .try_quote_prefix_aware(&input_program)
                 .context("shell quoting")?
         )?;
         for arg in input_args {
-            let arg = shell_kind.try_quote(&arg).context("shell quoting")?;
+            let arg = ssh_shell_kind.try_quote(&arg).context("shell quoting")?;
             write!(exec, " {}", &arg)?;
         }
     } else {
@@ -1341,6 +1396,7 @@ mod tests {
             env.clone(),
             PathStyle::Posix,
             "/bin/fish",
+            ShellKind::Fish,
             vec!["-p".to_string(), "2222".to_string()],
         )?;
 
@@ -1370,6 +1426,7 @@ mod tests {
             env.clone(),
             PathStyle::Posix,
             "/bin/fish",
+            ShellKind::Fish,
             vec!["-p".to_string(), "2222".to_string()],
         )?;
 
  
  
  
    
    @@ -44,6 +44,7 @@ pub(crate) struct WslRemoteConnection {
     remote_binary_path: Option<Arc<RelPath>>,
     platform: RemotePlatform,
     shell: String,
+    shell_kind: ShellKind,
     default_system_shell: String,
     connection_options: WslConnectionOptions,
     can_exec: bool,
@@ -73,16 +74,17 @@ impl WslRemoteConnection {
             remote_binary_path: None,
             platform: RemotePlatform { os: "", arch: "" },
             shell: String::new(),
+            shell_kind: ShellKind::Posix,
             default_system_shell: String::from("/bin/sh"),
             can_exec: true,
         };
         delegate.set_status(Some("Detecting WSL environment"), cx);
         this.shell = this.detect_shell().await?;
-        let shell = ShellKind::new(&this.shell, false);
-        this.can_exec = this.detect_can_exec(shell).await?;
-        this.platform = this.detect_platform(shell).await?;
+        this.shell_kind = ShellKind::new(&this.shell, false);
+        this.can_exec = this.detect_can_exec().await?;
+        this.platform = this.detect_platform().await?;
         this.remote_binary_path = Some(
-            this.ensure_server_binary(&delegate, release_channel, version, commit, shell, cx)
+            this.ensure_server_binary(&delegate, release_channel, version, commit, cx)
                 .await?,
         );
         log::debug!("Detected WSL environment: {this:#?}");
@@ -90,20 +92,16 @@ impl WslRemoteConnection {
         Ok(this)
     }
 
-    async fn detect_can_exec(&self, shell: ShellKind) -> Result<bool> {
+    async fn detect_can_exec(&self) -> Result<bool> {
         let options = &self.connection_options;
-        let program = if shell == ShellKind::Nushell {
-            "^uname"
-        } else {
-            "uname"
-        };
+        let program = self.shell_kind.prepend_command_prefix("uname");
         let args = &["-m"];
-        let output = wsl_command_impl(options, program, args, true)
+        let output = wsl_command_impl(options, &program, args, true)
             .output()
             .await?;
 
         if !output.status.success() {
-            let output = wsl_command_impl(options, program, args, false)
+            let output = wsl_command_impl(options, &program, args, false)
                 .output()
                 .await?;
 
@@ -120,14 +118,9 @@ impl WslRemoteConnection {
             Ok(true)
         }
     }
-    async fn detect_platform(&self, shell: ShellKind) -> Result<RemotePlatform> {
-        let arch_str = if shell == ShellKind::Nushell {
-            // https://github.com/nushell/nushell/issues/12570
-            self.run_wsl_command("sh", &["-c", "uname -m"])
-        } else {
-            self.run_wsl_command("uname", &["-m"])
-        }
-        .await?;
+    async fn detect_platform(&self) -> Result<RemotePlatform> {
+        let program = self.shell_kind.prepend_command_prefix("uname");
+        let arch_str = self.run_wsl_command(&program, &["-m"]).await?;
         let arch_str = arch_str.trim().to_string();
         let arch = match arch_str.as_str() {
             "x86_64" => "x86_64",
@@ -163,7 +156,6 @@ impl WslRemoteConnection {
         release_channel: ReleaseChannel,
         version: SemanticVersion,
         commit: Option<AppCommitSha>,
-        shell: ShellKind,
         cx: &mut AsyncApp,
     ) -> Result<Arc<RelPath>> {
         let version_str = match release_channel {
@@ -186,12 +178,9 @@ impl WslRemoteConnection {
 
         if let Some(parent) = dst_path.parent() {
             let parent = parent.display(PathStyle::Posix);
-            if shell == ShellKind::Nushell {
-                self.run_wsl_command("mkdir", &[&parent]).await
-            } else {
-                self.run_wsl_command("mkdir", &["-p", &parent]).await
-            }
-            .map_err(|e| anyhow!("Failed to create directory: {}", e))?;
+            self.run_wsl_command("mkdir", &["-p", &parent])
+                .await
+                .map_err(|e| anyhow!("Failed to create directory: {}", e))?;
         }
 
         #[cfg(debug_assertions)]
@@ -206,7 +195,7 @@ impl WslRemoteConnection {
                 ))
                 .unwrap(),
             );
-            self.upload_file(&remote_server_path, &tmp_path, delegate, &shell, cx)
+            self.upload_file(&remote_server_path, &tmp_path, delegate, cx)
                 .await?;
             self.extract_and_install(&tmp_path, &dst_path, delegate, cx)
                 .await?;
@@ -239,8 +228,7 @@ impl WslRemoteConnection {
         );
         let tmp_path = RelPath::unix(&tmp_path).unwrap();
 
-        self.upload_file(&src_path, &tmp_path, delegate, &shell, cx)
-            .await?;
+        self.upload_file(&src_path, &tmp_path, delegate, cx).await?;
         self.extract_and_install(&tmp_path, &dst_path, delegate, cx)
             .await?;
 
@@ -252,19 +240,15 @@ impl WslRemoteConnection {
         src_path: &Path,
         dst_path: &RelPath,
         delegate: &Arc<dyn RemoteClientDelegate>,
-        shell: &ShellKind,
         cx: &mut AsyncApp,
     ) -> Result<()> {
         delegate.set_status(Some("Uploading remote server to WSL"), cx);
 
         if let Some(parent) = dst_path.parent() {
             let parent = parent.display(PathStyle::Posix);
-            if *shell == ShellKind::Nushell {
-                self.run_wsl_command("mkdir", &[&parent]).await
-            } else {
-                self.run_wsl_command("mkdir", &["-p", &parent]).await
-            }
-            .map_err(|e| anyhow!("Failed to create directory when uploading file: {}", e))?;
+            self.run_wsl_command("mkdir", &["-p", &parent])
+                .await
+                .map_err(|e| anyhow!("Failed to create directory when uploading file: {}", e))?;
         }
 
         let t0 = Instant::now();
@@ -441,7 +425,7 @@ impl RemoteConnection for WslRemoteConnection {
             bail!("WSL shares the network interface with the host system");
         }
 
-        let shell_kind = ShellKind::new(&self.shell, false);
+        let shell_kind = self.shell_kind;
         let working_dir = working_dir
             .map(|working_dir| RemotePathBuf::new(working_dir, PathStyle::Posix).to_string())
             .unwrap_or("~".to_string());
@@ -461,7 +445,9 @@ impl RemoteConnection for WslRemoteConnection {
             write!(
                 exec,
                 "{}",
-                shell_kind.try_quote(&program).context("shell quoting")?
+                shell_kind
+                    .try_quote_prefix_aware(&program)
+                    .context("shell quoting")?
             )?;
             for arg in args {
                 let arg = shell_kind.try_quote(&arg).context("shell quoting")?;
  
  
  
    
    @@ -94,7 +94,8 @@ impl HeadlessProject {
             store
         });
 
-        let environment = cx.new(|cx| ProjectEnvironment::new(None, cx));
+        let environment =
+            cx.new(|cx| ProjectEnvironment::new(None, worktree_store.downgrade(), None, cx));
         let manifest_tree = ManifestTree::new(worktree_store.clone(), cx);
         let toolchain_store = cx.new(|cx| {
             ToolchainStore::local(
@@ -786,7 +787,7 @@ impl HeadlessProject {
         let environment = this
             .update(&mut cx, |this, cx| {
                 this.environment.update(cx, |environment, cx| {
-                    environment.get_local_directory_environment(&shell, directory.into(), cx)
+                    environment.local_directory_environment(&shell, directory.into(), cx)
                 })
             })?
             .await
  
  
  
    
    @@ -13,7 +13,7 @@ use fs::{FakeFs, Fs};
 use gpui::{AppContext as _, Entity, SemanticVersion, SharedString, TestAppContext};
 use http_client::{BlockedHttpClient, FakeHttpClient};
 use language::{
-    Buffer, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageRegistry, LineEnding, Rope,
+    Buffer, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageRegistry, LineEnding,
     language_settings::{AllLanguageSettings, language_settings},
 };
 use lsp::{CompletionContext, CompletionResponse, CompletionTriggerKind, LanguageServerName};
@@ -120,7 +120,7 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test
     // sees the new file.
     fs.save(
         path!("/code/project1/src/main.rs").as_ref(),
-        &Rope::from_str_small("fn main() {}"),
+        &"fn main() {}".into(),
         Default::default(),
     )
     .await
@@ -766,7 +766,7 @@ async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppCont
 
     fs.save(
         &PathBuf::from(path!("/code/project1/src/lib.rs")),
-        &Rope::from_str_small("bangles"),
+        &("bangles".to_string().into()),
         LineEnding::Unix,
     )
     .await
@@ -781,7 +781,7 @@ async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppCont
 
     fs.save(
         &PathBuf::from(path!("/code/project1/src/lib.rs")),
-        &Rope::from_str_small("bloop"),
+        &("bloop".to_string().into()),
         LineEnding::Unix,
     )
     .await
  
  
  
    
    @@ -1,10 +1,9 @@
 use futures::FutureExt;
 use gpui::{
-    AnyElement, AnyView, App, BackgroundExecutor, ElementId, FontStyle, FontWeight, HighlightStyle,
-    InteractiveText, IntoElement, SharedString, StrikethroughStyle, StyledText, UnderlineStyle,
-    Window,
+    AnyElement, AnyView, App, ElementId, FontStyle, FontWeight, HighlightStyle, InteractiveText,
+    IntoElement, SharedString, StrikethroughStyle, StyledText, UnderlineStyle, Window,
 };
-use language::{HighlightId, Language, LanguageRegistry, Rope};
+use language::{HighlightId, Language, LanguageRegistry};
 use std::{ops::Range, sync::Arc};
 use theme::ActiveTheme;
 use ui::LinkPreview;
@@ -57,7 +56,6 @@ impl RichText {
         block: String,
         mentions: &[Mention],
         language_registry: &Arc<LanguageRegistry>,
-        executor: &BackgroundExecutor,
     ) -> Self {
         let mut text = String::new();
         let mut highlights = Vec::new();
@@ -72,7 +70,6 @@ impl RichText {
             &mut highlights,
             &mut link_ranges,
             &mut link_urls,
-            executor,
         );
         text.truncate(text.trim_end().len());
 
@@ -187,7 +184,6 @@ pub fn render_markdown_mut(
     highlights: &mut Vec<(Range<usize>, Highlight)>,
     link_ranges: &mut Vec<Range<usize>>,
     link_urls: &mut Vec<String>,
-    executor: &BackgroundExecutor,
 ) {
     use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag, TagEnd};
 
@@ -206,7 +202,7 @@ pub fn render_markdown_mut(
         match event {
             Event::Text(t) => {
                 if let Some(language) = ¤t_language {
-                    render_code(text, highlights, t.as_ref(), language, executor);
+                    render_code(text, highlights, t.as_ref(), language);
                 } else {
                     while let Some(mention) = mentions.first() {
                         if !source_range.contains_inclusive(&mention.range) {
@@ -377,14 +373,11 @@ pub fn render_code(
     highlights: &mut Vec<(Range<usize>, Highlight)>,
     content: &str,
     language: &Arc<Language>,
-    executor: &BackgroundExecutor,
 ) {
     let prev_len = text.len();
     text.push_str(content);
     let mut offset = 0;
-    for (range, highlight_id) in
-        language.highlight_text(&Rope::from_str(content, executor), 0..content.len())
-    {
+    for (range, highlight_id) in language.highlight_text(&content.into(), 0..content.len()) {
         if range.start > offset {
             highlights.push((prev_len + offset..prev_len + range.start, Highlight::Code));
         }
  
  
  
    
    @@ -14,10 +14,10 @@ path = "src/rope.rs"
 [dependencies]
 arrayvec = "0.7.1"
 log.workspace = true
+rayon.workspace = true
 sum_tree.workspace = true
 unicode-segmentation.workspace = true
 util.workspace = true
-gpui.workspace = true
 
 [dev-dependencies]
 ctor.workspace = true
  
  
  
    
    @@ -3,7 +3,6 @@ use std::ops::Range;
 use criterion::{
     BatchSize, BenchmarkId, Criterion, Throughput, black_box, criterion_group, criterion_main,
 };
-use gpui::{AsyncApp, TestAppContext};
 use rand::prelude::*;
 use rand::rngs::StdRng;
 use rope::{Point, Rope};
@@ -27,10 +26,10 @@ fn generate_random_text(rng: &mut StdRng, len: usize) -> String {
     str
 }
 
-fn generate_random_rope(rng: &mut StdRng, text_len: usize, cx: &AsyncApp) -> Rope {
+fn generate_random_rope(rng: &mut StdRng, text_len: usize) -> Rope {
     let text = generate_random_text(rng, text_len);
     let mut rope = Rope::new();
-    rope.push(&text, cx.background_executor());
+    rope.push(&text);
     rope
 }
 
@@ -83,13 +82,11 @@ fn rope_benchmarks(c: &mut Criterion) {
         group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| {
             let mut rng = StdRng::seed_from_u64(SEED);
             let text = generate_random_text(&mut rng, *size);
-            let cx = TestAppContext::single();
-            let cx = cx.to_async();
 
             b.iter(|| {
                 let mut rope = Rope::new();
                 for _ in 0..10 {
-                    rope.push(&text, cx.background_executor());
+                    rope.push(&text);
                 }
             });
         });
@@ -102,10 +99,8 @@ fn rope_benchmarks(c: &mut Criterion) {
         group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| {
             let mut rng = StdRng::seed_from_u64(SEED);
             let mut random_ropes = Vec::new();
-            let cx = TestAppContext::single();
-            let cx = cx.to_async();
             for _ in 0..5 {
-                let rope = generate_random_rope(&mut rng, *size, &cx);
+                let rope = generate_random_rope(&mut rng, *size);
                 random_ropes.push(rope);
             }
 
@@ -124,9 +119,7 @@ fn rope_benchmarks(c: &mut Criterion) {
         group.throughput(Throughput::Bytes(*size as u64));
         group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| {
             let mut rng = StdRng::seed_from_u64(SEED);
-            let cx = TestAppContext::single();
-            let cx = cx.to_async();
-            let rope = generate_random_rope(&mut rng, *size, &cx);
+            let rope = generate_random_rope(&mut rng, *size);
 
             b.iter_batched(
                 || generate_random_rope_ranges(&mut rng, &rope),
@@ -146,9 +139,7 @@ fn rope_benchmarks(c: &mut Criterion) {
         group.throughput(Throughput::Bytes(*size as u64));
         group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| {
             let mut rng = StdRng::seed_from_u64(SEED);
-            let cx = TestAppContext::single();
-            let cx = cx.to_async();
-            let rope = generate_random_rope(&mut rng, *size, &cx);
+            let rope = generate_random_rope(&mut rng, *size);
 
             b.iter_batched(
                 || generate_random_rope_ranges(&mut rng, &rope),
@@ -169,9 +160,7 @@ fn rope_benchmarks(c: &mut Criterion) {
         group.throughput(Throughput::Bytes(*size as u64));
         group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| {
             let mut rng = StdRng::seed_from_u64(SEED);
-            let cx = TestAppContext::single();
-            let cx = cx.to_async();
-            let rope = generate_random_rope(&mut rng, *size, &cx);
+            let rope = generate_random_rope(&mut rng, *size);
 
             b.iter(|| {
                 let chars = rope.chars().count();
@@ -186,9 +175,7 @@ fn rope_benchmarks(c: &mut Criterion) {
         group.throughput(Throughput::Bytes(*size as u64));
         group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| {
             let mut rng = StdRng::seed_from_u64(SEED);
-            let cx = TestAppContext::single();
-            let cx = cx.to_async();
-            let rope = generate_random_rope(&mut rng, *size, &cx);
+            let rope = generate_random_rope(&mut rng, *size);
 
             b.iter_batched(
                 || generate_random_rope_points(&mut rng, &rope),
@@ -209,9 +196,7 @@ fn rope_benchmarks(c: &mut Criterion) {
         group.throughput(Throughput::Bytes(*size as u64));
         group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| {
             let mut rng = StdRng::seed_from_u64(SEED);
-            let cx = TestAppContext::single();
-            let cx = cx.to_async();
-            let rope = generate_random_rope(&mut rng, *size, &cx);
+            let rope = generate_random_rope(&mut rng, *size);
 
             b.iter_batched(
                 || generate_random_rope_points(&mut rng, &rope),
@@ -231,9 +216,7 @@ fn rope_benchmarks(c: &mut Criterion) {
         group.throughput(Throughput::Bytes(*size as u64));
         group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| {
             let mut rng = StdRng::seed_from_u64(SEED);
-            let cx = TestAppContext::single();
-            let cx = cx.to_async();
-            let rope = generate_random_rope(&mut rng, *size, &cx);
+            let rope = generate_random_rope(&mut rng, *size);
 
             b.iter_batched(
                 || {
  
  
  
    
    @@ -5,7 +5,7 @@ mod point_utf16;
 mod unclipped;
 
 use arrayvec::ArrayVec;
-use gpui::BackgroundExecutor;
+use rayon::iter::{IntoParallelIterator, ParallelIterator as _};
 use std::{
     cmp, fmt, io, mem,
     ops::{self, AddAssign, Range},
@@ -31,41 +31,6 @@ impl Rope {
         Self::default()
     }
 
-    /// Create a new rope from a string without trying to parallelize the construction for large strings.
-    pub fn from_str_small(text: &str) -> Self {
-        let mut rope = Self::new();
-        rope.push_small(text);
-        rope
-    }
-
-    /// Create a new rope from a string.
-    pub fn from_str(text: &str, executor: &BackgroundExecutor) -> Self {
-        let mut rope = Self::new();
-        rope.push(text, executor);
-        rope
-    }
-
-    /// Create a new rope from a string without trying to parallelize the construction for large strings.
-    pub fn from_iter_small<'a, T: IntoIterator<Item = &'a str>>(iter: T) -> Self {
-        let mut rope = Rope::new();
-        for chunk in iter {
-            rope.push_small(chunk);
-        }
-        rope
-    }
-
-    /// Create a new rope from a string.
-    pub fn from_iter<'a, T: IntoIterator<Item = &'a str>>(
-        iter: T,
-        executor: &BackgroundExecutor,
-    ) -> Self {
-        let mut rope = Rope::new();
-        for chunk in iter {
-            rope.push(chunk, executor);
-        }
-        rope
-    }
-
     /// Checks that `index`-th byte is the first byte in a UTF-8 code point
     /// sequence or the end of the string.
     ///
@@ -180,12 +145,12 @@ impl Rope {
         self.check_invariants();
     }
 
-    pub fn replace(&mut self, range: Range<usize>, text: &str, executor: &BackgroundExecutor) {
+    pub fn replace(&mut self, range: Range<usize>, text: &str) {
         let mut new_rope = Rope::new();
         let mut cursor = self.cursor(0);
         new_rope.append(cursor.slice(range.start));
         cursor.seek_forward(range.end);
-        new_rope.push(text, executor);
+        new_rope.push(text);
         new_rope.append(cursor.suffix());
         *self = new_rope;
     }
@@ -203,12 +168,28 @@ impl Rope {
         self.slice(start..end)
     }
 
-    pub fn push(&mut self, mut text: &str, executor: &BackgroundExecutor) {
-        self.fill_last_chunk(&mut text);
+    pub fn push(&mut self, mut text: &str) {
+        self.chunks.update_last(
+            |last_chunk| {
+                let split_ix = if last_chunk.text.len() + text.len() <= chunk::MAX_BASE {
+                    text.len()
+                } else {
+                    let mut split_ix = cmp::min(
+                        chunk::MIN_BASE.saturating_sub(last_chunk.text.len()),
+                        text.len(),
+                    );
+                    while !text.is_char_boundary(split_ix) {
+                        split_ix += 1;
+                    }
+                    split_ix
+                };
 
-        if text.is_empty() {
-            return;
-        }
+                let (suffix, remainder) = text.split_at(split_ix);
+                last_chunk.push_str(suffix);
+                text = remainder;
+            },
+            (),
+        );
 
         #[cfg(all(test, not(rust_analyzer)))]
         const NUM_CHUNKS: usize = 16;
@@ -219,8 +200,7 @@ impl Rope {
         // but given the chunk boundary can land within a character
         // we need to accommodate for the worst case where every chunk gets cut short by up to 4 bytes
         if text.len() > NUM_CHUNKS * chunk::MAX_BASE - NUM_CHUNKS * 4 {
-            let future = self.push_large(text, executor.clone());
-            return executor.block(future);
+            return self.push_large(text);
         }
         // 16 is enough as otherwise we will hit the branch above
         let mut new_chunks = ArrayVec::<_, NUM_CHUNKS>::new();
@@ -240,57 +220,8 @@ impl Rope {
         self.check_invariants();
     }
 
-    /// Pushes a string into the rope. Unlike [`push`], this method does not parallelize the construction on large strings.
-    pub fn push_small(&mut self, mut text: &str) {
-        self.fill_last_chunk(&mut text);
-        if text.is_empty() {
-            return;
-        }
-
-        // 16 is enough as otherwise we will hit the branch above
-        let mut new_chunks = Vec::new();
-
-        while !text.is_empty() {
-            let mut split_ix = cmp::min(chunk::MAX_BASE, text.len());
-            while !text.is_char_boundary(split_ix) {
-                split_ix -= 1;
-            }
-            let (chunk, remainder) = text.split_at(split_ix);
-            new_chunks.push(chunk);
-            text = remainder;
-        }
-        self.chunks
-            .extend(new_chunks.into_iter().map(Chunk::new), ());
-
-        self.check_invariants();
-    }
-
-    fn fill_last_chunk(&mut self, text: &mut &str) {
-        self.chunks.update_last(
-            |last_chunk| {
-                let split_ix = if last_chunk.text.len() + text.len() <= chunk::MAX_BASE {
-                    text.len()
-                } else {
-                    let mut split_ix = cmp::min(
-                        chunk::MIN_BASE.saturating_sub(last_chunk.text.len()),
-                        text.len(),
-                    );
-                    while !text.is_char_boundary(split_ix) {
-                        split_ix += 1;
-                    }
-                    split_ix
-                };
-
-                let (suffix, remainder) = text.split_at(split_ix);
-                last_chunk.push_str(suffix);
-                *text = remainder;
-            },
-            (),
-        );
-    }
-
     /// A copy of `push` specialized for working with large quantities of text.
-    async fn push_large(&mut self, mut text: &str, executor: BackgroundExecutor) {
+    fn push_large(&mut self, mut text: &str) {
         // To avoid frequent reallocs when loading large swaths of file contents,
         // we estimate worst-case `new_chunks` capacity;
         // Chunk is a fixed-capacity buffer. If a character falls on
@@ -323,22 +254,8 @@ impl Rope {
         const PARALLEL_THRESHOLD: usize = 4 * (2 * sum_tree::TREE_BASE);
 
         if new_chunks.len() >= PARALLEL_THRESHOLD {
-            let cx2 = executor.clone();
-            executor
-                .scoped(|scope| {
-                    // SAFETY: transmuting to 'static is safe because the future is scoped
-                    // and the underlying string data cannot go out of scope because dropping the scope
-                    // will wait for the task to finish
-                    let new_chunks =
-                        unsafe { std::mem::transmute::<Vec<&str>, Vec<&'static str>>(new_chunks) };
-
-                    let async_extend = self
-                        .chunks
-                        .async_extend(new_chunks.into_iter().map(Chunk::new), cx2);
-
-                    scope.spawn(async_extend);
-                })
-                .await;
+            self.chunks
+                .par_extend(new_chunks.into_par_iter().map(Chunk::new), ());
         } else {
             self.chunks
                 .extend(new_chunks.into_iter().map(Chunk::new), ());
@@ -375,13 +292,8 @@ impl Rope {
         }
     }
 
-    pub fn push_front(&mut self, text: &str, cx: &BackgroundExecutor) {
-        let suffix = mem::replace(self, Rope::from_str(text, cx));
-        self.append(suffix);
-    }
-
-    pub fn push_front_small(&mut self, text: &str) {
-        let suffix = mem::replace(self, Rope::from_str_small(text));
+    pub fn push_front(&mut self, text: &str) {
+        let suffix = mem::replace(self, Rope::from(text));
         self.append(suffix);
     }
 
@@ -665,19 +577,37 @@ impl Rope {
     }
 }
 
-// impl From<String> for Rope {
-//     #[inline(always)]
-//     fn from(text: String) -> Self {
-//         Rope::from(text.as_str())
-//     }
-// }
+impl<'a> From<&'a str> for Rope {
+    fn from(text: &'a str) -> Self {
+        let mut rope = Self::new();
+        rope.push(text);
+        rope
+    }
+}
 
-// impl From<&String> for Rope {
-//     #[inline(always)]
-//     fn from(text: &String) -> Self {
-//         Rope::from(text.as_str())
-//     }
-// }
+impl<'a> FromIterator<&'a str> for Rope {
+    fn from_iter<T: IntoIterator<Item = &'a str>>(iter: T) -> Self {
+        let mut rope = Rope::new();
+        for chunk in iter {
+            rope.push(chunk);
+        }
+        rope
+    }
+}
+
+impl From<String> for Rope {
+    #[inline(always)]
+    fn from(text: String) -> Self {
+        Rope::from(text.as_str())
+    }
+}
+
+impl From<&String> for Rope {
+    #[inline(always)]
+    fn from(text: &String) -> Self {
+        Rope::from(text.as_str())
+    }
+}
 
 impl fmt::Display for Rope {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
@@ -1709,7 +1639,6 @@ where
 mod tests {
     use super::*;
     use Bias::{Left, Right};
-    use gpui::TestAppContext;
     use rand::prelude::*;
     use std::{cmp::Ordering, env, io::Read};
     use util::RandomCharIter;
@@ -1719,17 +1648,17 @@ mod tests {
         zlog::init_test();
     }
 
-    #[gpui::test]
-    async fn test_all_4_byte_chars(cx: &mut TestAppContext) {
+    #[test]
+    fn test_all_4_byte_chars() {
         let mut rope = Rope::new();
         let text = "π".repeat(256);
-        rope.push(&text, cx.background_executor());
+        rope.push(&text);
         assert_eq!(rope.text(), text);
     }
 
-    #[gpui::test]
-    fn test_clip(cx: &mut TestAppContext) {
-        let rope = Rope::from_str("π§", cx.background_executor());
+    #[test]
+    fn test_clip() {
+        let rope = Rope::from("π§");
 
         assert_eq!(rope.clip_offset(1, Bias::Left), 0);
         assert_eq!(rope.clip_offset(1, Bias::Right), 4);
@@ -1775,9 +1704,9 @@ mod tests {
         );
     }
 
-    #[gpui::test]
-    fn test_prev_next_line(cx: &mut TestAppContext) {
-        let rope = Rope::from_str("abc\ndef\nghi\njkl", cx.background_executor());
+    #[test]
+    fn test_prev_next_line() {
+        let rope = Rope::from("abc\ndef\nghi\njkl");
 
         let mut chunks = rope.chunks();
         assert_eq!(chunks.peek().unwrap().chars().next().unwrap(), 'a');
@@ -1819,16 +1748,16 @@ mod tests {
         assert_eq!(chunks.peek(), None);
     }
 
-    #[gpui::test]
-    fn test_lines(cx: &mut TestAppContext) {
-        let rope = Rope::from_str("abc\ndefg\nhi", cx.background_executor());
+    #[test]
+    fn test_lines() {
+        let rope = Rope::from("abc\ndefg\nhi");
         let mut lines = rope.chunks().lines();
         assert_eq!(lines.next(), Some("abc"));
         assert_eq!(lines.next(), Some("defg"));
         assert_eq!(lines.next(), Some("hi"));
         assert_eq!(lines.next(), None);
 
-        let rope = Rope::from_str("abc\ndefg\nhi\n", cx.background_executor());
+        let rope = Rope::from("abc\ndefg\nhi\n");
         let mut lines = rope.chunks().lines();
         assert_eq!(lines.next(), Some("abc"));
         assert_eq!(lines.next(), Some("defg"));
@@ -1836,14 +1765,14 @@ mod tests {
         assert_eq!(lines.next(), Some(""));
         assert_eq!(lines.next(), None);
 
-        let rope = Rope::from_str("abc\ndefg\nhi", cx.background_executor());
+        let rope = Rope::from("abc\ndefg\nhi");
         let mut lines = rope.reversed_chunks_in_range(0..rope.len()).lines();
         assert_eq!(lines.next(), Some("hi"));
         assert_eq!(lines.next(), Some("defg"));
         assert_eq!(lines.next(), Some("abc"));
         assert_eq!(lines.next(), None);
 
-        let rope = Rope::from_str("abc\ndefg\nhi\n", cx.background_executor());
+        let rope = Rope::from("abc\ndefg\nhi\n");
         let mut lines = rope.reversed_chunks_in_range(0..rope.len()).lines();
         assert_eq!(lines.next(), Some(""));
         assert_eq!(lines.next(), Some("hi"));
@@ -1851,14 +1780,14 @@ mod tests {
         assert_eq!(lines.next(), Some("abc"));
         assert_eq!(lines.next(), None);
 
-        let rope = Rope::from_str("abc\nlonger line test\nhi", cx.background_executor());
+        let rope = Rope::from("abc\nlonger line test\nhi");
         let mut lines = rope.chunks().lines();
         assert_eq!(lines.next(), Some("abc"));
         assert_eq!(lines.next(), Some("longer line test"));
         assert_eq!(lines.next(), Some("hi"));
         assert_eq!(lines.next(), None);
 
-        let rope = Rope::from_str("abc\nlonger line test\nhi", cx.background_executor());
+        let rope = Rope::from("abc\nlonger line test\nhi");
         let mut lines = rope.reversed_chunks_in_range(0..rope.len()).lines();
         assert_eq!(lines.next(), Some("hi"));
         assert_eq!(lines.next(), Some("longer line test"));
@@ -1867,7 +1796,7 @@ mod tests {
     }
 
     #[gpui::test(iterations = 100)]
-    async fn test_random_rope(cx: &mut TestAppContext, mut rng: StdRng) {
+    fn test_random_rope(mut rng: StdRng) {
         let operations = env::var("OPERATIONS")
             .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
             .unwrap_or(10);
@@ -1883,7 +1812,7 @@ mod tests {
             let mut new_actual = Rope::new();
             let mut cursor = actual.cursor(0);
             new_actual.append(cursor.slice(start_ix));
-            new_actual.push(&new_text, cx.background_executor());
+            new_actual.push(&new_text);
             cursor.seek_forward(end_ix);
             new_actual.append(cursor.suffix());
             actual = new_actual;
@@ -2183,10 +2112,10 @@ mod tests {
         }
     }
 
-    #[gpui::test]
-    fn test_chunks_equals_str(cx: &mut TestAppContext) {
+    #[test]
+    fn test_chunks_equals_str() {
         let text = "This is a multi-chunk\n& multi-line test string!";
-        let rope = Rope::from_str(text, cx.background_executor());
+        let rope = Rope::from(text);
         for start in 0..text.len() {
             for end in start..text.len() {
                 let range = start..end;
@@ -2229,37 +2158,34 @@ mod tests {
             }
         }
 
-        let rope = Rope::from_str("", cx.background_executor());
+        let rope = Rope::from("");
         assert!(rope.chunks_in_range(0..0).equals_str(""));
         assert!(rope.reversed_chunks_in_range(0..0).equals_str(""));
         assert!(!rope.chunks_in_range(0..0).equals_str("foo"));
         assert!(!rope.reversed_chunks_in_range(0..0).equals_str("foo"));
     }
 
-    #[gpui::test]
-    fn test_is_char_boundary(cx: &mut TestAppContext) {
+    #[test]
+    fn test_is_char_boundary() {
         let fixture = "ε°";
-        let rope = Rope::from_str("ε°", cx.background_executor());
+        let rope = Rope::from("ε°");
         for b in 0..=fixture.len() {
             assert_eq!(rope.is_char_boundary(b), fixture.is_char_boundary(b));
         }
         let fixture = "";
-        let rope = Rope::from_str("", cx.background_executor());
+        let rope = Rope::from("");
         for b in 0..=fixture.len() {
             assert_eq!(rope.is_char_boundary(b), fixture.is_char_boundary(b));
         }
         let fixture = "π΄π π‘π’π΅π£β«οΈβͺοΈπ€\nπ³οΈββ§οΈππ³οΈβππ΄ββ οΈβ³οΈπ¬ππ΄π³οΈπ©";
-        let rope = Rope::from_str(
-            "π΄π π‘π’π΅π£β«οΈβͺοΈπ€\nπ³οΈββ§οΈππ³οΈβππ΄ββ οΈβ³οΈπ¬ππ΄π³οΈπ©",
-            cx.background_executor(),
-        );
+        let rope = Rope::from("π΄π π‘π’π΅π£β«οΈβͺοΈπ€\nπ³οΈββ§οΈππ³οΈβππ΄ββ οΈβ³οΈπ¬ππ΄π³οΈπ©");
         for b in 0..=fixture.len() {
             assert_eq!(rope.is_char_boundary(b), fixture.is_char_boundary(b));
         }
     }
 
-    #[gpui::test]
-    fn test_floor_char_boundary(cx: &mut TestAppContext) {
+    #[test]
+    fn test_floor_char_boundary() {
         // polyfill of str::floor_char_boundary
         fn floor_char_boundary(str: &str, index: usize) -> usize {
             if index >= str.len() {
@@ -2275,7 +2201,7 @@ mod tests {
         }
 
         let fixture = "ε°";
-        let rope = Rope::from_str("ε°", cx.background_executor());
+        let rope = Rope::from("ε°");
         for b in 0..=fixture.len() {
             assert_eq!(
                 rope.floor_char_boundary(b),
@@ -2284,7 +2210,7 @@ mod tests {
         }
 
         let fixture = "";
-        let rope = Rope::from_str("", cx.background_executor());
+        let rope = Rope::from("");
         for b in 0..=fixture.len() {
             assert_eq!(
                 rope.floor_char_boundary(b),
@@ -2293,10 +2219,7 @@ mod tests {
         }
 
         let fixture = "π΄π π‘π’π΅π£β«οΈβͺοΈπ€\nπ³οΈββ§οΈππ³οΈβππ΄ββ οΈβ³οΈπ¬ππ΄π³οΈπ©";
-        let rope = Rope::from_str(
-            "π΄π π‘π’π΅π£β«οΈβͺοΈπ€\nπ³οΈββ§οΈππ³οΈβππ΄ββ οΈβ³οΈπ¬ππ΄π³οΈπ©",
-            cx.background_executor(),
-        );
+        let rope = Rope::from("π΄π π‘π’π΅π£β«οΈβͺοΈπ€\nπ³οΈββ§οΈππ³οΈβππ΄ββ οΈβ³οΈπ¬ππ΄π³οΈπ©");
         for b in 0..=fixture.len() {
             assert_eq!(
                 rope.floor_char_boundary(b),
@@ -2305,8 +2228,8 @@ mod tests {
         }
     }
 
-    #[gpui::test]
-    fn test_ceil_char_boundary(cx: &mut TestAppContext) {
+    #[test]
+    fn test_ceil_char_boundary() {
         // polyfill of str::ceil_char_boundary
         fn ceil_char_boundary(str: &str, index: usize) -> usize {
             if index > str.len() {
@@ -2321,22 +2244,19 @@ mod tests {
         }
 
         let fixture = "ε°";
-        let rope = Rope::from_str("ε°", cx.background_executor());
+        let rope = Rope::from("ε°");
         for b in 0..=fixture.len() {
             assert_eq!(rope.ceil_char_boundary(b), ceil_char_boundary(&fixture, b));
         }
 
         let fixture = "";
-        let rope = Rope::from_str("", cx.background_executor());
+        let rope = Rope::from("");
         for b in 0..=fixture.len() {
             assert_eq!(rope.ceil_char_boundary(b), ceil_char_boundary(&fixture, b));
         }
 
         let fixture = "π΄π π‘π’π΅π£β«οΈβͺοΈπ€\nπ³οΈββ§οΈππ³οΈβππ΄ββ οΈβ³οΈπ¬ππ΄π³οΈπ©";
-        let rope = Rope::from_str(
-            "π΄π π‘π’π΅π£β«οΈβͺοΈπ€\nπ³οΈββ§οΈππ³οΈβππ΄ββ οΈβ³οΈπ¬ππ΄π³οΈπ©",
-            cx.background_executor(),
-        );
+        let rope = Rope::from("π΄π π‘π’π΅π£β«οΈβͺοΈπ€\nπ³οΈββ§οΈππ³οΈβππ΄ββ οΈβ³οΈπ¬ππ΄π³οΈπ©");
         for b in 0..=fixture.len() {
             assert_eq!(rope.ceil_char_boundary(b), ceil_char_boundary(&fixture, b));
         }
  
  
  
    
    @@ -554,7 +554,7 @@ impl RulesLibrary {
 
         let prompt_id = PromptId::new();
         let save = self.store.update(cx, |store, cx| {
-            store.save(prompt_id, None, false, Default::default(), cx)
+            store.save(prompt_id, None, false, "".into(), cx)
         });
         self.picker
             .update(cx, |picker, cx| picker.refresh(window, cx));
@@ -888,13 +888,7 @@ impl RulesLibrary {
             let new_id = PromptId::new();
             let body = rule.body_editor.read(cx).text(cx);
             let save = self.store.update(cx, |store, cx| {
-                store.save(
-                    new_id,
-                    Some(title.into()),
-                    false,
-                    Rope::from_str(&body, cx.background_executor()),
-                    cx,
-                )
+                store.save(new_id, Some(title.into()), false, body.into(), cx)
             });
             self.picker
                 .update(cx, |picker, cx| picker.refresh(window, cx));
  
  
  
    
    @@ -2813,6 +2813,7 @@ mod tests {
                 case_sensitive: false,
                 include_ignored: false,
                 regex: false,
+                center_on_match: false,
             },
             cx,
         );
@@ -2875,6 +2876,7 @@ mod tests {
                 case_sensitive: true,
                 include_ignored: false,
                 regex: false,
+                center_on_match: false,
             },
             cx,
         );
@@ -2912,6 +2914,7 @@ mod tests {
                 case_sensitive: true,
                 include_ignored: false,
                 regex: false,
+                center_on_match: false,
             },
             cx,
         );
@@ -2938,6 +2941,7 @@ mod tests {
                         case_sensitive: Some(search_settings.case_sensitive),
                         include_ignored: Some(search_settings.include_ignored),
                         regex: Some(search_settings.regex),
+                        center_on_match: Some(search_settings.center_on_match),
                     });
                 });
             });
  
  
  
    
    @@ -12,7 +12,9 @@ use editor::{
     SelectionEffects, VimFlavor,
     actions::{Backtab, SelectAll, Tab},
     items::active_match_index,
-    multibuffer_context_lines, vim_flavor,
+    multibuffer_context_lines,
+    scroll::Autoscroll,
+    vim_flavor,
 };
 use futures::{StreamExt, stream::FuturesOrdered};
 use gpui::{
@@ -55,7 +57,9 @@ actions!(
         /// Moves to the next input field.
         NextField,
         /// Toggles the search filters panel.
-        ToggleFilters
+        ToggleFilters,
+        /// Toggles collapse/expand state of all search result excerpts.
+        ToggleAllSearchResults
     ]
 );
 
@@ -118,6 +122,20 @@ pub fn init(cx: &mut App) {
             ProjectSearchView::search_in_new(workspace, action, window, cx)
         });
 
+        register_workspace_action_for_present_search(
+            workspace,
+            |workspace, action: &ToggleAllSearchResults, window, cx| {
+                if let Some(search_view) = workspace
+                    .active_item(cx)
+                    .and_then(|item| item.downcast::<ProjectSearchView>())
+                {
+                    search_view.update(cx, |search_view, cx| {
+                        search_view.toggle_all_search_results(action, window, cx);
+                    });
+                }
+            },
+        );
+
         register_workspace_action_for_present_search(
             workspace,
             |workspace, _: &menu::Cancel, window, cx| {
@@ -217,6 +235,7 @@ pub struct ProjectSearchView {
     replace_enabled: bool,
     included_opened_only: bool,
     regex_language: Option<Arc<Language>>,
+    results_collapsed: bool,
     _subscriptions: Vec<Subscription>,
 }
 
@@ -649,6 +668,44 @@ impl Item for ProjectSearchView {
     fn breadcrumbs(&self, theme: &theme::Theme, cx: &App) -> Option<Vec<BreadcrumbText>> {
         self.results_editor.breadcrumbs(theme, cx)
     }
+
+    fn breadcrumb_prefix(
+        &self,
+        _window: &mut Window,
+        cx: &mut Context<Self>,
+    ) -> Option<gpui::AnyElement> {
+        if !self.has_matches() {
+            return None;
+        }
+
+        let is_collapsed = self.results_collapsed;
+
+        let (icon, tooltip_label) = if is_collapsed {
+            (IconName::ChevronUpDown, "Expand All Search Results")
+        } else {
+            (IconName::ChevronDownUp, "Collapse All Search Results")
+        };
+
+        let focus_handle = self.query_editor.focus_handle(cx);
+
+        Some(
+            IconButton::new("project-search-collapse-expand", icon)
+                .shape(IconButtonShape::Square)
+                .icon_size(IconSize::Small)
+                .tooltip(move |_, cx| {
+                    Tooltip::for_action_in(
+                        tooltip_label,
+                        &ToggleAllSearchResults,
+                        &focus_handle,
+                        cx,
+                    )
+                })
+                .on_click(cx.listener(|this, _, window, cx| {
+                    this.toggle_all_search_results(&ToggleAllSearchResults, window, cx);
+                }))
+                .into_any_element(),
+        )
+    }
 }
 
 impl ProjectSearchView {
@@ -751,6 +808,34 @@ impl ProjectSearchView {
         });
     }
 
+    fn toggle_all_search_results(
+        &mut self,
+        _: &ToggleAllSearchResults,
+        _window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        self.results_collapsed = !self.results_collapsed;
+        self.update_results_visibility(cx);
+    }
+
+    fn update_results_visibility(&mut self, cx: &mut Context<Self>) {
+        self.results_editor.update(cx, |editor, cx| {
+            let multibuffer = editor.buffer().read(cx);
+            let buffer_ids = multibuffer.excerpt_buffer_ids();
+
+            if self.results_collapsed {
+                for buffer_id in buffer_ids {
+                    editor.fold_buffer(buffer_id, cx);
+                }
+            } else {
+                for buffer_id in buffer_ids {
+                    editor.unfold_buffer(buffer_id, cx);
+                }
+            }
+        });
+        cx.notify();
+    }
+
     pub fn new(
         workspace: WeakEntity<Workspace>,
         entity: Entity<ProjectSearch>,
@@ -909,8 +994,10 @@ impl ProjectSearchView {
             replace_enabled: false,
             included_opened_only: false,
             regex_language: None,
+            results_collapsed: false,
             _subscriptions: subscriptions,
         };
+
         this.entity_changed(window, cx);
         this
     }
@@ -1346,8 +1433,13 @@ impl ProjectSearchView {
             self.results_editor.update(cx, |editor, cx| {
                 let collapse = vim_flavor(cx) == Some(VimFlavor::Vim);
                 let range_to_select = editor.range_for_match(&range_to_select, collapse);
+                let autoscroll = if EditorSettings::get_global(cx).search.center_on_match {
+                    Autoscroll::center()
+                } else {
+                    Autoscroll::fit()
+                };
                 editor.unfold_ranges(std::slice::from_ref(&range_to_select), false, true, cx);
-                editor.change_selections(Default::default(), window, cx, |s| {
+                editor.change_selections(SelectionEffects::scroll(autoscroll), window, cx, |s| {
                     s.select_ranges([range_to_select])
                 });
             });
@@ -1404,6 +1496,7 @@ impl ProjectSearchView {
 
     fn entity_changed(&mut self, window: &mut Window, cx: &mut Context<Self>) {
         let match_ranges = self.entity.read(cx).match_ranges.clone();
+
         if match_ranges.is_empty() {
             self.active_match_index = None;
             self.results_editor.update(cx, |editor, cx| {
@@ -1961,6 +2054,8 @@ impl Render for ProjectSearchBar {
             })
             .unwrap_or_else(|| "0/0".to_string());
 
+        let query_focus = search.query_editor.focus_handle(cx);
+
         let query_column = input_base_styles(InputPanel::Query)
             .on_action(cx.listener(|this, action, window, cx| this.confirm(action, window, cx)))
             .on_action(cx.listener(|this, action, window, cx| {
@@ -1990,11 +2085,9 @@ impl Render for ProjectSearchBar {
                     )),
             );
 
-        let query_focus = search.query_editor.focus_handle(cx);
-
         let matches_column = h_flex()
-            .pl_2()
-            .ml_2()
+            .ml_1()
+            .pl_1p5()
             .border_l_1()
             .border_color(theme_colors.border_variant)
             .child(render_action_button(
@@ -2346,7 +2439,15 @@ pub fn perform_project_search(
 
 #[cfg(test)]
 pub mod tests {
-    use std::{ops::Deref as _, sync::Arc, time::Duration};
+    use std::{
+        ops::Deref as _,
+        path::PathBuf,
+        sync::{
+            Arc,
+            atomic::{self, AtomicUsize},
+        },
+        time::Duration,
+    };
 
     use super::*;
     use editor::{DisplayPoint, display_map::DisplayRow};
@@ -4247,6 +4348,8 @@ pub mod tests {
         )
         .await;
 
+        let requests_count = Arc::new(AtomicUsize::new(0));
+        let closure_requests_count = requests_count.clone();
         let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
         let language_registry = project.read_with(cx, |project, _| project.languages().clone());
         let language = rust_lang();
@@ -4258,21 +4361,26 @@ pub mod tests {
                     inlay_hint_provider: Some(lsp::OneOf::Left(true)),
                     ..lsp::ServerCapabilities::default()
                 },
-                initializer: Some(Box::new(|fake_server| {
-                    fake_server.set_request_handler::<lsp::request::InlayHintRequest, _, _>(
-                        move |_, _| async move {
-                            Ok(Some(vec![lsp::InlayHint {
-                                position: lsp::Position::new(0, 17),
-                                label: lsp::InlayHintLabel::String(": i32".to_owned()),
-                                kind: Some(lsp::InlayHintKind::TYPE),
-                                text_edits: None,
-                                tooltip: None,
-                                padding_left: None,
-                                padding_right: None,
-                                data: None,
-                            }]))
-                        },
-                    );
+                initializer: Some(Box::new(move |fake_server| {
+                    let requests_count = closure_requests_count.clone();
+                    fake_server.set_request_handler::<lsp::request::InlayHintRequest, _, _>({
+                        move |_, _| {
+                            let requests_count = requests_count.clone();
+                            async move {
+                                requests_count.fetch_add(1, atomic::Ordering::Release);
+                                Ok(Some(vec![lsp::InlayHint {
+                                    position: lsp::Position::new(0, 17),
+                                    label: lsp::InlayHintLabel::String(": i32".to_owned()),
+                                    kind: Some(lsp::InlayHintKind::TYPE),
+                                    text_edits: None,
+                                    tooltip: None,
+                                    padding_left: None,
+                                    padding_right: None,
+                                    data: None,
+                                }]))
+                            }
+                        }
+                    });
                 })),
                 ..FakeLspAdapter::default()
             },
@@ -4286,7 +4394,7 @@ pub mod tests {
         });
 
         perform_search(search_view, "let ", cx);
-        let _fake_server = fake_servers.next().await.unwrap();
+        let fake_server = fake_servers.next().await.unwrap();
         cx.executor().advance_clock(Duration::from_secs(1));
         cx.executor().run_until_parked();
         search_view
@@ -4299,11 +4407,127 @@ pub mod tests {
                 );
             })
             .unwrap();
+        assert_eq!(
+            requests_count.load(atomic::Ordering::Acquire),
+            1,
+            "New hints should have been queried",
+        );
 
         // Can do the 2nd search without any panics
         perform_search(search_view, "let ", cx);
+        cx.executor().advance_clock(Duration::from_secs(1));
+        cx.executor().run_until_parked();
+        search_view
+            .update(cx, |search_view, _, cx| {
+                assert_eq!(
+                    search_view
+                        .results_editor
+                        .update(cx, |editor, cx| editor.display_text(cx)),
+                    "\n\nfn main() { let a: i32 = 2; }\n"
+                );
+            })
+            .unwrap();
+        assert_eq!(
+            requests_count.load(atomic::Ordering::Acquire),
+            2,
+            "We did drop the previous buffer when cleared the old project search results, hence another query was made",
+        );
+
+        let singleton_editor = window
+            .update(cx, |workspace, window, cx| {
+                workspace.open_abs_path(
+                    PathBuf::from(path!("/dir/main.rs")),
+                    workspace::OpenOptions::default(),
+                    window,
+                    cx,
+                )
+            })
+            .unwrap()
+            .await
+            .unwrap()
+            .downcast::<Editor>()
+            .unwrap();
         cx.executor().advance_clock(Duration::from_millis(100));
         cx.executor().run_until_parked();
+        singleton_editor.update(cx, |editor, cx| {
+            assert_eq!(
+                editor.display_text(cx),
+                "fn main() { let a: i32 = 2; }\n",
+                "Newly opened editor should have the correct text with hints",
+            );
+        });
+        assert_eq!(
+            requests_count.load(atomic::Ordering::Acquire),
+            2,
+            "Opening the same buffer again should reuse the cached hints",
+        );
+
+        window
+            .update(cx, |_, window, cx| {
+                singleton_editor.update(cx, |editor, cx| {
+                    editor.handle_input("test", window, cx);
+                });
+            })
+            .unwrap();
+
+        cx.executor().advance_clock(Duration::from_secs(1));
+        cx.executor().run_until_parked();
+        singleton_editor.update(cx, |editor, cx| {
+            assert_eq!(
+                editor.display_text(cx),
+                "testfn main() { l: i32et a = 2; }\n",
+                "Newly opened editor should have the correct text with hints",
+            );
+        });
+        assert_eq!(
+            requests_count.load(atomic::Ordering::Acquire),
+            3,
+            "We have edited the buffer and should send a new request",
+        );
+
+        window
+            .update(cx, |_, window, cx| {
+                singleton_editor.update(cx, |editor, cx| {
+                    editor.undo(&editor::actions::Undo, window, cx);
+                });
+            })
+            .unwrap();
+        cx.executor().advance_clock(Duration::from_secs(1));
+        cx.executor().run_until_parked();
+        assert_eq!(
+            requests_count.load(atomic::Ordering::Acquire),
+            4,
+            "We have edited the buffer again and should send a new request again",
+        );
+        singleton_editor.update(cx, |editor, cx| {
+            assert_eq!(
+                editor.display_text(cx),
+                "fn main() { let a: i32 = 2; }\n",
+                "Newly opened editor should have the correct text with hints",
+            );
+        });
+        project.update(cx, |_, cx| {
+            cx.emit(project::Event::RefreshInlayHints {
+                server_id: fake_server.server.server_id(),
+                request_id: Some(1),
+            });
+        });
+        cx.executor().advance_clock(Duration::from_secs(1));
+        cx.executor().run_until_parked();
+        assert_eq!(
+            requests_count.load(atomic::Ordering::Acquire),
+            5,
+            "After a simulated server refresh request, we should have sent another request",
+        );
+
+        perform_search(search_view, "let ", cx);
+        cx.executor().advance_clock(Duration::from_secs(1));
+        cx.executor().run_until_parked();
+        assert_eq!(
+            requests_count.load(atomic::Ordering::Acquire),
+            5,
+            "New project search should reuse the cached hints",
+        );
         search_view
             .update(cx, |search_view, _, cx| {
                 assert_eq!(
  
  
  
    
    @@ -46,7 +46,6 @@ pub(crate) fn input_base_styles(border_color: Hsla, map: impl FnOnce(Div) -> Div
         .h_8()
         .pl_2()
         .pr_1()
-        .py_1()
         .border_1()
         .border_color(border_color)
         .rounded_md()
  
  
  
    
    @@ -197,6 +197,19 @@ pub struct EditorSettingsContent {
     ///
     /// Default: [`DocumentColorsRenderMode::Inlay`]
     pub lsp_document_colors: Option<DocumentColorsRenderMode>,
+    /// When to show the scrollbar in the completion menu.
+    /// This setting can take four values:
+    ///
+    /// 1. Show the scrollbar if there's important information or
+    ///    follow the system's configured behavior
+    ///   "auto"
+    /// 2. Match the system's configured behavior:
+    ///    "system"
+    /// 3. Always show the scrollbar:
+    ///    "always"
+    /// 4. Never show the scrollbar:
+    ///    "never" (default)
+    pub completion_menu_scrollbar: Option<ShowScrollbar>,
 }
 
 // Toolbar related settings
@@ -699,6 +712,8 @@ pub struct SearchSettingsContent {
     pub case_sensitive: Option<bool>,
     pub include_ignored: Option<bool>,
     pub regex: Option<bool>,
+    /// Whether to center the cursor on each search match when navigating.
+    pub center_on_match: Option<bool>,
 }
 
 #[skip_serializing_none]
  
  
  
    
    @@ -299,6 +299,7 @@ impl VsCodeSettings {
             toolbar: None,
             use_smartcase_search: self.read_bool("search.smartCase"),
             vertical_scroll_margin: self.read_f32("editor.cursorSurroundingLines"),
+            completion_menu_scrollbar: None,
         }
     }
 
  
  
  
    
    @@ -2450,6 +2450,29 @@ pub(crate) fn settings_data(cx: &App) -> Vec<SettingsPage> {
                     metadata: None,
                     files: USER,
                 }),
+                SettingsPageItem::SettingItem(SettingItem {
+                    title: "Center on Match",
+                    description: "Whether to center the current match in the editor",
+                    field: Box::new(SettingField {
+                        json_path: Some("editor.search.center_on_match"),
+                        pick: |settings_content| {
+                            settings_content
+                                .editor
+                                .search
+                                .as_ref()
+                                .and_then(|search| search.center_on_match.as_ref())
+                        },
+                        write: |settings_content, value| {
+                            settings_content
+                                .editor
+                                .search
+                                .get_or_insert_default()
+                                .center_on_match = value;
+                        },
+                    }),
+                    metadata: None,
+                    files: USER,
+                }),
                 SettingsPageItem::SettingItem(SettingItem {
                     title: "Seed Search Query From Cursor",
                     description: "When to populate a new search's query based on the text under the cursor.",
@@ -6518,6 +6541,19 @@ fn language_settings_data() -> Vec<SettingsPageItem> {
             metadata: None,
             files: USER | PROJECT,
         }),
+        SettingsPageItem::SettingItem(SettingItem {
+            title: "Completion Menu Scrollbar",
+            description: "When to show the scrollbar in the completion menu.",
+            field: Box::new(SettingField {
+                json_path: Some("editor.completion_menu_scrollbar"),
+                pick: |settings_content| settings_content.editor.completion_menu_scrollbar.as_ref(),
+                write: |settings_content, value| {
+                    settings_content.editor.completion_menu_scrollbar = value;
+                },
+            }),
+            metadata: None,
+            files: USER,
+        }),
         SettingsPageItem::SectionHeader("Inlay Hints"),
         SettingsPageItem::SettingItem(SettingItem {
             title: "Enabled",
  
  
  
    
    @@ -14,7 +14,6 @@ path = "src/streaming_diff.rs"
 [dependencies]
 ordered-float.workspace = true
 rope.workspace = true
-gpui.workspace = true
 
 [dev-dependencies]
 rand.workspace = true
  
  
  
    
    @@ -503,12 +503,11 @@ fn is_line_end(point: Point, text: &Rope) -> bool {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use gpui::BackgroundExecutor;
     use rand::prelude::*;
     use std::env;
 
-    #[gpui::test]
-    fn test_delete_first_of_two_lines(cx: &mut gpui::TestAppContext) {
+    #[test]
+    fn test_delete_first_of_two_lines() {
         let old_text = "aaaa\nbbbb";
         let char_ops = vec![
             CharOperation::Delete { bytes: 5 },
@@ -524,18 +523,18 @@ mod tests {
             apply_line_operations(old_text, &new_text, &expected_line_ops)
         );
 
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
         assert_eq!(line_ops, expected_line_ops);
     }
 
-    #[gpui::test]
-    fn test_delete_second_of_two_lines(cx: &mut gpui::TestAppContext) {
+    #[test]
+    fn test_delete_second_of_two_lines() {
         let old_text = "aaaa\nbbbb";
         let char_ops = vec![
             CharOperation::Keep { bytes: 5 },
             CharOperation::Delete { bytes: 4 },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
         assert_eq!(
             line_ops,
             vec![
@@ -551,8 +550,8 @@ mod tests {
         );
     }
 
-    #[gpui::test]
-    fn test_add_new_line(cx: &mut gpui::TestAppContext) {
+    #[test]
+    fn test_add_new_line() {
         let old_text = "aaaa\nbbbb";
         let char_ops = vec![
             CharOperation::Keep { bytes: 9 },
@@ -560,7 +559,7 @@ mod tests {
                 text: "\ncccc".into(),
             },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
         assert_eq!(
             line_ops,
             vec![
@@ -575,15 +574,15 @@ mod tests {
         );
     }
 
-    #[gpui::test]
-    fn test_delete_line_in_middle(cx: &mut gpui::TestAppContext) {
+    #[test]
+    fn test_delete_line_in_middle() {
         let old_text = "aaaa\nbbbb\ncccc";
         let char_ops = vec![
             CharOperation::Keep { bytes: 5 },
             CharOperation::Delete { bytes: 5 },
             CharOperation::Keep { bytes: 4 },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
         assert_eq!(
             line_ops,
             vec![
@@ -599,8 +598,8 @@ mod tests {
         );
     }
 
-    #[gpui::test]
-    fn test_replace_line(cx: &mut gpui::TestAppContext) {
+    #[test]
+    fn test_replace_line() {
         let old_text = "aaaa\nbbbb\ncccc";
         let char_ops = vec![
             CharOperation::Keep { bytes: 5 },
@@ -610,7 +609,7 @@ mod tests {
             },
             CharOperation::Keep { bytes: 5 },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
         assert_eq!(
             line_ops,
             vec![
@@ -627,8 +626,8 @@ mod tests {
         );
     }
 
-    #[gpui::test]
-    fn test_multiple_edits_on_different_lines(cx: &mut gpui::TestAppContext) {
+    #[test]
+    fn test_multiple_edits_on_different_lines() {
         let old_text = "aaaa\nbbbb\ncccc\ndddd";
         let char_ops = vec![
             CharOperation::Insert { text: "A".into() },
@@ -639,7 +638,7 @@ mod tests {
                 text: "\nEEEE".into(),
             },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
         assert_eq!(
             line_ops,
             vec![
@@ -657,15 +656,15 @@ mod tests {
         );
     }
 
-    #[gpui::test]
-    fn test_edit_at_end_of_line(cx: &mut gpui::TestAppContext) {
+    #[test]
+    fn test_edit_at_end_of_line() {
         let old_text = "aaaa\nbbbb\ncccc";
         let char_ops = vec![
             CharOperation::Keep { bytes: 4 },
             CharOperation::Insert { text: "A".into() },
             CharOperation::Keep { bytes: 10 },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
         assert_eq!(
             line_ops,
             vec![
@@ -681,8 +680,8 @@ mod tests {
         );
     }
 
-    #[gpui::test]
-    fn test_insert_newline_character(cx: &mut gpui::TestAppContext) {
+    #[test]
+    fn test_insert_newline_character() {
         let old_text = "aaaabbbb";
         let char_ops = vec![
             CharOperation::Keep { bytes: 4 },
@@ -690,7 +689,7 @@ mod tests {
             CharOperation::Keep { bytes: 4 },
         ];
         let new_text = apply_char_operations(old_text, &char_ops);
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
         assert_eq!(
             line_ops,
             vec![
@@ -704,14 +703,14 @@ mod tests {
         );
     }
 
-    #[gpui::test]
-    fn test_insert_newline_at_beginning(cx: &mut gpui::TestAppContext) {
+    #[test]
+    fn test_insert_newline_at_beginning() {
         let old_text = "aaaa\nbbbb";
         let char_ops = vec![
             CharOperation::Insert { text: "\n".into() },
             CharOperation::Keep { bytes: 9 },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
         assert_eq!(
             line_ops,
             vec![
@@ -726,15 +725,15 @@ mod tests {
         );
     }
 
-    #[gpui::test]
-    fn test_delete_newline(cx: &mut gpui::TestAppContext) {
+    #[test]
+    fn test_delete_newline() {
         let old_text = "aaaa\nbbbb";
         let char_ops = vec![
             CharOperation::Keep { bytes: 4 },
             CharOperation::Delete { bytes: 1 },
             CharOperation::Keep { bytes: 4 },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
         assert_eq!(
             line_ops,
             vec![
@@ -750,8 +749,8 @@ mod tests {
         );
     }
 
-    #[gpui::test]
-    fn test_insert_multiple_newlines(cx: &mut gpui::TestAppContext) {
+    #[test]
+    fn test_insert_multiple_newlines() {
         let old_text = "aaaa\nbbbb";
         let char_ops = vec![
             CharOperation::Keep { bytes: 5 },
@@ -760,7 +759,7 @@ mod tests {
             },
             CharOperation::Keep { bytes: 4 },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
         assert_eq!(
             line_ops,
             vec![
@@ -776,15 +775,15 @@ mod tests {
         );
     }
 
-    #[gpui::test]
-    fn test_delete_multiple_newlines(cx: &mut gpui::TestAppContext) {
+    #[test]
+    fn test_delete_multiple_newlines() {
         let old_text = "aaaa\n\n\nbbbb";
         let char_ops = vec![
             CharOperation::Keep { bytes: 5 },
             CharOperation::Delete { bytes: 2 },
             CharOperation::Keep { bytes: 4 },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
         assert_eq!(
             line_ops,
             vec![
@@ -800,8 +799,8 @@ mod tests {
         );
     }
 
-    #[gpui::test]
-    fn test_complex_scenario(cx: &mut gpui::TestAppContext) {
+    #[test]
+    fn test_complex_scenario() {
         let old_text = "line1\nline2\nline3\nline4";
         let char_ops = vec![
             CharOperation::Keep { bytes: 6 },
@@ -815,7 +814,7 @@ mod tests {
             },
             CharOperation::Keep { bytes: 6 },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
         assert_eq!(
             line_ops,
             vec![
@@ -835,8 +834,8 @@ mod tests {
         );
     }
 
-    #[gpui::test]
-    fn test_cleaning_up_common_suffix(cx: &mut gpui::TestAppContext) {
+    #[test]
+    fn test_cleaning_up_common_suffix() {
         let old_text = concat!(
             "        for y in 0..size.y() {\n",
             "            let a = 10;\n",
@@ -884,7 +883,7 @@ mod tests {
             },
             CharOperation::Keep { bytes: 1 },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
         assert_eq!(
             line_ops,
             vec![
@@ -902,8 +901,8 @@ mod tests {
         );
     }
 
-    #[gpui::test]
-    fn test_random_diffs(cx: &mut gpui::TestAppContext) {
+    #[test]
+    fn test_random_diffs() {
         random_test(|mut rng| {
             let old_text_len = env::var("OLD_TEXT_LEN")
                 .map(|i| i.parse().expect("invalid `OLD_TEXT_LEN` variable"))
@@ -923,19 +922,15 @@ mod tests {
             assert_eq!(patched, new);
 
             // Test char_ops_to_line_ops
-            let line_ops = char_ops_to_line_ops(&old, &char_operations, cx.background_executor());
+            let line_ops = char_ops_to_line_ops(&old, &char_operations);
             println!("line operations: {:?}", line_ops);
             let patched = apply_line_operations(&old, &new, &line_ops);
             assert_eq!(patched, new);
         });
     }
 
-    fn char_ops_to_line_ops(
-        old_text: &str,
-        char_ops: &[CharOperation],
-        executor: &BackgroundExecutor,
-    ) -> Vec<LineOperation> {
-        let old_rope = Rope::from_str(old_text, executor);
+    fn char_ops_to_line_ops(old_text: &str, char_ops: &[CharOperation]) -> Vec<LineOperation> {
+        let old_rope = Rope::from(old_text);
         let mut diff = LineDiff::default();
         for op in char_ops {
             diff.push_char_operation(op, &old_rope);
  
  
  
    
    @@ -15,12 +15,10 @@ doctest = false
 
 [dependencies]
 arrayvec = "0.7.1"
+rayon.workspace = true
 log.workspace = true
-futures.workspace = true
-itertools.workspace = true
 
 [dev-dependencies]
 ctor.workspace = true
 rand.workspace = true
 zlog.workspace = true
-pollster = "0.4.0"
  
  
  
    
    @@ -3,8 +3,7 @@ mod tree_map;
 
 use arrayvec::ArrayVec;
 pub use cursor::{Cursor, FilterCursor, Iter};
-use futures::{StreamExt, stream};
-use itertools::Itertools as _;
+use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator as _};
 use std::marker::PhantomData;
 use std::mem;
 use std::{cmp::Ordering, fmt, iter::FromIterator, sync::Arc};
@@ -15,18 +14,6 @@ pub const TREE_BASE: usize = 2;
 #[cfg(not(test))]
 pub const TREE_BASE: usize = 6;
 
-pub trait BackgroundSpawn {
-    type Task<R>: Future<Output = R> + Send + Sync
-    where
-        R: Send + Sync;
-    fn background_spawn<R>(
-        &self,
-        future: impl Future<Output = R> + Send + Sync + 'static,
-    ) -> Self::Task<R>
-    where
-        R: Send + Sync + 'static;
-}
-
 /// An item that can be stored in a [`SumTree`]
 ///
 /// Must be summarized by a type that implements [`Summary`]
@@ -311,71 +298,62 @@ impl<T: Item> SumTree<T> {
         }
     }
 
-    pub async fn from_iter_async<I, S>(iter: I, spawn: S) -> Self
+    pub fn from_par_iter<I, Iter>(iter: I, cx: <T::Summary as Summary>::Context<'_>) -> Self
     where
-        T: 'static + Send + Sync,
-        for<'a> T::Summary: Summary<Context<'a> = ()> + Send + Sync,
-        S: BackgroundSpawn,
-        I: IntoIterator<Item = T>,
+        I: IntoParallelIterator<Iter = Iter>,
+        Iter: IndexedParallelIterator<Item = T>,
+        T: Send + Sync,
+        T::Summary: Send + Sync,
+        for<'a> <T::Summary as Summary>::Context<'a>: Sync,
     {
-        let mut futures = vec![];
-        let chunks = iter.into_iter().chunks(2 * TREE_BASE);
-        for chunk in chunks.into_iter() {
-            let items: ArrayVec<T, { 2 * TREE_BASE }> = chunk.into_iter().collect();
-            futures.push(async move {
+        let mut nodes = iter
+            .into_par_iter()
+            .chunks(2 * TREE_BASE)
+            .map(|items| {
+                let items: ArrayVec<T, { 2 * TREE_BASE }> = items.into_iter().collect();
                 let item_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }> =
-                    items.iter().map(|item| item.summary(())).collect();
+                    items.iter().map(|item| item.summary(cx)).collect();
                 let mut summary = item_summaries[0].clone();
                 for item_summary in &item_summaries[1..] {
-                    <T::Summary as Summary>::add_summary(&mut summary, item_summary, ());
+                    <T::Summary as Summary>::add_summary(&mut summary, item_summary, cx);
                 }
                 SumTree(Arc::new(Node::Leaf {
                     summary,
                     items,
                     item_summaries,
                 }))
-            });
-        }
-
-        let mut nodes = futures::stream::iter(futures)
-            .map(|future| spawn.background_spawn(future))
-            .buffered(4)
-            .collect::<Vec<_>>()
-            .await;
+            })
+            .collect::<Vec<_>>();
 
         let mut height = 0;
         while nodes.len() > 1 {
             height += 1;
-            let current_nodes = mem::take(&mut nodes);
-            nodes = stream::iter(current_nodes)
+            nodes = nodes
+                .into_par_iter()
                 .chunks(2 * TREE_BASE)
-                .map(|chunk| {
-                    spawn.background_spawn(async move {
-                        let child_trees: ArrayVec<SumTree<T>, { 2 * TREE_BASE }> =
-                            chunk.into_iter().collect();
-                        let child_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }> = child_trees
-                            .iter()
-                            .map(|child_tree| child_tree.summary().clone())
-                            .collect();
-                        let mut summary = child_summaries[0].clone();
-                        for child_summary in &child_summaries[1..] {
-                            <T::Summary as Summary>::add_summary(&mut summary, child_summary, ());
-                        }
-                        SumTree(Arc::new(Node::Internal {
-                            height,
-                            summary,
-                            child_summaries,
-                            child_trees,
-                        }))
-                    })
+                .map(|child_nodes| {
+                    let child_trees: ArrayVec<SumTree<T>, { 2 * TREE_BASE }> =
+                        child_nodes.into_iter().collect();
+                    let child_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }> = child_trees
+                        .iter()
+                        .map(|child_tree| child_tree.summary().clone())
+                        .collect();
+                    let mut summary = child_summaries[0].clone();
+                    for child_summary in &child_summaries[1..] {
+                        <T::Summary as Summary>::add_summary(&mut summary, child_summary, cx);
+                    }
+                    SumTree(Arc::new(Node::Internal {
+                        height,
+                        summary,
+                        child_summaries,
+                        child_trees,
+                    }))
                 })
-                .buffered(4)
-                .collect::<Vec<_>>()
-                .await;
+                .collect::<Vec<_>>();
         }
 
         if nodes.is_empty() {
-            Self::new(())
+            Self::new(cx)
         } else {
             debug_assert_eq!(nodes.len(), 1);
             nodes.pop().unwrap()
@@ -619,15 +597,15 @@ impl<T: Item> SumTree<T> {
         self.append(Self::from_iter(iter, cx), cx);
     }
 
-    pub async fn async_extend<S, I>(&mut self, iter: I, spawn: S)
+    pub fn par_extend<I, Iter>(&mut self, iter: I, cx: <T::Summary as Summary>::Context<'_>)
     where
-        S: BackgroundSpawn,
-        I: IntoIterator<Item = T> + 'static,
-        T: 'static + Send + Sync,
-        for<'b> T::Summary: Summary<Context<'b> = ()> + Send + Sync,
+        I: IntoParallelIterator<Iter = Iter>,
+        Iter: IndexedParallelIterator<Item = T>,
+        T: Send + Sync,
+        T::Summary: Send + Sync,
+        for<'a> <T::Summary as Summary>::Context<'a>: Sync,
     {
-        let other = Self::from_iter_async(iter, spawn);
-        self.append(other.await, ());
+        self.append(Self::from_par_iter(iter, cx), cx);
     }
 
     pub fn push(&mut self, item: T, cx: <T::Summary as Summary>::Context<'_>) {
@@ -1092,23 +1070,6 @@ mod tests {
 
     #[test]
     fn test_random() {
-        struct NoSpawn;
-        impl BackgroundSpawn for NoSpawn {
-            type Task<R>
-                = std::pin::Pin<Box<dyn Future<Output = R> + Sync + Send>>
-            where
-                R: Send + Sync;
-            fn background_spawn<R>(
-                &self,
-                future: impl Future<Output = R> + Send + Sync + 'static,
-            ) -> Self::Task<R>
-            where
-                R: Send + Sync + 'static,
-            {
-                Box::pin(future)
-            }
-        }
-
         let mut starting_seed = 0;
         if let Ok(value) = std::env::var("SEED") {
             starting_seed = value.parse().expect("invalid SEED variable");
@@ -1134,7 +1095,7 @@ mod tests {
                     .sample_iter(StandardUniform)
                     .take(count)
                     .collect::<Vec<_>>();
-                pollster::block_on(tree.async_extend(items, NoSpawn));
+                tree.par_extend(items, ());
             }
 
             for _ in 0..num_operations {
@@ -1156,7 +1117,7 @@ mod tests {
                     if rng.random() {
                         new_tree.extend(new_items, ());
                     } else {
-                        pollster::block_on(new_tree.async_extend(new_items, NoSpawn));
+                        new_tree.par_extend(new_items, ());
                     }
                     cursor.seek(&Count(splice_end), Bias::Right);
                     new_tree.append(cursor.slice(&tree_end, Bias::Right), ());
  
  
  
    
    @@ -448,11 +448,12 @@ impl PickerDelegate for TasksModalDelegate {
         let template = resolved_task.original_task();
         let display_label = resolved_task.display_label();
 
-        let mut tooltip_label_text = if display_label != &template.label {
-            resolved_task.resolved_label.clone()
-        } else {
-            String::new()
-        };
+        let mut tooltip_label_text =
+            if display_label != &template.label || source_kind == &TaskSourceKind::UserInput {
+                resolved_task.resolved_label.clone()
+            } else {
+                String::new()
+            };
 
         if resolved_task.resolved.command_label != resolved_task.resolved_label {
             if !tooltip_label_text.trim().is_empty() {
  
  
  
    
    @@ -28,7 +28,6 @@ rope.workspace = true
 smallvec.workspace = true
 sum_tree.workspace = true
 util.workspace = true
-gpui.workspace = true
 
 [dev-dependencies]
 collections = { workspace = true, features = ["test-support"] }
  
  
  
    
    @@ -14,29 +14,24 @@ fn init_logger() {
     zlog::init_test();
 }
 
-#[gpui::test]
-fn test_edit(cx: &mut gpui::TestAppContext) {
-    let mut buffer = Buffer::new(
-        ReplicaId::LOCAL,
-        BufferId::new(1).unwrap(),
-        "abc",
-        cx.background_executor(),
-    );
+#[test]
+fn test_edit() {
+    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "abc");
     assert_eq!(buffer.text(), "abc");
-    buffer.edit([(3..3, "def")], cx.background_executor());
+    buffer.edit([(3..3, "def")]);
     assert_eq!(buffer.text(), "abcdef");
-    buffer.edit([(0..0, "ghi")], cx.background_executor());
+    buffer.edit([(0..0, "ghi")]);
     assert_eq!(buffer.text(), "ghiabcdef");
-    buffer.edit([(5..5, "jkl")], cx.background_executor());
+    buffer.edit([(5..5, "jkl")]);
     assert_eq!(buffer.text(), "ghiabjklcdef");
-    buffer.edit([(6..7, "")], cx.background_executor());
+    buffer.edit([(6..7, "")]);
     assert_eq!(buffer.text(), "ghiabjlcdef");
-    buffer.edit([(4..9, "mno")], cx.background_executor());
+    buffer.edit([(4..9, "mno")]);
     assert_eq!(buffer.text(), "ghiamnoef");
 }
 
 #[gpui::test(iterations = 100)]
-fn test_random_edits(cx: &mut gpui::TestAppContext, mut rng: StdRng) {
+fn test_random_edits(mut rng: StdRng) {
     let operations = env::var("OPERATIONS")
         .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
         .unwrap_or(10);
@@ -49,7 +44,6 @@ fn test_random_edits(cx: &mut gpui::TestAppContext, mut rng: StdRng) {
         ReplicaId::LOCAL,
         BufferId::new(1).unwrap(),
         reference_string.clone(),
-        cx.background_executor(),
     );
     LineEnding::normalize(&mut reference_string);
 
@@ -62,7 +56,7 @@ fn test_random_edits(cx: &mut gpui::TestAppContext, mut rng: StdRng) {
     );
 
     for _i in 0..operations {
-        let (edits, _) = buffer.randomly_edit(&mut rng, 5, cx.background_executor());
+        let (edits, _) = buffer.randomly_edit(&mut rng, 5);
         for (old_range, new_text) in edits.iter().rev() {
             reference_string.replace_range(old_range.clone(), new_text);
         }
@@ -112,11 +106,7 @@ fn test_random_edits(cx: &mut gpui::TestAppContext, mut rng: StdRng) {
         let mut text = old_buffer.visible_text.clone();
         for edit in edits {
             let new_text: String = buffer.text_for_range(edit.new.clone()).collect();
-            text.replace(
-                edit.new.start..edit.new.start + edit.old.len(),
-                &new_text,
-                cx.background_executor(),
-            );
+            text.replace(edit.new.start..edit.new.start + edit.old.len(), &new_text);
         }
         assert_eq!(text.to_string(), buffer.text());
 
@@ -171,18 +161,14 @@ fn test_random_edits(cx: &mut gpui::TestAppContext, mut rng: StdRng) {
         let mut text = old_buffer.visible_text.clone();
         for edit in subscription_edits.into_inner() {
             let new_text: String = buffer.text_for_range(edit.new.clone()).collect();
-            text.replace(
-                edit.new.start..edit.new.start + edit.old.len(),
-                &new_text,
-                cx.background_executor(),
-            );
+            text.replace(edit.new.start..edit.new.start + edit.old.len(), &new_text);
         }
         assert_eq!(text.to_string(), buffer.text());
     }
 }
 
-#[gpui::test]
-fn test_line_endings(cx: &mut gpui::TestAppContext) {
+#[test]
+fn test_line_endings() {
     assert_eq!(LineEnding::detect(&"πβ
\n".repeat(1000)), LineEnding::Unix);
     assert_eq!(LineEnding::detect(&"abcd\n".repeat(1000)), LineEnding::Unix);
     assert_eq!(
@@ -198,34 +184,25 @@ fn test_line_endings(cx: &mut gpui::TestAppContext) {
         ReplicaId::LOCAL,
         BufferId::new(1).unwrap(),
         "one\r\ntwo\rthree",
-        cx.background_executor(),
     );
     assert_eq!(buffer.text(), "one\ntwo\nthree");
     assert_eq!(buffer.line_ending(), LineEnding::Windows);
     buffer.check_invariants();
 
-    buffer.edit(
-        [(buffer.len()..buffer.len(), "\r\nfour")],
-        cx.background_executor(),
-    );
-    buffer.edit([(0..0, "zero\r\n")], cx.background_executor());
+    buffer.edit([(buffer.len()..buffer.len(), "\r\nfour")]);
+    buffer.edit([(0..0, "zero\r\n")]);
     assert_eq!(buffer.text(), "zero\none\ntwo\nthree\nfour");
     assert_eq!(buffer.line_ending(), LineEnding::Windows);
     buffer.check_invariants();
 }
 
-#[gpui::test]
-fn test_line_len(cx: &mut gpui::TestAppContext) {
-    let mut buffer = Buffer::new(
-        ReplicaId::LOCAL,
-        BufferId::new(1).unwrap(),
-        "",
-        cx.background_executor(),
-    );
-    buffer.edit([(0..0, "abcd\nefg\nhij")], cx.background_executor());
-    buffer.edit([(12..12, "kl\nmno")], cx.background_executor());
-    buffer.edit([(18..18, "\npqrs\n")], cx.background_executor());
-    buffer.edit([(18..21, "\nPQ")], cx.background_executor());
+#[test]
+fn test_line_len() {
+    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "");
+    buffer.edit([(0..0, "abcd\nefg\nhij")]);
+    buffer.edit([(12..12, "kl\nmno")]);
+    buffer.edit([(18..18, "\npqrs\n")]);
+    buffer.edit([(18..21, "\nPQ")]);
 
     assert_eq!(buffer.line_len(0), 4);
     assert_eq!(buffer.line_len(1), 3);
@@ -235,15 +212,10 @@ fn test_line_len(cx: &mut gpui::TestAppContext) {
     assert_eq!(buffer.line_len(5), 0);
 }
 
-#[gpui::test]
-fn test_common_prefix_at_position(cx: &mut gpui::TestAppContext) {
+#[test]
+fn test_common_prefix_at_position() {
     let text = "a = str; b = δα";
-    let buffer = Buffer::new(
-        ReplicaId::LOCAL,
-        BufferId::new(1).unwrap(),
-        text,
-        cx.background_executor(),
-    );
+    let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), text);
 
     let offset1 = offset_after(text, "str");
     let offset2 = offset_after(text, "δα");
@@ -289,13 +261,12 @@ fn test_common_prefix_at_position(cx: &mut gpui::TestAppContext) {
     }
 }
 
-#[gpui::test]
-fn test_text_summary_for_range(cx: &mut gpui::TestAppContext) {
+#[test]
+fn test_text_summary_for_range() {
     let buffer = Buffer::new(
         ReplicaId::LOCAL,
         BufferId::new(1).unwrap(),
         "ab\nefg\nhklm\nnopqrs\ntuvwxyz",
-        cx.background_executor(),
     );
     assert_eq!(
         buffer.text_summary_for_range::<TextSummary, _>(0..2),
@@ -383,18 +354,13 @@ fn test_text_summary_for_range(cx: &mut gpui::TestAppContext) {
     );
 }
 
-#[gpui::test]
-fn test_chars_at(cx: &mut gpui::TestAppContext) {
-    let mut buffer = Buffer::new(
-        ReplicaId::LOCAL,
-        BufferId::new(1).unwrap(),
-        "",
-        cx.background_executor(),
-    );
-    buffer.edit([(0..0, "abcd\nefgh\nij")], cx.background_executor());
-    buffer.edit([(12..12, "kl\nmno")], cx.background_executor());
-    buffer.edit([(18..18, "\npqrs")], cx.background_executor());
-    buffer.edit([(18..21, "\nPQ")], cx.background_executor());
+#[test]
+fn test_chars_at() {
+    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "");
+    buffer.edit([(0..0, "abcd\nefgh\nij")]);
+    buffer.edit([(12..12, "kl\nmno")]);
+    buffer.edit([(18..18, "\npqrs")]);
+    buffer.edit([(18..21, "\nPQ")]);
 
     let chars = buffer.chars_at(Point::new(0, 0));
     assert_eq!(chars.collect::<String>(), "abcd\nefgh\nijkl\nmno\nPQrs");
@@ -412,53 +378,43 @@ fn test_chars_at(cx: &mut gpui::TestAppContext) {
     assert_eq!(chars.collect::<String>(), "PQrs");
 
     // Regression test:
-    let mut buffer = Buffer::new(
-        ReplicaId::LOCAL,
-        BufferId::new(1).unwrap(),
-        "",
-        cx.background_executor(),
-    );
-    buffer.edit([(0..0, "[workspace]\nmembers = [\n    \"xray_core\",\n    \"xray_server\",\n    \"xray_cli\",\n    \"xray_wasm\",\n]\n")], cx.background_executor());
-    buffer.edit([(60..60, "\n")], cx.background_executor());
+    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "");
+    buffer.edit([(0..0, "[workspace]\nmembers = [\n    \"xray_core\",\n    \"xray_server\",\n    \"xray_cli\",\n    \"xray_wasm\",\n]\n")]);
+    buffer.edit([(60..60, "\n")]);
 
     let chars = buffer.chars_at(Point::new(6, 0));
     assert_eq!(chars.collect::<String>(), "    \"xray_wasm\",\n]\n");
 }
 
-#[gpui::test]
-fn test_anchors(cx: &mut gpui::TestAppContext) {
-    let mut buffer = Buffer::new(
-        ReplicaId::LOCAL,
-        BufferId::new(1).unwrap(),
-        "",
-        cx.background_executor(),
-    );
-    buffer.edit([(0..0, "abc")], cx.background_executor());
+#[test]
+fn test_anchors() {
+    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "");
+    buffer.edit([(0..0, "abc")]);
     let left_anchor = buffer.anchor_before(2);
     let right_anchor = buffer.anchor_after(2);
 
-    buffer.edit([(1..1, "def\n")], cx.background_executor());
+    buffer.edit([(1..1, "def\n")]);
     assert_eq!(buffer.text(), "adef\nbc");
     assert_eq!(left_anchor.to_offset(&buffer), 6);
     assert_eq!(right_anchor.to_offset(&buffer), 6);
     assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 });
     assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 });
 
-    buffer.edit([(2..3, "")], cx.background_executor());
+    buffer.edit([(2..3, "")]);
     assert_eq!(buffer.text(), "adf\nbc");
     assert_eq!(left_anchor.to_offset(&buffer), 5);
     assert_eq!(right_anchor.to_offset(&buffer), 5);
     assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 });
     assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 });
 
-    buffer.edit([(5..5, "ghi\n")], cx.background_executor());
+    buffer.edit([(5..5, "ghi\n")]);
     assert_eq!(buffer.text(), "adf\nbghi\nc");
     assert_eq!(left_anchor.to_offset(&buffer), 5);
     assert_eq!(right_anchor.to_offset(&buffer), 9);
     assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 });
     assert_eq!(right_anchor.to_point(&buffer), Point { row: 2, column: 0 });
 
-    buffer.edit([(7..9, "")], cx.background_executor());
+    buffer.edit([(7..9, "")]);
     assert_eq!(buffer.text(), "adf\nbghc");
     assert_eq!(left_anchor.to_offset(&buffer), 5);
     assert_eq!(right_anchor.to_offset(&buffer), 7);
@@ -548,18 +504,13 @@ fn test_anchors(cx: &mut gpui::TestAppContext) {
     );
 }
 
-#[gpui::test]
-fn test_anchors_at_start_and_end(cx: &mut gpui::TestAppContext) {
-    let mut buffer = Buffer::new(
-        ReplicaId::LOCAL,
-        BufferId::new(1).unwrap(),
-        "",
-        cx.background_executor(),
-    );
+#[test]
+fn test_anchors_at_start_and_end() {
+    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "");
     let before_start_anchor = buffer.anchor_before(0);
     let after_end_anchor = buffer.anchor_after(0);
 
-    buffer.edit([(0..0, "abc")], cx.background_executor());
+    buffer.edit([(0..0, "abc")]);
     assert_eq!(buffer.text(), "abc");
     assert_eq!(before_start_anchor.to_offset(&buffer), 0);
     assert_eq!(after_end_anchor.to_offset(&buffer), 3);
@@ -567,8 +518,8 @@ fn test_anchors_at_start_and_end(cx: &mut gpui::TestAppContext) {
     let after_start_anchor = buffer.anchor_after(0);
     let before_end_anchor = buffer.anchor_before(3);
 
-    buffer.edit([(3..3, "def")], cx.background_executor());
-    buffer.edit([(0..0, "ghi")], cx.background_executor());
+    buffer.edit([(3..3, "def")]);
+    buffer.edit([(0..0, "ghi")]);
     assert_eq!(buffer.text(), "ghiabcdef");
     assert_eq!(before_start_anchor.to_offset(&buffer), 0);
     assert_eq!(after_start_anchor.to_offset(&buffer), 3);
@@ -576,20 +527,15 @@ fn test_anchors_at_start_and_end(cx: &mut gpui::TestAppContext) {
     assert_eq!(after_end_anchor.to_offset(&buffer), 9);
 }
 
-#[gpui::test]
-fn test_undo_redo(cx: &mut gpui::TestAppContext) {
-    let mut buffer = Buffer::new(
-        ReplicaId::LOCAL,
-        BufferId::new(1).unwrap(),
-        "1234",
-        cx.background_executor(),
-    );
+#[test]
+fn test_undo_redo() {
+    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "1234");
     // Set group interval to zero so as to not group edits in the undo stack.
     buffer.set_group_interval(Duration::from_secs(0));
 
-    buffer.edit([(1..1, "abx")], cx.background_executor());
-    buffer.edit([(3..4, "yzef")], cx.background_executor());
-    buffer.edit([(3..5, "cd")], cx.background_executor());
+    buffer.edit([(1..1, "abx")]);
+    buffer.edit([(3..4, "yzef")]);
+    buffer.edit([(3..5, "cd")]);
     assert_eq!(buffer.text(), "1abcdef234");
 
     let entries = buffer.history.undo_stack.clone();
@@ -617,31 +563,26 @@ fn test_undo_redo(cx: &mut gpui::TestAppContext) {
     assert_eq!(buffer.text(), "1234");
 }
 
-#[gpui::test]
-fn test_history(cx: &mut gpui::TestAppContext) {
+#[test]
+fn test_history() {
     let mut now = Instant::now();
-    let mut buffer = Buffer::new(
-        ReplicaId::LOCAL,
-        BufferId::new(1).unwrap(),
-        "123456",
-        cx.background_executor(),
-    );
+    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "123456");
     buffer.set_group_interval(Duration::from_millis(300));
 
     let transaction_1 = buffer.start_transaction_at(now).unwrap();
-    buffer.edit([(2..4, "cd")], cx.background_executor());
+    buffer.edit([(2..4, "cd")]);
     buffer.end_transaction_at(now);
     assert_eq!(buffer.text(), "12cd56");
 
     buffer.start_transaction_at(now);
-    buffer.edit([(4..5, "e")], cx.background_executor());
+    buffer.edit([(4..5, "e")]);
     buffer.end_transaction_at(now).unwrap();
     assert_eq!(buffer.text(), "12cde6");
 
     now += buffer.transaction_group_interval() + Duration::from_millis(1);
     buffer.start_transaction_at(now);
-    buffer.edit([(0..1, "a")], cx.background_executor());
-    buffer.edit([(1..1, "b")], cx.background_executor());
+    buffer.edit([(0..1, "a")]);
+    buffer.edit([(1..1, "b")]);
     buffer.end_transaction_at(now).unwrap();
     assert_eq!(buffer.text(), "ab2cde6");
 
@@ -668,7 +609,7 @@ fn test_history(cx: &mut gpui::TestAppContext) {
 
     // Redo stack gets cleared after performing an edit.
     buffer.start_transaction_at(now);
-    buffer.edit([(0..0, "X")], cx.background_executor());
+    buffer.edit([(0..0, "X")]);
     buffer.end_transaction_at(now);
     assert_eq!(buffer.text(), "X12cde6");
     buffer.redo();
@@ -689,31 +630,26 @@ fn test_history(cx: &mut gpui::TestAppContext) {
     assert_eq!(buffer.text(), "X12cde6");
 }
 
-#[gpui::test]
-fn test_finalize_last_transaction(cx: &mut gpui::TestAppContext) {
+#[test]
+fn test_finalize_last_transaction() {
     let now = Instant::now();
-    let mut buffer = Buffer::new(
-        ReplicaId::LOCAL,
-        BufferId::new(1).unwrap(),
-        "123456",
-        cx.background_executor(),
-    );
+    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "123456");
     buffer.history.group_interval = Duration::from_millis(1);
 
     buffer.start_transaction_at(now);
-    buffer.edit([(2..4, "cd")], cx.background_executor());
+    buffer.edit([(2..4, "cd")]);
     buffer.end_transaction_at(now);
     assert_eq!(buffer.text(), "12cd56");
 
     buffer.finalize_last_transaction();
     buffer.start_transaction_at(now);
-    buffer.edit([(4..5, "e")], cx.background_executor());
+    buffer.edit([(4..5, "e")]);
     buffer.end_transaction_at(now).unwrap();
     assert_eq!(buffer.text(), "12cde6");
 
     buffer.start_transaction_at(now);
-    buffer.edit([(0..1, "a")], cx.background_executor());
-    buffer.edit([(1..1, "b")], cx.background_executor());
+    buffer.edit([(0..1, "a")]);
+    buffer.edit([(1..1, "b")]);
     buffer.end_transaction_at(now).unwrap();
     assert_eq!(buffer.text(), "ab2cde6");
 
@@ -730,19 +666,14 @@ fn test_finalize_last_transaction(cx: &mut gpui::TestAppContext) {
     assert_eq!(buffer.text(), "ab2cde6");
 }
 
-#[gpui::test]
-fn test_edited_ranges_for_transaction(cx: &mut gpui::TestAppContext) {
+#[test]
+fn test_edited_ranges_for_transaction() {
     let now = Instant::now();
-    let mut buffer = Buffer::new(
-        ReplicaId::LOCAL,
-        BufferId::new(1).unwrap(),
-        "1234567",
-        cx.background_executor(),
-    );
+    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "1234567");
 
     buffer.start_transaction_at(now);
-    buffer.edit([(2..4, "cd")], cx.background_executor());
-    buffer.edit([(6..6, "efg")], cx.background_executor());
+    buffer.edit([(2..4, "cd")]);
+    buffer.edit([(6..6, "efg")]);
     buffer.end_transaction_at(now);
     assert_eq!(buffer.text(), "12cd56efg7");
 
@@ -754,7 +685,7 @@ fn test_edited_ranges_for_transaction(cx: &mut gpui::TestAppContext) {
         [2..4, 6..9]
     );
 
-    buffer.edit([(5..5, "hijk")], cx.background_executor());
+    buffer.edit([(5..5, "hijk")]);
     assert_eq!(buffer.text(), "12cd5hijk6efg7");
     assert_eq!(
         buffer
@@ -763,7 +694,7 @@ fn test_edited_ranges_for_transaction(cx: &mut gpui::TestAppContext) {
         [2..4, 10..13]
     );
 
-    buffer.edit([(4..4, "l")], cx.background_executor());
+    buffer.edit([(4..4, "l")]);
     assert_eq!(buffer.text(), "12cdl5hijk6efg7");
     assert_eq!(
         buffer
@@ -773,42 +704,27 @@ fn test_edited_ranges_for_transaction(cx: &mut gpui::TestAppContext) {
     );
 }
 
-#[gpui::test]
-fn test_concurrent_edits(cx: &mut gpui::TestAppContext) {
+#[test]
+fn test_concurrent_edits() {
     let text = "abcdef";
 
-    let mut buffer1 = Buffer::new(
-        ReplicaId::new(1),
-        BufferId::new(1).unwrap(),
-        text,
-        cx.background_executor(),
-    );
-    let mut buffer2 = Buffer::new(
-        ReplicaId::new(2),
-        BufferId::new(1).unwrap(),
-        text,
-        cx.background_executor(),
-    );
-    let mut buffer3 = Buffer::new(
-        ReplicaId::new(3),
-        BufferId::new(1).unwrap(),
-        text,
-        cx.background_executor(),
-    );
+    let mut buffer1 = Buffer::new(ReplicaId::new(1), BufferId::new(1).unwrap(), text);
+    let mut buffer2 = Buffer::new(ReplicaId::new(2), BufferId::new(1).unwrap(), text);
+    let mut buffer3 = Buffer::new(ReplicaId::new(3), BufferId::new(1).unwrap(), text);
 
-    let buf1_op = buffer1.edit([(1..2, "12")], cx.background_executor());
+    let buf1_op = buffer1.edit([(1..2, "12")]);
     assert_eq!(buffer1.text(), "a12cdef");
-    let buf2_op = buffer2.edit([(3..4, "34")], cx.background_executor());
+    let buf2_op = buffer2.edit([(3..4, "34")]);
     assert_eq!(buffer2.text(), "abc34ef");
-    let buf3_op = buffer3.edit([(5..6, "56")], cx.background_executor());
+    let buf3_op = buffer3.edit([(5..6, "56")]);
     assert_eq!(buffer3.text(), "abcde56");
 
-    buffer1.apply_op(buf2_op.clone(), Some(cx.background_executor()));
-    buffer1.apply_op(buf3_op.clone(), Some(cx.background_executor()));
-    buffer2.apply_op(buf1_op.clone(), Some(cx.background_executor()));
-    buffer2.apply_op(buf3_op, Some(cx.background_executor()));
-    buffer3.apply_op(buf1_op, Some(cx.background_executor()));
-    buffer3.apply_op(buf2_op, Some(cx.background_executor()));
+    buffer1.apply_op(buf2_op.clone());
+    buffer1.apply_op(buf3_op.clone());
+    buffer2.apply_op(buf1_op.clone());
+    buffer2.apply_op(buf3_op);
+    buffer3.apply_op(buf1_op);
+    buffer3.apply_op(buf2_op);
 
     assert_eq!(buffer1.text(), "a12c34e56");
     assert_eq!(buffer2.text(), "a12c34e56");
@@ -816,7 +732,7 @@ fn test_concurrent_edits(cx: &mut gpui::TestAppContext) {
 }
 
 #[gpui::test(iterations = 100)]
-fn test_random_concurrent_edits(mut rng: StdRng, cx: &mut gpui::TestAppContext) {
+fn test_random_concurrent_edits(mut rng: StdRng) {
     let peers = env::var("PEERS")
         .map(|i| i.parse().expect("invalid `PEERS` variable"))
         .unwrap_or(5);
@@ -837,7 +753,6 @@ fn test_random_concurrent_edits(mut rng: StdRng, cx: &mut gpui::TestAppContext)
             ReplicaId::new(i as u16),
             BufferId::new(1).unwrap(),
             base_text.clone(),
-            cx.background_executor(),
         );
         buffer.history.group_interval = Duration::from_millis(rng.random_range(0..=200));
         buffers.push(buffer);
@@ -854,9 +769,7 @@ fn test_random_concurrent_edits(mut rng: StdRng, cx: &mut gpui::TestAppContext)
         let buffer = &mut buffers[replica_index];
         match rng.random_range(0..=100) {
             0..=50 if mutation_count != 0 => {
-                let op = buffer
-                    .randomly_edit(&mut rng, 5, cx.background_executor())
-                    .1;
+                let op = buffer.randomly_edit(&mut rng, 5).1;
                 network.broadcast(buffer.replica_id, vec![op]);
                 log::info!("buffer {:?} text: {:?}", buffer.replica_id, buffer.text());
                 mutation_count -= 1;
@@ -874,7 +787,7 @@ fn test_random_concurrent_edits(mut rng: StdRng, cx: &mut gpui::TestAppContext)
                         replica_id,
                         ops.len()
                     );
-                    buffer.apply_ops(ops, Some(cx.background_executor()));
+                    buffer.apply_ops(ops);
                 }
             }
             _ => {}
  
  
  
    
    @@ -15,7 +15,6 @@ use anyhow::{Context as _, Result};
 use clock::Lamport;
 pub use clock::ReplicaId;
 use collections::{HashMap, HashSet};
-use gpui::BackgroundExecutor;
 use locator::Locator;
 use operation_queue::OperationQueue;
 pub use patch::Patch;
@@ -710,41 +709,11 @@ impl FromIterator<char> for LineIndent {
 }
 
 impl Buffer {
-    /// Create a new buffer from a string.
-    pub fn new(
-        replica_id: ReplicaId,
-        remote_id: BufferId,
-        base_text: impl Into<String>,
-        executor: &BackgroundExecutor,
-    ) -> Buffer {
+    pub fn new(replica_id: ReplicaId, remote_id: BufferId, base_text: impl Into<String>) -> Buffer {
         let mut base_text = base_text.into();
         let line_ending = LineEnding::detect(&base_text);
         LineEnding::normalize(&mut base_text);
-        Self::new_normalized(
-            replica_id,
-            remote_id,
-            line_ending,
-            Rope::from_str(&base_text, executor),
-        )
-    }
-
-    /// Create a new buffer from a string.
-    ///
-    /// Unlike [`Buffer::new`], this does not construct the backing rope in parallel if it is large enough.
-    pub fn new_slow(
-        replica_id: ReplicaId,
-        remote_id: BufferId,
-        base_text: impl Into<String>,
-    ) -> Buffer {
-        let mut base_text = base_text.into();
-        let line_ending = LineEnding::detect(&base_text);
-        LineEnding::normalize(&mut base_text);
-        Self::new_normalized(
-            replica_id,
-            remote_id,
-            line_ending,
-            Rope::from_str_small(&base_text),
-        )
+        Self::new_normalized(replica_id, remote_id, line_ending, Rope::from(&*base_text))
     }
 
     pub fn new_normalized(
@@ -839,7 +808,7 @@ impl Buffer {
         self.history.group_interval
     }
 
-    pub fn edit<R, I, S, T>(&mut self, edits: R, cx: &BackgroundExecutor) -> Operation
+    pub fn edit<R, I, S, T>(&mut self, edits: R) -> Operation
     where
         R: IntoIterator<IntoIter = I>,
         I: ExactSizeIterator<Item = (Range<S>, T)>,
@@ -852,7 +821,7 @@ impl Buffer {
 
         self.start_transaction();
         let timestamp = self.lamport_clock.tick();
-        let operation = Operation::Edit(self.apply_local_edit(edits, timestamp, cx));
+        let operation = Operation::Edit(self.apply_local_edit(edits, timestamp));
 
         self.history.push(operation.clone());
         self.history.push_undo(operation.timestamp());
@@ -865,7 +834,6 @@ impl Buffer {
         &mut self,
         edits: impl ExactSizeIterator<Item = (Range<S>, T)>,
         timestamp: clock::Lamport,
-        executor: &BackgroundExecutor,
     ) -> EditOperation {
         let mut edits_patch = Patch::default();
         let mut edit_op = EditOperation {
@@ -954,7 +922,7 @@ impl Buffer {
                 });
                 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
                 new_insertions.push(InsertionFragment::insert_new(&fragment));
-                new_ropes.push_str(new_text.as_ref(), executor);
+                new_ropes.push_str(new_text.as_ref());
                 new_fragments.push(fragment, &None);
                 insertion_offset += new_text.len();
             }
@@ -1033,26 +1001,22 @@ impl Buffer {
         self.snapshot.line_ending = line_ending;
     }
 
-    pub fn apply_ops<I: IntoIterator<Item = Operation>>(
-        &mut self,
-        ops: I,
-        executor: Option<&BackgroundExecutor>,
-    ) {
+    pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I) {
         let mut deferred_ops = Vec::new();
         for op in ops {
             self.history.push(op.clone());
             if self.can_apply_op(&op) {
-                self.apply_op(op, executor);
+                self.apply_op(op);
             } else {
                 self.deferred_replicas.insert(op.replica_id());
                 deferred_ops.push(op);
             }
         }
         self.deferred_ops.insert(deferred_ops);
-        self.flush_deferred_ops(executor);
+        self.flush_deferred_ops();
     }
 
-    fn apply_op(&mut self, op: Operation, executor: Option<&BackgroundExecutor>) {
+    fn apply_op(&mut self, op: Operation) {
         match op {
             Operation::Edit(edit) => {
                 if !self.version.observed(edit.timestamp) {
@@ -1061,7 +1025,6 @@ impl Buffer {
                         &edit.ranges,
                         &edit.new_text,
                         edit.timestamp,
-                        executor,
                     );
                     self.snapshot.version.observe(edit.timestamp);
                     self.lamport_clock.observe(edit.timestamp);
@@ -1092,7 +1055,6 @@ impl Buffer {
         ranges: &[Range<FullOffset>],
         new_text: &[Arc<str>],
         timestamp: clock::Lamport,
-        executor: Option<&BackgroundExecutor>,
     ) {
         if ranges.is_empty() {
             return;
@@ -1208,10 +1170,7 @@ impl Buffer {
                 });
                 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
                 new_insertions.push(InsertionFragment::insert_new(&fragment));
-                match executor {
-                    Some(executor) => new_ropes.push_str(new_text, executor),
-                    None => new_ropes.push_str_small(new_text),
-                }
+                new_ropes.push_str(new_text);
                 new_fragments.push(fragment, &None);
                 insertion_offset += new_text.len();
             }
@@ -1389,12 +1348,12 @@ impl Buffer {
         self.subscriptions.publish_mut(&edits);
     }
 
-    fn flush_deferred_ops(&mut self, executor: Option<&BackgroundExecutor>) {
+    fn flush_deferred_ops(&mut self) {
         self.deferred_replicas.clear();
         let mut deferred_ops = Vec::new();
         for op in self.deferred_ops.drain().iter().cloned() {
             if self.can_apply_op(&op) {
-                self.apply_op(op, executor);
+                self.apply_op(op);
             } else {
                 self.deferred_replicas.insert(op.replica_id());
                 deferred_ops.push(op);
@@ -1752,9 +1711,9 @@ impl Buffer {
 #[cfg(any(test, feature = "test-support"))]
 impl Buffer {
     #[track_caller]
-    pub fn edit_via_marked_text(&mut self, marked_string: &str, cx: &BackgroundExecutor) {
+    pub fn edit_via_marked_text(&mut self, marked_string: &str) {
         let edits = self.edits_for_marked_text(marked_string);
-        self.edit(edits, cx);
+        self.edit(edits);
     }
 
     #[track_caller]
@@ -1891,7 +1850,6 @@ impl Buffer {
         &mut self,
         rng: &mut T,
         edit_count: usize,
-        executor: &BackgroundExecutor,
     ) -> (Vec<(Range<usize>, Arc<str>)>, Operation)
     where
         T: rand::Rng,
@@ -1899,7 +1857,7 @@ impl Buffer {
         let mut edits = self.get_random_edits(rng, edit_count);
         log::info!("mutating buffer {:?} with {:?}", self.replica_id, edits);
 
-        let op = self.edit(edits.iter().cloned(), executor);
+        let op = self.edit(edits.iter().cloned());
         if let Operation::Edit(edit) = &op {
             assert_eq!(edits.len(), edit.new_text.len());
             for (edit, new_text) in edits.iter_mut().zip(&edit.new_text) {
@@ -2748,12 +2706,8 @@ impl<'a> RopeBuilder<'a> {
         }
     }
 
-    fn push_str(&mut self, text: &str, cx: &BackgroundExecutor) {
-        self.new_visible.push(text, cx);
-    }
-
-    fn push_str_small(&mut self, text: &str) {
-        self.new_visible.push_small(text);
+    fn push_str(&mut self, text: &str) {
+        self.new_visible.push(text);
     }
 
     fn finish(mut self) -> (Rope, Rope) {
  
  
  
    
    @@ -220,6 +220,8 @@ impl TitleBar {
                                 .on_click({
                                     let peer_id = collaborator.peer_id;
                                     cx.listener(move |this, _, window, cx| {
+                                        cx.stop_propagation();
+
                                         this.workspace
                                             .update(cx, |workspace, cx| {
                                                 if is_following {
  
  
  
    
    @@ -270,11 +270,11 @@ fn show_menu<M: ManagedView>(
     window: &mut Window,
     cx: &mut App,
 ) {
+    let previous_focus_handle = window.focused(cx);
     let Some(new_menu) = (builder)(window, cx) else {
         return;
     };
     let menu2 = menu.clone();
-    let previous_focus_handle = window.focused(cx);
 
     window
         .subscribe(&new_menu, cx, move |modal, _: &DismissEvent, window, cx| {
  
  
  
    
    @@ -392,7 +392,7 @@ pub struct Scrollbars<T: ScrollableHandle = ScrollHandle> {
 
 impl Scrollbars {
     pub fn new(show_along: ScrollAxes) -> Self {
-        Self::new_with_setting(show_along, |_| ShowScrollbar::Always)
+        Self::new_with_setting(show_along, |_| ShowScrollbar::default())
     }
 
     pub fn for_settings<S: ScrollbarVisibility>() -> Scrollbars {
  
  
  
    
    @@ -408,6 +408,15 @@ impl ShellKind {
         }
     }
 
+    pub fn prepend_command_prefix<'a>(&self, command: &'a str) -> Cow<'a, str> {
+        match self.command_prefix() {
+            Some(prefix) if !command.starts_with(prefix) => {
+                Cow::Owned(format!("{prefix}{command}"))
+            }
+            _ => Cow::Borrowed(command),
+        }
+    }
+
     pub const fn sequential_commands_separator(&self) -> char {
         match self {
             ShellKind::Cmd => '&',
@@ -422,6 +431,20 @@ impl ShellKind {
         }
     }
 
+    pub const fn sequential_and_commands_separator(&self) -> &'static str {
+        match self {
+            ShellKind::Cmd
+            | ShellKind::Posix
+            | ShellKind::Csh
+            | ShellKind::Tcsh
+            | ShellKind::Rc
+            | ShellKind::Fish
+            | ShellKind::PowerShell
+            | ShellKind::Xonsh => "&&",
+            ShellKind::Nushell => ";",
+        }
+    }
+
     pub fn try_quote<'a>(&self, arg: &'a str) -> Option<Cow<'a, str>> {
         shlex::try_quote(arg).ok().map(|arg| match self {
             // If we are running in PowerShell, we want to take extra care when escaping strings.
@@ -438,6 +461,42 @@ impl ShellKind {
         })
     }
 
+    /// Quotes the given argument if necessary, taking into account the command prefix.
+    ///
+    /// In other words, this will consider quoting arg without its command prefix to not break the command.
+    /// You should use this over `try_quote` when you want to quote a shell command.
+    pub fn try_quote_prefix_aware<'a>(&self, arg: &'a str) -> Option<Cow<'a, str>> {
+        if let Some(char) = self.command_prefix() {
+            if let Some(arg) = arg.strip_prefix(char) {
+                // we have a command that is prefixed
+                for quote in ['\'', '"'] {
+                    if let Some(arg) = arg
+                        .strip_prefix(quote)
+                        .and_then(|arg| arg.strip_suffix(quote))
+                    {
+                        // and the command itself is wrapped as a literal, that
+                        // means the prefix exists to interpret a literal as a
+                        // command. So strip the quotes, quote the command, and
+                        // re-add the quotes if they are missing after requoting
+                        let quoted = self.try_quote(arg)?;
+                        return Some(if quoted.starts_with(['\'', '"']) {
+                            Cow::Owned(self.prepend_command_prefix("ed).into_owned())
+                        } else {
+                            Cow::Owned(
+                                self.prepend_command_prefix(&format!("{quote}{quoted}{quote}"))
+                                    .into_owned(),
+                            )
+                        });
+                    }
+                }
+                return self
+                    .try_quote(arg)
+                    .map(|quoted| Cow::Owned(self.prepend_command_prefix("ed).into_owned()));
+            }
+        }
+        self.try_quote(arg)
+    }
+
     pub fn split(&self, input: &str) -> Option<Vec<String>> {
         shlex::split(input)
     }
@@ -525,4 +584,75 @@ mod tests {
             "\"C:\\Users\\johndoe\\dev\\python\\39007\\tests\\.venv\\Scripts\\python.exe -m pytest \\\"test_foo.py::test_foo\\\"\"".to_string()
         );
     }
+
+    #[test]
+    fn test_try_quote_nu_command() {
+        let shell_kind = ShellKind::Nushell;
+        assert_eq!(
+            shell_kind.try_quote("'uname'").unwrap().into_owned(),
+            "\"'uname'\"".to_string()
+        );
+        assert_eq!(
+            shell_kind
+                .try_quote_prefix_aware("'uname'")
+                .unwrap()
+                .into_owned(),
+            "\"'uname'\"".to_string()
+        );
+        assert_eq!(
+            shell_kind.try_quote("^uname").unwrap().into_owned(),
+            "'^uname'".to_string()
+        );
+        assert_eq!(
+            shell_kind
+                .try_quote_prefix_aware("^uname")
+                .unwrap()
+                .into_owned(),
+            "^uname".to_string()
+        );
+        assert_eq!(
+            shell_kind.try_quote("^'uname'").unwrap().into_owned(),
+            "'^'\"'uname\'\"".to_string()
+        );
+        assert_eq!(
+            shell_kind
+                .try_quote_prefix_aware("^'uname'")
+                .unwrap()
+                .into_owned(),
+            "^'uname'".to_string()
+        );
+        assert_eq!(
+            shell_kind.try_quote("'uname a'").unwrap().into_owned(),
+            "\"'uname a'\"".to_string()
+        );
+        assert_eq!(
+            shell_kind
+                .try_quote_prefix_aware("'uname a'")
+                .unwrap()
+                .into_owned(),
+            "\"'uname a'\"".to_string()
+        );
+        assert_eq!(
+            shell_kind.try_quote("^'uname a'").unwrap().into_owned(),
+            "'^'\"'uname a'\"".to_string()
+        );
+        assert_eq!(
+            shell_kind
+                .try_quote_prefix_aware("^'uname a'")
+                .unwrap()
+                .into_owned(),
+            "^'uname a'".to_string()
+        );
+        assert_eq!(
+            shell_kind.try_quote("uname").unwrap().into_owned(),
+            "uname".to_string()
+        );
+        assert_eq!(
+            shell_kind
+                .try_quote_prefix_aware("uname")
+                .unwrap()
+                .into_owned(),
+            "uname".to_string()
+        );
+    }
 }
  
  
  
    
    @@ -3096,7 +3096,6 @@ mod test {
     use indoc::indoc;
     use language::Point;
     use multi_buffer::MultiBufferRow;
-    use text::Rope;
 
     #[gpui::test]
     async fn test_start_end_of_paragraph(cx: &mut gpui::TestAppContext) {
@@ -3823,7 +3822,7 @@ mod test {
         cx.update_editor(|editor, _window, cx| {
             let range = editor.selections.newest_anchor().range();
             let inlay_text = "  field: int,\n  field2: string\n  field3: float";
-            let inlay = Inlay::edit_prediction(1, range.start, Rope::from_str_small(inlay_text));
+            let inlay = Inlay::edit_prediction(1, range.start, inlay_text);
             editor.splice_inlays(&[], vec![inlay], cx);
         });
 
@@ -3855,7 +3854,7 @@ mod test {
             let end_of_line =
                 snapshot.anchor_after(Point::new(0, snapshot.line_len(MultiBufferRow(0))));
             let inlay_text = " hint";
-            let inlay = Inlay::edit_prediction(1, end_of_line, Rope::from_str_small(inlay_text));
+            let inlay = Inlay::edit_prediction(1, end_of_line, inlay_text);
             editor.splice_inlays(&[], vec![inlay], cx);
         });
         cx.simulate_keystrokes("$");
@@ -3894,7 +3893,7 @@ mod test {
             // The empty line is at line 3 (0-indexed)
             let line_start = snapshot.anchor_after(Point::new(3, 0));
             let inlay_text = ": Vec<u32>";
-            let inlay = Inlay::edit_prediction(1, line_start, Rope::from_str_small(inlay_text));
+            let inlay = Inlay::edit_prediction(1, line_start, inlay_text);
             editor.splice_inlays(&[], vec![inlay], cx);
         });
 
@@ -3938,8 +3937,7 @@ mod test {
             let snapshot = editor.buffer().read(cx).snapshot(cx);
             let empty_line_start = snapshot.anchor_after(Point::new(2, 0));
             let inlay_text = ": i32";
-            let inlay =
-                Inlay::edit_prediction(2, empty_line_start, Rope::from_str_small(inlay_text));
+            let inlay = Inlay::edit_prediction(2, empty_line_start, inlay_text);
             editor.splice_inlays(&[], vec![inlay], cx);
         });
 
  
  
  
    
    @@ -296,6 +296,15 @@ pub trait Item: Focusable + EventEmitter<Self::Event> + Render + Sized {
         None
     }
 
+    /// Returns optional elements to render to the left of the breadcrumb.
+    fn breadcrumb_prefix(
+        &self,
+        _window: &mut Window,
+        _cx: &mut Context<Self>,
+    ) -> Option<gpui::AnyElement> {
+        None
+    }
+
     fn added_to_workspace(
         &mut self,
         _workspace: &mut Workspace,
@@ -479,6 +488,7 @@ pub trait ItemHandle: 'static + Send {
     fn to_searchable_item_handle(&self, cx: &App) -> Option<Box<dyn SearchableItemHandle>>;
     fn breadcrumb_location(&self, cx: &App) -> ToolbarItemLocation;
     fn breadcrumbs(&self, theme: &Theme, cx: &App) -> Option<Vec<BreadcrumbText>>;
+    fn breadcrumb_prefix(&self, window: &mut Window, cx: &mut App) -> Option<gpui::AnyElement>;
     fn show_toolbar(&self, cx: &App) -> bool;
     fn pixel_position_of_cursor(&self, cx: &App) -> Option<Point<Pixels>>;
     fn downgrade_item(&self) -> Box<dyn WeakItemHandle>;
@@ -979,6 +989,10 @@ impl<T: Item> ItemHandle for Entity<T> {
         self.read(cx).breadcrumbs(theme, cx)
     }
 
+    fn breadcrumb_prefix(&self, window: &mut Window, cx: &mut App) -> Option<gpui::AnyElement> {
+        self.update(cx, |item, cx| item.breadcrumb_prefix(window, cx))
+    }
+
     fn show_toolbar(&self, cx: &App) -> bool {
         self.read(cx).show_toolbar()
     }
  
  
  
    
    @@ -7580,13 +7580,13 @@ pub fn create_and_open_local_file(
     path: &'static Path,
     window: &mut Window,
     cx: &mut Context<Workspace>,
-    default_content: impl 'static + Send + FnOnce(&mut AsyncApp) -> Rope,
+    default_content: impl 'static + Send + FnOnce() -> Rope,
 ) -> Task<Result<Box<dyn ItemHandle>>> {
     cx.spawn_in(window, async move |workspace, cx| {
         let fs = workspace.read_with(cx, |workspace, _| workspace.app_state().fs.clone())?;
         if !fs.is_file(path).await {
             fs.create_file(path, Default::default()).await?;
-            fs.save(path, &default_content(cx), Default::default())
+            fs.save(path, &default_content(), Default::default())
                 .await?;
         }
 
  
  
  
    
    @@ -20,7 +20,6 @@ use std::{
     path::{Path, PathBuf},
     sync::Arc,
 };
-use text::Rope;
 use util::{
     ResultExt, path,
     rel_path::{RelPath, rel_path},
@@ -647,13 +646,9 @@ async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
 
     // Update the gitignore so that node_modules is no longer ignored,
     // but a subdirectory is ignored
-    fs.save(
-        "/root/.gitignore".as_ref(),
-        &Rope::from_str("e", cx.background_executor()),
-        Default::default(),
-    )
-    .await
-    .unwrap();
+    fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
+        .await
+        .unwrap();
     cx.executor().run_until_parked();
 
     // All of the directories that are no longer ignored are now loaded.
@@ -721,7 +716,7 @@ async fn test_write_file(cx: &mut TestAppContext) {
         .update(cx, |tree, cx| {
             tree.write_file(
                 rel_path("tracked-dir/file.txt").into(),
-                Rope::from_str("hello", cx.background_executor()),
+                "hello".into(),
                 Default::default(),
                 cx,
             )
@@ -732,7 +727,7 @@ async fn test_write_file(cx: &mut TestAppContext) {
         .update(cx, |tree, cx| {
             tree.write_file(
                 rel_path("ignored-dir/file.txt").into(),
-                Rope::from_str("world", cx.background_executor()),
+                "world".into(),
                 Default::default(),
                 cx,
             )
@@ -1470,7 +1465,7 @@ async fn test_random_worktree_operations_during_initial_scan(
     let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
     fs.as_fake().insert_tree(root_dir, json!({})).await;
     for _ in 0..initial_entries {
-        randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng, cx.background_executor()).await;
+        randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
     }
     log::info!("generated initial tree");
 
@@ -1560,7 +1555,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
     let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
     fs.as_fake().insert_tree(root_dir, json!({})).await;
     for _ in 0..initial_entries {
-        randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng, cx.background_executor()).await;
+        randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
     }
     log::info!("generated initial tree");
 
@@ -1603,7 +1598,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
                 .await
                 .log_err();
         } else {
-            randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng, cx.background_executor()).await;
+            randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
         }
 
         let buffered_event_count = fs.as_fake().buffered_event_count();
@@ -1612,7 +1607,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
             log::info!("flushing {} events", len);
             fs.as_fake().flush_events(len);
         } else {
-            randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng, cx.background_executor()).await;
+            randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
             mutations_len -= 1;
         }
 
@@ -1764,12 +1759,8 @@ fn randomly_mutate_worktree(
                 })
             } else {
                 log::info!("overwriting file {:?} ({})", &entry.path, entry.id.0);
-                let task = worktree.write_file(
-                    entry.path.clone(),
-                    Rope::default(),
-                    Default::default(),
-                    cx,
-                );
+                let task =
+                    worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx);
                 cx.background_spawn(async move {
                     task.await?;
                     Ok(())
@@ -1784,7 +1775,6 @@ async fn randomly_mutate_fs(
     root_path: &Path,
     insertion_probability: f64,
     rng: &mut impl Rng,
-    executor: &BackgroundExecutor,
 ) {
     log::info!("mutating fs");
     let mut files = Vec::new();
@@ -1859,7 +1849,7 @@ async fn randomly_mutate_fs(
         );
         fs.save(
             &ignore_path,
-            &Rope::from_str(ignore_contents.as_str(), executor),
+            &ignore_contents.as_str().into(),
             Default::default(),
         )
         .await
  
  
  
    
    @@ -73,6 +73,7 @@ gpui = { workspace = true, features = [
     "windows-manifest",
 ] }
 gpui_tokio.workspace = true
+rayon.workspace = true
 
 edit_prediction_button.workspace = true
 http_client.workspace = true
  
  
  
    
    @@ -257,6 +257,13 @@ pub fn main() {
         return;
     }
 
+    rayon::ThreadPoolBuilder::new()
+        .num_threads(4)
+        .stack_size(10 * 1024 * 1024)
+        .thread_name(|ix| format!("RayonWorker{}", ix))
+        .build_global()
+        .unwrap();
+
     log::info!(
         "========== starting zed version {}, sha {} ==========",
         app_version,
  
  
  
    
    @@ -28,10 +28,10 @@ use git_ui::commit_view::CommitViewToolbar;
 use git_ui::git_panel::GitPanel;
 use git_ui::project_diff::ProjectDiffToolbar;
 use gpui::{
-    Action, App, AppContext as _, AsyncApp, Context, DismissEvent, Element, Entity, Focusable,
-    KeyBinding, ParentElement, PathPromptOptions, PromptLevel, ReadGlobal, SharedString, Styled,
-    Task, TitlebarOptions, UpdateGlobal, Window, WindowKind, WindowOptions, actions, image_cache,
-    point, px, retain_all,
+    Action, App, AppContext as _, Context, DismissEvent, Element, Entity, Focusable, KeyBinding,
+    ParentElement, PathPromptOptions, PromptLevel, ReadGlobal, SharedString, Styled, Task,
+    TitlebarOptions, UpdateGlobal, Window, WindowKind, WindowOptions, actions, image_cache, point,
+    px, retain_all,
 };
 use image_viewer::ImageInfo;
 use language::Capability;
@@ -201,12 +201,7 @@ pub fn init(cx: &mut App) {
         with_active_or_new_workspace(cx, |_, window, cx| {
             open_settings_file(
                 paths::keymap_file(),
-                |cx| {
-                    Rope::from_str(
-                        settings::initial_keymap_content().as_ref(),
-                        cx.background_executor(),
-                    )
-                },
+                || settings::initial_keymap_content().as_ref().into(),
                 window,
                 cx,
             );
@@ -216,12 +211,7 @@ pub fn init(cx: &mut App) {
         with_active_or_new_workspace(cx, |_, window, cx| {
             open_settings_file(
                 paths::settings_file(),
-                |cx| {
-                    Rope::from_str(
-                        settings::initial_user_settings_content().as_ref(),
-                        cx.background_executor(),
-                    )
-                },
+                || settings::initial_user_settings_content().as_ref().into(),
                 window,
                 cx,
             );
@@ -236,12 +226,7 @@ pub fn init(cx: &mut App) {
         with_active_or_new_workspace(cx, |_, window, cx| {
             open_settings_file(
                 paths::tasks_file(),
-                |cx| {
-                    Rope::from_str(
-                        settings::initial_tasks_content().as_ref(),
-                        cx.background_executor(),
-                    )
-                },
+                || settings::initial_tasks_content().as_ref().into(),
                 window,
                 cx,
             );
@@ -251,12 +236,7 @@ pub fn init(cx: &mut App) {
         with_active_or_new_workspace(cx, |_, window, cx| {
             open_settings_file(
                 paths::debug_scenarios_file(),
-                |cx| {
-                    Rope::from_str(
-                        settings::initial_debug_tasks_content().as_ref(),
-                        cx.background_executor(),
-                    )
-                },
+                || settings::initial_debug_tasks_content().as_ref().into(),
                 window,
                 cx,
             );
@@ -1959,7 +1939,7 @@ fn open_bundled_file(
 
 fn open_settings_file(
     abs_path: &'static Path,
-    default_content: impl FnOnce(&mut AsyncApp) -> Rope + Send + 'static,
+    default_content: impl FnOnce() -> Rope + Send + 'static,
     window: &mut Window,
     cx: &mut Context<Workspace>,
 ) {
@@ -4375,7 +4355,7 @@ mod tests {
             .fs
             .save(
                 "/settings.json".as_ref(),
-                &Rope::from_str_small(r#"{"base_keymap": "Atom"}"#),
+                &r#"{"base_keymap": "Atom"}"#.into(),
                 Default::default(),
             )
             .await
@@ -4385,7 +4365,7 @@ mod tests {
             .fs
             .save(
                 "/keymap.json".as_ref(),
-                &Rope::from_str_small(r#"[{"bindings": {"backspace": "test_only::ActionA"}}]"#),
+                &r#"[{"bindings": {"backspace": "test_only::ActionA"}}]"#.into(),
                 Default::default(),
             )
             .await
@@ -4433,7 +4413,7 @@ mod tests {
             .fs
             .save(
                 "/keymap.json".as_ref(),
-                &Rope::from_str_small(r#"[{"bindings": {"backspace": "test_only::ActionB"}}]"#),
+                &r#"[{"bindings": {"backspace": "test_only::ActionB"}}]"#.into(),
                 Default::default(),
             )
             .await
@@ -4453,7 +4433,7 @@ mod tests {
             .fs
             .save(
                 "/settings.json".as_ref(),
-                &Rope::from_str_small(r#"{"base_keymap": "JetBrains"}"#),
+                &r#"{"base_keymap": "JetBrains"}"#.into(),
                 Default::default(),
             )
             .await
@@ -4493,7 +4473,7 @@ mod tests {
             .fs
             .save(
                 "/settings.json".as_ref(),
-                &Rope::from_str_small(r#"{"base_keymap": "Atom"}"#),
+                &r#"{"base_keymap": "Atom"}"#.into(),
                 Default::default(),
             )
             .await
@@ -4502,7 +4482,7 @@ mod tests {
             .fs
             .save(
                 "/keymap.json".as_ref(),
-                &Rope::from_str_small(r#"[{"bindings": {"backspace": "test_only::ActionA"}}]"#),
+                &r#"[{"bindings": {"backspace": "test_only::ActionA"}}]"#.into(),
                 Default::default(),
             )
             .await
@@ -4545,7 +4525,7 @@ mod tests {
             .fs
             .save(
                 "/keymap.json".as_ref(),
-                &Rope::from_str_small(r#"[{"bindings": {"backspace": null}}]"#),
+                &r#"[{"bindings": {"backspace": null}}]"#.into(),
                 Default::default(),
             )
             .await
@@ -4565,7 +4545,7 @@ mod tests {
             .fs
             .save(
                 "/settings.json".as_ref(),
-                &Rope::from_str_small(r#"{"base_keymap": "JetBrains"}"#),
+                &r#"{"base_keymap": "JetBrains"}"#.into(),
                 Default::default(),
             )
             .await
  
  
  
    
    @@ -861,7 +861,7 @@ mod tests {
             .fs
             .save(
                 Path::new(file1_path),
-                &Rope::from_str("content1", cx.background_executor()),
+                &Rope::from("content1"),
                 LineEnding::Unix,
             )
             .await
@@ -875,7 +875,7 @@ mod tests {
             .fs
             .save(
                 Path::new(file2_path),
-                &Rope::from_str("content2", cx.background_executor()),
+                &Rope::from("content2"),
                 LineEnding::Unix,
             )
             .await
  
  
  
    
    @@ -1836,13 +1836,12 @@ mod tests {
         let fs = project::FakeFs::new(cx.executor());
         let project = Project::test(fs.clone(), [], cx).await;
 
-        let buffer = cx.new(|cx| {
+        let buffer = cx.new(|_cx| {
             Buffer::remote(
                 language::BufferId::new(1).unwrap(),
                 ReplicaId::new(1),
                 language::Capability::ReadWrite,
                 "fn main() {\n    println!(\"Hello\");\n}",
-                cx.background_executor(),
             )
         });
 
  
  
  
    
    @@ -165,6 +165,5 @@
   - [Local Collaboration](./development/local-collaboration.md)
   - [Using Debuggers](./development/debuggers.md)
   - [Glossary](./development/glossary.md)
-- [Release Process](./development/releases.md)
 - [Release Notes](./development/release-notes.md)
 - [Debugging Crashes](./development/debugging-crashes.md)
  
  
  
    
    @@ -3163,6 +3163,12 @@ Non-negative `integer` values
 - Setting: `search_wrap`
 - Default: `true`
 
+## Center on Match
+
+- Description: If `center_on_match` is enabled, the editor will center the cursor on the current match when searching.
+- Setting: `center_on_match`
+- Default: `false`
+
 ## Seed Search Query From Cursor
 
 - Description: When to populate a new search's query based on the text under the cursor.
  
  
  
    
    @@ -88,7 +88,6 @@ in-depth examples and explanations.
 ## Contributor links
 
 - [CONTRIBUTING.md](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md)
-- [Releases](./development/releases.md)
 - [Debugging Crashes](./development/debugging-crashes.md)
 - [Code of Conduct](https://zed.dev/code-of-conduct)
 - [Zed Contributor License](https://zed.dev/cla)
  
  
  
    
    @@ -165,6 +165,58 @@ $ cargo heaptrack -b zed
 
 When this zed instance is exited, terminal output will include a command to run `heaptrack_interpret` to convert the `*.raw.zst` profile to a `*.zst` file which can be passed to `heaptrack_gui` for viewing.
 
+## Perf recording
+
+How to get a flamegraph with resolved symbols from a running zed instance. Use
+when zed is using a lot of CPU. Not useful for hangs.
+
+### During the incident
+
+- Find the PID (process ID) using:
+  `ps -eo size,pid,comm | grep zed | sort | head -n 1 | cut -d ' ' -f 2`
+  Or find the pid of the command zed-editor with the most ram usage in something
+  like htop/btop/top.
+
+- Install perf:
+  On Ubuntu (derivatives) run `sudo apt install linux-tools`.
+
+- Perf Record:
+  run `sudo perf record -p <pid you just found>`, wait a few seconds to gather data then press Ctrl+C. You should now have a perf.data file
+
+- Make the output file user owned:
+  run `sudo chown $USER:$USER perf.data`
+
+- Get build info:
+  Run zed again and type `zed: about` in the command pallet to get the exact commit.
+
+The `data.perf` file can be send to zed together with the exact commit.
+
+### Later
+
+This can be done by Zed staff.
+
+- Build Zed with symbols:
+  Check out the commit found previously and modify `Cargo.toml`.
+  Apply the following diff then make a release build.
+
+```diff
+[profile.release]
+-debug = "limited"
++debug = "full"
+```
+
+- Add the symbols to perf database:
+  `pref buildid-cache -v -a <path to release zed binary>`
+
+- Resolve the symbols from the db:
+  `perf inject -i perf.data -o perf_with_symbols.data`
+
+- Install flamegraph:
+  `cargo install cargo-flamegraph`
+
+- Render the flamegraph:
+  `flamegraph --perfdata perf_with_symbols.data`
+
 ## Troubleshooting
 
 ### Cargo errors claiming that a dependency is using unstable features
  
  
  
    
    @@ -1,147 +0,0 @@
-# Zed Releases
-
-Read about Zed's [release channels here](https://zed.dev/faq#what-are-the-release-channels).
-
-## Wednesday Release Process
-
-You will need write access to the Zed repository to do this.
-
-Credentials for various services used in this process can be found in 1Password.
-
-Use the `releases` Slack channel to notify the team that releases will be starting.
-This is mostly a formality on Wednesday's minor update releases, but can be beneficial when doing patch releases, as other devs may have landed fixes they'd like to cherry pick.
-
-### Starting the Builds
-
-1. Checkout `main` and ensure your working copy is clean.
-
-1. Run `git fetch && git pull` to ensure you have the latest commits locally.
-
-1. Run `git fetch --tags --force` to forcibly ensure your local tags are in sync with the remote.
-
-1. Run `./script/get-stable-channel-release-notes` and store output locally.
-
-1. Run `./script/bump-zed-minor-versions`.
-
-   - Push the tags and branches as instructed.
-
-1. Run `./script/get-preview-channel-changes` and store output locally.
-
-> **Note:** Always prioritize the stable release.
-> If you've completed aggregating stable release notes, you can move on to working on aggregating preview release notes, but once the stable build has finished, work through the rest of the stable steps to fully publish.
-> Preview can be finished up after.
-
-### Stable Release
-
-1. Aggregate stable release notes.
-
-   - Follow the instructions at the end of the script and aggregate the release notes into one structure.
-
-1. Once the stable release draft is up on [GitHub Releases](https://github.com/zed-industries/zed/releases), paste the stable release notes into it and **save**.
-
-   - **Do not publish the draft!**
-
-1. Check the stable release assets.
-
-   - Ensure the stable release job has finished without error.
-   - Ensure the draft has the proper number of assetsβreleases currently have 12 assets each (as of v0.211).
-   - Download the artifacts for the stable release draft and test that you can run them locally.
-
-1. Publish the stable draft on [GitHub Releases](https://github.com/zed-industries/zed/releases).
-
-   - Use [Vercel](https://vercel.com/zed-industries/zed-dev) to check the progress of the website rebuild.
-     The release will be public once the rebuild has completed.
-
-1. Post the stable release notes to social media.
-
-   - Bluesky and X posts will already be built as drafts in [Buffer](https://buffer.com).
-   - Double-check links.
-   - Publish both, one at a time, ensuring both are posted to each respective platform.
-
-1. Send the stable release notes email.
-
-   - The email broadcast will already be built as a draft in [Kit](https://kit.com).
-   - Double-check links.
-   - Publish the email.
-
-### Preview Release
-
-1. Aggregate preview release notes.
-
-   - Take the script's output and build release notes by organizing each release note line into a category.
-   - Use a prior release for the initial outline.
-   - Make sure to append the `Credit` line, if present, to the end of each release note line.
-
-1. Once the preview release draft is up on [GitHub Releases](https://github.com/zed-industries/zed/releases), paste the preview release notes into it and **save**.
-
-   - **Do not publish the draft!**
-
-1. Check the preview release assets.
-
-   - Ensure the preview release job has finished without error.
-   - Ensure the draft has the proper number of assetsβreleases currently have 12 assets each (as of v0.211).
-   - Download the artifacts for the preview release draft and test that you can run them locally.
-
-1. Publish the preview draft on [GitHub Releases](https://github.com/zed-industries/zed/releases).
-   - Use [Vercel](https://vercel.com/zed-industries/zed-dev) to check the progress of the website rebuild.
-     The release will be public once the rebuild has completed.
-
-### Prep Content for Next Week's Stable Release
-
-1. Build social media posts based on the popular items in preview.
-
-   - Draft the copy in the [tweets](https://zed.dev/channel/tweets-23331) channel.
-   - Create the preview media (videos, screenshots).
-     - For features that you film videos around, try to create alternative photo-only versions to be used in the email, as videos and GIFs aren't great for email.
-     - Store all created media in `Feature Media` in our Google Drive.
-   - Build X and Bluesky post drafts (copy and media) in [Buffer](https://buffer.com), to be sent for next week's stable release.
-
-   **Note: These are preview items and you may discover bugs.**
-   **This is a very good time to report these findings to the team!**
-
-1. Build email based on the popular items in preview.
-
-   - You can reuse the copy and photo media from the preview social media posts.
-   - Create a draft email in [Kit](https://kit.com), to be sent for next week's stable release.
-
-## Patch Release Process
-
-If your PR fixes a panic or a crash, you should cherry-pick it to the current stable and preview branches.
-If your PR fixes a regression in recently released code, you should cherry-pick it to preview.
-
-You will need write access to the Zed repository to do this:
-
----
-
-1. Send a PR containing your change to `main` as normal.
-
-1. Once it is merged, cherry-pick the commit locally to either of the release branches (`v0.XXX.x`).
-
-   - In some cases, you may have to handle a merge conflict.
-     More often than not, this will happen when cherry-picking to stable, as the stable branch is more "stale" than the preview branch.
-
-1. After the commit is cherry-picked, run `./script/trigger-release {preview|stable}`.
-   This will bump the version numbers, create a new release tag, and kick off a release build.
-
-   - This can also be run from the [GitHub Actions UI](https://github.com/zed-industries/zed/actions/workflows/bump_patch_version.yml):
-     
-
-1. Once release drafts are up on [GitHub Releases](https://github.com/zed-industries/zed/releases), proofread and edit the release notes as needed and **save**.
-
-   - **Do not publish the drafts, yet.**
-
-1. Check the release assets.
-
-   - Ensure the stable / preview release jobs have finished without error.
-   - Ensure each draft has the proper number of assetsβreleases currently have 10 assets each.
-   - Download the artifacts for each release draft and test that you can run them locally.
-
-1. Publish stable / preview drafts, one at a time.
-   - Use [Vercel](https://vercel.com/zed-industries/zed-dev) to check the progress of the website rebuild.
-     The release will be public once the rebuild has completed.
-
-## Nightly release process
-
-In addition to the public releases, we also have a nightly build that we encourage employees to use.
-Nightly is released by cron once a day, and can be shipped as often as you'd like.
-There are no release notes or announcements, so you can just merge your changes to main and run `./script/trigger-release nightly`.
  
  
  
    
    @@ -11,7 +11,7 @@ The [Material Icon Theme](https://github.com/zed-extensions/material-icon-theme)
 There are two important directories for an icon theme extension:
 
 - `icon_themes`: This directory will contain one or more JSON files containing the icon theme definitions.
-- `icons`: This directory contains the icons assets that will be distributed with the extension. You can created subdirectories in this directory, if so desired.
+- `icons`: This directory contains the icon assets that will be distributed with the extension. You can created subdirectories in this directory, if so desired.
 
 Each icon theme file should adhere to the JSON schema specified at [`https://zed.dev/schema/icon_themes/v0.3.0.json`](https://zed.dev/schema/icon_themes/v0.3.0.json).
 
  
  
  
    
    @@ -324,7 +324,7 @@ This query marks number and string values in key-value pairs and arrays for reda
 
 The `runnables.scm` file defines rules for detecting runnable code.
 
-Here's an example from an `runnables.scm` file for JSON:
+Here's an example from a `runnables.scm` file for JSON:
 
 ```scheme
 (
  
  
  
    
    @@ -4,19 +4,21 @@ Zed comes with a built-in icon theme, with more icon themes available as extensi
 
 ## Selecting an Icon Theme
 
-See what icon themes are installed and preview them via the Icon Theme Selector, which you can open from the command palette with "icon theme selector: toggle".
+See what icon themes are installed and preview them via the Icon Theme Selector, which you can open from the command palette with `icon theme selector: toggle`.
 
 Navigating through the icon theme list by moving up and down will change the icon theme in real time and hitting enter will save it to your settings file.
 
 ## Installing more Icon Themes
 
-More icon themes are available from the Extensions page, which you can access via the command palette with `zed: extensions` or the [Zed website](https://zed.dev/extensions).
+More icon themes are available from the Extensions page, which you can access via the command palette with `zed: extensions` or the [Zed website](https://zed.dev/extensions?filter=icon-themes).
 
 ## Configuring Icon Themes
 
-Your selected icon theme is stored in your settings file. You can open your settings file from the command palette with `zed: open settings file` (bound to `cmd-alt-,` on macOS and `ctrl-alt-,` on Linux).
+Your selected icon theme is stored in your settings file.
+You can open your settings file from the command palette with {#action zed::OpenSettingsFile} (bound to {#kb zed::OpenSettingsFile}).
 
-Just like with themes, Zed allows for configuring different icon themes for light and dark mode. You can set the mode to `"light"` or `"dark"` to ignore the current system mode.
+Just like with themes, Zed allows for configuring different icon themes for light and dark mode.
+You can set the mode to `"light"` or `"dark"` to ignore the current system mode.
 
 ```json [settings]
 {
  
  
  
    
    @@ -50,6 +50,7 @@ Zed supports the follow macOS releases:
 
 | Version       | Codename | Apple Status   | Zed Status          |
 | ------------- | -------- | -------------- | ------------------- |
+| macOS 26.x    | Tahoe    | Supported      | Supported           |
 | macOS 15.x    | Sequoia  | Supported      | Supported           |
 | macOS 14.x    | Sonoma   | Supported      | Supported           |
 | macOS 13.x    | Ventura  | Supported      | Supported           |
  
  
  
    
    @@ -7,7 +7,7 @@ Rego language support in Zed is provided by the community-maintained [Rego exten
 
 ## Installation
 
-The extensions is largely based on the [Regal](https://docs.styra.com/regal/language-server) language server which should be installed to make use of the extension. Read the [getting started](https://docs.styra.com/regal#getting-started) instructions for more information.
+The extension is largely based on the [Regal](https://docs.styra.com/regal/language-server) language server which should be installed to make use of the extension. Read the [getting started](https://docs.styra.com/regal#getting-started) instructions for more information.
 
 ## Configuration
 
  
  
  
    
    @@ -1,6 +1,6 @@
 # Snippets
 
-Use the {#action snippets::ConfigureSnippets} action to create a new snippets file or edit a existing snippets file for a specified [scope](#scopes).
+Use the {#action snippets::ConfigureSnippets} action to create a new snippets file or edit an existing snippets file for a specified [scope](#scopes).
 
 The snippets are located in `~/.config/zed/snippets` directory to which you can navigate to with the {#action snippets::OpenFolder} action.
 
  
  
  
    
    @@ -4,21 +4,23 @@ Zed comes with a number of built-in themes, with more themes available as extens
 
 ## Selecting a Theme
 
-See what themes are installed and preview them via the Theme Selector, which you can open from the command palette with "theme selector: Toggle" (bound to `cmd-k cmd-t` on macOS and `ctrl-k ctrl-t` on Linux).
+See what themes are installed and preview them via the Theme Selector, which you can open from the command palette with `theme selector: toggle` (bound to {#kb theme_selector::Toggle}).
 
 Navigating through the theme list by moving up and down will change the theme in real time and hitting enter will save it to your settings file.
 
 ## Installing more Themes
 
-More themes are available from the Extensions page, which you can access via the command palette with `zed: extensions` or the [Zed website](https://zed.dev/extensions).
+More themes are available from the Extensions page, which you can access via the command palette with `zed: extensions` or the [Zed website](https://zed.dev/extensions?filter=themes).
 
 Many popular themes have been ported to Zed, and if you're struggling to choose one, visit [zed-themes.com](https://zed-themes.com), a third-party gallery with visible previews for many of them.
 
 ## Configuring a Theme
 
-Your selected theme is stored in your settings file. You can open your settings file from the command palette with `zed: open settings file` (bound to `cmd-alt-,` on macOS and `ctrl-alt-,` on Linux).
+Your selected theme is stored in your settings file.
+You can open your settings file from the command palette with {#action zed::OpenSettingsFile} (bound to {#kb zed::OpenSettingsFile}).
 
-By default, Zed maintains two themes: one for light mode and one for dark mode. You can set the mode to `"dark"` or `"light"` to ignore the current system mode.
+By default, Zed maintains two themes: one for light mode and one for dark mode.
+You can set the mode to `"dark"` or `"light"` to ignore the current system mode.
 
 ```json [settings]
 {
@@ -32,7 +34,8 @@ By default, Zed maintains two themes: one for light mode and one for dark mode.
 
 ## Theme Overrides
 
-To override specific attributes of a theme, use the `theme_overrides` setting. This setting can be used to configure theme-specific overrides.
+To override specific attributes of a theme, use the `theme_overrides` setting.
+This setting can be used to configure theme-specific overrides.
 
 For example, add the following to your `settings.json` if you wish to override the background color of the editor and display comments and doc comments as italics:
 
@@ -54,17 +57,17 @@ For example, add the following to your `settings.json` if you wish to override t
 }
 ```
 
-To see a comprehensive list of list of captures (like `comment` and `comment.doc`) see: [Language Extensions: Syntax highlighting](./extensions/languages.md#syntax-highlighting).
+To see a comprehensive list of list of captures (like `comment` and `comment.doc`) see [Language Extensions: Syntax highlighting](./extensions/languages.md#syntax-highlighting).
 
-To see a list of available theme attributes look at the JSON file for your theme. For example, [assets/themes/one/one.json](https://github.com/zed-industries/zed/blob/main/assets/themes/one/one.json) for the default One Dark and One Light themes.
+To see a list of available theme attributes look at the JSON file for your theme.
+For example, [assets/themes/one/one.json](https://github.com/zed-industries/zed/blob/main/assets/themes/one/one.json) for the default One Dark and One Light themes.
 
 ## Local Themes
 
 Store new themes locally by placing them in the `~/.config/zed/themes` directory (macOS and Linux) or `%USERPROFILE%\AppData\Roaming\Zed\themes\` (Windows).
 
-For example, to create a new theme called `my-cool-theme`, create a file called `my-cool-theme.json` in that directory. It will be available in the theme selector the next time Zed loads.
-
-Find more themes at [zed-themes.com](https://zed-themes.com).
+For example, to create a new theme called `my-cool-theme`, create a file called `my-cool-theme.json` in that directory.
+It will be available in the theme selector the next time Zed loads.
 
 ## Theme Development
 
  
  
  
    
    @@ -1,14 +1,14 @@
 # Visual Customization
 
-Various aspects of Zed's visual layout can be configured via Zed settings.json which you can access via {#action zed::OpenSettings} ({#kb zed::OpenSettings}).
+Various aspects of Zed's visual layout can be configured via either the settings window or the `settings.json` file, which you can access via {#action zed::OpenSettings} ({#kb zed::OpenSettings}) and {#action zed::OpenSettingsFile} ({#kb zed::OpenSettingsFile}) respectively.
 
 See [Configuring Zed](./configuring-zed.md) for additional information and other non-visual settings.
 
 ## Themes
 
-Use may install zed extensions providing [Themes](./themes.md) and [Icon Themes](./icon-themes.md) via {#action zed::Extensions} from the command palette or menu.
+You can install many [themes](./themes.md) and [icon themes](./icon-themes.md) in form of extensions by running {#action zed::Extensions} from the command palette.
 
-You can preview/choose amongst your installed themes and icon themes with {#action theme_selector::Toggle} ({#kb theme_selector::Toggle}) and ({#action icon_theme_selector::Toggle}) which will modify the following settings:
+You can preview/choose amongst your installed themes and icon themes with {#action theme_selector::Toggle} ({#kb theme_selector::Toggle}) and {#action icon_theme_selector::Toggle} ({#kb icon_theme_selector::Toggle}) which will modify the following settings:
 
 ```json [settings]
 {
@@ -61,15 +61,20 @@ If you would like to use distinct themes for light mode/dark mode that can be se
     "line_height": "standard",
   },
 
-  // Agent Panel Font Settings
-  "agent_font_size": 15
+  // Controls the font size for agent responses in the agent panel.
+  // If not specified, it falls back to the UI font size.
+  "agent_ui_font_size": 15,
+  // Controls the font size for the agent panel's message editor, user message,
+  // and any other snippet of code.
+  "agent_buffer_font_size": 12
 ```
 
 ### Font ligatures
 
 By default Zed enable font ligatures which will visually combines certain adjacent characters.
 
-For example `=>` will be displayed as `β` and `!=` will be `β `. This is purely cosmetic and the individual characters remain unchanged.
+For example `=>` will be displayed as `β` and `!=` will be `β `.
+This is purely cosmetic and the individual characters remain unchanged.
 
 To disable this behavior use:
 
@@ -363,6 +368,8 @@ TBD: Centered layout related settings
 
   // How to render LSP `textDocument/documentColor` colors in the editor.
   "lsp_document_colors": "inlay",        // none, inlay, border, background
+  // When to show the scrollbar in the completion menu.
+  "completion_menu_scrollbar": "never", // auto, system, always, never
 ```
 
 ### Edit Predictions {#editor-ai}
@@ -464,7 +471,12 @@ Project panel can be shown/hidden with {#action project_panel::ToggleFocus} ({#k
     "default_width": 640,   // Default width (left/right docked)
     "default_height": 320,  // Default height (bottom docked)
   },
-  "agent_font_size": 16
+  // Controls the font size for agent responses in the agent panel.
+  // If not specified, it falls back to the UI font size.
+  "agent_ui_font_size": 15,
+  // Controls the font size for the agent panel's message editor, user message,
+  // and any other snippet of code.
+  "agent_buffer_font_size": 12
 ```
 
 See [Zed AI Documentation](./ai/overview.md) for additional non-visual AI settings.
  
  
  
    
    @@ -170,12 +170,7 @@ cp "assets/licenses.md" "${zed_dir}/licenses.md"
 
 # Create archive out of everything that's in the temp directory
 arch=$(uname -m)
-target="linux-${arch}"
-if  [[ "$channel" == "dev" ]]; then
-  archive="zed-${commit}-${target}.tar.gz"
-else
-  archive="zed-${target}.tar.gz"
-fi
+archive="zed-linux-${arch}.tar.gz"
 
 rm -rf "${archive}"
 remove_match="zed(-[a-zA-Z0-9]+)?-linux-$(uname -m)\.tar\.gz"
  
  
  
    
    @@ -70,12 +70,12 @@ target_triple=${host_line#*: }
 if [[ $# -gt 0 && -n "$1" ]]; then
     target_triple="$1"
 fi
-remote_server_arch=""
+arch_suffix=""
 
 if [[ "$target_triple" = "x86_64-apple-darwin" ]]; then
-    remote_server_arch="x86_64"
+    arch_suffix="x86_64"
 elif [[ "$target_triple" = "aarch64-apple-darwin" ]]; then
-    remote_server_arch="aarch64"
+    arch_suffix="aarch64"
 else
     echo "Unsupported architecture $target_triple"
     exit 1
@@ -232,7 +232,7 @@ function sign_app_binaries() {
     else
         dmg_target_directory="target/${target_triple}/${target_dir}"
         dmg_source_directory="${dmg_target_directory}/dmg"
-        dmg_file_path="${dmg_target_directory}/Zed.dmg"
+        dmg_file_path="${dmg_target_directory}/Zed-${arch_suffix}.dmg"
         xcode_bin_dir_path="$(xcode-select -p)/usr/bin"
 
         rm -rf ${dmg_source_directory}
@@ -310,4 +310,4 @@ cp target/${target_triple}/${target_dir}/cli "${app_path}/Contents/MacOS/cli"
 sign_app_binaries
 
 sign_binary "target/$target_triple/release/remote_server"
-gzip -f --stdout --best target/$target_triple/release/remote_server > target/zed-remote-server-macos-$remote_server_arch.gz
+gzip -f --stdout --best target/$target_triple/release/remote_server > target/zed-remote-server-macos-$arch_suffix.gz
  
  
  
    
    @@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+set -euxo pipefail
+
+cargo nextest run --workspace --no-fail-fast --features unit-eval --no-capture -E 'test(::eval_)'
  
  
  
    
    @@ -1,75 +1,18 @@
 #!/usr/bin/env bash
 
-# Based on the template in: https://docs.digitalocean.com/reference/api/spaces-api/
 bash -euo pipefail
 source script/lib/blob-store.sh
 
-allowed_targets=("linux-targz" "macos" "freebsd")
-is_allowed_target() {
-    for val in "${allowed_targets[@]}"; do
-        if [[ "$1" == "$val" ]]; then
-            return 0
-        fi
-    done
-    return 1
-}
-
-allowed_arch=("x86_64" "aarch64")
-is_allowed_arch() {
-    for val in "${allowed_arch[@]}"; do
-        if [[ "$1" == "$val" ]]; then
-            return 0
-        fi
-    done
-    return 1
-}
-
-if is_allowed_target "$1"; then
-    target="$1"
-else
-    echo "Error: Target '$1' is not allowed"
-    echo "Usage: $0 [${allowed_targets[*]}] {arch}"
-    exit 1
-fi
-if is_allowed_arch "$2"; then
-    arch="$2"
-else
-    echo "Error: Arch '$2' is not allowed"
-    echo "Usage: $0 $1 [${allowed_arch[*]}]"
-    exit 1
-fi
-echo "Uploading nightly for target: $target $arch"
-
 bucket_name="zed-nightly-host"
 
-sha=$(git rev-parse HEAD)
-echo ${sha} > target/latest-sha
 
-find target -type f -name "zed-remote-server-*.gz" -print0 | while IFS= read -r -d '' file_to_upload; do
+for file_to_upload in ./release-artifacts/*; do
+    [ -f "$file_to_upload" ] || continue
     upload_to_blob_store $bucket_name "$file_to_upload" "nightly/$(basename "$file_to_upload")"
+    upload_to_blob_store $bucket_name "$file_to_upload" "${GITHUB_SHA}/$(basename "$file_to_upload")"
     rm -f "$file_to_upload"
 done
 
-case "$target" in
-    macos)
-        upload_to_blob_store $bucket_name "target/$arch-apple-darwin/release/Zed.dmg" "nightly/Zed-$arch.dmg"
-        upload_to_blob_store $bucket_name "target/latest-sha" "nightly/latest-sha"
-        rm -f "target/$arch-apple-darwin/release/Zed.dmg" "target/release/Zed.dmg"
-        rm -f "target/latest-sha"
-        ;;
-    linux-targz)
-        find . -type f -name "zed-*.tar.gz" -print0 | while IFS= read -r -d '' file_to_upload; do
-            upload_to_blob_store $bucket_name "$file_to_upload" "nightly/$(basename "$file_to_upload")"
-            rm -f "$file_to_upload"
-        done
-        upload_to_blob_store $bucket_name "target/latest-sha" "nightly/latest-sha-linux-targz"
-        rm -f "target/latest-sha"
-        ;;
-    freebsd)
-        echo "No freebsd client build (yet)."
-        ;;
-    *)
-        echo "Error: Unknown target '$target'"
-        exit 1
-        ;;
-esac
+sha=$(git rev-parse HEAD)
+echo -n ${sha} > ./release-artifacts/latest-sha
+upload_to_blob_store $bucket_name "release-artifacts/latest-sha" "nightly/latest-sha"
  
  
  
    
    @@ -413,10 +413,26 @@ fn triage_test(
     }
 }
 
+/// Try to find the hyperfine binary the user has installed.
+fn hyp_binary() -> Option<Command> {
+    const HYP_PATH: &str = "hyperfine";
+    const HYP_HOME: &str = "~/.cargo/bin/hyperfine";
+    if Command::new(HYP_PATH).output().is_err() {
+        if Command::new(HYP_HOME).output().is_err() {
+            None
+        } else {
+            Some(Command::new(HYP_HOME))
+        }
+    } else {
+        Some(Command::new(HYP_PATH))
+    }
+}
+
 /// Profiles a given test with hyperfine, returning the mean and standard deviation
 /// for its runtime. If the test errors, returns `None` instead.
 fn hyp_profile(t_bin: &str, t_name: &str, iterations: NonZero<usize>) -> Option<Timings> {
-    let mut perf_cmd = Command::new("hyperfine");
+    let mut perf_cmd = hyp_binary().expect("Couldn't find the Hyperfine binary on the system");
+
     // Warm up the cache and print markdown output to stdout, which we parse.
     perf_cmd.args([
         "--style",
  
  
  
    
    @@ -10,6 +10,7 @@ mod release_nightly;
 mod run_bundling;
 
 mod release;
+mod run_agent_evals;
 mod run_tests;
 mod runners;
 mod steps;
@@ -28,6 +29,8 @@ pub fn run_workflows(_: GenerateWorkflowArgs) -> Result<()> {
         ("run_tests.yml", run_tests::run_tests()),
         ("release.yml", release::release()),
         ("compare_perf.yml", compare_perf::compare_perf()),
+        ("run_unit_evals.yml", run_agent_evals::run_unit_evals()),
+        ("run_agent_evals.yml", run_agent_evals::run_agent_evals()),
     ];
     fs::create_dir_all(dir)
         .with_context(|| format!("Failed to create directory: {}", dir.display()))?;
  
  
  
    
    @@ -1,9 +1,11 @@
 use gh_workflow::{Event, Expression, Push, Run, Step, Use, Workflow};
 
 use crate::tasks::workflows::{
-    run_bundling, run_tests, runners,
-    steps::{self, NamedJob, dependant_job, named, release_job},
-    vars,
+    run_bundling::{bundle_linux, bundle_mac, bundle_windows},
+    run_tests,
+    runners::{self, Arch},
+    steps::{self, FluentBuilder, NamedJob, dependant_job, named, release_job},
+    vars::{self, assets},
 };
 
 pub(crate) fn release() -> Workflow {
@@ -15,12 +17,12 @@ pub(crate) fn release() -> Workflow {
     let create_draft_release = create_draft_release();
 
     let bundle = ReleaseBundleJobs {
-        linux_arm64: bundle_linux_arm64(&[&linux_tests, &check_scripts]),
-        linux_x86_64: bundle_linux_x86_64(&[&linux_tests, &check_scripts]),
-        mac_arm64: bundle_mac_arm64(&[&macos_tests, &check_scripts]),
-        mac_x86_64: bundle_mac_x86_64(&[&macos_tests, &check_scripts]),
-        windows_arm64: bundle_windows_arm64(&[&windows_tests, &check_scripts]),
-        windows_x86_64: bundle_windows_x86_64(&[&windows_tests, &check_scripts]),
+        linux_aarch64: bundle_linux(Arch::AARCH64, None, &[&linux_tests, &check_scripts]),
+        linux_x86_64: bundle_linux(Arch::X86_64, None, &[&linux_tests, &check_scripts]),
+        mac_aarch64: bundle_mac(Arch::AARCH64, None, &[&macos_tests, &check_scripts]),
+        mac_x86_64: bundle_mac(Arch::X86_64, None, &[&macos_tests, &check_scripts]),
+        windows_aarch64: bundle_windows(Arch::AARCH64, None, &[&windows_tests, &check_scripts]),
+        windows_x86_64: bundle_windows(Arch::X86_64, None, &[&windows_tests, &check_scripts]),
     };
 
     let upload_release_assets = upload_release_assets(&[&create_draft_release], &bundle);
@@ -31,47 +33,68 @@ pub(crate) fn release() -> Workflow {
         .on(Event::default().push(Push::default().tags(vec!["v*".to_string()])))
         .concurrency(vars::one_workflow_per_non_main_branch())
         .add_env(("CARGO_TERM_COLOR", "always"))
-        .add_env(("CARGO_INCREMENTAL", "0"))
         .add_env(("RUST_BACKTRACE", "1"))
-        .add_env(("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED))
-        .add_env(("ZED_MINIDUMP_ENDPOINT", vars::ZED_SENTRY_MINIDUMP_ENDPOINT))
         .add_job(macos_tests.name, macos_tests.job)
         .add_job(linux_tests.name, linux_tests.job)
         .add_job(windows_tests.name, windows_tests.job)
         .add_job(check_scripts.name, check_scripts.job)
         .add_job(create_draft_release.name, create_draft_release.job)
-        .add_job(bundle.linux_arm64.name, bundle.linux_arm64.job)
-        .add_job(bundle.linux_x86_64.name, bundle.linux_x86_64.job)
-        .add_job(bundle.mac_arm64.name, bundle.mac_arm64.job)
-        .add_job(bundle.mac_x86_64.name, bundle.mac_x86_64.job)
-        .add_job(bundle.windows_arm64.name, bundle.windows_arm64.job)
-        .add_job(bundle.windows_x86_64.name, bundle.windows_x86_64.job)
+        .map(|mut workflow| {
+            for job in bundle.into_jobs() {
+                workflow = workflow.add_job(job.name, job.job);
+            }
+            workflow
+        })
         .add_job(upload_release_assets.name, upload_release_assets.job)
         .add_job(auto_release_preview.name, auto_release_preview.job)
 }
 
-struct ReleaseBundleJobs {
-    linux_arm64: NamedJob,
-    linux_x86_64: NamedJob,
-    mac_arm64: NamedJob,
-    mac_x86_64: NamedJob,
-    windows_arm64: NamedJob,
-    windows_x86_64: NamedJob,
+pub(crate) struct ReleaseBundleJobs {
+    pub linux_aarch64: NamedJob,
+    pub linux_x86_64: NamedJob,
+    pub mac_aarch64: NamedJob,
+    pub mac_x86_64: NamedJob,
+    pub windows_aarch64: NamedJob,
+    pub windows_x86_64: NamedJob,
 }
 
-fn auto_release_preview(deps: &[&NamedJob; 1]) -> NamedJob {
-    fn create_sentry_release() -> Step<Use> {
-        named::uses(
-            "getsentry",
-            "action-release",
-            "526942b68292201ac6bbb99b9a0747d4abee354c", // v3
-        )
-        .add_env(("SENTRY_ORG", "zed-dev"))
-        .add_env(("SENTRY_PROJECT", "zed"))
-        .add_env(("SENTRY_AUTH_TOKEN", "${{ secrets.SENTRY_AUTH_TOKEN }}"))
-        .add_with(("environment", "production"))
+impl ReleaseBundleJobs {
+    pub fn jobs(&self) -> Vec<&NamedJob> {
+        vec![
+            &self.linux_aarch64,
+            &self.linux_x86_64,
+            &self.mac_aarch64,
+            &self.mac_x86_64,
+            &self.windows_aarch64,
+            &self.windows_x86_64,
+        ]
     }
 
+    pub fn into_jobs(self) -> Vec<NamedJob> {
+        vec![
+            self.linux_aarch64,
+            self.linux_x86_64,
+            self.mac_aarch64,
+            self.mac_x86_64,
+            self.windows_aarch64,
+            self.windows_x86_64,
+        ]
+    }
+}
+
+pub(crate) fn create_sentry_release() -> Step<Use> {
+    named::uses(
+        "getsentry",
+        "action-release",
+        "526942b68292201ac6bbb99b9a0747d4abee354c", // v3
+    )
+    .add_env(("SENTRY_ORG", "zed-dev"))
+    .add_env(("SENTRY_PROJECT", "zed"))
+    .add_env(("SENTRY_AUTH_TOKEN", vars::SENTRY_AUTH_TOKEN))
+    .add_with(("environment", "production"))
+}
+
+fn auto_release_preview(deps: &[&NamedJob; 1]) -> NamedJob {
     named::job(
         dependant_job(deps)
             .runs_on(runners::LINUX_SMALL)
@@ -86,84 +109,44 @@ fn auto_release_preview(deps: &[&NamedJob; 1]) -> NamedJob {
                 steps::script(
                     r#"gh release edit "$GITHUB_REF_NAME" --repo=zed-industries/zed --draft=false"#,
                 )
-                .add_env(("GITHUB_TOKEN", "${{ secrets.GITHUB_TOKEN }}")),
+                .add_env(("GITHUB_TOKEN", vars::GITHUB_TOKEN)),
             )
             .add_step(create_sentry_release()),
     )
 }
 
-fn upload_release_assets(deps: &[&NamedJob], bundle_jobs: &ReleaseBundleJobs) -> NamedJob {
-    fn download_workflow_artifacts() -> Step<Use> {
-        named::uses(
-            "actions",
-            "download-artifact",
-            "018cc2cf5baa6db3ef3c5f8a56943fffe632ef53", // v6.0.0
-        )
-        .add_with(("path", "./artifacts/"))
-    }
+pub(crate) fn download_workflow_artifacts() -> Step<Use> {
+    named::uses(
+        "actions",
+        "download-artifact",
+        "018cc2cf5baa6db3ef3c5f8a56943fffe632ef53", // v6.0.0
+    )
+    .add_with(("path", "./artifacts/"))
+}
 
-    fn prep_release_artifacts(bundle: &ReleaseBundleJobs) -> Step<Run> {
-        let assets = [
-            (&bundle.mac_x86_64.name, "zed", "Zed-x86_64.dmg"),
-            (&bundle.mac_arm64.name, "zed", "Zed-aarch64.dmg"),
-            (&bundle.windows_x86_64.name, "zed", "Zed-x86_64.exe"),
-            (&bundle.windows_arm64.name, "zed", "Zed-aarch64.exe"),
-            (&bundle.linux_arm64.name, "zed", "zed-linux-aarch64.tar.gz"),
-            (&bundle.linux_x86_64.name, "zed", "zed-linux-x86_64.tar.gz"),
-            (
-                &bundle.linux_x86_64.name,
-                "remote-server",
-                "zed-remote-server-linux-x86_64.gz",
-            ),
-            (
-                &bundle.linux_arm64.name,
-                "remote-server",
-                "zed-remote-server-linux-aarch64.gz",
-            ),
-            (
-                &bundle.mac_x86_64.name,
-                "remote-server",
-                "zed-remote-server-macos-x86_64.gz",
-            ),
-            (
-                &bundle.mac_arm64.name,
-                "remote-server",
-                "zed-remote-server-macos-aarch64.gz",
-            ),
-        ];
-
-        let mut script_lines = vec!["mkdir -p release-artifacts/\n".to_string()];
-        for (job_name, artifact_kind, release_artifact_name) in assets {
-            let artifact_path =
-                ["${{ needs.", job_name, ".outputs.", artifact_kind, " }}"].join("");
-            let mv_command = format!(
-                "mv ./artifacts/{artifact_path}/* release-artifacts/{release_artifact_name}"
-            );
-            script_lines.push(mv_command)
-        }
-
-        named::bash(&script_lines.join("\n"))
+pub(crate) fn prep_release_artifacts() -> Step<Run> {
+    let mut script_lines = vec!["mkdir -p release-artifacts/\n".to_string()];
+    for asset in assets::all() {
+        let mv_command = format!("mv ./artifacts/{asset}/{asset} release-artifacts/{asset}");
+        script_lines.push(mv_command)
     }
 
+    named::bash(&script_lines.join("\n"))
+}
+
+fn upload_release_assets(deps: &[&NamedJob], bundle: &ReleaseBundleJobs) -> NamedJob {
     let mut deps = deps.to_vec();
-    deps.extend([
-        &bundle_jobs.linux_arm64,
-        &bundle_jobs.linux_x86_64,
-        &bundle_jobs.mac_arm64,
-        &bundle_jobs.mac_x86_64,
-        &bundle_jobs.windows_arm64,
-        &bundle_jobs.windows_x86_64,
-    ]);
+    deps.extend(bundle.jobs());
 
     named::job(
         dependant_job(&deps)
             .runs_on(runners::LINUX_MEDIUM)
             .add_step(download_workflow_artifacts())
             .add_step(steps::script("ls -lR ./artifacts"))
-            .add_step(prep_release_artifacts(bundle_jobs))
+            .add_step(prep_release_artifacts())
             .add_step(
                 steps::script("gh release upload \"$GITHUB_REF_NAME\" --repo=zed-industries/zed release-artifacts/*")
-                    .add_env(("GITHUB_TOKEN", "${{ secrets.GITHUB_TOKEN }}")),
+                    .add_env(("GITHUB_TOKEN", vars::GITHUB_TOKEN)),
             ),
     )
 }
@@ -177,7 +160,7 @@ fn create_draft_release() -> NamedJob {
 
     fn create_release() -> Step<Run> {
         named::bash("script/create-draft-release target/release-notes.md")
-            .add_env(("GITHUB_TOKEN", "${{ secrets.GITHUB_TOKEN }}"))
+            .add_env(("GITHUB_TOKEN", vars::GITHUB_TOKEN))
     }
 
     named::job(
@@ -199,25 +182,3 @@ fn create_draft_release() -> NamedJob {
             .add_step(create_release()),
     )
 }
-
-fn bundle_mac_x86_64(deps: &[&NamedJob]) -> NamedJob {
-    named::job(run_bundling::bundle_mac_job(runners::Arch::X86_64, deps))
-}
-fn bundle_mac_arm64(deps: &[&NamedJob]) -> NamedJob {
-    named::job(run_bundling::bundle_mac_job(runners::Arch::ARM64, deps))
-}
-fn bundle_linux_x86_64(deps: &[&NamedJob]) -> NamedJob {
-    named::job(run_bundling::bundle_linux_job(runners::Arch::X86_64, deps))
-}
-fn bundle_linux_arm64(deps: &[&NamedJob]) -> NamedJob {
-    named::job(run_bundling::bundle_linux_job(runners::Arch::ARM64, deps))
-}
-fn bundle_windows_x86_64(deps: &[&NamedJob]) -> NamedJob {
-    named::job(run_bundling::bundle_windows_job(
-        runners::Arch::X86_64,
-        deps,
-    ))
-}
-fn bundle_windows_arm64(deps: &[&NamedJob]) -> NamedJob {
-    named::job(run_bundling::bundle_windows_job(runners::Arch::ARM64, deps))
-}
  
  
  
    
    @@ -1,46 +1,33 @@
 use crate::tasks::workflows::{
     nix_build::build_nix,
-    run_bundling::{bundle_mac, bundle_windows},
+    release::{
+        ReleaseBundleJobs, create_sentry_release, download_workflow_artifacts,
+        prep_release_artifacts,
+    },
+    run_bundling::{bundle_linux, bundle_mac, bundle_windows},
     run_tests::run_platform_tests,
-    runners::{Arch, Platform},
-    steps::NamedJob,
-    vars::{mac_bundle_envs, windows_bundle_envs},
+    runners::{Arch, Platform, ReleaseChannel},
+    steps::{FluentBuilder, NamedJob},
 };
 
 use super::{runners, steps, steps::named, vars};
 use gh_workflow::*;
-use indexmap::IndexMap;
 
 /// Generates the release_nightly.yml workflow
 pub fn release_nightly() -> Workflow {
-    let env: IndexMap<_, _> = [
-        ("CARGO_TERM_COLOR", "always"),
-        ("CARGO_INCREMENTAL", "0"),
-        ("RUST_BACKTRACE", "1"),
-        ("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED),
-        ("ZED_MINIDUMP_ENDPOINT", vars::ZED_SENTRY_MINIDUMP_ENDPOINT),
-        (
-            "DIGITALOCEAN_SPACES_ACCESS_KEY",
-            vars::DIGITALOCEAN_SPACES_ACCESS_KEY,
-        ),
-        (
-            "DIGITALOCEAN_SPACES_SECRET_KEY",
-            vars::DIGITALOCEAN_SPACES_SECRET_KEY,
-        ),
-    ]
-    .into_iter()
-    .map(|(key, value)| (key.into(), value.into()))
-    .collect();
-
     let style = check_style();
-    let tests = run_platform_tests(Platform::Mac);
-    let windows_tests = run_platform_tests(Platform::Windows);
-    let bundle_mac_x86 = bundle_mac_nightly(Arch::X86_64, &[&style, &tests]);
-    let bundle_mac_arm = bundle_mac_nightly(Arch::ARM64, &[&style, &tests]);
-    let linux_x86 = bundle_linux_nightly(Arch::X86_64, &[&style, &tests]);
-    let linux_arm = bundle_linux_nightly(Arch::ARM64, &[&style, &tests]);
-    let windows_x86 = bundle_windows_nightly(Arch::X86_64, &[&style, &windows_tests]);
-    let windows_arm = bundle_windows_nightly(Arch::ARM64, &[&style, &windows_tests]);
+    // run only on windows as that's our fastest platform right now.
+    let tests = run_platform_tests(Platform::Windows);
+    let nightly = Some(ReleaseChannel::Nightly);
+
+    let bundle = ReleaseBundleJobs {
+        linux_aarch64: bundle_linux(Arch::AARCH64, nightly, &[&style, &tests]),
+        linux_x86_64: bundle_linux(Arch::X86_64, nightly, &[&style, &tests]),
+        mac_aarch64: bundle_mac(Arch::AARCH64, nightly, &[&style, &tests]),
+        mac_x86_64: bundle_mac(Arch::X86_64, nightly, &[&style, &tests]),
+        windows_aarch64: bundle_windows(Arch::AARCH64, nightly, &[&style, &tests]),
+        windows_x86_64: bundle_windows(Arch::X86_64, nightly, &[&style, &tests]),
+    };
 
     let nix_linux_x86 = build_nix(
         Platform::Linux,
@@ -51,35 +38,28 @@ pub fn release_nightly() -> Workflow {
     );
     let nix_mac_arm = build_nix(
         Platform::Mac,
-        Arch::ARM64,
+        Arch::AARCH64,
         "default",
         None,
         &[&style, &tests],
     );
-    let update_nightly_tag = update_nightly_tag_job(&[
-        &bundle_mac_x86,
-        &bundle_mac_arm,
-        &linux_x86,
-        &linux_arm,
-        &windows_x86,
-        &windows_arm,
-    ]);
+    let update_nightly_tag = update_nightly_tag_job(&bundle);
 
     named::workflow()
         .on(Event::default()
             // Fire every day at 7:00am UTC (Roughly before EU workday and after US workday)
             .schedule([Schedule::new("0 7 * * *")])
             .push(Push::default().add_tag("nightly")))
-        .envs(env)
+        .add_env(("CARGO_TERM_COLOR", "always"))
+        .add_env(("RUST_BACKTRACE", "1"))
         .add_job(style.name, style.job)
         .add_job(tests.name, tests.job)
-        .add_job(windows_tests.name, windows_tests.job)
-        .add_job(bundle_mac_x86.name, bundle_mac_x86.job)
-        .add_job(bundle_mac_arm.name, bundle_mac_arm.job)
-        .add_job(linux_x86.name, linux_x86.job)
-        .add_job(linux_arm.name, linux_arm.job)
-        .add_job(windows_x86.name, windows_x86.job)
-        .add_job(windows_arm.name, windows_arm.job)
+        .map(|mut workflow| {
+            for job in bundle.into_jobs() {
+                workflow = workflow.add_job(job.name, job.job);
+            }
+            workflow
+        })
         .add_job(nix_linux_x86.name, nix_linux_x86.job)
         .add_job(nix_mac_arm.name, nix_mac_arm.job)
         .add_job(update_nightly_tag.name, update_nightly_tag.job)
@@ -112,62 +92,7 @@ fn release_job(deps: &[&NamedJob]) -> Job {
     }
 }
 
-fn bundle_mac_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob {
-    let platform = Platform::Mac;
-    NamedJob {
-        name: format!("bundle_mac_nightly_{arch}"),
-        job: release_job(deps)
-            .runs_on(runners::MAC_DEFAULT)
-            .envs(mac_bundle_envs())
-            .add_step(steps::checkout_repo())
-            .add_step(steps::setup_node())
-            .add_step(steps::setup_sentry())
-            .add_step(steps::clear_target_dir_if_large(platform))
-            .add_step(set_release_channel_to_nightly(platform))
-            .add_step(bundle_mac(arch))
-            .add_step(upload_zed_nightly(platform, arch)),
-    }
-}
-
-fn bundle_linux_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob {
-    let platform = Platform::Linux;
-    let mut job = steps::release_job(deps)
-        .runs_on(arch.linux_bundler())
-        .add_step(steps::checkout_repo())
-        .add_step(steps::setup_sentry())
-        .add_step(steps::script("./script/linux"));
-
-    // todo(ci) can we do this on arm too?
-    if arch == Arch::X86_64 {
-        job = job.add_step(steps::script("./script/install-mold"));
-    }
-    job = job
-        .add_step(steps::clear_target_dir_if_large(platform))
-        .add_step(set_release_channel_to_nightly(platform))
-        .add_step(steps::script("./script/bundle-linux"))
-        .add_step(upload_zed_nightly(platform, arch));
-    NamedJob {
-        name: format!("bundle_linux_nightly_{arch}"),
-        job,
-    }
-}
-
-fn bundle_windows_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob {
-    let platform = Platform::Windows;
-    NamedJob {
-        name: format!("bundle_windows_nightly_{arch}"),
-        job: steps::release_job(deps)
-            .runs_on(runners::WINDOWS_DEFAULT)
-            .envs(windows_bundle_envs())
-            .add_step(steps::checkout_repo())
-            .add_step(steps::setup_sentry())
-            .add_step(set_release_channel_to_nightly(platform))
-            .add_step(bundle_windows(arch))
-            .add_step(upload_zed_nightly(platform, arch)),
-    }
-}
-
-fn update_nightly_tag_job(deps: &[&NamedJob]) -> NamedJob {
+fn update_nightly_tag_job(bundle: &ReleaseBundleJobs) -> NamedJob {
     fn update_nightly_tag() -> Step<Run> {
         named::bash(indoc::indoc! {r#"
             if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then
@@ -181,56 +106,26 @@ fn update_nightly_tag_job(deps: &[&NamedJob]) -> NamedJob {
         "#})
     }
 
-    fn create_sentry_release() -> Step<Use> {
-        named::uses(
-            "getsentry",
-            "action-release",
-            "526942b68292201ac6bbb99b9a0747d4abee354c", // v3
-        )
-        .add_env(("SENTRY_ORG", "zed-dev"))
-        .add_env(("SENTRY_PROJECT", "zed"))
-        .add_env(("SENTRY_AUTH_TOKEN", vars::SENTRY_AUTH_TOKEN))
-        .add_with(("environment", "production"))
-    }
-
     NamedJob {
         name: "update_nightly_tag".to_owned(),
-        job: steps::release_job(deps)
-            .runs_on(runners::LINUX_SMALL)
+        job: steps::release_job(&bundle.jobs())
+            .runs_on(runners::LINUX_MEDIUM)
             .add_step(steps::checkout_repo().add_with(("fetch-depth", 0)))
+            .add_step(download_workflow_artifacts())
+            .add_step(steps::script("ls -lR ./artifacts"))
+            .add_step(prep_release_artifacts())
+            .add_step(
+                steps::script("./script/upload-nightly")
+                    .add_env((
+                        "DIGITALOCEAN_SPACES_ACCESS_KEY",
+                        vars::DIGITALOCEAN_SPACES_ACCESS_KEY,
+                    ))
+                    .add_env((
+                        "DIGITALOCEAN_SPACES_SECRET_KEY",
+                        vars::DIGITALOCEAN_SPACES_SECRET_KEY,
+                    )),
+            )
             .add_step(update_nightly_tag())
             .add_step(create_sentry_release()),
     }
 }
-
-fn set_release_channel_to_nightly(platform: Platform) -> Step<Run> {
-    match platform {
-        Platform::Linux | Platform::Mac => named::bash(indoc::indoc! {r#"
-            set -eu
-            version=$(git rev-parse --short HEAD)
-            echo "Publishing version: ${version} on release channel nightly"
-            echo "nightly" > crates/zed/RELEASE_CHANNEL
-        "#}),
-        Platform::Windows => named::pwsh(indoc::indoc! {r#"
-            $ErrorActionPreference = "Stop"
-            $version = git rev-parse --short HEAD
-            Write-Host "Publishing version: $version on release channel nightly"
-            "nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL"
-        "#})
-        .working_directory("${{ env.ZED_WORKSPACE }}"),
-    }
-}
-
-fn upload_zed_nightly(platform: Platform, arch: Arch) -> Step<Run> {
-    match platform {
-        Platform::Linux => named::bash(&format!("script/upload-nightly linux-targz {arch}")),
-        Platform::Mac => named::bash(&format!("script/upload-nightly macos {arch}")),
-        Platform::Windows => {
-            let cmd = match arch {
-                Arch::X86_64 => "script/upload-nightly.ps1 -Architecture x86_64",
-                Arch::ARM64 => "script/upload-nightly.ps1 -Architecture aarch64",
-            };
-            named::pwsh(cmd).working_directory("${{ env.ZED_WORKSPACE }}")
-        }
-    }
-}
  
  
  
    
    @@ -0,0 +1,107 @@
+use gh_workflow::{
+    Event, Expression, Job, PullRequest, PullRequestType, Run, Schedule, Step, Use, Workflow,
+    WorkflowDispatch,
+};
+
+use crate::tasks::workflows::{
+    runners::{self, Platform},
+    steps::{self, FluentBuilder as _, NamedJob, named, setup_cargo_config},
+    vars,
+};
+
+pub(crate) fn run_agent_evals() -> Workflow {
+    let agent_evals = agent_evals();
+
+    named::workflow()
+        .on(Event::default()
+            .schedule([Schedule::default().cron("0 0 * * *")])
+            .pull_request(PullRequest::default().add_branch("**").types([
+                PullRequestType::Synchronize,
+                PullRequestType::Reopened,
+                PullRequestType::Labeled,
+            ]))
+            .workflow_dispatch(WorkflowDispatch::default()))
+        .concurrency(vars::one_workflow_per_non_main_branch())
+        .add_env(("CARGO_TERM_COLOR", "always"))
+        .add_env(("CARGO_INCREMENTAL", 0))
+        .add_env(("RUST_BACKTRACE", 1))
+        .add_env(("ANTHROPIC_API_KEY", vars::ANTHROPIC_API_KEY))
+        .add_env(("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED))
+        .add_env(("ZED_EVAL_TELEMETRY", 1))
+        .add_job(agent_evals.name, agent_evals.job)
+}
+
+fn agent_evals() -> NamedJob {
+    fn run_eval() -> Step<Run> {
+        named::bash("cargo run --package=eval -- --repetitions=8 --concurrency=1")
+    }
+
+    named::job(
+        Job::default()
+            .cond(Expression::new(indoc::indoc!{r#"
+                github.repository_owner == 'zed-industries' &&
+                (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval'))
+            "#}))
+            .runs_on(runners::LINUX_DEFAULT)
+            .timeout_minutes(60_u32)
+            .add_step(steps::checkout_repo())
+            .add_step(steps::cache_rust_dependencies_namespace())
+            .map(steps::install_linux_dependencies)
+            .add_step(setup_cargo_config(Platform::Linux))
+            .add_step(steps::script("cargo build --package=eval"))
+            .add_step(run_eval())
+            .add_step(steps::cleanup_cargo_config(Platform::Linux))
+    )
+}
+
+pub(crate) fn run_unit_evals() -> Workflow {
+    let unit_evals = unit_evals();
+
+    named::workflow()
+        .on(Event::default()
+            .schedule([
+                // GitHub might drop jobs at busy times, so we choose a random time in the middle of the night.
+                Schedule::default().cron("47 1 * * 2"),
+            ])
+            .workflow_dispatch(WorkflowDispatch::default()))
+        .concurrency(vars::one_workflow_per_non_main_branch())
+        .add_env(("CARGO_TERM_COLOR", "always"))
+        .add_env(("CARGO_INCREMENTAL", 0))
+        .add_env(("RUST_BACKTRACE", 1))
+        .add_env(("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED))
+        .add_job(unit_evals.name, unit_evals.job)
+}
+
+fn unit_evals() -> NamedJob {
+    fn send_failure_to_slack() -> Step<Use> {
+        named::uses(
+            "slackapi",
+            "slack-github-action",
+            "b0fa283ad8fea605de13dc3f449259339835fc52",
+        )
+        .if_condition(Expression::new("${{ failure() }}"))
+        .add_with(("method", "chat.postMessage"))
+        .add_with(("token", vars::SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN))
+        .add_with(("payload", indoc::indoc!{r#"
+            channel: C04UDRNNJFQ
+            text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}"
+        "#}))
+    }
+
+    named::job(
+        Job::default()
+            .runs_on(runners::LINUX_DEFAULT)
+            .add_step(steps::checkout_repo())
+            .add_step(steps::setup_cargo_config(Platform::Linux))
+            .add_step(steps::cache_rust_dependencies_namespace())
+            .map(steps::install_linux_dependencies)
+            .add_step(steps::cargo_install_nextest(Platform::Linux))
+            .add_step(steps::clear_target_dir_if_large(Platform::Linux))
+            .add_step(
+                steps::script("./script/run-unit-evals")
+                    .add_env(("ANTHROPIC_API_KEY", vars::ANTHROPIC_API_KEY)),
+            )
+            .add_step(send_failure_to_slack())
+            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
+    )
+}
  
  
  
    
    @@ -1,13 +1,24 @@
+use std::path::Path;
+
 use crate::tasks::workflows::{
+    release::ReleaseBundleJobs,
+    runners::{Arch, Platform, ReleaseChannel},
     steps::{FluentBuilder, NamedJob, dependant_job, named},
-    vars::{mac_bundle_envs, windows_bundle_envs},
+    vars::{assets, bundle_envs},
 };
 
-use super::{runners, steps, vars};
+use super::{runners, steps};
 use gh_workflow::*;
-use indexmap::IndexMap;
 
 pub fn run_bundling() -> Workflow {
+    let bundle = ReleaseBundleJobs {
+        linux_aarch64: bundle_linux(Arch::AARCH64, None, &[]),
+        linux_x86_64: bundle_linux(Arch::X86_64, None, &[]),
+        mac_aarch64: bundle_mac(Arch::AARCH64, None, &[]),
+        mac_x86_64: bundle_mac(Arch::X86_64, None, &[]),
+        windows_aarch64: bundle_windows(Arch::AARCH64, None, &[]),
+        windows_x86_64: bundle_windows(Arch::X86_64, None, &[]),
+    };
     named::workflow()
         .on(Event::default().pull_request(
             PullRequest::default().types([PullRequestType::Labeled, PullRequestType::Synchronize]),
@@ -19,34 +30,13 @@ pub fn run_bundling() -> Workflow {
             .cancel_in_progress(true),
         )
         .add_env(("CARGO_TERM_COLOR", "always"))
-        .add_env(("CARGO_INCREMENTAL", "0"))
         .add_env(("RUST_BACKTRACE", "1"))
-        .add_env(("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED))
-        .add_env(("ZED_MINIDUMP_ENDPOINT", vars::ZED_SENTRY_MINIDUMP_ENDPOINT))
-        .add_job(
-            "bundle_mac_x86_64",
-            bundle_mac_job(runners::Arch::X86_64, &[]),
-        )
-        .add_job(
-            "bundle_mac_arm64",
-            bundle_mac_job(runners::Arch::ARM64, &[]),
-        )
-        .add_job(
-            "bundle_linux_x86_64",
-            bundle_linux_job(runners::Arch::X86_64, &[]),
-        )
-        .add_job(
-            "bundle_linux_arm64",
-            bundle_linux_job(runners::Arch::ARM64, &[]),
-        )
-        .add_job(
-            "bundle_windows_x86_64",
-            bundle_windows_job(runners::Arch::X86_64, &[]),
-        )
-        .add_job(
-            "bundle_windows_arm64",
-            bundle_windows_job(runners::Arch::ARM64, &[]),
-        )
+        .map(|mut workflow| {
+            for job in bundle.into_jobs() {
+                workflow = workflow.add_job(job.name, job.job);
+            }
+            workflow
+        })
 }
 
 fn bundle_job(deps: &[&NamedJob]) -> Job {
@@ -59,95 +49,145 @@ fn bundle_job(deps: &[&NamedJob]) -> Job {
         .timeout_minutes(60u32)
 }
 
-pub(crate) fn bundle_mac_job(arch: runners::Arch, deps: &[&NamedJob]) -> Job {
-    use vars::GITHUB_SHA;
-    let artifact_name = format!("Zed_{GITHUB_SHA}-{arch}.dmg");
-    let remote_server_artifact_name = format!("zed-remote-server-{GITHUB_SHA}-macos-{arch}.gz");
-    bundle_job(deps)
-        .runs_on(runners::MAC_DEFAULT)
-        .envs(mac_bundle_envs())
-        .add_step(steps::checkout_repo())
-        .add_step(steps::setup_node())
-        .add_step(steps::setup_sentry())
-        .add_step(steps::clear_target_dir_if_large(runners::Platform::Mac))
-        .add_step(bundle_mac(arch))
-        .add_step(steps::upload_artifact(
-            &artifact_name,
-            &format!("target/{arch}-apple-darwin/release/Zed.dmg"),
-        ))
-        .add_step(steps::upload_artifact(
-            &remote_server_artifact_name,
-            &format!("target/zed-remote-server-macos-{arch}.gz"),
-        ))
-        .outputs(
-            [
-                ("zed".to_string(), artifact_name),
-                ("remote-server".to_string(), remote_server_artifact_name),
-            ]
-            .into_iter()
-            .collect::<IndexMap<_, _>>(),
+pub(crate) fn bundle_mac(
+    arch: Arch,
+    release_channel: Option<ReleaseChannel>,
+    deps: &[&NamedJob],
+) -> NamedJob {
+    pub fn bundle_mac(arch: Arch) -> Step<Run> {
+        named::bash(&format!("./script/bundle-mac {arch}-apple-darwin"))
+    }
+    let platform = Platform::Mac;
+    let artifact_name = match arch {
+        Arch::X86_64 => assets::MAC_X86_64,
+        Arch::AARCH64 => assets::MAC_AARCH64,
+    };
+    let remote_server_artifact_name = match arch {
+        Arch::X86_64 => assets::REMOTE_SERVER_MAC_X86_64,
+        Arch::AARCH64 => assets::REMOTE_SERVER_MAC_AARCH64,
+    };
+    NamedJob {
+        name: format!("bundle_mac_{arch}"),
+        job: bundle_job(deps)
+            .runs_on(runners::MAC_DEFAULT)
+            .envs(bundle_envs(platform))
+            .add_step(steps::checkout_repo())
+            .when_some(release_channel, |job, release_channel| {
+                job.add_step(set_release_channel(platform, release_channel))
+            })
+            .add_step(steps::setup_node())
+            .add_step(steps::setup_sentry())
+            .add_step(steps::clear_target_dir_if_large(runners::Platform::Mac))
+            .add_step(bundle_mac(arch))
+            .add_step(upload_artifact(&format!(
+                "target/{arch}-apple-darwin/release/{artifact_name}"
+            )))
+            .add_step(upload_artifact(&format!(
+                "target/{remote_server_artifact_name}"
+            ))),
+    }
+}
+
+pub fn upload_artifact(path: &str) -> Step<Use> {
+    let name = Path::new(path).file_name().unwrap().to_str().unwrap();
+    Step::new(format!("@actions/upload-artifact {}", name))
+        .uses(
+            "actions",
+            "upload-artifact",
+            "330a01c490aca151604b8cf639adc76d48f6c5d4", // v5
         )
+        // N.B. "name" is the name for the asset. The uploaded
+        // file retains its filename.
+        .add_with(("name", name))
+        .add_with(("path", path))
+        .add_with(("if-no-files-found", "error"))
 }
 
-pub fn bundle_mac(arch: runners::Arch) -> Step<Run> {
-    named::bash(&format!("./script/bundle-mac {arch}-apple-darwin"))
+pub(crate) fn bundle_linux(
+    arch: Arch,
+    release_channel: Option<ReleaseChannel>,
+    deps: &[&NamedJob],
+) -> NamedJob {
+    let platform = Platform::Linux;
+    let artifact_name = match arch {
+        Arch::X86_64 => assets::LINUX_X86_64,
+        Arch::AARCH64 => assets::LINUX_AARCH64,
+    };
+    let remote_server_artifact_name = match arch {
+        Arch::X86_64 => assets::REMOTE_SERVER_LINUX_X86_64,
+        Arch::AARCH64 => assets::REMOTE_SERVER_LINUX_AARCH64,
+    };
+    NamedJob {
+        name: format!("bundle_linux_{arch}"),
+        job: bundle_job(deps)
+            .runs_on(arch.linux_bundler())
+            .envs(bundle_envs(platform))
+            .add_step(steps::checkout_repo())
+            .when_some(release_channel, |job, release_channel| {
+                job.add_step(set_release_channel(platform, release_channel))
+            })
+            .add_step(steps::setup_sentry())
+            .map(steps::install_linux_dependencies)
+            .add_step(steps::script("./script/bundle-linux"))
+            .add_step(upload_artifact(&format!("target/release/{artifact_name}")))
+            .add_step(upload_artifact(&format!(
+                "target/{remote_server_artifact_name}"
+            ))),
+    }
 }
 
-pub(crate) fn bundle_linux_job(arch: runners::Arch, deps: &[&NamedJob]) -> Job {
-    let artifact_name = format!("zed-{}-{}.tar.gz", vars::GITHUB_SHA, arch.triple());
-    let remote_server_artifact_name = format!(
-        "zed-remote-server-{}-{}.tar.gz",
-        vars::GITHUB_SHA,
-        arch.triple()
-    );
-    bundle_job(deps)
-        .runs_on(arch.linux_bundler())
-        .add_step(steps::checkout_repo())
-        .add_step(steps::setup_sentry())
-        .map(steps::install_linux_dependencies)
-        .add_step(steps::script("./script/bundle-linux"))
-        .add_step(steps::upload_artifact(
-            &artifact_name,
-            "target/release/zed-*.tar.gz",
-        ))
-        .add_step(steps::upload_artifact(
-            &remote_server_artifact_name,
-            "target/zed-remote-server-*.gz",
-        ))
-        .outputs(
-            [
-                ("zed".to_string(), artifact_name),
-                ("remote-server".to_string(), remote_server_artifact_name),
-            ]
-            .into_iter()
-            .collect::<IndexMap<_, _>>(),
-        )
+pub(crate) fn bundle_windows(
+    arch: Arch,
+    release_channel: Option<ReleaseChannel>,
+    deps: &[&NamedJob],
+) -> NamedJob {
+    let platform = Platform::Windows;
+    pub fn bundle_windows(arch: Arch) -> Step<Run> {
+        let step = match arch {
+            Arch::X86_64 => named::pwsh("script/bundle-windows.ps1 -Architecture x86_64"),
+            Arch::AARCH64 => named::pwsh("script/bundle-windows.ps1 -Architecture aarch64"),
+        };
+        step.working_directory("${{ env.ZED_WORKSPACE }}")
+    }
+    let artifact_name = match arch {
+        Arch::X86_64 => assets::WINDOWS_X86_64,
+        Arch::AARCH64 => assets::WINDOWS_AARCH64,
+    };
+    NamedJob {
+        name: format!("bundle_windows_{arch}"),
+        job: bundle_job(deps)
+            .runs_on(runners::WINDOWS_DEFAULT)
+            .envs(bundle_envs(platform))
+            .add_step(steps::checkout_repo())
+            .when_some(release_channel, |job, release_channel| {
+                job.add_step(set_release_channel(platform, release_channel))
+            })
+            .add_step(steps::setup_sentry())
+            .add_step(bundle_windows(arch))
+            .add_step(upload_artifact(&format!("target/{artifact_name}"))),
+    }
 }
 
-pub(crate) fn bundle_windows_job(arch: runners::Arch, deps: &[&NamedJob]) -> Job {
-    use vars::GITHUB_SHA;
-    let artifact_name = format!("Zed_{GITHUB_SHA}-{arch}.exe");
-    bundle_job(deps)
-        .runs_on(runners::WINDOWS_DEFAULT)
-        .envs(windows_bundle_envs())
-        .add_step(steps::checkout_repo())
-        .add_step(steps::setup_sentry())
-        .add_step(bundle_windows(arch))
-        .add_step(steps::upload_artifact(
-            &artifact_name,
-            "${{ env.SETUP_PATH }}",
-        ))
-        .outputs(
-            [("zed".to_string(), artifact_name)]
-                .into_iter()
-                .collect::<IndexMap<_, _>>(),
-        )
+fn set_release_channel(platform: Platform, release_channel: ReleaseChannel) -> Step<Run> {
+    match release_channel {
+        ReleaseChannel::Nightly => set_release_channel_to_nightly(platform),
+    }
 }
 
-pub fn bundle_windows(arch: runners::Arch) -> Step<Run> {
-    let step = match arch {
-        runners::Arch::X86_64 => named::pwsh("script/bundle-windows.ps1 -Architecture x86_64"),
-        runners::Arch::ARM64 => named::pwsh("script/bundle-windows.ps1 -Architecture aarch64"),
-    };
-    step.working_directory("${{ env.ZED_WORKSPACE }}")
+fn set_release_channel_to_nightly(platform: Platform) -> Step<Run> {
+    match platform {
+        Platform::Linux | Platform::Mac => named::bash(indoc::indoc! {r#"
+            set -eu
+            version=$(git rev-parse --short HEAD)
+            echo "Publishing version: ${version} on release channel nightly"
+            echo "nightly" > crates/zed/RELEASE_CHANNEL
+        "#}),
+        Platform::Windows => named::pwsh(indoc::indoc! {r#"
+            $ErrorActionPreference = "Stop"
+            $version = git rev-parse --short HEAD
+            Write-Host "Publishing version: $version on release channel nightly"
+            "nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL"
+        "#})
+        .working_directory("${{ env.ZED_WORKSPACE }}"),
+    }
 }
  
  
  
    
    @@ -65,7 +65,7 @@ pub(crate) fn run_tests() -> Workflow {
         )),
         should_build_nix.guard(build_nix(
             Platform::Mac,
-            Arch::ARM64,
+            Arch::AARCH64,
             "debug",
             // *don't* cache the built output
             Some("-zed-editor-[0-9.]*-nightly"),
@@ -74,7 +74,7 @@ pub(crate) fn run_tests() -> Workflow {
     ];
     let tests_pass = tests_pass(&jobs);
 
-    let mut workflow = named::workflow()
+    named::workflow()
         .add_event(Event::default()
             .push(
                 Push::default()
@@ -89,11 +89,14 @@ pub(crate) fn run_tests() -> Workflow {
         )
         .add_env(( "CARGO_TERM_COLOR", "always" ))
         .add_env(( "RUST_BACKTRACE", 1 ))
-        .add_env(( "CARGO_INCREMENTAL", 0 ));
-    for job in jobs {
-        workflow = workflow.add_job(job.name, job.job)
-    }
-    workflow.add_job(tests_pass.name, tests_pass.job)
+        .add_env(( "CARGO_INCREMENTAL", 0 ))
+        .map(|mut workflow| {
+            for job in jobs {
+                workflow = workflow.add_job(job.name, job.job)
+            }
+            workflow
+        })
+        .add_job(tests_pass.name, tests_pass.job)
 }
 
 // Generates a bash script that checks changed files against regex patterns
@@ -226,6 +229,7 @@ fn check_style() -> NamedJob {
         release_job(&[])
             .runs_on(runners::LINUX_MEDIUM)
             .add_step(steps::checkout_repo())
+            .add_step(steps::cache_rust_dependencies_namespace())
             .add_step(steps::setup_pnpm())
             .add_step(steps::script("./script/prettier"))
             .add_step(steps::script("./script/check-todos"))
@@ -273,6 +277,7 @@ fn check_dependencies() -> NamedJob {
         release_job(&[])
             .runs_on(runners::LINUX_SMALL)
             .add_step(steps::checkout_repo())
+            .add_step(steps::cache_rust_dependencies_namespace())
             .add_step(install_cargo_machete())
             .add_step(run_cargo_machete())
             .add_step(check_cargo_lock())
@@ -287,6 +292,7 @@ fn check_workspace_binaries() -> NamedJob {
             .add_step(steps::checkout_repo())
             .add_step(steps::setup_cargo_config(Platform::Linux))
             .map(steps::install_linux_dependencies)
+            .add_step(steps::cache_rust_dependencies_namespace())
             .add_step(steps::script("cargo build -p collab"))
             .add_step(steps::script("cargo build --workspace --bins --examples"))
             .add_step(steps::cleanup_cargo_config(Platform::Linux)),
@@ -309,6 +315,9 @@ pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
                 platform == Platform::Linux,
                 steps::install_linux_dependencies,
             )
+            .when(platform == Platform::Linux, |this| {
+                this.add_step(steps::cache_rust_dependencies_namespace())
+            })
             .add_step(steps::setup_node())
             .add_step(steps::clippy(platform))
             .add_step(steps::cargo_install_nextest(platform))
@@ -368,7 +377,7 @@ fn doctests() -> NamedJob {
         release_job(&[])
             .runs_on(runners::LINUX_DEFAULT)
             .add_step(steps::checkout_repo())
-            .add_step(steps::cache_rust_dependencies())
+            .add_step(steps::cache_rust_dependencies_namespace())
             .map(steps::install_linux_dependencies)
             .add_step(steps::setup_cargo_config(Platform::Linux))
             .add_step(run_doctests())
@@ -381,6 +390,7 @@ fn check_licenses() -> NamedJob {
         Job::default()
             .runs_on(runners::LINUX_SMALL)
             .add_step(steps::checkout_repo())
+            .add_step(steps::cache_rust_dependencies_namespace())
             .add_step(steps::script("./script/check-licenses"))
             .add_step(steps::script("./script/generate-licenses")),
     )
@@ -420,7 +430,7 @@ fn check_docs() -> NamedJob {
             .add_step(steps::checkout_repo())
             .add_step(steps::setup_cargo_config(Platform::Linux))
             // todo(ci): un-inline build_docs/action.yml here
-            .add_step(steps::cache_rust_dependencies())
+            .add_step(steps::cache_rust_dependencies_namespace())
             .add_step(
                 lychee_link_check("./docs/src/**/*"), // check markdown links
             )
  
  
  
    
    @@ -22,30 +22,23 @@ impl Into<gh_workflow::RunsOn> for Runner {
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub enum Arch {
     X86_64,
-    ARM64,
+    AARCH64,
 }
 
 impl std::fmt::Display for Arch {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         match self {
             Arch::X86_64 => write!(f, "x86_64"),
-            Arch::ARM64 => write!(f, "aarch64"),
+            Arch::AARCH64 => write!(f, "aarch64"),
         }
     }
 }
 
 impl Arch {
-    pub fn triple(&self) -> &'static str {
-        match self {
-            Arch::X86_64 => "x86_64-unknown-linux-gnu",
-            Arch::ARM64 => "aarch64-unknown-linux-gnu",
-        }
-    }
-
     pub fn linux_bundler(&self) -> Runner {
         match self {
             Arch::X86_64 => LINUX_X86_BUNDLER,
-            Arch::ARM64 => LINUX_ARM_BUNDLER,
+            Arch::AARCH64 => LINUX_ARM_BUNDLER,
         }
     }
 }
@@ -66,3 +59,8 @@ impl std::fmt::Display for Platform {
         }
     }
 }
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum ReleaseChannel {
+    Nightly,
+}
  
  
  
    
    @@ -86,22 +86,10 @@ pub fn cleanup_cargo_config(platform: Platform) -> Step<Run> {
     step.if_condition(Expression::new("always()"))
 }
 
-pub fn upload_artifact(name: &str, path: &str) -> Step<Use> {
-    Step::new(format!("@actions/upload-artifact {}", name))
-        .uses(
-            "actions",
-            "upload-artifact",
-            "330a01c490aca151604b8cf639adc76d48f6c5d4", // v5
-        )
-        .add_with(("name", name))
-        .add_with(("path", path))
-        .add_with(("if-no-files-found", "error"))
-}
-
 pub fn clear_target_dir_if_large(platform: Platform) -> Step<Run> {
     match platform {
         Platform::Windows => named::pwsh("./script/clear-target-dir-if-larger-than.ps1 250"),
-        Platform::Linux => named::bash("./script/clear-target-dir-if-larger-than 100"),
+        Platform::Linux => named::bash("./script/clear-target-dir-if-larger-than 250"),
         Platform::Mac => named::bash("./script/clear-target-dir-if-larger-than 300"),
     }
 }
@@ -113,13 +101,8 @@ pub(crate) fn clippy(platform: Platform) -> Step<Run> {
     }
 }
 
-pub(crate) fn cache_rust_dependencies() -> Step<Use> {
-    named::uses(
-        "swatinem",
-        "rust-cache",
-        "9d47c6ad4b02e050fd481d890b2ea34778fd09d6", // v2
-    )
-    .with(("save-if", "${{ github.ref == 'refs/heads/main' }}"))
+pub(crate) fn cache_rust_dependencies_namespace() -> Step<Use> {
+    named::uses("namespacelabs", "nscloud-cache-action", "v1").add_with(("cache", "rust"))
 }
 
 fn setup_linux() -> Step<Run> {
  
  
  
    
    @@ -2,7 +2,7 @@ use std::cell::RefCell;
 
 use gh_workflow::{Concurrency, Env, Expression};
 
-use crate::tasks::workflows::steps::NamedJob;
+use crate::tasks::workflows::{runners::Platform, steps::NamedJob};
 
 macro_rules! secret {
     ($secret_name:ident) => {
@@ -16,6 +16,7 @@ macro_rules! var {
     };
 }
 
+secret!(ANTHROPIC_API_KEY);
 secret!(APPLE_NOTARIZATION_ISSUER_ID);
 secret!(APPLE_NOTARIZATION_KEY);
 secret!(APPLE_NOTARIZATION_KEY_ID);
@@ -32,34 +33,38 @@ secret!(SENTRY_AUTH_TOKEN);
 secret!(ZED_CLIENT_CHECKSUM_SEED);
 secret!(ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON);
 secret!(ZED_SENTRY_MINIDUMP_ENDPOINT);
+secret!(SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN);
 
 // todo(ci) make these secrets too...
 var!(AZURE_SIGNING_ACCOUNT_NAME);
 var!(AZURE_SIGNING_CERT_PROFILE_NAME);
 var!(AZURE_SIGNING_ENDPOINT);
 
-pub const GITHUB_SHA: &str = "${{ github.event.pull_request.head.sha || github.sha }}";
+pub fn bundle_envs(platform: Platform) -> Env {
+    let env = Env::default()
+        .add("CARGO_INCREMENTAL", 0)
+        .add("ZED_CLIENT_CHECKSUM_SEED", ZED_CLIENT_CHECKSUM_SEED)
+        .add("ZED_MINIDUMP_ENDPOINT", ZED_SENTRY_MINIDUMP_ENDPOINT);
 
-pub fn mac_bundle_envs() -> Env {
-    Env::default()
-        .add("MACOS_CERTIFICATE", MACOS_CERTIFICATE)
-        .add("MACOS_CERTIFICATE_PASSWORD", MACOS_CERTIFICATE_PASSWORD)
-        .add("APPLE_NOTARIZATION_KEY", APPLE_NOTARIZATION_KEY)
-        .add("APPLE_NOTARIZATION_KEY_ID", APPLE_NOTARIZATION_KEY_ID)
-        .add("APPLE_NOTARIZATION_ISSUER_ID", APPLE_NOTARIZATION_ISSUER_ID)
-}
-
-pub fn windows_bundle_envs() -> Env {
-    Env::default()
-        .add("AZURE_TENANT_ID", AZURE_SIGNING_TENANT_ID)
-        .add("AZURE_CLIENT_ID", AZURE_SIGNING_CLIENT_ID)
-        .add("AZURE_CLIENT_SECRET", AZURE_SIGNING_CLIENT_SECRET)
-        .add("ACCOUNT_NAME", AZURE_SIGNING_ACCOUNT_NAME)
-        .add("CERT_PROFILE_NAME", AZURE_SIGNING_CERT_PROFILE_NAME)
-        .add("ENDPOINT", AZURE_SIGNING_ENDPOINT)
-        .add("FILE_DIGEST", "SHA256")
-        .add("TIMESTAMP_DIGEST", "SHA256")
-        .add("TIMESTAMP_SERVER", "http://timestamp.acs.microsoft.com")
+    match platform {
+        Platform::Linux => env,
+        Platform::Mac => env
+            .add("MACOS_CERTIFICATE", MACOS_CERTIFICATE)
+            .add("MACOS_CERTIFICATE_PASSWORD", MACOS_CERTIFICATE_PASSWORD)
+            .add("APPLE_NOTARIZATION_KEY", APPLE_NOTARIZATION_KEY)
+            .add("APPLE_NOTARIZATION_KEY_ID", APPLE_NOTARIZATION_KEY_ID)
+            .add("APPLE_NOTARIZATION_ISSUER_ID", APPLE_NOTARIZATION_ISSUER_ID),
+        Platform::Windows => env
+            .add("AZURE_TENANT_ID", AZURE_SIGNING_TENANT_ID)
+            .add("AZURE_CLIENT_ID", AZURE_SIGNING_CLIENT_ID)
+            .add("AZURE_CLIENT_SECRET", AZURE_SIGNING_CLIENT_SECRET)
+            .add("ACCOUNT_NAME", AZURE_SIGNING_ACCOUNT_NAME)
+            .add("CERT_PROFILE_NAME", AZURE_SIGNING_CERT_PROFILE_NAME)
+            .add("ENDPOINT", AZURE_SIGNING_ENDPOINT)
+            .add("FILE_DIGEST", "SHA256")
+            .add("TIMESTAMP_DIGEST", "SHA256")
+            .add("TIMESTAMP_SERVER", "http://timestamp.acs.microsoft.com"),
+    }
 }
 
 pub(crate) fn one_workflow_per_non_main_branch() -> Concurrency {
@@ -110,3 +115,33 @@ impl PathCondition {
         }
     }
 }
+
+pub mod assets {
+    // NOTE: these asset names also exist in the zed.dev codebase.
+    pub const MAC_AARCH64: &str = "Zed-aarch64.dmg";
+    pub const MAC_X86_64: &str = "Zed-x86_64.dmg";
+    pub const LINUX_AARCH64: &str = "zed-linux-aarch64.tar.gz";
+    pub const LINUX_X86_64: &str = "zed-linux-x86_64.tar.gz";
+    pub const WINDOWS_X86_64: &str = "Zed-x86_64.exe";
+    pub const WINDOWS_AARCH64: &str = "Zed-aarch64.exe";
+
+    pub const REMOTE_SERVER_MAC_AARCH64: &str = "zed-remote-server-macos-aarch64.gz";
+    pub const REMOTE_SERVER_MAC_X86_64: &str = "zed-remote-server-macos-x86_64.gz";
+    pub const REMOTE_SERVER_LINUX_AARCH64: &str = "zed-remote-server-linux-aarch64.gz";
+    pub const REMOTE_SERVER_LINUX_X86_64: &str = "zed-remote-server-linux-x86_64.gz";
+
+    pub fn all() -> Vec<&'static str> {
+        vec![
+            MAC_AARCH64,
+            MAC_X86_64,
+            LINUX_AARCH64,
+            LINUX_X86_64,
+            WINDOWS_X86_64,
+            WINDOWS_AARCH64,
+            REMOTE_SERVER_MAC_AARCH64,
+            REMOTE_SERVER_MAC_X86_64,
+            REMOTE_SERVER_LINUX_AARCH64,
+            REMOTE_SERVER_LINUX_X86_64,
+        ]
+    }
+}