Merge branch 'main' into kb/rainbow-brackets

Kirill Bulatov created

Change summary

.github/workflows/ci.yml                                      | 855 -----
.github/workflows/compare_perf.yml                            |  13 
.github/workflows/danger.yml                                  |   4 
.github/workflows/eval.yml                                    |  71 
.github/workflows/nix_build.yml                               |  77 
.github/workflows/release.yml                                 | 486 ++
.github/workflows/release_nightly.yml                         |  38 
.github/workflows/run_agent_evals.yml                         |  62 
.github/workflows/run_bundling.yml                            |  39 
.github/workflows/run_tests.yml                               | 549 +++
.github/workflows/run_unit_evals.yml                          |  63 
.github/workflows/script_checks.yml                           |  21 
.github/workflows/unit_evals.yml                              |  86 
Cargo.lock                                                    |  23 
REVIEWERS.conl                                                |   2 
assets/keymaps/default-linux.json                             |   6 
assets/keymaps/default-macos.json                             |   4 
assets/keymaps/default-windows.json                           |   6 
assets/keymaps/vim.json                                       |   6 
crates/acp_thread/src/diff.rs                                 |   4 
crates/acp_tools/src/acp_tools.rs                             |  51 
crates/action_log/src/action_log.rs                           |  18 
crates/agent/src/edit_agent/edit_parser.rs                    |  46 
crates/agent/src/edit_agent/evals.rs                          |   1 
crates/agent/src/edit_agent/streaming_fuzzy_matcher.rs        |  69 
crates/agent/src/templates.rs                                 |   3 
crates/agent/src/templates/system_prompt.hbs                  |   6 
crates/agent/src/thread.rs                                    |   1 
crates/agent/src/tools/edit_file_tool.rs                      |   5 
crates/agent_ui/src/acp/message_editor.rs                     | 294 +
crates/agent_ui/src/acp/mode_selector.rs                      |  12 
crates/agent_ui/src/acp/thread_history.rs                     |   1 
crates/agent_ui/src/acp/thread_view.rs                        | 160 
crates/agent_ui/src/agent_configuration.rs                    |  95 
crates/agent_ui/src/agent_diff.rs                             |  26 
crates/agent_ui/src/agent_panel.rs                            |  19 
crates/agent_ui/src/buffer_codegen.rs                         |   7 
crates/agent_ui/src/text_thread_editor.rs                     |   3 
crates/assistant_text_thread/src/text_thread.rs               |   3 
crates/auto_update/Cargo.toml                                 |   1 
crates/auto_update/src/auto_update.rs                         |   2 
crates/buffer_diff/src/buffer_diff.rs                         |  58 
crates/channel/src/channel_buffer.rs                          |   1 
crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs           |  27 
crates/codestral/src/codestral.rs                             |   8 
crates/collab/src/db/queries/buffers.rs                       |   4 
crates/collab/src/db/tests/buffer_tests.rs                    |  56 
crates/collab/src/tests/editor_tests.rs                       |  24 
crates/collab/src/tests/integration_tests.rs                  |   4 
crates/collab/src/tests/random_project_collaboration_tests.rs |   7 
crates/collab_ui/src/collab_panel.rs                          |  16 
crates/dap_adapters/Cargo.toml                                |   4 
crates/dap_adapters/src/dap_adapters.rs                       |  64 
crates/dap_adapters/src/python.rs                             | 311 +
crates/debugger_tools/src/dap_log.rs                          |   6 
crates/debugger_ui/src/session/running/breakpoint_list.rs     | 125 
crates/debugger_ui/src/session/running/memory_view.rs         |  18 
crates/debugger_ui/src/session/running/variable_list.rs       |  45 
crates/diagnostics/src/diagnostics_tests.rs                   |   4 
crates/edit_prediction_button/src/edit_prediction_button.rs   | 173 
crates/editor/src/display_map.rs                              |   3 
crates/editor/src/display_map/inlay_map.rs                    |  36 
crates/editor/src/display_map/tab_map.rs                      |   2 
crates/editor/src/display_map/wrap_map.rs                     |  16 
crates/editor/src/editor.rs                                   |  53 
crates/editor/src/element.rs                                  |  12 
crates/editor/src/git/blame.rs                                |   6 
crates/editor/src/hover_popover.rs                            |  18 
crates/editor/src/inlays.rs                                   |  18 
crates/editor/src/inlays/inlay_hints.rs                       | 198 
crates/editor/src/items.rs                                    |   3 
crates/editor/src/linked_editing_ranges.rs                    |  21 
crates/editor/src/movement.rs                                 |   9 
crates/editor/src/signature_help.rs                           |   2 
crates/extension_cli/src/main.rs                              |   4 
crates/extension_host/src/extension_host.rs                   |  19 
crates/extensions_ui/src/extensions_ui.rs                     |  49 
crates/file_finder/src/open_path_prompt.rs                    |   6 
crates/fs/src/fs.rs                                           |   2 
crates/git_hosting_providers/Cargo.toml                       |   1 
crates/git_hosting_providers/src/providers/gitee.rs           |  81 
crates/git_hosting_providers/src/providers/gitlab.rs          | 131 
crates/git_ui/src/commit_view.rs                              |   9 
crates/git_ui/src/file_diff_view.rs                           |  11 
crates/git_ui/src/git_panel.rs                                |  92 
crates/git_ui/src/stash_picker.rs                             |  99 
crates/go_to_line/src/cursor_position.rs                      |  32 
crates/gpui/src/app/async_context.rs                          |  15 
crates/gpui/src/app/test_context.rs                           |   5 
crates/gpui/src/elements/uniform_list.rs                      |   2 
crates/gpui/src/executor.rs                                   |   3 
crates/gpui/src/window.rs                                     |   6 
crates/keymap_editor/src/keymap_editor.rs                     |   4 
crates/language/src/buffer.rs                                 |  46 
crates/language/src/buffer_tests.rs                           |  39 
crates/language/src/syntax_map/syntax_map_tests.rs            |  45 
crates/language_extension/src/extension_lsp_adapter.rs        |   7 
crates/language_models/src/provider/bedrock.rs                |   1 
crates/language_models/src/provider/copilot_chat.rs           |   5 
crates/language_models/src/provider/mistral.rs                |   4 
crates/language_models/src/provider/ollama.rs                 |  10 
crates/language_tools/src/lsp_log_view.rs                     |   6 
crates/languages/src/c.rs                                     |  27 
crates/languages/src/go.rs                                    |  24 
crates/languages/src/python.rs                                |   9 
crates/languages/src/rust.rs                                  |  15 
crates/markdown/src/markdown.rs                               |   4 
crates/markdown_preview/src/markdown_parser.rs                |   2 
crates/multi_buffer/src/multi_buffer_tests.rs                 |  33 
crates/multi_buffer/src/path_key.rs                           |  16 
crates/outline_panel/src/outline_panel.rs                     | 465 ++
crates/picker/src/picker.rs                                   |   2 
crates/project/src/agent_server_store.rs                      |  42 
crates/project/src/buffer_store.rs                            |  62 
crates/project/src/git_store.rs                               | 172 
crates/project/src/git_store/conflict_set.rs                  |  44 
crates/project/src/lsp_store.rs                               | 201 
crates/project/src/lsp_store/inlay_hint_cache.rs              |  54 
crates/project/src/prettier_store.rs                          | 139 
crates/project/src/project.rs                                 |  28 
crates/project/src/project_tests.rs                           |  30 
crates/project/src/terminals.rs                               | 353 +-
crates/project_panel/src/project_panel.rs                     |   3 
crates/proto/proto/lsp.proto                                  |   1 
crates/recent_projects/src/remote_connections.rs              |  50 
crates/remote/src/transport/ssh.rs                            | 111 
crates/remote/src/transport/wsl.rs                            |  64 
crates/remote_server/src/remote_editing_tests.rs              |   8 
crates/rich_text/src/rich_text.rs                             |  17 
crates/rope/Cargo.toml                                        |   2 
crates/rope/benches/rope_benchmark.rs                         |  37 
crates/rope/src/rope.rs                                       | 266 
crates/rules_library/src/rules_library.rs                     |  10 
crates/search/src/buffer_search.rs                            |  21 
crates/search/src/project_search.rs                           | 205 +
crates/settings_ui/src/page_data.rs                           |  12 
crates/settings_ui/src/settings_ui.rs                         |  50 
crates/streaming_diff/Cargo.toml                              |   1 
crates/streaming_diff/src/streaming_diff.rs                   |  99 
crates/sum_tree/Cargo.toml                                    |   4 
crates/sum_tree/src/sum_tree.rs                               | 167 
crates/terminal/src/pty_info.rs                               |  23 
crates/terminal/src/terminal.rs                               | 544 +-
crates/terminal_view/src/persistence.rs                       | 102 
crates/terminal_view/src/terminal_view.rs                     |  38 
crates/text/Cargo.toml                                        |   1 
crates/text/src/tests.rs                                      | 281 +
crates/text/src/text.rs                                       | 112 
crates/title_bar/src/collab.rs                                |   2 
crates/ui/src/components/popover_menu.rs                      |   2 
crates/ui/src/components/scrollbar.rs                         |   2 
crates/ui/src/styles/typography.rs                            |  13 
crates/util/src/shell.rs                                      | 130 
crates/vim/src/helix.rs                                       |  64 
crates/vim/src/motion.rs                                      |  55 
crates/vim/src/normal.rs                                      |  13 
crates/vim/src/normal/search.rs                               |  19 
crates/vim/src/replace.rs                                     |  39 
crates/vim/src/state.rs                                       |   8 
crates/vim/src/vim.rs                                         |  24 
crates/vim/src/visual.rs                                      |   8 
crates/workspace/src/searchable.rs                            |   5 
crates/workspace/src/workspace.rs                             |   7 
crates/worktree/src/worktree.rs                               |   4 
crates/worktree/src/worktree_tests.rs                         |  34 
crates/zed/Cargo.toml                                         |   1 
crates/zed/src/main.rs                                        |   7 
crates/zed/src/zed.rs                                         |  73 
crates/zed/src/zed/open_listener.rs                           |   4 
crates/zeta/src/zeta.rs                                       |   3 
crates/zeta2/src/merge_excerpts.rs                            |  26 
crates/zeta2/src/related_excerpts.rs                          | 533 +-
crates/zeta2/src/zeta2.rs                                     |  35 
crates/zeta2_tools/Cargo.toml                                 |   1 
crates/zeta2_tools/src/zeta2_context_view.rs                  | 123 
crates/zeta_cli/Cargo.toml                                    |   2 
crates/zeta_cli/src/example.rs                                | 355 ++
crates/zeta_cli/src/main.rs                                   | 618 ++-
crates/zeta_cli/src/syntax_retrieval_stats.rs                 |   0 
docs/src/SUMMARY.md                                           |   1 
docs/src/ai/agent-panel.md                                    |  14 
docs/src/ai/agent-settings.md                                 |  36 
docs/src/ai/edit-prediction.md                                |  35 
docs/src/ai/inline-assistant.md                               |  97 
docs/src/development.md                                       |   1 
docs/src/development/releases.md                              | 147 
docs/src/extensions/icon-themes.md                            |   2 
docs/src/extensions/languages.md                              |   2 
docs/src/icon-themes.md                                       |  10 
docs/src/languages/php.md                                     |   4 
docs/src/languages/rego.md                                    |   2 
docs/src/snippets.md                                          |   2 
docs/src/themes.md                                            |  23 
docs/src/vim.md                                               |   2 
docs/src/visual-customization.md                              |  24 
script/bundle-mac                                             |  67 
script/prettier                                               |  17 
script/run-unit-evals                                         |   5 
tooling/xtask/src/tasks/workflows.rs                          |  12 
tooling/xtask/src/tasks/workflows/compare_perf.rs             |  22 
tooling/xtask/src/tasks/workflows/danger.rs                   |  72 
tooling/xtask/src/tasks/workflows/nix_build.rs                | 144 
tooling/xtask/src/tasks/workflows/release.rs                  | 223 +
tooling/xtask/src/tasks/workflows/release_nightly.rs          | 110 
tooling/xtask/src/tasks/workflows/run_agent_evals.rs          | 113 
tooling/xtask/src/tasks/workflows/run_bundling.rs             |  90 
tooling/xtask/src/tasks/workflows/run_tests.rs                | 473 ++
tooling/xtask/src/tasks/workflows/runners.rs                  |   7 
tooling/xtask/src/tasks/workflows/steps.rs                    | 118 
tooling/xtask/src/tasks/workflows/vars.rs                     |  59 
210 files changed, 9,138 insertions(+), 4,281 deletions(-)

Detailed changes

.github/workflows/ci.yml πŸ”—

@@ -1,855 +0,0 @@
-name: CI
-
-on:
-  push:
-    branches:
-      - main
-      - "v[0-9]+.[0-9]+.x"
-    tags:
-      - "v*"
-
-  pull_request:
-    branches:
-      - "**"
-
-concurrency:
-  # Allow only one workflow per any non-`main` branch.
-  group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
-  cancel-in-progress: true
-
-env:
-  CARGO_TERM_COLOR: always
-  CARGO_INCREMENTAL: 0
-  RUST_BACKTRACE: 1
-  DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
-  DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
-  ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
-  ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
-
-jobs:
-  job_spec:
-    name: Decide which jobs to run
-    if: github.repository_owner == 'zed-industries'
-    outputs:
-      run_tests: ${{ steps.filter.outputs.run_tests }}
-      run_license: ${{ steps.filter.outputs.run_license }}
-      run_docs: ${{ steps.filter.outputs.run_docs }}
-      run_nix: ${{ steps.filter.outputs.run_nix }}
-      run_actionlint: ${{ steps.filter.outputs.run_actionlint }}
-    runs-on:
-      - namespace-profile-2x4-ubuntu-2404
-    steps:
-      - name: Checkout repo
-        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
-        with:
-          # 350 is arbitrary; ~10days of history on main (5secs); full history is ~25secs
-          fetch-depth: ${{ github.ref == 'refs/heads/main' && 2 || 350 }}
-      - name: Fetch git history and generate output filters
-        id: filter
-        run: |
-          if [ -z "$GITHUB_BASE_REF" ]; then
-            echo "Not in a PR context (i.e., push to main/stable/preview)"
-            COMPARE_REV="$(git rev-parse HEAD~1)"
-          else
-            echo "In a PR context comparing to pull_request.base.ref"
-            git fetch origin "$GITHUB_BASE_REF" --depth=350
-            COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
-          fi
-          CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
-
-          # Specify anything which should potentially skip full test suite in this regex:
-          # - docs/
-          # - script/update_top_ranking_issues/
-          # - .github/ISSUE_TEMPLATE/
-          # - .github/workflows/  (except .github/workflows/ci.yml)
-          SKIP_REGEX='^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!ci)))'
-
-          echo "$CHANGED_FILES" | grep -qvP "$SKIP_REGEX" && \
-            echo "run_tests=true" >> "$GITHUB_OUTPUT" || \
-            echo "run_tests=false" >> "$GITHUB_OUTPUT"
-
-          echo "$CHANGED_FILES" | grep -qP '^docs/' && \
-            echo "run_docs=true" >> "$GITHUB_OUTPUT" || \
-            echo "run_docs=false" >> "$GITHUB_OUTPUT"
-
-          echo "$CHANGED_FILES" | grep -qP '^\.github/(workflows/|actions/|actionlint.yml)' && \
-            echo "run_actionlint=true" >> "$GITHUB_OUTPUT" || \
-            echo "run_actionlint=false" >> "$GITHUB_OUTPUT"
-
-          echo "$CHANGED_FILES" | grep -qP '^(Cargo.lock|script/.*licenses)' && \
-            echo "run_license=true" >> "$GITHUB_OUTPUT" || \
-            echo "run_license=false" >> "$GITHUB_OUTPUT"
-
-          echo "$CHANGED_FILES" | grep -qP '^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)' && \
-            echo "$GITHUB_REF_NAME" | grep -qvP '^v[0-9]+\.[0-9]+\.[0-9x](-pre)?$' && \
-            echo "run_nix=true" >> "$GITHUB_OUTPUT" || \
-            echo "run_nix=false" >> "$GITHUB_OUTPUT"
-
-  migration_checks:
-    name: Check Postgres and Protobuf migrations, mergability
-    needs: [job_spec]
-    if: |
-      github.repository_owner == 'zed-industries' &&
-      needs.job_spec.outputs.run_tests == 'true'
-    timeout-minutes: 60
-    runs-on:
-      - self-mini-macos
-    steps:
-      - name: Checkout repo
-        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
-        with:
-          clean: false
-          fetch-depth: 0 # fetch full history
-
-      - name: Remove untracked files
-        run: git clean -df
-
-      - name: Find modified migrations
-        shell: bash -euxo pipefail {0}
-        run: |
-          export SQUAWK_GITHUB_TOKEN=${{ github.token }}
-          . ./script/squawk
-
-      - name: Ensure fresh merge
-        shell: bash -euxo pipefail {0}
-        run: |
-          if [ -z "$GITHUB_BASE_REF" ];
-          then
-            echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
-          else
-            git checkout -B temp
-            git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
-            echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
-          fi
-
-      - uses: bufbuild/buf-setup-action@v1
-        with:
-          version: v1.29.0
-      - uses: bufbuild/buf-breaking-action@v1
-        with:
-          input: "crates/proto/proto/"
-          against: "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"
-
-  style:
-    timeout-minutes: 60
-    name: Check formatting and spelling
-    needs: [job_spec]
-    if: github.repository_owner == 'zed-industries'
-    runs-on:
-      - namespace-profile-4x8-ubuntu-2204
-    steps:
-      - name: Checkout repo
-        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
-
-      - uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
-        with:
-          version: 9
-
-      - name: Prettier Check on /docs
-        working-directory: ./docs
-        run: |
-          pnpm dlx "prettier@${PRETTIER_VERSION}" . --check || {
-            echo "To fix, run from the root of the Zed repo:"
-            echo "  cd docs && pnpm dlx prettier@${PRETTIER_VERSION} . --write && cd .."
-            false
-          }
-        env:
-          PRETTIER_VERSION: 3.5.0
-
-      - name: Prettier Check on default.json
-        run: |
-          pnpm dlx "prettier@${PRETTIER_VERSION}" assets/settings/default.json --check || {
-            echo "To fix, run from the root of the Zed repo:"
-            echo "  pnpm dlx prettier@${PRETTIER_VERSION} assets/settings/default.json --write"
-            false
-          }
-        env:
-          PRETTIER_VERSION: 3.5.0
-
-      # To support writing comments that they will certainly be revisited.
-      - name: Check for todo! and FIXME comments
-        run: script/check-todos
-
-      - name: Check modifier use in keymaps
-        run: script/check-keymaps
-
-      - name: Run style checks
-        uses: ./.github/actions/check_style
-
-      - name: Check for typos
-        uses: crate-ci/typos@80c8a4945eec0f6d464eaf9e65ed98ef085283d1 # v1.38.1
-        with:
-          config: ./typos.toml
-
-  check_docs:
-    timeout-minutes: 60
-    name: Check docs
-    needs: [job_spec]
-    if: |
-      github.repository_owner == 'zed-industries' &&
-      (needs.job_spec.outputs.run_tests == 'true' || needs.job_spec.outputs.run_docs == 'true')
-    runs-on:
-      - namespace-profile-8x16-ubuntu-2204
-    steps:
-      - name: Checkout repo
-        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
-        with:
-          clean: false
-
-      - name: Configure CI
-        run: |
-          mkdir -p ./../.cargo
-          cp ./.cargo/ci-config.toml ./../.cargo/config.toml
-
-      - name: Build docs
-        uses: ./.github/actions/build_docs
-
-  actionlint:
-    runs-on: namespace-profile-2x4-ubuntu-2404
-    if: github.repository_owner == 'zed-industries' && needs.job_spec.outputs.run_actionlint == 'true'
-    needs: [job_spec]
-    steps:
-      - uses: actions/checkout@v4
-      - name: Download actionlint
-        id: get_actionlint
-        run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)
-        shell: bash
-      - name: Check workflow files
-        run: ${{ steps.get_actionlint.outputs.executable }} -color
-        shell: bash
-
-  macos_tests:
-    timeout-minutes: 60
-    name: (macOS) Run Clippy and tests
-    needs: [job_spec]
-    if: |
-      github.repository_owner == 'zed-industries' &&
-      needs.job_spec.outputs.run_tests == 'true'
-    runs-on:
-      - self-mini-macos
-    steps:
-      - name: Checkout repo
-        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
-        with:
-          clean: false
-
-      - name: Configure CI
-        run: |
-          mkdir -p ./../.cargo
-          cp ./.cargo/ci-config.toml ./../.cargo/config.toml
-
-      - name: Check that Cargo.lock is up to date
-        run: |
-          cargo update --locked --workspace
-
-      - name: cargo clippy
-        run: ./script/clippy
-
-      - name: Install cargo-machete
-        uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386 # v2
-        with:
-          command: install
-          args: cargo-machete@0.7.0
-
-      - name: Check unused dependencies
-        uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386 # v2
-        with:
-          command: machete
-
-      - name: Check licenses
-        run: |
-          script/check-licenses
-          if [[ "${{ needs.job_spec.outputs.run_license }}" == "true" ]]; then
-            script/generate-licenses /tmp/zed_licenses_output
-          fi
-
-      - name: Check for new vulnerable dependencies
-        if: github.event_name == 'pull_request'
-        uses: actions/dependency-review-action@67d4f4bd7a9b17a0db54d2a7519187c65e339de8 # v4
-        with:
-          license-check: false
-
-      - name: Run tests
-        uses: ./.github/actions/run_tests
-
-      - name: Build collab
-        run: cargo build -p collab
-
-      - name: Build other binaries and features
-        run: |
-          cargo build --workspace --bins --all-features
-          cargo check -p gpui --features "macos-blade"
-          cargo check -p workspace
-          cargo build -p remote_server
-          cargo check -p gpui --examples
-
-      # Since the macOS runners are stateful, so we need to remove the config file to prevent potential bug.
-      - name: Clean CI config file
-        if: always()
-        run: rm -rf ./../.cargo
-
-  linux_tests:
-    timeout-minutes: 60
-    name: (Linux) Run Clippy and tests
-    needs: [job_spec]
-    if: |
-      github.repository_owner == 'zed-industries' &&
-      needs.job_spec.outputs.run_tests == 'true'
-    runs-on:
-      - namespace-profile-16x32-ubuntu-2204
-    steps:
-      - name: Add Rust to the PATH
-        run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
-
-      - name: Checkout repo
-        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
-        with:
-          clean: false
-
-      - name: Cache dependencies
-        uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
-        with:
-          save-if: ${{ github.ref == 'refs/heads/main' }}
-          # cache-provider: "buildjet"
-
-      - name: Install Linux dependencies
-        run: ./script/linux
-
-      - name: Configure CI
-        run: |
-          mkdir -p ./../.cargo
-          cp ./.cargo/ci-config.toml ./../.cargo/config.toml
-
-      - name: cargo clippy
-        run: ./script/clippy
-
-      - name: Run tests
-        uses: ./.github/actions/run_tests
-
-      - name: Build other binaries and features
-        run: |
-          cargo build -p zed
-          cargo check -p workspace
-          cargo check -p gpui --examples
-
-      # Even the Linux runner is not stateful, in theory there is no need to do this cleanup.
-      # But, to avoid potential issues in the future if we choose to use a stateful Linux runner and forget to add code
-      # to clean up the config file, I’ve included the cleanup code here as a precaution.
-      # While it’s not strictly necessary at this moment, I believe it’s better to err on the side of caution.
-      - name: Clean CI config file
-        if: always()
-        run: rm -rf ./../.cargo
-
-  doctests:
-    # Nextest currently doesn't support doctests, so run them separately and in parallel.
-    timeout-minutes: 60
-    name: (Linux) Run doctests
-    needs: [job_spec]
-    if: |
-      github.repository_owner == 'zed-industries' &&
-      needs.job_spec.outputs.run_tests == 'true'
-    runs-on:
-      - namespace-profile-16x32-ubuntu-2204
-    steps:
-      - name: Add Rust to the PATH
-        run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
-
-      - name: Checkout repo
-        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
-        with:
-          clean: false
-
-      - name: Cache dependencies
-        uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
-        with:
-          save-if: ${{ github.ref == 'refs/heads/main' }}
-          # cache-provider: "buildjet"
-
-      - name: Install Linux dependencies
-        run: ./script/linux
-
-      - name: Configure CI
-        run: |
-          mkdir -p ./../.cargo
-          cp ./.cargo/ci-config.toml ./../.cargo/config.toml
-
-      - name: Run doctests
-        run: cargo test --workspace --doc --no-fail-fast
-
-      - name: Clean CI config file
-        if: always()
-        run: rm -rf ./../.cargo
-
-  build_remote_server:
-    timeout-minutes: 60
-    name: (Linux) Build Remote Server
-    needs: [job_spec]
-    if: |
-      github.repository_owner == 'zed-industries' &&
-      needs.job_spec.outputs.run_tests == 'true'
-    runs-on:
-      - namespace-profile-16x32-ubuntu-2204
-    steps:
-      - name: Add Rust to the PATH
-        run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
-
-      - name: Checkout repo
-        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
-        with:
-          clean: false
-
-      - name: Cache dependencies
-        uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
-        with:
-          save-if: ${{ github.ref == 'refs/heads/main' }}
-          # cache-provider: "buildjet"
-
-      - name: Install Clang & Mold
-        run: ./script/remote-server && ./script/install-mold 2.34.0
-
-      - name: Configure CI
-        run: |
-          mkdir -p ./../.cargo
-          cp ./.cargo/ci-config.toml ./../.cargo/config.toml
-
-      - name: Build Remote Server
-        run: cargo build -p remote_server
-
-      - name: Clean CI config file
-        if: always()
-        run: rm -rf ./../.cargo
-
-  windows_tests:
-    timeout-minutes: 60
-    name: (Windows) Run Clippy and tests
-    needs: [job_spec]
-    if: |
-      github.repository_owner == 'zed-industries' &&
-      needs.job_spec.outputs.run_tests == 'true'
-    runs-on: [self-32vcpu-windows-2022]
-    steps:
-      - name: Environment Setup
-        run: |
-          $RunnerDir = Split-Path -Parent $env:RUNNER_WORKSPACE
-          Write-Output `
-            "RUSTUP_HOME=$RunnerDir\.rustup" `
-            "CARGO_HOME=$RunnerDir\.cargo" `
-            "PATH=$RunnerDir\.cargo\bin;$env:PATH" `
-          >> $env:GITHUB_ENV
-
-      - name: Checkout repo
-        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
-        with:
-          clean: false
-
-      - name: Configure CI
-        run: |
-          New-Item -ItemType Directory -Path "./../.cargo" -Force
-          Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml"
-
-      - name: cargo clippy
-        run: |
-          .\script\clippy.ps1
-
-      - name: Run tests
-        uses: ./.github/actions/run_tests_windows
-
-      - name: Build Zed
-        run: cargo build
-
-      - name: Limit target directory size
-        run: ./script/clear-target-dir-if-larger-than.ps1 250
-
-      - name: Clean CI config file
-        if: always()
-        run: Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue
-
-  tests_pass:
-    name: Tests Pass
-    runs-on: namespace-profile-2x4-ubuntu-2404
-    needs:
-      - job_spec
-      - style
-      - check_docs
-      - actionlint
-      - migration_checks
-      # run_tests: If adding required tests, add them here and to script below.
-      - linux_tests
-      - build_remote_server
-      - macos_tests
-      - windows_tests
-    if: |
-      github.repository_owner == 'zed-industries' &&
-      always()
-    steps:
-      - name: Check all tests passed
-        run: |
-          # Check dependent jobs...
-          RET_CODE=0
-          # Always check style
-          [[ "${{ needs.style.result }}"      != 'success' ]] && { RET_CODE=1; echo "style tests failed"; }
-
-          if [[ "${{ needs.job_spec.outputs.run_docs }}" == "true" ]]; then
-            [[ "${{ needs.check_docs.result }}" != 'success' ]] && { RET_CODE=1; echo "docs checks failed"; }
-          fi
-
-          if [[ "${{ needs.job_spec.outputs.run_actionlint }}" == "true" ]]; then
-            [[ "${{ needs.actionlint.result }}" != 'success' ]] && { RET_CODE=1; echo "actionlint checks failed"; }
-          fi
-
-          # Only check test jobs if they were supposed to run
-          if [[ "${{ needs.job_spec.outputs.run_tests }}" == "true" ]]; then
-            [[ "${{ needs.macos_tests.result }}"          != 'success' ]] && { RET_CODE=1; echo "macOS tests failed"; }
-            [[ "${{ needs.linux_tests.result }}"          != 'success' ]] && { RET_CODE=1; echo "Linux tests failed"; }
-            [[ "${{ needs.windows_tests.result }}"        != 'success' ]] && { RET_CODE=1; echo "Windows tests failed"; }
-            [[ "${{ needs.build_remote_server.result }}"  != 'success' ]] && { RET_CODE=1; echo "Remote server build failed"; }
-            # This check is intentionally disabled. See: https://github.com/zed-industries/zed/pull/28431
-            # [[ "${{ needs.migration_checks.result }}"     != 'success' ]] && { RET_CODE=1; echo "Migration Checks failed"; }
-          fi
-          if [[ "$RET_CODE" -eq 0 ]]; then
-            echo "All tests passed successfully!"
-          fi
-          exit $RET_CODE
-
-  bundle-mac:
-    timeout-minutes: 120
-    name: Create a macOS bundle
-    runs-on:
-      - self-mini-macos
-    if: startsWith(github.ref, 'refs/tags/v')
-    needs: [macos_tests]
-    env:
-      MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
-      MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
-      APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
-      APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
-      APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
-    steps:
-      - name: Install Node
-        uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
-        with:
-          node-version: "18"
-
-      - name: Setup Sentry CLI
-        uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
-        with:
-          token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
-
-      - name: Checkout repo
-        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
-        with:
-          # We need to fetch more than one commit so that `script/draft-release-notes`
-          # is able to diff between the current and previous tag.
-          #
-          # 25 was chosen arbitrarily.
-          fetch-depth: 25
-          clean: false
-          ref: ${{ github.ref }}
-
-      - name: Limit target directory size
-        run: script/clear-target-dir-if-larger-than 300
-
-      - name: Determine version and release channel
-        run: |
-          # This exports RELEASE_CHANNEL into env (GITHUB_ENV)
-          script/determine-release-channel
-
-      - name: Draft release notes
-        run: |
-          mkdir -p target/
-          # Ignore any errors that occur while drafting release notes to not fail the build.
-          script/draft-release-notes "$RELEASE_VERSION" "$RELEASE_CHANNEL" > target/release-notes.md || true
-          script/create-draft-release target/release-notes.md
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
-      - name: Create macOS app bundle (aarch64)
-        run: script/bundle-mac aarch64-apple-darwin
-
-      - name: Create macOS app bundle (x64)
-        run: script/bundle-mac x86_64-apple-darwin
-
-      - name: Rename binaries
-        run: |
-          mv target/aarch64-apple-darwin/release/Zed.dmg target/aarch64-apple-darwin/release/Zed-aarch64.dmg
-          mv target/x86_64-apple-darwin/release/Zed.dmg target/x86_64-apple-darwin/release/Zed-x86_64.dmg
-
-      - uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
-        name: Upload app bundle to release
-        if: ${{ env.RELEASE_CHANNEL == 'preview' || env.RELEASE_CHANNEL == 'stable' }}
-        with:
-          draft: true
-          prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
-          files: |
-            target/zed-remote-server-macos-x86_64.gz
-            target/zed-remote-server-macos-aarch64.gz
-            target/aarch64-apple-darwin/release/Zed-aarch64.dmg
-            target/x86_64-apple-darwin/release/Zed-x86_64.dmg
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
-  bundle-linux-x86_x64:
-    timeout-minutes: 60
-    name: Linux x86_x64 release bundle
-    runs-on:
-      - namespace-profile-16x32-ubuntu-2004 # ubuntu 20.04 for minimal glibc
-    if: |
-      ( startsWith(github.ref, 'refs/tags/v') )
-    needs: [linux_tests]
-    steps:
-      - name: Checkout repo
-        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
-        with:
-          clean: false
-
-      - name: Install Linux dependencies
-        run: ./script/linux && ./script/install-mold 2.34.0
-
-      - name: Setup Sentry CLI
-        uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
-        with:
-          token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
-
-      - name: Determine version and release channel
-        run: |
-          # This exports RELEASE_CHANNEL into env (GITHUB_ENV)
-          script/determine-release-channel
-
-      - name: Create Linux .tar.gz bundle
-        run: script/bundle-linux
-
-      - name: Upload Artifacts to release
-        uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
-        with:
-          draft: true
-          prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
-          files: |
-            target/zed-remote-server-linux-x86_64.gz
-            target/release/zed-linux-x86_64.tar.gz
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
-  bundle-linux-aarch64: # this runs on ubuntu22.04
-    timeout-minutes: 60
-    name: Linux arm64 release bundle
-    runs-on:
-      - namespace-profile-8x32-ubuntu-2004-arm-m4 # ubuntu 20.04 for minimal glibc
-    if: |
-      startsWith(github.ref, 'refs/tags/v')
-    needs: [linux_tests]
-    steps:
-      - name: Checkout repo
-        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
-        with:
-          clean: false
-
-      - name: Install Linux dependencies
-        run: ./script/linux
-
-      - name: Setup Sentry CLI
-        uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
-        with:
-          token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
-
-      - name: Determine version and release channel
-        run: |
-          # This exports RELEASE_CHANNEL into env (GITHUB_ENV)
-          script/determine-release-channel
-
-      - name: Create and upload Linux .tar.gz bundles
-        run: script/bundle-linux
-
-      - name: Upload Artifacts to release
-        uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
-        with:
-          draft: true
-          prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
-          files: |
-            target/zed-remote-server-linux-aarch64.gz
-            target/release/zed-linux-aarch64.tar.gz
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
-  freebsd:
-    timeout-minutes: 60
-    runs-on: github-8vcpu-ubuntu-2404
-    if: |
-      false && ( startsWith(github.ref, 'refs/tags/v') )
-    needs: [linux_tests]
-    name: Build Zed on FreeBSD
-    steps:
-      - uses: actions/checkout@v4
-      - name: Build FreeBSD remote-server
-        id: freebsd-build
-        uses: vmactions/freebsd-vm@c3ae29a132c8ef1924775414107a97cac042aad5 # v1.2.0
-        with:
-          usesh: true
-          release: 13.5
-          copyback: true
-          prepare: |
-            pkg install -y \
-              bash curl jq git \
-              rustup-init cmake-core llvm-devel-lite pkgconf protobuf # ibx11 alsa-lib rust-bindgen-cli
-          run: |
-            freebsd-version
-            sysctl hw.model
-            sysctl hw.ncpu
-            sysctl hw.physmem
-            sysctl hw.usermem
-            git config --global --add safe.directory /home/runner/work/zed/zed
-            rustup-init --profile minimal --default-toolchain none -y
-            . "$HOME/.cargo/env"
-            ./script/bundle-freebsd
-            mkdir -p out/
-            mv "target/zed-remote-server-freebsd-x86_64.gz" out/
-            rm -rf target/
-            cargo clean
-
-      - name: Upload Artifact to Workflow - zed-remote-server (run-bundling)
-        uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
-        if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
-        with:
-          name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-freebsd.gz
-          path: out/zed-remote-server-freebsd-x86_64.gz
-
-      - name: Upload Artifacts to release
-        uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
-        if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }}
-        with:
-          draft: true
-          prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
-          files: |
-            out/zed-remote-server-freebsd-x86_64.gz
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
-  nix-build:
-    name: Build with Nix
-    uses: ./.github/workflows/nix_build.yml
-    needs: [job_spec]
-    if: github.repository_owner == 'zed-industries' &&
-      (contains(github.event.pull_request.labels.*.name, 'run-nix') ||
-      needs.job_spec.outputs.run_nix == 'true')
-    secrets: inherit
-    with:
-      flake-output: debug
-      # excludes the final package to only cache dependencies
-      cachix-filter: "-zed-editor-[0-9.]*-nightly"
-
-  bundle-windows-x64:
-    timeout-minutes: 120
-    name: Create a Windows installer for x86_64
-    runs-on: [self-32vcpu-windows-2022]
-    if: |
-      ( startsWith(github.ref, 'refs/tags/v') )
-    needs: [windows_tests]
-    env:
-      AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
-      AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
-      AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
-      ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
-      CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
-      ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
-      FILE_DIGEST: SHA256
-      TIMESTAMP_DIGEST: SHA256
-      TIMESTAMP_SERVER: "http://timestamp.acs.microsoft.com"
-    steps:
-      - name: Checkout repo
-        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
-        with:
-          clean: false
-
-      - name: Setup Sentry CLI
-        uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
-        with:
-          token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
-
-      - name: Determine version and release channel
-        working-directory: ${{ env.ZED_WORKSPACE }}
-        run: |
-          # This exports RELEASE_CHANNEL into env (GITHUB_ENV)
-          script/determine-release-channel.ps1
-
-      - name: Build Zed installer
-        working-directory: ${{ env.ZED_WORKSPACE }}
-        run: script/bundle-windows.ps1
-
-      - name: Upload Artifacts to release
-        uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
-        with:
-          draft: true
-          prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
-          files: ${{ env.SETUP_PATH }}
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
-  bundle-windows-aarch64:
-    timeout-minutes: 120
-    name: Create a Windows installer for aarch64
-    runs-on: [self-32vcpu-windows-2022]
-    if: |
-      ( startsWith(github.ref, 'refs/tags/v') )
-    needs: [windows_tests]
-    env:
-      AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
-      AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
-      AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
-      ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
-      CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
-      ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
-      FILE_DIGEST: SHA256
-      TIMESTAMP_DIGEST: SHA256
-      TIMESTAMP_SERVER: "http://timestamp.acs.microsoft.com"
-    steps:
-      - name: Checkout repo
-        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
-        with:
-          clean: false
-
-      - name: Setup Sentry CLI
-        uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
-        with:
-          token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
-
-      - name: Determine version and release channel
-        working-directory: ${{ env.ZED_WORKSPACE }}
-        run: |
-          # This exports RELEASE_CHANNEL into env (GITHUB_ENV)
-          script/determine-release-channel.ps1
-
-      - name: Build Zed installer
-        working-directory: ${{ env.ZED_WORKSPACE }}
-        run: script/bundle-windows.ps1 -Architecture aarch64
-
-      - name: Upload Artifacts to release
-        uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
-        with:
-          draft: true
-          prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
-          files: ${{ env.SETUP_PATH }}
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
-  auto-release-preview:
-    name: Auto release preview
-    if: |
-      false
-      && startsWith(github.ref, 'refs/tags/v')
-      && endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre')
-    needs: [bundle-mac, bundle-linux-x86_x64, bundle-linux-aarch64, bundle-windows-x64, bundle-windows-aarch64]
-    runs-on:
-      - self-mini-macos
-    steps:
-      - name: gh release
-        run: gh release edit "$GITHUB_REF_NAME" --draft=false
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
-      - name: Create Sentry release
-        uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c # v3
-        env:
-          SENTRY_ORG: zed-dev
-          SENTRY_PROJECT: zed
-          SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
-        with:
-          environment: production

.github/workflows/compare_perf.yml πŸ”—

@@ -0,0 +1,13 @@
+# Generated from xtask::workflows::compare_perf
+# Rebuild with `cargo xtask workflows`.
+name: compare_perf
+on:
+  workflow_dispatch: {}
+jobs:
+  run_perf:
+    runs-on: namespace-profile-2x4-ubuntu-2404
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false

.github/workflows/danger.yml πŸ”—

@@ -29,10 +29,10 @@ jobs:
         node-version: '20'
         cache: pnpm
         cache-dependency-path: script/danger/pnpm-lock.yaml
-    - name: danger::install_deps
+    - name: danger::danger_job::install_deps
       run: pnpm install --dir script/danger
       shell: bash -euxo pipefail {0}
-    - name: danger::run
+    - name: danger::danger_job::run
       run: pnpm run --dir script/danger danger ci
       shell: bash -euxo pipefail {0}
       env:

.github/workflows/eval.yml πŸ”—

@@ -1,71 +0,0 @@
-name: Run Agent Eval
-
-on:
-  schedule:
-    - cron: "0 0 * * *"
-
-  pull_request:
-    branches:
-      - "**"
-    types: [synchronize, reopened, labeled]
-
-  workflow_dispatch:
-
-concurrency:
-  # Allow only one workflow per any non-`main` branch.
-  group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
-  cancel-in-progress: true
-
-env:
-  CARGO_TERM_COLOR: always
-  CARGO_INCREMENTAL: 0
-  RUST_BACKTRACE: 1
-  ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
-  ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
-  ZED_EVAL_TELEMETRY: 1
-
-jobs:
-  run_eval:
-    timeout-minutes: 60
-    name: Run Agent Eval
-    if: >
-      github.repository_owner == 'zed-industries' &&
-      (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval'))
-    runs-on:
-      - namespace-profile-16x32-ubuntu-2204
-    steps:
-      - name: Add Rust to the PATH
-        run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
-
-      - name: Checkout repo
-        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
-        with:
-          clean: false
-
-      - name: Cache dependencies
-        uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
-        with:
-          save-if: ${{ github.ref == 'refs/heads/main' }}
-          # cache-provider: "buildjet"
-
-      - name: Install Linux dependencies
-        run: ./script/linux
-
-      - name: Configure CI
-        run: |
-          mkdir -p ./../.cargo
-          cp ./.cargo/ci-config.toml ./../.cargo/config.toml
-
-      - name: Compile eval
-        run: cargo build --package=eval
-
-      - name: Run eval
-        run: cargo run --package=eval -- --repetitions=8 --concurrency=1
-
-      # Even the Linux runner is not stateful, in theory there is no need to do this cleanup.
-      # But, to avoid potential issues in the future if we choose to use a stateful Linux runner and forget to add code
-      # to clean up the config file, I’ve included the cleanup code here as a precaution.
-      # While it’s not strictly necessary at this moment, I believe it’s better to err on the side of caution.
-      - name: Clean CI config file
-        if: always()
-        run: rm -rf ./../.cargo

.github/workflows/nix_build.yml πŸ”—

@@ -1,77 +0,0 @@
-# Generated from xtask::workflows::nix_build
-# Rebuild with `cargo xtask workflows`.
-name: nix_build
-on:
-  workflow_call:
-    inputs:
-      flake-output:
-        type: string
-        default: default
-      cachix-filter:
-        type: string
-jobs:
-  build_nix_linux_x86_64:
-    if: github.repository_owner == 'zed-industries'
-    runs-on: namespace-profile-32x64-ubuntu-2004
-    env:
-      ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
-      ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
-      ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
-      GIT_LFS_SKIP_SMUDGE: '1'
-    steps:
-    - name: steps::checkout_repo
-      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
-      with:
-        clean: false
-    - name: nix_build::install_nix
-      uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f
-      with:
-        github_access_token: ${{ secrets.GITHUB_TOKEN }}
-    - name: nix_build::cachix_action
-      uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad
-      with:
-        name: zed
-        authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
-        cachixArgs: -v
-        pushFilter: ${{ inputs.cachix-filter }}
-    - name: nix_build::build
-      run: nix build .#${{ inputs.flake-output }} -L --accept-flake-config
-      shell: bash -euxo pipefail {0}
-    timeout-minutes: 60
-    continue-on-error: true
-  build_nix_mac_aarch64:
-    if: github.repository_owner == 'zed-industries'
-    runs-on: self-mini-macos
-    env:
-      ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
-      ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
-      ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
-      GIT_LFS_SKIP_SMUDGE: '1'
-    steps:
-    - name: steps::checkout_repo
-      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
-      with:
-        clean: false
-    - name: nix_build::set_path
-      run: |
-        echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH"
-        echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH"
-      shell: bash -euxo pipefail {0}
-    - name: nix_build::cachix_action
-      uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad
-      with:
-        name: zed
-        authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
-        cachixArgs: -v
-        pushFilter: ${{ inputs.cachix-filter }}
-    - name: nix_build::build
-      run: nix build .#${{ inputs.flake-output }} -L --accept-flake-config
-      shell: bash -euxo pipefail {0}
-    - name: nix_build::limit_store
-      run: |-
-        if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then
-            nix-collect-garbage -d || true
-        fi
-      shell: bash -euxo pipefail {0}
-    timeout-minutes: 60
-    continue-on-error: true

.github/workflows/release.yml πŸ”—

@@ -0,0 +1,486 @@
+# Generated from xtask::workflows::release
+# Rebuild with `cargo xtask workflows`.
+name: release
+env:
+  CARGO_TERM_COLOR: always
+  CARGO_INCREMENTAL: '0'
+  RUST_BACKTRACE: '1'
+  ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+  ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
+on:
+  push:
+    tags:
+    - v*
+jobs:
+  run_tests_mac:
+    if: github.repository_owner == 'zed-industries'
+    runs-on: self-mini-macos
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: steps::setup_cargo_config
+      run: |
+        mkdir -p ./../.cargo
+        cp ./.cargo/ci-config.toml ./../.cargo/config.toml
+      shell: bash -euxo pipefail {0}
+    - name: steps::setup_node
+      uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
+      with:
+        node-version: '20'
+    - name: steps::clippy
+      run: ./script/clippy
+      shell: bash -euxo pipefail {0}
+    - name: steps::cargo_install_nextest
+      run: cargo install cargo-nextest --locked
+      shell: bash -euxo pipefail {0}
+    - name: steps::clear_target_dir_if_large
+      run: ./script/clear-target-dir-if-larger-than 300
+      shell: bash -euxo pipefail {0}
+    - name: steps::cargo_nextest
+      run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
+      shell: bash -euxo pipefail {0}
+    - name: steps::cleanup_cargo_config
+      if: always()
+      run: |
+        rm -rf ./../.cargo
+      shell: bash -euxo pipefail {0}
+    timeout-minutes: 60
+  run_tests_linux:
+    if: github.repository_owner == 'zed-industries'
+    runs-on: namespace-profile-16x32-ubuntu-2204
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: steps::setup_cargo_config
+      run: |
+        mkdir -p ./../.cargo
+        cp ./.cargo/ci-config.toml ./../.cargo/config.toml
+      shell: bash -euxo pipefail {0}
+    - name: steps::setup_linux
+      run: ./script/linux
+      shell: bash -euxo pipefail {0}
+    - name: steps::install_mold
+      run: ./script/install-mold
+      shell: bash -euxo pipefail {0}
+    - name: steps::setup_node
+      uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
+      with:
+        node-version: '20'
+    - name: steps::clippy
+      run: ./script/clippy
+      shell: bash -euxo pipefail {0}
+    - name: steps::cargo_install_nextest
+      run: cargo install cargo-nextest --locked
+      shell: bash -euxo pipefail {0}
+    - name: steps::clear_target_dir_if_large
+      run: ./script/clear-target-dir-if-larger-than 100
+      shell: bash -euxo pipefail {0}
+    - name: steps::cargo_nextest
+      run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
+      shell: bash -euxo pipefail {0}
+    - name: steps::cleanup_cargo_config
+      if: always()
+      run: |
+        rm -rf ./../.cargo
+      shell: bash -euxo pipefail {0}
+    timeout-minutes: 60
+  run_tests_windows:
+    if: github.repository_owner == 'zed-industries'
+    runs-on: self-32vcpu-windows-2022
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: steps::setup_cargo_config
+      run: |
+        New-Item -ItemType Directory -Path "./../.cargo" -Force
+        Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml"
+      shell: pwsh
+    - name: steps::setup_node
+      uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
+      with:
+        node-version: '20'
+    - name: steps::clippy
+      run: ./script/clippy.ps1
+      shell: pwsh
+    - name: steps::cargo_install_nextest
+      run: cargo install cargo-nextest --locked
+      shell: pwsh
+    - name: steps::clear_target_dir_if_large
+      run: ./script/clear-target-dir-if-larger-than.ps1 250
+      shell: pwsh
+    - name: steps::cargo_nextest
+      run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
+      shell: pwsh
+    - name: steps::cleanup_cargo_config
+      if: always()
+      run: |
+        Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue
+      shell: pwsh
+    timeout-minutes: 60
+  check_scripts:
+    if: github.repository_owner == 'zed-industries'
+    runs-on: namespace-profile-2x4-ubuntu-2404
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: run_tests::check_scripts::run_shellcheck
+      run: ./script/shellcheck-scripts error
+      shell: bash -euxo pipefail {0}
+    - id: get_actionlint
+      name: run_tests::check_scripts::download_actionlint
+      run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)
+      shell: bash -euxo pipefail {0}
+    - name: run_tests::check_scripts::run_actionlint
+      run: |
+        ${{ steps.get_actionlint.outputs.executable }} -color
+      shell: bash -euxo pipefail {0}
+    - name: run_tests::check_scripts::check_xtask_workflows
+      run: |
+        cargo xtask workflows
+        if ! git diff --exit-code .github; then
+          echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
+          echo "Please run 'cargo xtask workflows' locally and commit the changes"
+          exit 1
+        fi
+      shell: bash -euxo pipefail {0}
+    timeout-minutes: 60
+  create_draft_release:
+    if: github.repository_owner == 'zed-industries'
+    runs-on: namespace-profile-2x4-ubuntu-2404
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+        fetch-depth: 25
+        ref: ${{ github.ref }}
+    - name: script/determine-release-channel
+      run: script/determine-release-channel
+      shell: bash -euxo pipefail {0}
+    - name: mkdir -p target/
+      run: mkdir -p target/
+      shell: bash -euxo pipefail {0}
+    - name: release::create_draft_release::generate_release_notes
+      run: node --redirect-warnings=/dev/null ./script/draft-release-notes "$RELEASE_VERSION" "$RELEASE_CHANNEL" > target/release-notes.md
+      shell: bash -euxo pipefail {0}
+    - name: release::create_draft_release::create_release
+      run: script/create-draft-release target/release-notes.md
+      shell: bash -euxo pipefail {0}
+      env:
+        GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+    timeout-minutes: 60
+  bundle_linux_arm64:
+    needs:
+    - run_tests_linux
+    - check_scripts
+    runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: steps::setup_sentry
+      uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
+      with:
+        token: ${{ secrets.SENTRY_AUTH_TOKEN }}
+    - name: steps::setup_linux
+      run: ./script/linux
+      shell: bash -euxo pipefail {0}
+    - name: steps::install_mold
+      run: ./script/install-mold
+      shell: bash -euxo pipefail {0}
+    - name: ./script/bundle-linux
+      run: ./script/bundle-linux
+      shell: bash -euxo pipefail {0}
+    - name: '@actions/upload-artifact zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz'
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
+        path: target/release/zed-*.tar.gz
+        if-no-files-found: error
+    - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz'
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
+        path: target/zed-remote-server-*.gz
+        if-no-files-found: error
+    outputs:
+      zed: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
+      remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
+    timeout-minutes: 60
+  bundle_linux_x86_64:
+    needs:
+    - run_tests_linux
+    - check_scripts
+    runs-on: namespace-profile-32x64-ubuntu-2004
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: steps::setup_sentry
+      uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
+      with:
+        token: ${{ secrets.SENTRY_AUTH_TOKEN }}
+    - name: steps::setup_linux
+      run: ./script/linux
+      shell: bash -euxo pipefail {0}
+    - name: steps::install_mold
+      run: ./script/install-mold
+      shell: bash -euxo pipefail {0}
+    - name: ./script/bundle-linux
+      run: ./script/bundle-linux
+      shell: bash -euxo pipefail {0}
+    - name: '@actions/upload-artifact zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz'
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
+        path: target/release/zed-*.tar.gz
+        if-no-files-found: error
+    - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz'
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
+        path: target/zed-remote-server-*.gz
+        if-no-files-found: error
+    outputs:
+      zed: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
+      remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
+    timeout-minutes: 60
+  bundle_mac_arm64:
+    needs:
+    - run_tests_mac
+    - check_scripts
+    runs-on: self-mini-macos
+    env:
+      MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
+      MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
+      APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
+      APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
+      APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: steps::setup_node
+      uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
+      with:
+        node-version: '20'
+    - name: steps::setup_sentry
+      uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
+      with:
+        token: ${{ secrets.SENTRY_AUTH_TOKEN }}
+    - name: steps::clear_target_dir_if_large
+      run: ./script/clear-target-dir-if-larger-than 300
+      shell: bash -euxo pipefail {0}
+    - name: run_bundling::bundle_mac
+      run: ./script/bundle-mac aarch64-apple-darwin
+      shell: bash -euxo pipefail {0}
+    - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg'
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
+        path: target/aarch64-apple-darwin/release/Zed.dmg
+        if-no-files-found: error
+    - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz'
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz
+        path: target/zed-remote-server-macos-aarch64.gz
+        if-no-files-found: error
+    outputs:
+      zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
+      remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz
+    timeout-minutes: 60
+  bundle_mac_x86_64:
+    needs:
+    - run_tests_mac
+    - check_scripts
+    runs-on: self-mini-macos
+    env:
+      MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
+      MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
+      APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
+      APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
+      APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: steps::setup_node
+      uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
+      with:
+        node-version: '20'
+    - name: steps::setup_sentry
+      uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
+      with:
+        token: ${{ secrets.SENTRY_AUTH_TOKEN }}
+    - name: steps::clear_target_dir_if_large
+      run: ./script/clear-target-dir-if-larger-than 300
+      shell: bash -euxo pipefail {0}
+    - name: run_bundling::bundle_mac
+      run: ./script/bundle-mac x86_64-apple-darwin
+      shell: bash -euxo pipefail {0}
+    - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg'
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
+        path: target/x86_64-apple-darwin/release/Zed.dmg
+        if-no-files-found: error
+    - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz'
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz
+        path: target/zed-remote-server-macos-x86_64.gz
+        if-no-files-found: error
+    outputs:
+      zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
+      remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz
+    timeout-minutes: 60
+  bundle_windows_arm64:
+    needs:
+    - run_tests_windows
+    - check_scripts
+    runs-on: self-32vcpu-windows-2022
+    env:
+      AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
+      AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
+      AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
+      ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
+      CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
+      ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
+      FILE_DIGEST: SHA256
+      TIMESTAMP_DIGEST: SHA256
+      TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: steps::setup_sentry
+      uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
+      with:
+        token: ${{ secrets.SENTRY_AUTH_TOKEN }}
+    - name: run_bundling::bundle_windows
+      run: script/bundle-windows.ps1 -Architecture aarch64
+      shell: pwsh
+      working-directory: ${{ env.ZED_WORKSPACE }}
+    - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe'
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe
+        path: ${{ env.SETUP_PATH }}
+        if-no-files-found: error
+    outputs:
+      zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe
+    timeout-minutes: 60
+  bundle_windows_x86_64:
+    needs:
+    - run_tests_windows
+    - check_scripts
+    runs-on: self-32vcpu-windows-2022
+    env:
+      AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
+      AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
+      AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
+      ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
+      CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
+      ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
+      FILE_DIGEST: SHA256
+      TIMESTAMP_DIGEST: SHA256
+      TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: steps::setup_sentry
+      uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
+      with:
+        token: ${{ secrets.SENTRY_AUTH_TOKEN }}
+    - name: run_bundling::bundle_windows
+      run: script/bundle-windows.ps1 -Architecture x86_64
+      shell: pwsh
+      working-directory: ${{ env.ZED_WORKSPACE }}
+    - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe'
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe
+        path: ${{ env.SETUP_PATH }}
+        if-no-files-found: error
+    outputs:
+      zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe
+    timeout-minutes: 60
+  upload_release_assets:
+    needs:
+    - create_draft_release
+    - bundle_linux_arm64
+    - bundle_linux_x86_64
+    - bundle_mac_arm64
+    - bundle_mac_x86_64
+    - bundle_windows_arm64
+    - bundle_windows_x86_64
+    runs-on: namespace-profile-4x8-ubuntu-2204
+    steps:
+    - name: release::upload_release_assets::download_workflow_artifacts
+      uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53
+      with:
+        path: ./artifacts/
+    - name: ls -lR ./artifacts
+      run: ls -lR ./artifacts
+      shell: bash -euxo pipefail {0}
+    - name: release::upload_release_assets::prep_release_artifacts
+      run: |-
+        mkdir -p release-artifacts/
+
+        mv ./artifacts/${{ needs.bundle_mac_x86_64.outputs.zed }}/* release-artifacts/Zed-x86_64.dmg
+        mv ./artifacts/${{ needs.bundle_mac_arm64.outputs.zed }}/* release-artifacts/Zed-aarch64.dmg
+        mv ./artifacts/${{ needs.bundle_windows_x86_64.outputs.zed }}/* release-artifacts/Zed-x86_64.exe
+        mv ./artifacts/${{ needs.bundle_windows_arm64.outputs.zed }}/* release-artifacts/Zed-aarch64.exe
+        mv ./artifacts/${{ needs.bundle_linux_arm64.outputs.zed }}/* release-artifacts/zed-linux-aarch64.tar.gz
+        mv ./artifacts/${{ needs.bundle_linux_x86_64.outputs.zed }}/* release-artifacts/zed-linux-x86_64.tar.gz
+        mv ./artifacts/${{ needs.bundle_linux_x86_64.outputs.remote-server }}/* release-artifacts/zed-remote-server-linux-x86_64.gz
+        mv ./artifacts/${{ needs.bundle_linux_arm64.outputs.remote-server }}/* release-artifacts/zed-remote-server-linux-aarch64.gz
+        mv ./artifacts/${{ needs.bundle_mac_x86_64.outputs.remote-server }}/* release-artifacts/zed-remote-server-macos-x86_64.gz
+        mv ./artifacts/${{ needs.bundle_mac_arm64.outputs.remote-server }}/* release-artifacts/zed-remote-server-macos-aarch64.gz
+      shell: bash -euxo pipefail {0}
+    - name: gh release upload "$GITHUB_REF_NAME" --repo=zed-industries/zed release-artifacts/*
+      run: gh release upload "$GITHUB_REF_NAME" --repo=zed-industries/zed release-artifacts/*
+      shell: bash -euxo pipefail {0}
+      env:
+        GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+  auto_release_preview:
+    needs:
+    - upload_release_assets
+    if: |
+      false
+      && startsWith(github.ref, 'refs/tags/v')
+      && endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre')
+    runs-on: namespace-profile-2x4-ubuntu-2404
+    steps:
+    - name: gh release edit "$GITHUB_REF_NAME" --repo=zed-industries/zed --draft=false
+      run: gh release edit "$GITHUB_REF_NAME" --repo=zed-industries/zed --draft=false
+      shell: bash -euxo pipefail {0}
+      env:
+        GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+    - name: release::auto_release_preview::create_sentry_release
+      uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c
+      with:
+        environment: production
+      env:
+        SENTRY_ORG: zed-dev
+        SENTRY_PROJECT: zed
+        SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
+concurrency:
+  group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
+  cancel-in-progress: true

.github/workflows/release_nightly.yml πŸ”—

@@ -49,6 +49,9 @@ jobs:
       uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
       with:
         node-version: '20'
+    - name: steps::clippy
+      run: ./script/clippy
+      shell: bash -euxo pipefail {0}
     - name: steps::cargo_install_nextest
       run: cargo install cargo-nextest --locked
       shell: bash -euxo pipefail {0}
@@ -81,6 +84,9 @@ jobs:
       uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
       with:
         node-version: '20'
+    - name: steps::clippy
+      run: ./script/clippy.ps1
+      shell: pwsh
     - name: steps::cargo_install_nextest
       run: cargo install cargo-nextest --locked
       shell: pwsh
@@ -195,9 +201,6 @@ jobs:
       uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
       with:
         token: ${{ secrets.SENTRY_AUTH_TOKEN }}
-    - name: release_nightly::add_rust_to_path
-      run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
-      shell: bash -euxo pipefail {0}
     - name: ./script/linux
       run: ./script/linux
       shell: bash -euxo pipefail {0}
@@ -236,9 +239,6 @@ jobs:
       uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
       with:
         token: ${{ secrets.SENTRY_AUTH_TOKEN }}
-    - name: release_nightly::add_rust_to_path
-      run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
-      shell: bash -euxo pipefail {0}
     - name: ./script/linux
       run: ./script/linux
       shell: bash -euxo pipefail {0}
@@ -292,11 +292,11 @@ jobs:
         "nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL"
       shell: pwsh
       working-directory: ${{ env.ZED_WORKSPACE }}
-    - name: release_nightly::build_zed_installer
+    - name: run_bundling::bundle_windows
       run: script/bundle-windows.ps1 -Architecture x86_64
       shell: pwsh
       working-directory: ${{ env.ZED_WORKSPACE }}
-    - name: release_nightly::upload_zed_nightly_windows
+    - name: release_nightly::upload_zed_nightly
       run: script/upload-nightly.ps1 -Architecture x86_64
       shell: pwsh
       working-directory: ${{ env.ZED_WORKSPACE }}
@@ -334,11 +334,11 @@ jobs:
         "nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL"
       shell: pwsh
       working-directory: ${{ env.ZED_WORKSPACE }}
-    - name: release_nightly::build_zed_installer
+    - name: run_bundling::bundle_windows
       run: script/bundle-windows.ps1 -Architecture aarch64
       shell: pwsh
       working-directory: ${{ env.ZED_WORKSPACE }}
-    - name: release_nightly::upload_zed_nightly_windows
+    - name: release_nightly::upload_zed_nightly
       run: script/upload-nightly.ps1 -Architecture aarch64
       shell: pwsh
       working-directory: ${{ env.ZED_WORKSPACE }}
@@ -359,17 +359,17 @@ jobs:
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
       with:
         clean: false
-    - name: nix_build::install_nix
+    - name: nix_build::build_nix::install_nix
       uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f
       with:
         github_access_token: ${{ secrets.GITHUB_TOKEN }}
-    - name: nix_build::cachix_action
+    - name: nix_build::build_nix::cachix_action
       uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad
       with:
         name: zed
         authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
         cachixArgs: -v
-    - name: nix_build::build
+    - name: nix_build::build_nix::build
       run: nix build .#default -L --accept-flake-config
       shell: bash -euxo pipefail {0}
     timeout-minutes: 60
@@ -390,21 +390,21 @@ jobs:
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
       with:
         clean: false
-    - name: nix_build::set_path
+    - name: nix_build::build_nix::set_path
       run: |
         echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH"
         echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH"
       shell: bash -euxo pipefail {0}
-    - name: nix_build::cachix_action
+    - name: nix_build::build_nix::cachix_action
       uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad
       with:
         name: zed
         authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
         cachixArgs: -v
-    - name: nix_build::build
+    - name: nix_build::build_nix::build
       run: nix build .#default -L --accept-flake-config
       shell: bash -euxo pipefail {0}
-    - name: nix_build::limit_store
+    - name: nix_build::build_nix::limit_store
       run: |-
         if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then
             nix-collect-garbage -d || true
@@ -428,7 +428,7 @@ jobs:
       with:
         clean: false
         fetch-depth: 0
-    - name: release_nightly::update_nightly_tag
+    - name: release_nightly::update_nightly_tag_job::update_nightly_tag
       run: |
         if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then
           echo "Nightly tag already points to current commit. Skipping tagging."
@@ -439,7 +439,7 @@ jobs:
         git tag -f nightly
         git push origin nightly --force
       shell: bash -euxo pipefail {0}
-    - name: release_nightly::create_sentry_release
+    - name: release_nightly::update_nightly_tag_job::create_sentry_release
       uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c
       with:
         environment: production

.github/workflows/run_agent_evals.yml πŸ”—

@@ -0,0 +1,62 @@
+# Generated from xtask::workflows::run_agent_evals
+# Rebuild with `cargo xtask workflows`.
+name: run_agent_evals
+env:
+  CARGO_TERM_COLOR: always
+  CARGO_INCREMENTAL: '0'
+  RUST_BACKTRACE: '1'
+  ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
+  ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+  ZED_EVAL_TELEMETRY: '1'
+on:
+  pull_request:
+    types:
+    - synchronize
+    - reopened
+    - labeled
+    branches:
+    - '**'
+  schedule:
+  - cron: 0 0 * * *
+  workflow_dispatch: {}
+jobs:
+  agent_evals:
+    if: |
+      github.repository_owner == 'zed-industries' &&
+      (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval'))
+    runs-on: namespace-profile-16x32-ubuntu-2204
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: steps::cache_rust_dependencies
+      uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6
+      with:
+        save-if: ${{ github.ref == 'refs/heads/main' }}
+    - name: steps::setup_linux
+      run: ./script/linux
+      shell: bash -euxo pipefail {0}
+    - name: steps::install_mold
+      run: ./script/install-mold
+      shell: bash -euxo pipefail {0}
+    - name: steps::setup_cargo_config
+      run: |
+        mkdir -p ./../.cargo
+        cp ./.cargo/ci-config.toml ./../.cargo/config.toml
+      shell: bash -euxo pipefail {0}
+    - name: cargo build --package=eval
+      run: cargo build --package=eval
+      shell: bash -euxo pipefail {0}
+    - name: run_agent_evals::agent_evals::run_eval
+      run: cargo run --package=eval -- --repetitions=8 --concurrency=1
+      shell: bash -euxo pipefail {0}
+    - name: steps::cleanup_cargo_config
+      if: always()
+      run: |
+        rm -rf ./../.cargo
+      shell: bash -euxo pipefail {0}
+    timeout-minutes: 60
+concurrency:
+  group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
+  cancel-in-progress: true

.github/workflows/run_bundling.yml πŸ”—

@@ -48,11 +48,16 @@ jobs:
       with:
         name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
         path: target/x86_64-apple-darwin/release/Zed.dmg
+        if-no-files-found: error
     - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
         name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz
         path: target/zed-remote-server-macos-x86_64.gz
+        if-no-files-found: error
+    outputs:
+      zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
+      remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz
     timeout-minutes: 60
   bundle_mac_arm64:
     if: |-
@@ -89,11 +94,16 @@ jobs:
       with:
         name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
         path: target/aarch64-apple-darwin/release/Zed.dmg
+        if-no-files-found: error
     - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
         name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz
         path: target/zed-remote-server-macos-aarch64.gz
+        if-no-files-found: error
+    outputs:
+      zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
+      remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz
     timeout-minutes: 60
   bundle_linux_x86_64:
     if: |-
@@ -109,10 +119,10 @@ jobs:
       uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
       with:
         token: ${{ secrets.SENTRY_AUTH_TOKEN }}
-    - name: ./script/linux
+    - name: steps::setup_linux
       run: ./script/linux
       shell: bash -euxo pipefail {0}
-    - name: ./script/install-mold
+    - name: steps::install_mold
       run: ./script/install-mold
       shell: bash -euxo pipefail {0}
     - name: ./script/bundle-linux
@@ -123,11 +133,16 @@ jobs:
       with:
         name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
         path: target/release/zed-*.tar.gz
+        if-no-files-found: error
     - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
         name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
-        path: target/release/zed-remote-server-*.tar.gz
+        path: target/zed-remote-server-*.gz
+        if-no-files-found: error
+    outputs:
+      zed: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
+      remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
     timeout-minutes: 60
   bundle_linux_arm64:
     if: |-
@@ -143,9 +158,12 @@ jobs:
       uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
       with:
         token: ${{ secrets.SENTRY_AUTH_TOKEN }}
-    - name: ./script/linux
+    - name: steps::setup_linux
       run: ./script/linux
       shell: bash -euxo pipefail {0}
+    - name: steps::install_mold
+      run: ./script/install-mold
+      shell: bash -euxo pipefail {0}
     - name: ./script/bundle-linux
       run: ./script/bundle-linux
       shell: bash -euxo pipefail {0}
@@ -154,11 +172,16 @@ jobs:
       with:
         name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
         path: target/release/zed-*.tar.gz
+        if-no-files-found: error
     - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:
         name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
-        path: target/release/zed-remote-server-*.tar.gz
+        path: target/zed-remote-server-*.gz
+        if-no-files-found: error
+    outputs:
+      zed: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
+      remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
     timeout-minutes: 60
   bundle_windows_x86_64:
     if: |-
@@ -193,6 +216,9 @@ jobs:
       with:
         name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe
         path: ${{ env.SETUP_PATH }}
+        if-no-files-found: error
+    outputs:
+      zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe
     timeout-minutes: 60
   bundle_windows_arm64:
     if: |-
@@ -227,6 +253,9 @@ jobs:
       with:
         name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe
         path: ${{ env.SETUP_PATH }}
+        if-no-files-found: error
+    outputs:
+      zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe
     timeout-minutes: 60
 concurrency:
   group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}

.github/workflows/run_tests.yml πŸ”—

@@ -0,0 +1,549 @@
+# Generated from xtask::workflows::run_tests
+# Rebuild with `cargo xtask workflows`.
+name: run_tests
+env:
+  CARGO_TERM_COLOR: always
+  RUST_BACKTRACE: '1'
+  CARGO_INCREMENTAL: '0'
+on:
+  pull_request:
+    branches:
+    - '**'
+  push:
+    branches:
+    - main
+    - v[0-9]+.[0-9]+.x
+jobs:
+  orchestrate:
+    if: github.repository_owner == 'zed-industries'
+    runs-on: namespace-profile-2x4-ubuntu-2404
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+        fetch-depth: ${{ github.ref == 'refs/heads/main' && 2 || 350 }}
+    - id: filter
+      name: filter
+      run: |
+        if [ -z "$GITHUB_BASE_REF" ]; then
+          echo "Not in a PR context (i.e., push to main/stable/preview)"
+          COMPARE_REV="$(git rev-parse HEAD~1)"
+        else
+          echo "In a PR context comparing to pull_request.base.ref"
+          git fetch origin "$GITHUB_BASE_REF" --depth=350
+          COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
+        fi
+        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
+
+        check_pattern() {
+          local output_name="$1"
+          local pattern="$2"
+          local grep_arg="$3"
+
+          echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
+            echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
+            echo "${output_name}=false" >> "$GITHUB_OUTPUT"
+        }
+
+        check_pattern "run_action_checks" '^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/' -qP
+        check_pattern "run_docs" '^docs/' -qP
+        check_pattern "run_licenses" '^(Cargo.lock|script/.*licenses)' -qP
+        check_pattern "run_nix" '^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)' -qP
+        check_pattern "run_tests" '^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))' -qvP
+      shell: bash -euxo pipefail {0}
+    outputs:
+      run_action_checks: ${{ steps.filter.outputs.run_action_checks }}
+      run_docs: ${{ steps.filter.outputs.run_docs }}
+      run_licenses: ${{ steps.filter.outputs.run_licenses }}
+      run_nix: ${{ steps.filter.outputs.run_nix }}
+      run_tests: ${{ steps.filter.outputs.run_tests }}
+  check_style:
+    if: github.repository_owner == 'zed-industries'
+    runs-on: namespace-profile-4x8-ubuntu-2204
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: steps::setup_pnpm
+      uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2
+      with:
+        version: '9'
+    - name: ./script/prettier
+      run: ./script/prettier
+      shell: bash -euxo pipefail {0}
+    - name: ./script/check-todos
+      run: ./script/check-todos
+      shell: bash -euxo pipefail {0}
+    - name: ./script/check-keymaps
+      run: ./script/check-keymaps
+      shell: bash -euxo pipefail {0}
+    - name: run_tests::check_style::check_for_typos
+      uses: crate-ci/typos@80c8a4945eec0f6d464eaf9e65ed98ef085283d1
+      with:
+        config: ./typos.toml
+    - name: steps::cargo_fmt
+      run: cargo fmt --all -- --check
+      shell: bash -euxo pipefail {0}
+    timeout-minutes: 60
+  run_tests_windows:
+    needs:
+    - orchestrate
+    if: needs.orchestrate.outputs.run_tests == 'true'
+    runs-on: self-32vcpu-windows-2022
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: steps::setup_cargo_config
+      run: |
+        New-Item -ItemType Directory -Path "./../.cargo" -Force
+        Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml"
+      shell: pwsh
+    - name: steps::setup_node
+      uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
+      with:
+        node-version: '20'
+    - name: steps::clippy
+      run: ./script/clippy.ps1
+      shell: pwsh
+    - name: steps::cargo_install_nextest
+      run: cargo install cargo-nextest --locked
+      shell: pwsh
+    - name: steps::clear_target_dir_if_large
+      run: ./script/clear-target-dir-if-larger-than.ps1 250
+      shell: pwsh
+    - name: steps::cargo_nextest
+      run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
+      shell: pwsh
+    - name: steps::cleanup_cargo_config
+      if: always()
+      run: |
+        Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue
+      shell: pwsh
+    timeout-minutes: 60
+  run_tests_linux:
+    needs:
+    - orchestrate
+    if: needs.orchestrate.outputs.run_tests == 'true'
+    runs-on: namespace-profile-16x32-ubuntu-2204
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: steps::setup_cargo_config
+      run: |
+        mkdir -p ./../.cargo
+        cp ./.cargo/ci-config.toml ./../.cargo/config.toml
+      shell: bash -euxo pipefail {0}
+    - name: steps::setup_linux
+      run: ./script/linux
+      shell: bash -euxo pipefail {0}
+    - name: steps::install_mold
+      run: ./script/install-mold
+      shell: bash -euxo pipefail {0}
+    - name: steps::setup_node
+      uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
+      with:
+        node-version: '20'
+    - name: steps::clippy
+      run: ./script/clippy
+      shell: bash -euxo pipefail {0}
+    - name: steps::cargo_install_nextest
+      run: cargo install cargo-nextest --locked
+      shell: bash -euxo pipefail {0}
+    - name: steps::clear_target_dir_if_large
+      run: ./script/clear-target-dir-if-larger-than 100
+      shell: bash -euxo pipefail {0}
+    - name: steps::cargo_nextest
+      run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
+      shell: bash -euxo pipefail {0}
+    - name: steps::cleanup_cargo_config
+      if: always()
+      run: |
+        rm -rf ./../.cargo
+      shell: bash -euxo pipefail {0}
+    timeout-minutes: 60
+  run_tests_mac:
+    needs:
+    - orchestrate
+    if: needs.orchestrate.outputs.run_tests == 'true'
+    runs-on: self-mini-macos
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: steps::setup_cargo_config
+      run: |
+        mkdir -p ./../.cargo
+        cp ./.cargo/ci-config.toml ./../.cargo/config.toml
+      shell: bash -euxo pipefail {0}
+    - name: steps::setup_node
+      uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
+      with:
+        node-version: '20'
+    - name: steps::clippy
+      run: ./script/clippy
+      shell: bash -euxo pipefail {0}
+    - name: steps::cargo_install_nextest
+      run: cargo install cargo-nextest --locked
+      shell: bash -euxo pipefail {0}
+    - name: steps::clear_target_dir_if_large
+      run: ./script/clear-target-dir-if-larger-than 300
+      shell: bash -euxo pipefail {0}
+    - name: steps::cargo_nextest
+      run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
+      shell: bash -euxo pipefail {0}
+    - name: steps::cleanup_cargo_config
+      if: always()
+      run: |
+        rm -rf ./../.cargo
+      shell: bash -euxo pipefail {0}
+    timeout-minutes: 60
+  doctests:
+    needs:
+    - orchestrate
+    if: needs.orchestrate.outputs.run_tests == 'true'
+    runs-on: namespace-profile-16x32-ubuntu-2204
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: steps::cache_rust_dependencies
+      uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6
+      with:
+        save-if: ${{ github.ref == 'refs/heads/main' }}
+    - name: steps::setup_linux
+      run: ./script/linux
+      shell: bash -euxo pipefail {0}
+    - name: steps::install_mold
+      run: ./script/install-mold
+      shell: bash -euxo pipefail {0}
+    - name: steps::setup_cargo_config
+      run: |
+        mkdir -p ./../.cargo
+        cp ./.cargo/ci-config.toml ./../.cargo/config.toml
+      shell: bash -euxo pipefail {0}
+    - id: run_doctests
+      name: run_tests::doctests::run_doctests
+      run: |
+        cargo test --workspace --doc --no-fail-fast
+      shell: bash -euxo pipefail {0}
+    - name: steps::cleanup_cargo_config
+      if: always()
+      run: |
+        rm -rf ./../.cargo
+      shell: bash -euxo pipefail {0}
+    timeout-minutes: 60
+  check_workspace_binaries:
+    needs:
+    - orchestrate
+    if: needs.orchestrate.outputs.run_tests == 'true'
+    runs-on: namespace-profile-8x16-ubuntu-2204
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: steps::setup_cargo_config
+      run: |
+        mkdir -p ./../.cargo
+        cp ./.cargo/ci-config.toml ./../.cargo/config.toml
+      shell: bash -euxo pipefail {0}
+    - name: steps::setup_linux
+      run: ./script/linux
+      shell: bash -euxo pipefail {0}
+    - name: steps::install_mold
+      run: ./script/install-mold
+      shell: bash -euxo pipefail {0}
+    - name: cargo build -p collab
+      run: cargo build -p collab
+      shell: bash -euxo pipefail {0}
+    - name: cargo build --workspace --bins --examples
+      run: cargo build --workspace --bins --examples
+      shell: bash -euxo pipefail {0}
+    - name: steps::cleanup_cargo_config
+      if: always()
+      run: |
+        rm -rf ./../.cargo
+      shell: bash -euxo pipefail {0}
+    timeout-minutes: 60
+  check_postgres_and_protobuf_migrations:
+    needs:
+    - orchestrate
+    if: needs.orchestrate.outputs.run_tests == 'true'
+    runs-on: self-mini-macos
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        fetch-depth: 0
+    - name: run_tests::check_postgres_and_protobuf_migrations::remove_untracked_files
+      run: git clean -df
+      shell: bash -euxo pipefail {0}
+    - name: run_tests::check_postgres_and_protobuf_migrations::ensure_fresh_merge
+      run: |
+        if [ -z "$GITHUB_BASE_REF" ];
+        then
+          echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
+        else
+          git checkout -B temp
+          git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
+          echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
+        fi
+      shell: bash -euxo pipefail {0}
+    - name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_setup_action
+      uses: bufbuild/buf-setup-action@v1
+      with:
+        version: v1.29.0
+    - name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_breaking_action
+      uses: bufbuild/buf-breaking-action@v1
+      with:
+        input: crates/proto/proto/
+        against: https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/
+    timeout-minutes: 60
+  check_dependencies:
+    needs:
+    - orchestrate
+    if: needs.orchestrate.outputs.run_tests == 'true'
+    runs-on: namespace-profile-2x4-ubuntu-2404
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: run_tests::check_dependencies::install_cargo_machete
+      uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386
+      with:
+        command: install
+        args: cargo-machete@0.7.0
+    - name: run_tests::check_dependencies::run_cargo_machete
+      uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386
+      with:
+        command: machete
+    - name: run_tests::check_dependencies::check_cargo_lock
+      run: cargo update --locked --workspace
+      shell: bash -euxo pipefail {0}
+    - name: run_tests::check_dependencies::check_vulnerable_dependencies
+      if: github.event_name == 'pull_request'
+      uses: actions/dependency-review-action@67d4f4bd7a9b17a0db54d2a7519187c65e339de8
+      with:
+        license-check: false
+    timeout-minutes: 60
+  check_docs:
+    needs:
+    - orchestrate
+    if: needs.orchestrate.outputs.run_docs == 'true'
+    runs-on: namespace-profile-8x16-ubuntu-2204
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: steps::setup_cargo_config
+      run: |
+        mkdir -p ./../.cargo
+        cp ./.cargo/ci-config.toml ./../.cargo/config.toml
+      shell: bash -euxo pipefail {0}
+    - name: steps::cache_rust_dependencies
+      uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6
+      with:
+        save-if: ${{ github.ref == 'refs/heads/main' }}
+    - name: run_tests::check_docs::lychee_link_check
+      uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332
+      with:
+        args: --no-progress --exclude '^http' './docs/src/**/*'
+        fail: true
+        jobSummary: false
+    - name: steps::setup_linux
+      run: ./script/linux
+      shell: bash -euxo pipefail {0}
+    - name: steps::install_mold
+      run: ./script/install-mold
+      shell: bash -euxo pipefail {0}
+    - name: run_tests::check_docs::install_mdbook
+      uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08
+      with:
+        mdbook-version: 0.4.37
+    - name: run_tests::check_docs::build_docs
+      run: |
+        mkdir -p target/deploy
+        mdbook build ./docs --dest-dir=../target/deploy/docs/
+      shell: bash -euxo pipefail {0}
+    - name: run_tests::check_docs::lychee_link_check
+      uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332
+      with:
+        args: --no-progress --exclude '^http' 'target/deploy/docs'
+        fail: true
+        jobSummary: false
+    timeout-minutes: 60
+  check_licenses:
+    needs:
+    - orchestrate
+    if: needs.orchestrate.outputs.run_licenses == 'true'
+    runs-on: namespace-profile-2x4-ubuntu-2404
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: ./script/check-licenses
+      run: ./script/check-licenses
+      shell: bash -euxo pipefail {0}
+    - name: ./script/generate-licenses
+      run: ./script/generate-licenses
+      shell: bash -euxo pipefail {0}
+  check_scripts:
+    needs:
+    - orchestrate
+    if: needs.orchestrate.outputs.run_action_checks == 'true'
+    runs-on: namespace-profile-2x4-ubuntu-2404
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: run_tests::check_scripts::run_shellcheck
+      run: ./script/shellcheck-scripts error
+      shell: bash -euxo pipefail {0}
+    - id: get_actionlint
+      name: run_tests::check_scripts::download_actionlint
+      run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)
+      shell: bash -euxo pipefail {0}
+    - name: run_tests::check_scripts::run_actionlint
+      run: |
+        ${{ steps.get_actionlint.outputs.executable }} -color
+      shell: bash -euxo pipefail {0}
+    - name: run_tests::check_scripts::check_xtask_workflows
+      run: |
+        cargo xtask workflows
+        if ! git diff --exit-code .github; then
+          echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
+          echo "Please run 'cargo xtask workflows' locally and commit the changes"
+          exit 1
+        fi
+      shell: bash -euxo pipefail {0}
+    timeout-minutes: 60
+  build_nix_linux_x86_64:
+    needs:
+    - orchestrate
+    if: needs.orchestrate.outputs.run_nix == 'true'
+    runs-on: namespace-profile-32x64-ubuntu-2004
+    env:
+      ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+      ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
+      ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
+      GIT_LFS_SKIP_SMUDGE: '1'
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: nix_build::build_nix::install_nix
+      uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f
+      with:
+        github_access_token: ${{ secrets.GITHUB_TOKEN }}
+    - name: nix_build::build_nix::cachix_action
+      uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad
+      with:
+        name: zed
+        authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
+        cachixArgs: -v
+        pushFilter: -zed-editor-[0-9.]*-nightly
+    - name: nix_build::build_nix::build
+      run: nix build .#debug -L --accept-flake-config
+      shell: bash -euxo pipefail {0}
+    timeout-minutes: 60
+    continue-on-error: true
+  build_nix_mac_aarch64:
+    needs:
+    - orchestrate
+    if: needs.orchestrate.outputs.run_nix == 'true'
+    runs-on: self-mini-macos
+    env:
+      ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+      ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
+      ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
+      GIT_LFS_SKIP_SMUDGE: '1'
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: nix_build::build_nix::set_path
+      run: |
+        echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH"
+        echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH"
+      shell: bash -euxo pipefail {0}
+    - name: nix_build::build_nix::cachix_action
+      uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad
+      with:
+        name: zed
+        authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
+        cachixArgs: -v
+        pushFilter: -zed-editor-[0-9.]*-nightly
+    - name: nix_build::build_nix::build
+      run: nix build .#debug -L --accept-flake-config
+      shell: bash -euxo pipefail {0}
+    - name: nix_build::build_nix::limit_store
+      run: |-
+        if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then
+            nix-collect-garbage -d || true
+        fi
+      shell: bash -euxo pipefail {0}
+    timeout-minutes: 60
+    continue-on-error: true
+  tests_pass:
+    needs:
+    - orchestrate
+    - check_style
+    - run_tests_windows
+    - run_tests_linux
+    - run_tests_mac
+    - doctests
+    - check_workspace_binaries
+    - check_postgres_and_protobuf_migrations
+    - check_dependencies
+    - check_docs
+    - check_licenses
+    - check_scripts
+    - build_nix_linux_x86_64
+    - build_nix_mac_aarch64
+    if: github.repository_owner == 'zed-industries' && always()
+    runs-on: namespace-profile-2x4-ubuntu-2404
+    steps:
+    - name: run_tests::tests_pass
+      run: |
+        set +x
+        EXIT_CODE=0
+
+        check_result() {
+          echo "* $1: $2"
+          if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
+        }
+
+        check_result "orchestrate" "${{ needs.orchestrate.result }}"
+        check_result "check_style" "${{ needs.check_style.result }}"
+        check_result "run_tests_windows" "${{ needs.run_tests_windows.result }}"
+        check_result "run_tests_linux" "${{ needs.run_tests_linux.result }}"
+        check_result "run_tests_mac" "${{ needs.run_tests_mac.result }}"
+        check_result "doctests" "${{ needs.doctests.result }}"
+        check_result "check_workspace_binaries" "${{ needs.check_workspace_binaries.result }}"
+        check_result "check_postgres_and_protobuf_migrations" "${{ needs.check_postgres_and_protobuf_migrations.result }}"
+        check_result "check_dependencies" "${{ needs.check_dependencies.result }}"
+        check_result "check_docs" "${{ needs.check_docs.result }}"
+        check_result "check_licenses" "${{ needs.check_licenses.result }}"
+        check_result "check_scripts" "${{ needs.check_scripts.result }}"
+        check_result "build_nix_linux_x86_64" "${{ needs.build_nix_linux_x86_64.result }}"
+        check_result "build_nix_mac_aarch64" "${{ needs.build_nix_mac_aarch64.result }}"
+
+        exit $EXIT_CODE
+      shell: bash -euxo pipefail {0}
+concurrency:
+  group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
+  cancel-in-progress: true

.github/workflows/run_unit_evals.yml πŸ”—

@@ -0,0 +1,63 @@
+# Generated from xtask::workflows::run_agent_evals
+# Rebuild with `cargo xtask workflows`.
+name: run_agent_evals
+env:
+  CARGO_TERM_COLOR: always
+  CARGO_INCREMENTAL: '0'
+  RUST_BACKTRACE: '1'
+  ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+on:
+  schedule:
+  - cron: 47 1 * * 2
+  workflow_dispatch: {}
+jobs:
+  unit_evals:
+    runs-on: namespace-profile-16x32-ubuntu-2204
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: steps::setup_cargo_config
+      run: |
+        mkdir -p ./../.cargo
+        cp ./.cargo/ci-config.toml ./../.cargo/config.toml
+      shell: bash -euxo pipefail {0}
+    - name: steps::cache_rust_dependencies
+      uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6
+      with:
+        save-if: ${{ github.ref == 'refs/heads/main' }}
+    - name: steps::setup_linux
+      run: ./script/linux
+      shell: bash -euxo pipefail {0}
+    - name: steps::install_mold
+      run: ./script/install-mold
+      shell: bash -euxo pipefail {0}
+    - name: steps::cargo_install_nextest
+      run: cargo install cargo-nextest --locked
+      shell: bash -euxo pipefail {0}
+    - name: steps::clear_target_dir_if_large
+      run: ./script/clear-target-dir-if-larger-than 100
+      shell: bash -euxo pipefail {0}
+    - name: ./script/run-unit-evals
+      run: ./script/run-unit-evals
+      shell: bash -euxo pipefail {0}
+      env:
+        ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
+    - name: run_agent_evals::unit_evals::send_failure_to_slack
+      if: ${{ failure() }}
+      uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52
+      with:
+        method: chat.postMessage
+        token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }}
+        payload: |
+          channel: C04UDRNNJFQ
+          text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}"
+    - name: steps::cleanup_cargo_config
+      if: always()
+      run: |
+        rm -rf ./../.cargo
+      shell: bash -euxo pipefail {0}
+concurrency:
+  group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
+  cancel-in-progress: true

.github/workflows/script_checks.yml πŸ”—

@@ -1,21 +0,0 @@
-name: Script
-
-on:
-  pull_request:
-    paths:
-      - "script/**"
-  push:
-    branches:
-      - main
-
-jobs:
-  shellcheck:
-    name: "ShellCheck Scripts"
-    if: github.repository_owner == 'zed-industries'
-    runs-on: namespace-profile-2x4-ubuntu-2404
-
-    steps:
-      - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
-      - name: Shellcheck ./scripts
-        run: |
-          ./script/shellcheck-scripts error

.github/workflows/unit_evals.yml πŸ”—

@@ -1,86 +0,0 @@
-name: Run Unit Evals
-
-on:
-  schedule:
-    # GitHub might drop jobs at busy times, so we choose a random time in the middle of the night.
-    - cron: "47 1 * * 2"
-  workflow_dispatch:
-
-concurrency:
-  # Allow only one workflow per any non-`main` branch.
-  group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
-  cancel-in-progress: true
-
-env:
-  CARGO_TERM_COLOR: always
-  CARGO_INCREMENTAL: 0
-  RUST_BACKTRACE: 1
-  ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
-
-jobs:
-  unit_evals:
-    if: github.repository_owner == 'zed-industries'
-    timeout-minutes: 60
-    name: Run unit evals
-    runs-on:
-      - namespace-profile-16x32-ubuntu-2204
-    steps:
-      - name: Add Rust to the PATH
-        run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
-
-      - name: Checkout repo
-        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
-        with:
-          clean: false
-
-      - name: Cache dependencies
-        uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
-        with:
-          save-if: ${{ github.ref == 'refs/heads/main' }}
-          # cache-provider: "buildjet"
-
-      - name: Install Linux dependencies
-        run: ./script/linux
-
-      - name: Configure CI
-        run: |
-          mkdir -p ./../.cargo
-          cp ./.cargo/ci-config.toml ./../.cargo/config.toml
-
-      - name: Install Rust
-        shell: bash -euxo pipefail {0}
-        run: |
-          cargo install cargo-nextest --locked
-
-      - name: Install Node
-        uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
-        with:
-          node-version: "18"
-
-      - name: Limit target directory size
-        shell: bash -euxo pipefail {0}
-        run: script/clear-target-dir-if-larger-than 100
-
-      - name: Run unit evals
-        shell: bash -euxo pipefail {0}
-        run: cargo nextest run --workspace --no-fail-fast --features unit-eval --no-capture -E 'test(::eval_)'
-        env:
-          ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
-
-      - name: Send failure message to Slack channel if needed
-        if: ${{ failure() }}
-        uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52
-        with:
-          method: chat.postMessage
-          token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }}
-          payload: |
-            channel: C04UDRNNJFQ
-            text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}"
-
-      # Even the Linux runner is not stateful, in theory there is no need to do this cleanup.
-      # But, to avoid potential issues in the future if we choose to use a stateful Linux runner and forget to add code
-      # to clean up the config file, I’ve included the cleanup code here as a precaution.
-      # While it’s not strictly necessary at this moment, I believe it’s better to err on the side of caution.
-      - name: Clean CI config file
-        if: always()
-        run: rm -rf ./../.cargo

Cargo.lock πŸ”—

@@ -1339,6 +1339,7 @@ dependencies = [
  "settings",
  "smol",
  "tempfile",
+ "util",
  "which 6.0.3",
  "workspace",
 ]
@@ -4528,12 +4529,15 @@ dependencies = [
  "fs",
  "futures 0.3.31",
  "gpui",
+ "http_client",
  "json_dotpath",
  "language",
  "log",
+ "node_runtime",
  "paths",
  "serde",
  "serde_json",
+ "settings",
  "smol",
  "task",
  "util",
@@ -7074,6 +7078,7 @@ dependencies = [
  "serde_json",
  "settings",
  "url",
+ "urlencoding",
  "util",
 ]
 
@@ -12711,6 +12716,12 @@ version = "0.2.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "5da3b0203fd7ee5720aa0b5e790b591aa5d3f41c3ed2c34a3a393382198af2f7"
 
+[[package]]
+name = "pollster"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2f3a9f18d041e6d0e102a0a46750538147e5e8992d3b4873aaafee2520b00ce3"
+
 [[package]]
 name = "portable-atomic"
 version = "1.11.1"
@@ -12759,7 +12770,7 @@ dependencies = [
  "log",
  "parking_lot",
  "pin-project",
- "pollster",
+ "pollster 0.2.5",
  "static_assertions",
  "thiserror 1.0.69",
 ]
@@ -14311,7 +14322,6 @@ dependencies = [
  "gpui",
  "log",
  "rand 0.9.2",
- "rayon",
  "sum_tree",
  "unicode-segmentation",
  "util",
@@ -16237,6 +16247,7 @@ checksum = "2b2231b7c3057d5e4ad0156fb3dc807d900806020c5ffa3ee6ff2c8c76fb8520"
 name = "streaming_diff"
 version = "0.1.0"
 dependencies = [
+ "gpui",
  "ordered-float 2.10.1",
  "rand 0.9.2",
  "rope",
@@ -16355,9 +16366,11 @@ version = "0.1.0"
 dependencies = [
  "arrayvec",
  "ctor",
+ "futures 0.3.31",
+ "futures-lite 1.13.0",
  "log",
+ "pollster 0.4.0",
  "rand 0.9.2",
- "rayon",
  "zlog",
 ]
 
@@ -21220,6 +21233,7 @@ dependencies = [
  "project_symbols",
  "prompt_store",
  "proto",
+ "rayon",
  "recent_projects",
  "release_channel",
  "remote",
@@ -21702,6 +21716,7 @@ dependencies = [
  "ordered-float 2.10.1",
  "pretty_assertions",
  "project",
+ "regex-syntax",
  "serde",
  "serde_json",
  "settings",
@@ -21745,6 +21760,7 @@ dependencies = [
  "polars",
  "project",
  "prompt_store",
+ "pulldown-cmark 0.12.2",
  "release_channel",
  "reqwest_client",
  "serde",
@@ -21754,6 +21770,7 @@ dependencies = [
  "smol",
  "soa-rs",
  "terminal_view",
+ "toml 0.8.23",
  "util",
  "watch",
  "zeta",

REVIEWERS.conl πŸ”—

@@ -19,6 +19,7 @@
   = @dinocosta
   = @smitbarmase
   = @cole-miller
+  = @HactarCE
 
 vim
   = @ConradIrwin
@@ -80,6 +81,7 @@ ai
   = @rtfeldman
   = @danilo-leal
   = @benbrandt
+  = @bennetbo
 
 design
   = @danilo-leal

assets/keymaps/default-linux.json πŸ”—

@@ -1020,7 +1020,8 @@
     "context": "CollabPanel",
     "bindings": {
       "alt-up": "collab_panel::MoveChannelUp",
-      "alt-down": "collab_panel::MoveChannelDown"
+      "alt-down": "collab_panel::MoveChannelDown",
+      "alt-enter": "collab_panel::OpenSelectedChannelNotes"
     }
   },
   {
@@ -1134,7 +1135,8 @@
       "ctrl-shift-space": "terminal::ToggleViMode",
       "ctrl-shift-r": "terminal::RerunTask",
       "ctrl-alt-r": "terminal::RerunTask",
-      "alt-t": "terminal::RerunTask"
+      "alt-t": "terminal::RerunTask",
+      "ctrl-shift-5": "pane::SplitRight"
     }
   },
   {

assets/keymaps/default-macos.json πŸ”—

@@ -1085,7 +1085,8 @@
     "use_key_equivalents": true,
     "bindings": {
       "alt-up": "collab_panel::MoveChannelUp",
-      "alt-down": "collab_panel::MoveChannelDown"
+      "alt-down": "collab_panel::MoveChannelDown",
+      "alt-enter": "collab_panel::OpenSelectedChannelNotes"
     }
   },
   {
@@ -1217,6 +1218,7 @@
       "ctrl-alt-down": "pane::SplitDown",
       "ctrl-alt-left": "pane::SplitLeft",
       "ctrl-alt-right": "pane::SplitRight",
+      "cmd-d": "pane::SplitRight",
       "cmd-alt-r": "terminal::RerunTask"
     }
   },

assets/keymaps/default-windows.json πŸ”—

@@ -1038,7 +1038,8 @@
     "use_key_equivalents": true,
     "bindings": {
       "alt-up": "collab_panel::MoveChannelUp",
-      "alt-down": "collab_panel::MoveChannelDown"
+      "alt-down": "collab_panel::MoveChannelDown",
+      "alt-enter": "collab_panel::OpenSelectedChannelNotes"
     }
   },
   {
@@ -1160,7 +1161,8 @@
       "ctrl-shift-space": "terminal::ToggleViMode",
       "ctrl-shift-r": "terminal::RerunTask",
       "ctrl-alt-r": "terminal::RerunTask",
-      "alt-t": "terminal::RerunTask"
+      "alt-t": "terminal::RerunTask",
+      "ctrl-shift-5": "pane::SplitRight"
     }
   },
   {

assets/keymaps/vim.json πŸ”—

@@ -421,6 +421,12 @@
       "ctrl-[": "editor::Cancel"
     }
   },
+  {
+    "context": "vim_mode == helix_select && !menu",
+    "bindings": {
+      "escape": "vim::SwitchToHelixNormalMode"
+    }
+  },
   {
     "context": "(vim_mode == helix_normal || vim_mode == helix_select) && !menu",
     "bindings": {

crates/acp_thread/src/diff.rs πŸ”—

@@ -361,10 +361,12 @@ async fn build_buffer_diff(
 ) -> Result<Entity<BufferDiff>> {
     let buffer = cx.update(|cx| buffer.read(cx).snapshot())?;
 
+    let executor = cx.background_executor().clone();
     let old_text_rope = cx
         .background_spawn({
             let old_text = old_text.clone();
-            async move { Rope::from(old_text.as_str()) }
+            let executor = executor.clone();
+            async move { Rope::from_str(old_text.as_str(), &executor) }
         })
         .await;
     let base_buffer = cx

crates/acp_tools/src/acp_tools.rs πŸ”—

@@ -19,7 +19,7 @@ use markdown::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownStyle};
 use project::Project;
 use settings::Settings;
 use theme::ThemeSettings;
-use ui::{Tooltip, prelude::*};
+use ui::{Tooltip, WithScrollbar, prelude::*};
 use util::ResultExt as _;
 use workspace::{
     Item, ItemHandle, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace,
@@ -291,17 +291,19 @@ impl AcpTools {
         let expanded = self.expanded.contains(&index);
 
         v_flex()
+            .id(index)
+            .group("message")
+            .cursor_pointer()
+            .font_buffer(cx)
             .w_full()
-            .px_4()
             .py_3()
-            .border_color(colors.border)
-            .border_b_1()
+            .pl_4()
+            .pr_5()
             .gap_2()
             .items_start()
-            .font_buffer(cx)
             .text_size(base_size)
-            .id(index)
-            .group("message")
+            .border_color(colors.border)
+            .border_b_1()
             .hover(|this| this.bg(colors.element_background.opacity(0.5)))
             .on_click(cx.listener(move |this, _, _, cx| {
                 if this.expanded.contains(&index) {
@@ -323,15 +325,14 @@ impl AcpTools {
                 h_flex()
                     .w_full()
                     .gap_2()
-                    .items_center()
                     .flex_shrink_0()
                     .child(match message.direction {
-                        acp::StreamMessageDirection::Incoming => {
-                            ui::Icon::new(ui::IconName::ArrowDown).color(Color::Error)
-                        }
-                        acp::StreamMessageDirection::Outgoing => {
-                            ui::Icon::new(ui::IconName::ArrowUp).color(Color::Success)
-                        }
+                        acp::StreamMessageDirection::Incoming => Icon::new(IconName::ArrowDown)
+                            .color(Color::Error)
+                            .size(IconSize::Small),
+                        acp::StreamMessageDirection::Outgoing => Icon::new(IconName::ArrowUp)
+                            .color(Color::Success)
+                            .size(IconSize::Small),
                     })
                     .child(
                         Label::new(message.name.clone())
@@ -501,7 +502,7 @@ impl Focusable for AcpTools {
 }
 
 impl Render for AcpTools {
-    fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+    fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
         v_flex()
             .track_focus(&self.focus_handle)
             .size_full()
@@ -516,13 +517,19 @@ impl Render for AcpTools {
                             .child("No messages recorded yet")
                             .into_any()
                     } else {
-                        list(
-                            connection.list_state.clone(),
-                            cx.processor(Self::render_message),
-                        )
-                        .with_sizing_behavior(gpui::ListSizingBehavior::Auto)
-                        .flex_grow()
-                        .into_any()
+                        div()
+                            .size_full()
+                            .flex_grow()
+                            .child(
+                                list(
+                                    connection.list_state.clone(),
+                                    cx.processor(Self::render_message),
+                                )
+                                .with_sizing_behavior(gpui::ListSizingBehavior::Auto)
+                                .size_full(),
+                            )
+                            .vertical_scrollbar_for(connection.list_state.clone(), window, cx)
+                            .into_any()
                     }
                 }
                 None => h_flex()

crates/action_log/src/action_log.rs πŸ”—

@@ -3,7 +3,9 @@ use buffer_diff::BufferDiff;
 use clock;
 use collections::BTreeMap;
 use futures::{FutureExt, StreamExt, channel::mpsc};
-use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
+use gpui::{
+    App, AppContext, AsyncApp, BackgroundExecutor, Context, Entity, Subscription, Task, WeakEntity,
+};
 use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
 use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
 use std::{cmp, ops::Range, sync::Arc};
@@ -321,6 +323,7 @@ impl ActionLog {
                 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
                 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
                 let mut has_user_changes = false;
+                let executor = cx.background_executor().clone();
                 async move {
                     if let ChangeAuthor::User = author {
                         has_user_changes = apply_non_conflicting_edits(
@@ -328,6 +331,7 @@ impl ActionLog {
                             edits,
                             &mut base_text,
                             new_snapshot.as_rope(),
+                            &executor,
                         );
                     }
 
@@ -382,6 +386,7 @@ impl ActionLog {
                 let agent_diff_base = tracked_buffer.diff_base.clone();
                 let git_diff_base = git_diff.read(cx).base_text().as_rope().clone();
                 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
+                let executor = cx.background_executor().clone();
                 anyhow::Ok(cx.background_spawn(async move {
                     let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
                     let committed_edits = language::line_diff(
@@ -416,8 +421,11 @@ impl ActionLog {
                                             ),
                                             new_agent_diff_base.max_point(),
                                         ));
-                                    new_agent_diff_base
-                                        .replace(old_byte_start..old_byte_end, &unreviewed_new);
+                                    new_agent_diff_base.replace(
+                                        old_byte_start..old_byte_end,
+                                        &unreviewed_new,
+                                        &executor,
+                                    );
                                     row_delta +=
                                         unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
                                 }
@@ -611,6 +619,7 @@ impl ActionLog {
                                 .snapshot
                                 .text_for_range(new_range)
                                 .collect::<String>(),
+                            cx.background_executor(),
                         );
                         delta += edit.new_len() as i32 - edit.old_len() as i32;
                         false
@@ -824,6 +833,7 @@ fn apply_non_conflicting_edits(
     edits: Vec<Edit<u32>>,
     old_text: &mut Rope,
     new_text: &Rope,
+    executor: &BackgroundExecutor,
 ) -> bool {
     let mut old_edits = patch.edits().iter().cloned().peekable();
     let mut new_edits = edits.into_iter().peekable();
@@ -877,6 +887,7 @@ fn apply_non_conflicting_edits(
             old_text.replace(
                 old_bytes,
                 &new_text.chunks_in_range(new_bytes).collect::<String>(),
+                executor,
             );
             applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
             has_made_changes = true;
@@ -2282,6 +2293,7 @@ mod tests {
                     old_text.replace(
                         old_start..old_end,
                         &new_text.slice_rows(edit.new.clone()).to_string(),
+                        cx.background_executor(),
                     );
                 }
                 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());

crates/agent/src/edit_agent/edit_parser.rs πŸ”—

@@ -13,7 +13,15 @@ const EDITS_END_TAG: &str = "</edits>";
 const SEARCH_MARKER: &str = "<<<<<<< SEARCH";
 const SEPARATOR_MARKER: &str = "=======";
 const REPLACE_MARKER: &str = ">>>>>>> REPLACE";
-const END_TAGS: [&str; 3] = [OLD_TEXT_END_TAG, NEW_TEXT_END_TAG, EDITS_END_TAG];
+const SONNET_PARAMETER_INVOKE_1: &str = "</parameter>\n</invoke>";
+const SONNET_PARAMETER_INVOKE_2: &str = "</parameter></invoke>";
+const END_TAGS: [&str; 5] = [
+    OLD_TEXT_END_TAG,
+    NEW_TEXT_END_TAG,
+    EDITS_END_TAG,
+    SONNET_PARAMETER_INVOKE_1, // Remove this after switching to streaming tool call
+    SONNET_PARAMETER_INVOKE_2,
+];
 
 #[derive(Debug)]
 pub enum EditParserEvent {
@@ -547,6 +555,37 @@ mod tests {
         );
     }
 
+    #[gpui::test(iterations = 1000)]
+    fn test_xml_edits_with_closing_parameter_invoke(mut rng: StdRng) {
+        // This case is a regression with Claude Sonnet 4.5.
+        // Sometimes Sonnet thinks that it's doing a tool call
+        // and closes its response with '</parameter></invoke>'
+        // instead of properly closing </new_text>
+
+        let mut parser = EditParser::new(EditFormat::XmlTags);
+        assert_eq!(
+            parse_random_chunks(
+                indoc! {"
+                    <old_text>some text</old_text><new_text>updated text</parameter></invoke>
+                "},
+                &mut parser,
+                &mut rng
+            ),
+            vec![Edit {
+                old_text: "some text".to_string(),
+                new_text: "updated text".to_string(),
+                line_hint: None,
+            },]
+        );
+        assert_eq!(
+            parser.finish(),
+            EditParserMetrics {
+                tags: 2,
+                mismatched_tags: 1
+            }
+        );
+    }
+
     #[gpui::test(iterations = 1000)]
     fn test_xml_nested_tags(mut rng: StdRng) {
         let mut parser = EditParser::new(EditFormat::XmlTags);
@@ -1035,6 +1074,11 @@ mod tests {
             last_ix = chunk_ix;
         }
 
+        if new_text.is_some() {
+            pending_edit.new_text = new_text.take().unwrap();
+            edits.push(pending_edit);
+        }
+
         edits
     }
 }

crates/agent/src/edit_agent/evals.rs πŸ”—

@@ -1581,6 +1581,7 @@ impl EditAgentTest {
             let template = crate::SystemPromptTemplate {
                 project: &project_context,
                 available_tools: tool_names,
+                model_name: None,
             };
             let templates = Templates::new();
             template.render(&templates).unwrap()

crates/agent/src/edit_agent/streaming_fuzzy_matcher.rs πŸ”—

@@ -305,18 +305,20 @@ impl SearchMatrix {
 #[cfg(test)]
 mod tests {
     use super::*;
+    use gpui::TestAppContext;
     use indoc::indoc;
     use language::{BufferId, TextBuffer};
     use rand::prelude::*;
     use text::ReplicaId;
     use util::test::{generate_marked_text, marked_text_ranges};
 
-    #[test]
-    fn test_empty_query() {
+    #[gpui::test]
+    fn test_empty_query(cx: &mut gpui::TestAppContext) {
         let buffer = TextBuffer::new(
             ReplicaId::LOCAL,
             BufferId::new(1).unwrap(),
             "Hello world\nThis is a test\nFoo bar baz",
+            cx.background_executor(),
         );
         let snapshot = buffer.snapshot();
 
@@ -325,12 +327,13 @@ mod tests {
         assert_eq!(finish(finder), None);
     }
 
-    #[test]
-    fn test_streaming_exact_match() {
+    #[gpui::test]
+    fn test_streaming_exact_match(cx: &mut gpui::TestAppContext) {
         let buffer = TextBuffer::new(
             ReplicaId::LOCAL,
             BufferId::new(1).unwrap(),
             "Hello world\nThis is a test\nFoo bar baz",
+            cx.background_executor(),
         );
         let snapshot = buffer.snapshot();
 
@@ -349,8 +352,8 @@ mod tests {
         assert_eq!(finish(finder), Some("This is a test".to_string()));
     }
 
-    #[test]
-    fn test_streaming_fuzzy_match() {
+    #[gpui::test]
+    fn test_streaming_fuzzy_match(cx: &mut gpui::TestAppContext) {
         let buffer = TextBuffer::new(
             ReplicaId::LOCAL,
             BufferId::new(1).unwrap(),
@@ -363,6 +366,7 @@ mod tests {
                     return x * y;
                 }
             "},
+            cx.background_executor(),
         );
         let snapshot = buffer.snapshot();
 
@@ -383,12 +387,13 @@ mod tests {
         );
     }
 
-    #[test]
-    fn test_incremental_improvement() {
+    #[gpui::test]
+    fn test_incremental_improvement(cx: &mut gpui::TestAppContext) {
         let buffer = TextBuffer::new(
             ReplicaId::LOCAL,
             BufferId::new(1).unwrap(),
             "Line 1\nLine 2\nLine 3\nLine 4\nLine 5",
+            cx.background_executor(),
         );
         let snapshot = buffer.snapshot();
 
@@ -408,8 +413,8 @@ mod tests {
         assert_eq!(finish(finder), Some("Line 3\nLine 4".to_string()));
     }
 
-    #[test]
-    fn test_incomplete_lines_buffering() {
+    #[gpui::test]
+    fn test_incomplete_lines_buffering(cx: &mut gpui::TestAppContext) {
         let buffer = TextBuffer::new(
             ReplicaId::LOCAL,
             BufferId::new(1).unwrap(),
@@ -418,6 +423,7 @@ mod tests {
                 jumps over the lazy dog
                 Pack my box with five dozen liquor jugs
             "},
+            cx.background_executor(),
         );
         let snapshot = buffer.snapshot();
 
@@ -435,8 +441,8 @@ mod tests {
         );
     }
 
-    #[test]
-    fn test_multiline_fuzzy_match() {
+    #[gpui::test]
+    fn test_multiline_fuzzy_match(cx: &mut gpui::TestAppContext) {
         let buffer = TextBuffer::new(
             ReplicaId::LOCAL,
             BufferId::new(1).unwrap(),
@@ -456,6 +462,7 @@ mod tests {
                     }
                 }
             "#},
+            cx.background_executor(),
         );
         let snapshot = buffer.snapshot();
 
@@ -509,7 +516,7 @@ mod tests {
     }
 
     #[gpui::test(iterations = 100)]
-    fn test_resolve_location_single_line(mut rng: StdRng) {
+    fn test_resolve_location_single_line(mut rng: StdRng, cx: &mut TestAppContext) {
         assert_location_resolution(
             concat!(
                 "    Lorem\n",
@@ -519,11 +526,12 @@ mod tests {
             ),
             "ipsum",
             &mut rng,
+            cx,
         );
     }
 
     #[gpui::test(iterations = 100)]
-    fn test_resolve_location_multiline(mut rng: StdRng) {
+    fn test_resolve_location_multiline(mut rng: StdRng, cx: &mut TestAppContext) {
         assert_location_resolution(
             concat!(
                 "    Lorem\n",
@@ -533,11 +541,12 @@ mod tests {
             ),
             "ipsum\ndolor sit amet",
             &mut rng,
+            cx,
         );
     }
 
     #[gpui::test(iterations = 100)]
-    fn test_resolve_location_function_with_typo(mut rng: StdRng) {
+    fn test_resolve_location_function_with_typo(mut rng: StdRng, cx: &mut TestAppContext) {
         assert_location_resolution(
             indoc! {"
                 Β«fn foo1(a: usize) -> usize {
@@ -550,11 +559,12 @@ mod tests {
             "},
             "fn foo1(a: usize) -> u32 {\n40\n}",
             &mut rng,
+            cx,
         );
     }
 
     #[gpui::test(iterations = 100)]
-    fn test_resolve_location_class_methods(mut rng: StdRng) {
+    fn test_resolve_location_class_methods(mut rng: StdRng, cx: &mut TestAppContext) {
         assert_location_resolution(
             indoc! {"
                 class Something {
@@ -575,11 +585,12 @@ mod tests {
                 six() { return 6666; }
             "},
             &mut rng,
+            cx,
         );
     }
 
     #[gpui::test(iterations = 100)]
-    fn test_resolve_location_imports_no_match(mut rng: StdRng) {
+    fn test_resolve_location_imports_no_match(mut rng: StdRng, cx: &mut TestAppContext) {
         assert_location_resolution(
             indoc! {"
                 use std::ops::Range;
@@ -609,11 +620,12 @@ mod tests {
                 use std::sync::Arc;
             "},
             &mut rng,
+            cx,
         );
     }
 
     #[gpui::test(iterations = 100)]
-    fn test_resolve_location_nested_closure(mut rng: StdRng) {
+    fn test_resolve_location_nested_closure(mut rng: StdRng, cx: &mut TestAppContext) {
         assert_location_resolution(
             indoc! {"
                 impl Foo {
@@ -641,11 +653,12 @@ mod tests {
                 "                    });",
             ),
             &mut rng,
+            cx,
         );
     }
 
     #[gpui::test(iterations = 100)]
-    fn test_resolve_location_tool_invocation(mut rng: StdRng) {
+    fn test_resolve_location_tool_invocation(mut rng: StdRng, cx: &mut TestAppContext) {
         assert_location_resolution(
             indoc! {r#"
                 let tool = cx
@@ -673,11 +686,12 @@ mod tests {
                 "    .output;",
             ),
             &mut rng,
+            cx,
         );
     }
 
     #[gpui::test]
-    fn test_line_hint_selection() {
+    fn test_line_hint_selection(cx: &mut TestAppContext) {
         let text = indoc! {r#"
             fn first_function() {
                 return 42;
@@ -696,6 +710,7 @@ mod tests {
             ReplicaId::LOCAL,
             BufferId::new(1).unwrap(),
             text.to_string(),
+            cx.background_executor(),
         );
         let snapshot = buffer.snapshot();
         let mut matcher = StreamingFuzzyMatcher::new(snapshot.clone());
@@ -727,9 +742,19 @@ mod tests {
     }
 
     #[track_caller]
-    fn assert_location_resolution(text_with_expected_range: &str, query: &str, rng: &mut StdRng) {
+    fn assert_location_resolution(
+        text_with_expected_range: &str,
+        query: &str,
+        rng: &mut StdRng,
+        cx: &mut TestAppContext,
+    ) {
         let (text, expected_ranges) = marked_text_ranges(text_with_expected_range, false);
-        let buffer = TextBuffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), text.clone());
+        let buffer = TextBuffer::new(
+            ReplicaId::LOCAL,
+            BufferId::new(1).unwrap(),
+            text.clone(),
+            cx.background_executor(),
+        );
         let snapshot = buffer.snapshot();
 
         let mut matcher = StreamingFuzzyMatcher::new(snapshot);

crates/agent/src/templates.rs πŸ”—

@@ -38,6 +38,7 @@ pub struct SystemPromptTemplate<'a> {
     #[serde(flatten)]
     pub project: &'a prompt_store::ProjectContext,
     pub available_tools: Vec<SharedString>,
+    pub model_name: Option<String>,
 }
 
 impl Template for SystemPromptTemplate<'_> {
@@ -79,9 +80,11 @@ mod tests {
         let template = SystemPromptTemplate {
             project: &project,
             available_tools: vec!["echo".into()],
+            model_name: Some("test-model".to_string()),
         };
         let templates = Templates::new();
         let rendered = template.render(&templates).unwrap();
         assert!(rendered.contains("## Fixing Diagnostics"));
+        assert!(rendered.contains("test-model"));
     }
 }

crates/agent/src/templates/system_prompt.hbs πŸ”—

@@ -150,6 +150,12 @@ Otherwise, follow debugging best practices:
 Operating System: {{os}}
 Default Shell: {{shell}}
 
+{{#if model_name}}
+## Model Information
+
+You are powered by the model named {{model_name}}.
+
+{{/if}}
 {{#if (or has_rules has_user_rules)}}
 ## User's Custom Instructions
 

crates/agent/src/thread.rs πŸ”—

@@ -1928,6 +1928,7 @@ impl Thread {
         let system_prompt = SystemPromptTemplate {
             project: self.project_context.read(cx),
             available_tools,
+            model_name: self.model.as_ref().map(|m| m.name().0.to_string()),
         }
         .render(&self.templates)
         .context("failed to build system prompt")

crates/agent/src/tools/edit_file_tool.rs πŸ”—

@@ -569,6 +569,7 @@ mod tests {
     use prompt_store::ProjectContext;
     use serde_json::json;
     use settings::SettingsStore;
+    use text::Rope;
     use util::{path, rel_path::rel_path};
 
     #[gpui::test]
@@ -741,7 +742,7 @@ mod tests {
         // Create the file
         fs.save(
             path!("/root/src/main.rs").as_ref(),
-            &"initial content".into(),
+            &Rope::from_str_small("initial content"),
             language::LineEnding::Unix,
         )
         .await
@@ -908,7 +909,7 @@ mod tests {
         // Create a simple file with trailing whitespace
         fs.save(
             path!("/root/src/main.rs").as_ref(),
-            &"initial content".into(),
+            &Rope::from_str_small("initial content"),
             language::LineEnding::Unix,
         )
         .await

crates/agent_ui/src/acp/message_editor.rs πŸ”—

@@ -1,4 +1,5 @@
 use crate::{
+    ChatWithFollow,
     acp::completion_provider::{ContextPickerCompletionProvider, SlashCommandCompletion},
     context_picker::{ContextPickerAction, fetch_context_picker::fetch_url_content},
 };
@@ -15,6 +16,7 @@ use editor::{
     MultiBuffer, ToOffset,
     actions::Paste,
     display_map::{Crease, CreaseId, FoldId},
+    scroll::Autoscroll,
 };
 use futures::{
     FutureExt as _,
@@ -49,7 +51,7 @@ use text::OffsetRangeExt;
 use theme::ThemeSettings;
 use ui::{ButtonLike, TintColor, Toggleable, prelude::*};
 use util::{ResultExt, debug_panic, rel_path::RelPath};
-use workspace::{Workspace, notifications::NotifyResultExt as _};
+use workspace::{CollaboratorId, Workspace, notifications::NotifyResultExt as _};
 use zed_actions::agent::Chat;
 
 pub struct MessageEditor {
@@ -234,8 +236,16 @@ impl MessageEditor {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
+        let uri = MentionUri::Thread {
+            id: thread.id.clone(),
+            name: thread.title.to_string(),
+        };
+        let content = format!("{}\n", uri.as_link());
+
+        let content_len = content.len() - 1;
+
         let start = self.editor.update(cx, |editor, cx| {
-            editor.set_text(format!("{}\n", thread.title), window, cx);
+            editor.set_text(content, window, cx);
             editor
                 .buffer()
                 .read(cx)
@@ -244,18 +254,8 @@ impl MessageEditor {
                 .text_anchor
         });
 
-        self.confirm_mention_completion(
-            thread.title.clone(),
-            start,
-            thread.title.len(),
-            MentionUri::Thread {
-                id: thread.id.clone(),
-                name: thread.title.to_string(),
-            },
-            window,
-            cx,
-        )
-        .detach();
+        self.confirm_mention_completion(thread.title, start, content_len, uri, window, cx)
+            .detach();
     }
 
     #[cfg(test)]
@@ -592,6 +592,21 @@ impl MessageEditor {
                 ),
             );
         }
+
+        // Take this explanation with a grain of salt but, with creases being
+        // inserted, GPUI's recomputes the editor layout in the next frames, so
+        // directly calling `editor.request_autoscroll` wouldn't work as
+        // expected. We're leveraging `cx.on_next_frame` to wait 2 frames and
+        // ensure that the layout has been recalculated so that the autoscroll
+        // request actually shows the cursor's new position.
+        let editor = self.editor.clone();
+        cx.on_next_frame(window, move |_, window, cx| {
+            cx.on_next_frame(window, move |_, _, cx| {
+                editor.update(cx, |editor, cx| {
+                    editor.request_autoscroll(Autoscroll::fit(), cx)
+                });
+            });
+        });
     }
 
     fn confirm_mention_for_thread(
@@ -813,6 +828,21 @@ impl MessageEditor {
         self.send(cx);
     }
 
+    fn chat_with_follow(
+        &mut self,
+        _: &ChatWithFollow,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        self.workspace
+            .update(cx, |this, cx| {
+                this.follow(CollaboratorId::Agent, window, cx)
+            })
+            .log_err();
+
+        self.send(cx);
+    }
+
     fn cancel(&mut self, _: &editor::actions::Cancel, _: &mut Window, cx: &mut Context<Self>) {
         cx.emit(MessageEditorEvent::Cancel)
     }
@@ -1016,6 +1046,7 @@ impl MessageEditor {
 
         self.editor.update(cx, |message_editor, cx| {
             message_editor.edit([(cursor_anchor..cursor_anchor, completion.new_text)], cx);
+            message_editor.request_autoscroll(Autoscroll::fit(), cx);
         });
         if let Some(confirm) = completion.confirm {
             confirm(CompletionIntent::Complete, window, cx);
@@ -1276,6 +1307,7 @@ impl Render for MessageEditor {
         div()
             .key_context("MessageEditor")
             .on_action(cx.listener(Self::chat))
+            .on_action(cx.listener(Self::chat_with_follow))
             .on_action(cx.listener(Self::cancel))
             .capture_action(cx.listener(Self::paste))
             .flex_1()
@@ -1584,6 +1616,7 @@ mod tests {
     use gpui::{
         AppContext, Entity, EventEmitter, FocusHandle, Focusable, TestAppContext, VisualTestContext,
     };
+    use language_model::LanguageModelRegistry;
     use lsp::{CompletionContext, CompletionTriggerKind};
     use project::{CompletionIntent, Project, ProjectPath};
     use serde_json::json;
@@ -2730,6 +2763,82 @@ mod tests {
         }
     }
 
+    #[gpui::test]
+    async fn test_insert_thread_summary(cx: &mut TestAppContext) {
+        init_test(cx);
+        cx.update(LanguageModelRegistry::test);
+
+        let fs = FakeFs::new(cx.executor());
+        fs.insert_tree("/project", json!({"file": ""})).await;
+        let project = Project::test(fs, [Path::new(path!("/project"))], cx).await;
+
+        let (workspace, cx) =
+            cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
+
+        let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx));
+        let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx));
+
+        // Create a thread metadata to insert as summary
+        let thread_metadata = agent::DbThreadMetadata {
+            id: acp::SessionId("thread-123".into()),
+            title: "Previous Conversation".into(),
+            updated_at: chrono::Utc::now(),
+        };
+
+        let message_editor = cx.update(|window, cx| {
+            cx.new(|cx| {
+                let mut editor = MessageEditor::new(
+                    workspace.downgrade(),
+                    project.clone(),
+                    history_store.clone(),
+                    None,
+                    Default::default(),
+                    Default::default(),
+                    "Test Agent".into(),
+                    "Test",
+                    EditorMode::AutoHeight {
+                        min_lines: 1,
+                        max_lines: None,
+                    },
+                    window,
+                    cx,
+                );
+                editor.insert_thread_summary(thread_metadata.clone(), window, cx);
+                editor
+            })
+        });
+
+        // Construct expected values for verification
+        let expected_uri = MentionUri::Thread {
+            id: thread_metadata.id.clone(),
+            name: thread_metadata.title.to_string(),
+        };
+        let expected_link = format!("[@{}]({})", thread_metadata.title, expected_uri.to_uri());
+
+        message_editor.read_with(cx, |editor, cx| {
+            let text = editor.text(cx);
+
+            assert!(
+                text.contains(&expected_link),
+                "Expected editor text to contain thread mention link.\nExpected substring: {}\nActual text: {}",
+                expected_link,
+                text
+            );
+
+            let mentions = editor.mentions();
+            assert_eq!(
+                mentions.len(),
+                1,
+                "Expected exactly one mention after inserting thread summary"
+            );
+
+            assert!(
+                mentions.contains(&expected_uri),
+                "Expected mentions to contain the thread URI"
+            );
+        });
+    }
+
     #[gpui::test]
     async fn test_whitespace_trimming(cx: &mut TestAppContext) {
         init_test(cx);
@@ -2787,4 +2896,161 @@ mod tests {
             })]
         );
     }
+
+    #[gpui::test]
+    async fn test_autoscroll_after_insert_selections(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let app_state = cx.update(AppState::test);
+
+        cx.update(|cx| {
+            language::init(cx);
+            editor::init(cx);
+            workspace::init(app_state.clone(), cx);
+            Project::init_settings(cx);
+        });
+
+        app_state
+            .fs
+            .as_fake()
+            .insert_tree(
+                path!("/dir"),
+                json!({
+                    "test.txt": "line1\nline2\nline3\nline4\nline5\n",
+                }),
+            )
+            .await;
+
+        let project = Project::test(app_state.fs.clone(), [path!("/dir").as_ref()], cx).await;
+        let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
+        let workspace = window.root(cx).unwrap();
+
+        let worktree = project.update(cx, |project, cx| {
+            let mut worktrees = project.worktrees(cx).collect::<Vec<_>>();
+            assert_eq!(worktrees.len(), 1);
+            worktrees.pop().unwrap()
+        });
+        let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
+
+        let mut cx = VisualTestContext::from_window(*window, cx);
+
+        // Open a regular editor with the created file, and select a portion of
+        // the text that will be used for the selections that are meant to be
+        // inserted in the agent panel.
+        let editor = workspace
+            .update_in(&mut cx, |workspace, window, cx| {
+                workspace.open_path(
+                    ProjectPath {
+                        worktree_id,
+                        path: rel_path("test.txt").into(),
+                    },
+                    None,
+                    false,
+                    window,
+                    cx,
+                )
+            })
+            .await
+            .unwrap()
+            .downcast::<Editor>()
+            .unwrap();
+
+        editor.update_in(&mut cx, |editor, window, cx| {
+            editor.change_selections(Default::default(), window, cx, |selections| {
+                selections.select_ranges([Point::new(0, 0)..Point::new(0, 5)]);
+            });
+        });
+
+        let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx));
+        let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx));
+
+        // Create a new `MessageEditor`. The `EditorMode::full()` has to be used
+        // to ensure we have a fixed viewport, so we can eventually actually
+        // place the cursor outside of the visible area.
+        let message_editor = workspace.update_in(&mut cx, |workspace, window, cx| {
+            let workspace_handle = cx.weak_entity();
+            let message_editor = cx.new(|cx| {
+                MessageEditor::new(
+                    workspace_handle,
+                    project.clone(),
+                    history_store.clone(),
+                    None,
+                    Default::default(),
+                    Default::default(),
+                    "Test Agent".into(),
+                    "Test",
+                    EditorMode::full(),
+                    window,
+                    cx,
+                )
+            });
+            workspace.active_pane().update(cx, |pane, cx| {
+                pane.add_item(
+                    Box::new(cx.new(|_| MessageEditorItem(message_editor.clone()))),
+                    true,
+                    true,
+                    None,
+                    window,
+                    cx,
+                );
+            });
+
+            message_editor
+        });
+
+        message_editor.update_in(&mut cx, |message_editor, window, cx| {
+            message_editor.editor.update(cx, |editor, cx| {
+                // Update the Agent Panel's Message Editor text to have 100
+                // lines, ensuring that the cursor is set at line 90 and that we
+                // then scroll all the way to the top, so the cursor's position
+                // remains off screen.
+                let mut lines = String::new();
+                for _ in 1..=100 {
+                    lines.push_str(&"Another line in the agent panel's message editor\n");
+                }
+                editor.set_text(lines.as_str(), window, cx);
+                editor.change_selections(Default::default(), window, cx, |selections| {
+                    selections.select_ranges([Point::new(90, 0)..Point::new(90, 0)]);
+                });
+                editor.set_scroll_position(gpui::Point::new(0., 0.), window, cx);
+            });
+        });
+
+        cx.run_until_parked();
+
+        // Before proceeding, let's assert that the cursor is indeed off screen,
+        // otherwise the rest of the test doesn't make sense.
+        message_editor.update_in(&mut cx, |message_editor, window, cx| {
+            message_editor.editor.update(cx, |editor, cx| {
+                let snapshot = editor.snapshot(window, cx);
+                let cursor_row = editor.selections.newest::<Point>(&snapshot).head().row;
+                let scroll_top = snapshot.scroll_position().y as u32;
+                let visible_lines = editor.visible_line_count().unwrap() as u32;
+                let visible_range = scroll_top..(scroll_top + visible_lines);
+
+                assert!(!visible_range.contains(&cursor_row));
+            })
+        });
+
+        // Now let's insert the selection in the Agent Panel's editor and
+        // confirm that, after the insertion, the cursor is now in the visible
+        // range.
+        message_editor.update_in(&mut cx, |message_editor, window, cx| {
+            message_editor.insert_selections(window, cx);
+        });
+
+        cx.run_until_parked();
+
+        message_editor.update_in(&mut cx, |message_editor, window, cx| {
+            message_editor.editor.update(cx, |editor, cx| {
+                let snapshot = editor.snapshot(window, cx);
+                let cursor_row = editor.selections.newest::<Point>(&snapshot).head().row;
+                let scroll_top = snapshot.scroll_position().y as u32;
+                let visible_lines = editor.visible_line_count().unwrap() as u32;
+                let visible_range = scroll_top..(scroll_top + visible_lines);
+
+                assert!(visible_range.contains(&cursor_row));
+            })
+        });
+    }
 }

crates/agent_ui/src/acp/mode_selector.rs πŸ”—

@@ -1,8 +1,10 @@
 use acp_thread::AgentSessionModes;
 use agent_client_protocol as acp;
 use agent_servers::AgentServer;
+use agent_settings::AgentSettings;
 use fs::Fs;
 use gpui::{Context, Entity, FocusHandle, WeakEntity, Window, prelude::*};
+use settings::Settings as _;
 use std::{rc::Rc, sync::Arc};
 use ui::{
     Button, ContextMenu, ContextMenuEntry, DocumentationEdge, DocumentationSide, KeyBinding,
@@ -84,6 +86,14 @@ impl ModeSelector {
             let current_mode = self.connection.current_mode();
             let default_mode = self.agent_server.default_mode(cx);
 
+            let settings = AgentSettings::get_global(cx);
+            let side = match settings.dock {
+                settings::DockPosition::Left => DocumentationSide::Right,
+                settings::DockPosition::Bottom | settings::DockPosition::Right => {
+                    DocumentationSide::Left
+                }
+            };
+
             for mode in all_modes {
                 let is_selected = &mode.id == &current_mode;
                 let is_default = Some(&mode.id) == default_mode.as_ref();
@@ -91,7 +101,7 @@ impl ModeSelector {
                     .toggleable(IconPosition::End, is_selected);
 
                 let entry = if let Some(description) = &mode.description {
-                    entry.documentation_aside(DocumentationSide::Left, DocumentationEdge::Bottom, {
+                    entry.documentation_aside(side, DocumentationEdge::Bottom, {
                         let description = description.clone();
 
                         move |cx| {

crates/agent_ui/src/acp/thread_history.rs πŸ”—

@@ -450,6 +450,7 @@ impl Render for AcpThreadHistory {
         v_flex()
             .key_context("ThreadHistory")
             .size_full()
+            .bg(cx.theme().colors().panel_background)
             .on_action(cx.listener(Self::select_previous))
             .on_action(cx.listener(Self::select_next))
             .on_action(cx.listener(Self::select_first))

crates/agent_ui/src/acp/thread_view.rs πŸ”—

@@ -3631,6 +3631,7 @@ impl AcpThreadView {
             .child(
                 h_flex()
                     .id("edits-container")
+                    .cursor_pointer()
                     .gap_1()
                     .child(Disclosure::new("edits-disclosure", expanded))
                     .map(|this| {
@@ -3770,6 +3771,7 @@ impl AcpThreadView {
                     Label::new(name.to_string())
                         .size(LabelSize::XSmall)
                         .buffer_font(cx)
+                        .ml_1p5()
                 });
 
                 let file_icon = FileIcons::get_icon(path.as_std_path(), cx)
@@ -3801,14 +3803,30 @@ impl AcpThreadView {
                     })
                     .child(
                         h_flex()
+                            .id(("file-name-row", index))
                             .relative()
-                            .id(("file-name", index))
                             .pr_8()
-                            .gap_1p5()
                             .w_full()
                             .overflow_x_scroll()
-                            .child(file_icon)
-                            .child(h_flex().gap_0p5().children(file_name).children(file_path))
+                            .child(
+                                h_flex()
+                                    .id(("file-name-path", index))
+                                    .cursor_pointer()
+                                    .pr_0p5()
+                                    .gap_0p5()
+                                    .hover(|s| s.bg(cx.theme().colors().element_hover))
+                                    .rounded_xs()
+                                    .child(file_icon)
+                                    .children(file_name)
+                                    .children(file_path)
+                                    .tooltip(Tooltip::text("Go to File"))
+                                    .on_click({
+                                        let buffer = buffer.clone();
+                                        cx.listener(move |this, _, window, cx| {
+                                            this.open_edited_buffer(&buffer, window, cx);
+                                        })
+                                    }),
+                            )
                             .child(
                                 div()
                                     .absolute()
@@ -3818,13 +3836,7 @@ impl AcpThreadView {
                                     .bottom_0()
                                     .right_0()
                                     .bg(overlay_gradient),
-                            )
-                            .on_click({
-                                let buffer = buffer.clone();
-                                cx.listener(move |this, _, window, cx| {
-                                    this.open_edited_buffer(&buffer, window, cx);
-                                })
-                            }),
+                            ),
                     )
                     .child(
                         h_flex()
@@ -4571,14 +4583,29 @@ impl AcpThreadView {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
-        if window.is_window_active() || !self.notifications.is_empty() {
+        if !self.notifications.is_empty() {
+            return;
+        }
+
+        let settings = AgentSettings::get_global(cx);
+
+        let window_is_inactive = !window.is_window_active();
+        let panel_is_hidden = self
+            .workspace
+            .upgrade()
+            .map(|workspace| AgentPanel::is_hidden(&workspace, cx))
+            .unwrap_or(true);
+
+        let should_notify = window_is_inactive || panel_is_hidden;
+
+        if !should_notify {
             return;
         }
 
         // TODO: Change this once we have title summarization for external agents.
         let title = self.agent.name();
 
-        match AgentSettings::get_global(cx).notify_when_agent_waiting {
+        match settings.notify_when_agent_waiting {
             NotifyWhenAgentWaiting::PrimaryScreen => {
                 if let Some(primary) = cx.primary_display() {
                     self.pop_up(icon, caption.into(), title, window, primary, cx);
@@ -5581,7 +5608,7 @@ fn default_markdown_style(
     let theme_settings = ThemeSettings::get_global(cx);
     let colors = cx.theme().colors();
 
-    let buffer_font_size = TextSize::Small.rems(cx);
+    let buffer_font_size = theme_settings.agent_buffer_font_size(cx);
 
     let mut text_style = window.text_style();
     let line_height = buffer_font_size * 1.75;
@@ -5593,9 +5620,9 @@ fn default_markdown_style(
     };
 
     let font_size = if buffer_font {
-        TextSize::Small.rems(cx)
+        theme_settings.agent_buffer_font_size(cx)
     } else {
-        TextSize::Default.rems(cx)
+        theme_settings.agent_ui_font_size(cx)
     };
 
     let text_color = if muted_text {
@@ -5892,6 +5919,107 @@ pub(crate) mod tests {
         );
     }
 
+    #[gpui::test]
+    async fn test_notification_when_panel_hidden(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let (thread_view, cx) = setup_thread_view(StubAgentServer::default_response(), cx).await;
+
+        add_to_workspace(thread_view.clone(), cx);
+
+        let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone());
+
+        message_editor.update_in(cx, |editor, window, cx| {
+            editor.set_text("Hello", window, cx);
+        });
+
+        // Window is active (don't deactivate), but panel will be hidden
+        // Note: In the test environment, the panel is not actually added to the dock,
+        // so is_agent_panel_hidden will return true
+
+        thread_view.update_in(cx, |thread_view, window, cx| {
+            thread_view.send(window, cx);
+        });
+
+        cx.run_until_parked();
+
+        // Should show notification because window is active but panel is hidden
+        assert!(
+            cx.windows()
+                .iter()
+                .any(|window| window.downcast::<AgentNotification>().is_some()),
+            "Expected notification when panel is hidden"
+        );
+    }
+
+    #[gpui::test]
+    async fn test_notification_still_works_when_window_inactive(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let (thread_view, cx) = setup_thread_view(StubAgentServer::default_response(), cx).await;
+
+        let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone());
+        message_editor.update_in(cx, |editor, window, cx| {
+            editor.set_text("Hello", window, cx);
+        });
+
+        // Deactivate window - should show notification regardless of setting
+        cx.deactivate_window();
+
+        thread_view.update_in(cx, |thread_view, window, cx| {
+            thread_view.send(window, cx);
+        });
+
+        cx.run_until_parked();
+
+        // Should still show notification when window is inactive (existing behavior)
+        assert!(
+            cx.windows()
+                .iter()
+                .any(|window| window.downcast::<AgentNotification>().is_some()),
+            "Expected notification when window is inactive"
+        );
+    }
+
+    #[gpui::test]
+    async fn test_notification_respects_never_setting(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        // Set notify_when_agent_waiting to Never
+        cx.update(|cx| {
+            AgentSettings::override_global(
+                AgentSettings {
+                    notify_when_agent_waiting: NotifyWhenAgentWaiting::Never,
+                    ..AgentSettings::get_global(cx).clone()
+                },
+                cx,
+            );
+        });
+
+        let (thread_view, cx) = setup_thread_view(StubAgentServer::default_response(), cx).await;
+
+        let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone());
+        message_editor.update_in(cx, |editor, window, cx| {
+            editor.set_text("Hello", window, cx);
+        });
+
+        // Window is active
+
+        thread_view.update_in(cx, |thread_view, window, cx| {
+            thread_view.send(window, cx);
+        });
+
+        cx.run_until_parked();
+
+        // Should NOT show notification because notify_when_agent_waiting is Never
+        assert!(
+            !cx.windows()
+                .iter()
+                .any(|window| window.downcast::<AgentNotification>().is_some()),
+            "Expected no notification when notify_when_agent_waiting is Never"
+        );
+    }
+
     async fn setup_thread_view(
         agent: impl AgentServer + 'static,
         cx: &mut TestAppContext,

crates/agent_ui/src/agent_configuration.rs πŸ”—

@@ -23,15 +23,18 @@ use language::LanguageRegistry;
 use language_model::{
     LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry, ZED_CLOUD_PROVIDER_ID,
 };
+use language_models::AllLanguageModelSettings;
 use notifications::status_toast::{StatusToast, ToastIcon};
 use project::{
     agent_server_store::{AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME},
     context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore},
 };
-use settings::{SettingsStore, update_settings_file};
+use rope::Rope;
+use settings::{Settings, SettingsStore, update_settings_file};
 use ui::{
-    Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor, ElevationIndex,
-    Indicator, PopoverMenu, Switch, SwitchColor, Tooltip, WithScrollbar, prelude::*,
+    Button, ButtonStyle, Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor,
+    ElevationIndex, IconName, IconPosition, IconSize, Indicator, LabelSize, PopoverMenu, Switch,
+    SwitchColor, Tooltip, WithScrollbar, prelude::*,
 };
 use util::ResultExt as _;
 use workspace::{Workspace, create_and_open_local_file};
@@ -303,10 +306,76 @@ impl AgentConfiguration {
                                 }
                             })),
                         )
-                    }),
+                    })
+                    .when(
+                        is_expanded && is_removable_provider(&provider.id(), cx),
+                        |this| {
+                            this.child(
+                                Button::new(
+                                    SharedString::from(format!("delete-provider-{provider_id}")),
+                                    "Remove Provider",
+                                )
+                                .full_width()
+                                .style(ButtonStyle::Outlined)
+                                .icon_position(IconPosition::Start)
+                                .icon(IconName::Trash)
+                                .icon_size(IconSize::Small)
+                                .icon_color(Color::Muted)
+                                .label_size(LabelSize::Small)
+                                .on_click(cx.listener({
+                                    let provider = provider.clone();
+                                    move |this, _event, window, cx| {
+                                        this.delete_provider(provider.clone(), window, cx);
+                                    }
+                                })),
+                            )
+                        },
+                    ),
             )
     }
 
+    fn delete_provider(
+        &mut self,
+        provider: Arc<dyn LanguageModelProvider>,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        let fs = self.fs.clone();
+        let provider_id = provider.id();
+
+        cx.spawn_in(window, async move |_, cx| {
+            cx.update(|_window, cx| {
+                update_settings_file(fs.clone(), cx, {
+                    let provider_id = provider_id.clone();
+                    move |settings, _| {
+                        if let Some(ref mut openai_compatible) = settings
+                            .language_models
+                            .as_mut()
+                            .and_then(|lm| lm.openai_compatible.as_mut())
+                        {
+                            let key_to_remove: Arc<str> = Arc::from(provider_id.0.as_ref());
+                            openai_compatible.remove(&key_to_remove);
+                        }
+                    }
+                });
+            })
+            .log_err();
+
+            cx.update(|_window, cx| {
+                LanguageModelRegistry::global(cx).update(cx, {
+                    let provider_id = provider_id.clone();
+                    move |registry, cx| {
+                        registry.unregister_provider(provider_id, cx);
+                    }
+                })
+            })
+            .log_err();
+
+            anyhow::Ok(())
+        })
+        .detach_and_log_err(cx);
+    }
+
     fn render_provider_configuration_section(
         &mut self,
         cx: &mut Context<Self>,
@@ -1114,8 +1183,11 @@ async fn open_new_agent_servers_entry_in_settings_editor(
 ) -> Result<()> {
     let settings_editor = workspace
         .update_in(cx, |_, window, cx| {
-            create_and_open_local_file(paths::settings_file(), window, cx, || {
-                settings::initial_user_settings_content().as_ref().into()
+            create_and_open_local_file(paths::settings_file(), window, cx, |cx| {
+                Rope::from_str(
+                    &settings::initial_user_settings_content(),
+                    cx.background_executor(),
+                )
             })
         })?
         .await?
@@ -1221,3 +1293,14 @@ fn find_text_in_buffer(
         None
     }
 }
+
+// OpenAI-compatible providers are user-configured and can be removed,
+// whereas built-in providers (like Anthropic, OpenAI, Google, etc.) can't.
+//
+// If in the future we have more "API-compatible-type" of providers,
+// they should be included here as removable providers.
+fn is_removable_provider(provider_id: &LanguageModelProviderId, cx: &App) -> bool {
+    AllLanguageModelSettings::get_global(cx)
+        .openai_compatible
+        .contains_key(provider_id.0.as_ref())
+}

crates/agent_ui/src/agent_diff.rs πŸ”—

@@ -70,14 +70,6 @@ impl AgentDiffThread {
         }
     }
 
-    fn is_generating(&self, cx: &App) -> bool {
-        match self {
-            AgentDiffThread::AcpThread(thread) => {
-                thread.read(cx).status() == acp_thread::ThreadStatus::Generating
-            }
-        }
-    }
-
     fn has_pending_edit_tool_uses(&self, cx: &App) -> bool {
         match self {
             AgentDiffThread::AcpThread(thread) => thread.read(cx).has_pending_edit_tool_calls(),
@@ -970,9 +962,7 @@ impl AgentDiffToolbar {
             None => ToolbarItemLocation::Hidden,
             Some(AgentDiffToolbarItem::Pane(_)) => ToolbarItemLocation::PrimaryRight,
             Some(AgentDiffToolbarItem::Editor { state, .. }) => match state {
-                EditorState::Generating | EditorState::Reviewing => {
-                    ToolbarItemLocation::PrimaryRight
-                }
+                EditorState::Reviewing => ToolbarItemLocation::PrimaryRight,
                 EditorState::Idle => ToolbarItemLocation::Hidden,
             },
         }
@@ -1050,7 +1040,6 @@ impl Render for AgentDiffToolbar {
 
                 let content = match state {
                     EditorState::Idle => return Empty.into_any(),
-                    EditorState::Generating => vec![spinner_icon],
                     EditorState::Reviewing => vec![
                         h_flex()
                             .child(
@@ -1222,7 +1211,6 @@ pub struct AgentDiff {
 pub enum EditorState {
     Idle,
     Reviewing,
-    Generating,
 }
 
 struct WorkspaceThread {
@@ -1545,15 +1533,11 @@ impl AgentDiff {
                     multibuffer.add_diff(diff_handle.clone(), cx);
                 });
 
-                let new_state = if thread.is_generating(cx) {
-                    EditorState::Generating
-                } else {
-                    EditorState::Reviewing
-                };
+                let reviewing_state = EditorState::Reviewing;
 
                 let previous_state = self
                     .reviewing_editors
-                    .insert(weak_editor.clone(), new_state.clone());
+                    .insert(weak_editor.clone(), reviewing_state.clone());
 
                 if previous_state.is_none() {
                     editor.update(cx, |editor, cx| {
@@ -1566,7 +1550,9 @@ impl AgentDiff {
                     unaffected.remove(weak_editor);
                 }
 
-                if new_state == EditorState::Reviewing && previous_state != Some(new_state) {
+                if reviewing_state == EditorState::Reviewing
+                    && previous_state != Some(reviewing_state)
+                {
                     // Jump to first hunk when we enter review mode
                     editor.update(cx, |editor, cx| {
                         let snapshot = multibuffer.read(cx).snapshot(cx);

crates/agent_ui/src/agent_panel.rs πŸ”—

@@ -729,6 +729,25 @@ impl AgentPanel {
         &self.context_server_registry
     }
 
+    pub fn is_hidden(workspace: &Entity<Workspace>, cx: &App) -> bool {
+        let workspace_read = workspace.read(cx);
+
+        workspace_read
+            .panel::<AgentPanel>(cx)
+            .map(|panel| {
+                let panel_id = Entity::entity_id(&panel);
+
+                let is_visible = workspace_read.all_docks().iter().any(|dock| {
+                    dock.read(cx)
+                        .visible_panel()
+                        .is_some_and(|visible_panel| visible_panel.panel_id() == panel_id)
+                });
+
+                !is_visible
+            })
+            .unwrap_or(true)
+    }
+
     fn active_thread_view(&self) -> Option<&Entity<AcpThreadView>> {
         match &self.active_view {
             ActiveView::ExternalAgentThread { thread_view, .. } => Some(thread_view),

crates/agent_ui/src/buffer_codegen.rs πŸ”—

@@ -487,9 +487,10 @@ impl CodegenAlternative {
     ) {
         let start_time = Instant::now();
         let snapshot = self.snapshot.clone();
-        let selected_text = snapshot
-            .text_for_range(self.range.start..self.range.end)
-            .collect::<Rope>();
+        let selected_text = Rope::from_iter(
+            snapshot.text_for_range(self.range.start..self.range.end),
+            cx.background_executor(),
+        );
 
         let selection_start = self.range.start.to_point(&snapshot);
 

crates/agent_ui/src/text_thread_editor.rs πŸ”—

@@ -2591,11 +2591,12 @@ impl SearchableItem for TextThreadEditor {
         &mut self,
         index: usize,
         matches: &[Self::Match],
+        collapse: bool,
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
         self.editor.update(cx, |editor, cx| {
-            editor.activate_match(index, matches, window, cx);
+            editor.activate_match(index, matches, collapse, window, cx);
         });
     }
 

crates/assistant_text_thread/src/text_thread.rs πŸ”—

@@ -744,12 +744,13 @@ impl TextThread {
         telemetry: Option<Arc<Telemetry>>,
         cx: &mut Context<Self>,
     ) -> Self {
-        let buffer = cx.new(|_cx| {
+        let buffer = cx.new(|cx| {
             let buffer = Buffer::remote(
                 language::BufferId::new(1).unwrap(),
                 replica_id,
                 capability,
                 "",
+                cx.background_executor(),
             );
             buffer.set_language_registry(language_registry.clone());
             buffer

crates/auto_update/Cargo.toml πŸ”—

@@ -26,6 +26,7 @@ serde_json.workspace = true
 settings.workspace = true
 smol.workspace = true
 tempfile.workspace = true
+util.workspace = true
 workspace.workspace = true
 
 [target.'cfg(not(target_os = "windows"))'.dependencies]

crates/auto_update/src/auto_update.rs πŸ”—

@@ -962,7 +962,7 @@ pub async fn finalize_auto_update_on_quit() {
             .parent()
             .map(|p| p.join("tools").join("auto_update_helper.exe"))
     {
-        let mut command = smol::process::Command::new(helper);
+        let mut command = util::command::new_smol_command(helper);
         command.arg("--launch");
         command.arg("false");
         if let Ok(mut cmd) = command.spawn() {

crates/buffer_diff/src/buffer_diff.rs πŸ”—

@@ -1,6 +1,9 @@
 use futures::channel::oneshot;
 use git2::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch};
-use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Task, TaskLabel};
+use gpui::{
+    App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, EventEmitter, Task,
+    TaskLabel,
+};
 use language::{Language, LanguageRegistry};
 use rope::Rope;
 use std::{
@@ -191,7 +194,7 @@ impl BufferDiffSnapshot {
         let base_text_exists;
         let base_text_snapshot;
         if let Some(text) = &base_text {
-            let base_text_rope = Rope::from(text.as_str());
+            let base_text_rope = Rope::from_str(text.as_str(), cx.background_executor());
             base_text_pair = Some((text.clone(), base_text_rope.clone()));
             let snapshot =
                 language::Buffer::build_snapshot(base_text_rope, language, language_registry, cx);
@@ -311,6 +314,7 @@ impl BufferDiffInner {
         hunks: &[DiffHunk],
         buffer: &text::BufferSnapshot,
         file_exists: bool,
+        cx: &BackgroundExecutor,
     ) -> Option<Rope> {
         let head_text = self
             .base_text_exists
@@ -505,7 +509,7 @@ impl BufferDiffInner {
         for (old_range, replacement_text) in edits {
             new_index_text.append(index_cursor.slice(old_range.start));
             index_cursor.seek_forward(old_range.end);
-            new_index_text.push(&replacement_text);
+            new_index_text.push(&replacement_text, cx);
         }
         new_index_text.append(index_cursor.suffix());
         Some(new_index_text)
@@ -962,6 +966,7 @@ impl BufferDiff {
             hunks,
             buffer,
             file_exists,
+            cx.background_executor(),
         );
 
         cx.emit(BufferDiffEvent::HunksStagedOrUnstaged(
@@ -1385,7 +1390,12 @@ mod tests {
         "
         .unindent();
 
-        let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text);
+        let mut buffer = Buffer::new(
+            ReplicaId::LOCAL,
+            BufferId::new(1).unwrap(),
+            buffer_text,
+            cx.background_executor(),
+        );
         let mut diff = BufferDiffSnapshot::new_sync(buffer.clone(), diff_base.clone(), cx);
         assert_hunks(
             diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer),
@@ -1394,7 +1404,7 @@ mod tests {
             &[(1..2, "two\n", "HELLO\n", DiffHunkStatus::modified_none())],
         );
 
-        buffer.edit([(0..0, "point five\n")]);
+        buffer.edit([(0..0, "point five\n")], cx.background_executor());
         diff = BufferDiffSnapshot::new_sync(buffer.clone(), diff_base.clone(), cx);
         assert_hunks(
             diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer),
@@ -1459,7 +1469,12 @@ mod tests {
         "
         .unindent();
 
-        let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text);
+        let buffer = Buffer::new(
+            ReplicaId::LOCAL,
+            BufferId::new(1).unwrap(),
+            buffer_text,
+            cx.background_executor(),
+        );
         let unstaged_diff = BufferDiffSnapshot::new_sync(buffer.clone(), index_text, cx);
         let mut uncommitted_diff =
             BufferDiffSnapshot::new_sync(buffer.clone(), head_text.clone(), cx);
@@ -1528,7 +1543,12 @@ mod tests {
         "
         .unindent();
 
-        let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text);
+        let buffer = Buffer::new(
+            ReplicaId::LOCAL,
+            BufferId::new(1).unwrap(),
+            buffer_text,
+            cx.background_executor(),
+        );
         let diff = cx
             .update(|cx| {
                 BufferDiffSnapshot::new_with_base_text(
@@ -1791,7 +1811,12 @@ mod tests {
 
         for example in table {
             let (buffer_text, ranges) = marked_text_ranges(&example.buffer_marked_text, false);
-            let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text);
+            let buffer = Buffer::new(
+                ReplicaId::LOCAL,
+                BufferId::new(1).unwrap(),
+                buffer_text,
+                cx.background_executor(),
+            );
             let hunk_range =
                 buffer.anchor_before(ranges[0].start)..buffer.anchor_before(ranges[0].end);
 
@@ -1868,6 +1893,7 @@ mod tests {
             ReplicaId::LOCAL,
             BufferId::new(1).unwrap(),
             buffer_text.clone(),
+            cx.background_executor(),
         );
         let unstaged = BufferDiffSnapshot::new_sync(buffer.clone(), index_text, cx);
         let uncommitted = BufferDiffSnapshot::new_sync(buffer.clone(), head_text.clone(), cx);
@@ -1941,7 +1967,12 @@ mod tests {
         "
         .unindent();
 
-        let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text_1);
+        let mut buffer = Buffer::new(
+            ReplicaId::LOCAL,
+            BufferId::new(1).unwrap(),
+            buffer_text_1,
+            cx.background_executor(),
+        );
 
         let empty_diff = cx.update(|cx| BufferDiffSnapshot::empty(&buffer, cx));
         let diff_1 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx);
@@ -1961,6 +1992,7 @@ mod tests {
                 NINE
             "
             .unindent(),
+            cx.background_executor(),
         );
         let diff_2 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx);
         assert_eq!(None, diff_2.inner.compare(&diff_1.inner, &buffer));
@@ -1978,6 +2010,7 @@ mod tests {
                 NINE
             "
             .unindent(),
+            cx.background_executor(),
         );
         let diff_3 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx);
         let range = diff_3.inner.compare(&diff_2.inner, &buffer).unwrap();
@@ -1995,6 +2028,7 @@ mod tests {
                 NINE
             "
             .unindent(),
+            cx.background_executor(),
         );
         let diff_4 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx);
         let range = diff_4.inner.compare(&diff_3.inner, &buffer).unwrap();
@@ -2013,6 +2047,7 @@ mod tests {
                 NINE
             "
             .unindent(),
+            cx.background_executor(),
         );
         let diff_5 = BufferDiffSnapshot::new_sync(buffer.snapshot(), base_text.clone(), cx);
         let range = diff_5.inner.compare(&diff_4.inner, &buffer).unwrap();
@@ -2031,6 +2066,7 @@ mod tests {
                 Β«nineΒ»
             "
             .unindent(),
+            cx.background_executor(),
         );
         let diff_6 = BufferDiffSnapshot::new_sync(buffer.snapshot(), base_text, cx);
         let range = diff_6.inner.compare(&diff_5.inner, &buffer).unwrap();
@@ -2140,14 +2176,14 @@ mod tests {
         let working_copy = gen_working_copy(rng, &head_text);
         let working_copy = cx.new(|cx| {
             language::Buffer::local_normalized(
-                Rope::from(working_copy.as_str()),
+                Rope::from_str(working_copy.as_str(), cx.background_executor()),
                 text::LineEnding::default(),
                 cx,
             )
         });
         let working_copy = working_copy.read_with(cx, |working_copy, _| working_copy.snapshot());
         let mut index_text = if rng.random() {
-            Rope::from(head_text.as_str())
+            Rope::from_str(head_text.as_str(), cx.background_executor())
         } else {
             working_copy.as_rope().clone()
         };

crates/channel/src/channel_buffer.rs πŸ”—

@@ -70,6 +70,7 @@ impl ChannelBuffer {
                 ReplicaId::new(response.replica_id as u16),
                 capability,
                 base_text,
+                cx.background_executor(),
             )
         })?;
         buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?;

crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs πŸ”—

@@ -182,8 +182,8 @@ pub fn build_prompt(
         }
 
         for related_file in &request.included_files {
-            writeln!(&mut prompt, "`````filename={}", related_file.path.display()).unwrap();
-            write_excerpts(
+            write_codeblock(
+                &related_file.path,
                 &related_file.excerpts,
                 if related_file.path == request.excerpt_path {
                     &insertions
@@ -194,7 +194,6 @@ pub fn build_prompt(
                 request.prompt_format == PromptFormat::NumLinesUniDiff,
                 &mut prompt,
             );
-            write!(&mut prompt, "`````\n\n").unwrap();
         }
     }
 
@@ -205,6 +204,25 @@ pub fn build_prompt(
     Ok((prompt, section_labels))
 }
 
+pub fn write_codeblock<'a>(
+    path: &Path,
+    excerpts: impl IntoIterator<Item = &'a Excerpt>,
+    sorted_insertions: &[(Point, &str)],
+    file_line_count: Line,
+    include_line_numbers: bool,
+    output: &'a mut String,
+) {
+    writeln!(output, "`````{}", path.display()).unwrap();
+    write_excerpts(
+        excerpts,
+        sorted_insertions,
+        file_line_count,
+        include_line_numbers,
+        output,
+    );
+    write!(output, "`````\n\n").unwrap();
+}
+
 pub fn write_excerpts<'a>(
     excerpts: impl IntoIterator<Item = &'a Excerpt>,
     sorted_insertions: &[(Point, &str)],
@@ -597,8 +615,7 @@ impl<'a> SyntaxBasedPrompt<'a> {
                 disjoint_snippets.push(current_snippet);
             }
 
-            // TODO: remove filename=?
-            writeln!(output, "`````filename={}", file_path.display()).ok();
+            writeln!(output, "`````path={}", file_path.display()).ok();
             let mut skipped_last_snippet = false;
             for (snippet, range) in disjoint_snippets {
                 let section_index = section_ranges.len();

crates/codestral/src/codestral.rs πŸ”—

@@ -66,6 +66,14 @@ impl CodestralCompletionProvider {
         Self::api_key(cx).is_some()
     }
 
+    /// This is so we can immediately show Codestral as a provider users can
+    /// switch to in the edit prediction menu, if the API has been added
+    pub fn ensure_api_key_loaded(http_client: Arc<dyn HttpClient>, cx: &mut App) {
+        MistralLanguageModelProvider::global(http_client, cx)
+            .load_codestral_api_key(cx)
+            .detach();
+    }
+
     fn api_key(cx: &App) -> Option<Arc<str>> {
         MistralLanguageModelProvider::try_global(cx)
             .and_then(|provider| provider.codestral_api_key(CODESTRAL_API_URL, cx))

crates/collab/src/db/queries/buffers.rs πŸ”—

@@ -701,12 +701,12 @@ impl Database {
             return Ok(());
         }
 
-        let mut text_buffer = text::Buffer::new(
+        let mut text_buffer = text::Buffer::new_slow(
             clock::ReplicaId::LOCAL,
             text::BufferId::new(1).unwrap(),
             base_text,
         );
-        text_buffer.apply_ops(operations.into_iter().filter_map(operation_from_wire));
+        text_buffer.apply_ops(operations.into_iter().filter_map(operation_from_wire), None);
 
         let base_text = text_buffer.text();
         let epoch = buffer.epoch + 1;

crates/collab/src/db/tests/buffer_tests.rs πŸ”—

@@ -74,11 +74,21 @@ async fn test_channel_buffers(db: &Arc<Database>) {
         ReplicaId::new(0),
         text::BufferId::new(1).unwrap(),
         "".to_string(),
+        &db.test_options.as_ref().unwrap().executor,
     );
     let operations = vec![
-        buffer_a.edit([(0..0, "hello world")]),
-        buffer_a.edit([(5..5, ", cruel")]),
-        buffer_a.edit([(0..5, "goodbye")]),
+        buffer_a.edit(
+            [(0..0, "hello world")],
+            &db.test_options.as_ref().unwrap().executor,
+        ),
+        buffer_a.edit(
+            [(5..5, ", cruel")],
+            &db.test_options.as_ref().unwrap().executor,
+        ),
+        buffer_a.edit(
+            [(0..5, "goodbye")],
+            &db.test_options.as_ref().unwrap().executor,
+        ),
         buffer_a.undo().unwrap().1,
     ];
     assert_eq!(buffer_a.text(), "hello, cruel world");
@@ -102,15 +112,19 @@ async fn test_channel_buffers(db: &Arc<Database>) {
         ReplicaId::new(0),
         text::BufferId::new(1).unwrap(),
         buffer_response_b.base_text,
+        &db.test_options.as_ref().unwrap().executor,
+    );
+    buffer_b.apply_ops(
+        buffer_response_b.operations.into_iter().map(|operation| {
+            let operation = proto::deserialize_operation(operation).unwrap();
+            if let language::Operation::Buffer(operation) = operation {
+                operation
+            } else {
+                unreachable!()
+            }
+        }),
+        None,
     );
-    buffer_b.apply_ops(buffer_response_b.operations.into_iter().map(|operation| {
-        let operation = proto::deserialize_operation(operation).unwrap();
-        if let language::Operation::Buffer(operation) = operation {
-            operation
-        } else {
-            unreachable!()
-        }
-    }));
 
     assert_eq!(buffer_b.text(), "hello, cruel world");
 
@@ -247,6 +261,7 @@ async fn test_channel_buffers_last_operations(db: &Database) {
             ReplicaId::new(res.replica_id as u16),
             text::BufferId::new(1).unwrap(),
             "".to_string(),
+            &db.test_options.as_ref().unwrap().executor,
         ));
     }
 
@@ -255,9 +270,9 @@ async fn test_channel_buffers_last_operations(db: &Database) {
         user_id,
         db,
         vec![
-            text_buffers[0].edit([(0..0, "a")]),
-            text_buffers[0].edit([(0..0, "b")]),
-            text_buffers[0].edit([(0..0, "c")]),
+            text_buffers[0].edit([(0..0, "a")], &db.test_options.as_ref().unwrap().executor),
+            text_buffers[0].edit([(0..0, "b")], &db.test_options.as_ref().unwrap().executor),
+            text_buffers[0].edit([(0..0, "c")], &db.test_options.as_ref().unwrap().executor),
         ],
     )
     .await;
@@ -267,9 +282,9 @@ async fn test_channel_buffers_last_operations(db: &Database) {
         user_id,
         db,
         vec![
-            text_buffers[1].edit([(0..0, "d")]),
-            text_buffers[1].edit([(1..1, "e")]),
-            text_buffers[1].edit([(2..2, "f")]),
+            text_buffers[1].edit([(0..0, "d")], &db.test_options.as_ref().unwrap().executor),
+            text_buffers[1].edit([(1..1, "e")], &db.test_options.as_ref().unwrap().executor),
+            text_buffers[1].edit([(2..2, "f")], &db.test_options.as_ref().unwrap().executor),
         ],
     )
     .await;
@@ -286,14 +301,15 @@ async fn test_channel_buffers_last_operations(db: &Database) {
         replica_id,
         text::BufferId::new(1).unwrap(),
         "def".to_string(),
+        &db.test_options.as_ref().unwrap().executor,
     );
     update_buffer(
         buffers[1].channel_id,
         user_id,
         db,
         vec![
-            text_buffers[1].edit([(0..0, "g")]),
-            text_buffers[1].edit([(0..0, "h")]),
+            text_buffers[1].edit([(0..0, "g")], &db.test_options.as_ref().unwrap().executor),
+            text_buffers[1].edit([(0..0, "h")], &db.test_options.as_ref().unwrap().executor),
         ],
     )
     .await;
@@ -302,7 +318,7 @@ async fn test_channel_buffers_last_operations(db: &Database) {
         buffers[2].channel_id,
         user_id,
         db,
-        vec![text_buffers[2].edit([(0..0, "i")])],
+        vec![text_buffers[2].edit([(0..0, "i")], &db.test_options.as_ref().unwrap().executor)],
     )
     .await;
 

crates/collab/src/tests/editor_tests.rs πŸ”—

@@ -39,6 +39,7 @@ use std::{
         Arc,
         atomic::{self, AtomicBool, AtomicUsize},
     },
+    time::Duration,
 };
 use text::Point;
 use util::{path, rel_path::rel_path, uri};
@@ -1817,14 +1818,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
                 settings.project.all_languages.defaults.inlay_hints =
                     Some(InlayHintSettingsContent {
                         enabled: Some(true),
-                        show_value_hints: Some(true),
-                        edit_debounce_ms: Some(0),
-                        scroll_debounce_ms: Some(0),
-                        show_type_hints: Some(true),
-                        show_parameter_hints: Some(false),
-                        show_other_hints: Some(true),
-                        show_background: Some(false),
-                        toggle_on_modifiers_press: None,
+                        ..InlayHintSettingsContent::default()
                     })
             });
         });
@@ -1834,15 +1828,8 @@ async fn test_mutual_editor_inlay_hint_cache_update(
             store.update_user_settings(cx, |settings| {
                 settings.project.all_languages.defaults.inlay_hints =
                     Some(InlayHintSettingsContent {
-                        show_value_hints: Some(true),
                         enabled: Some(true),
-                        edit_debounce_ms: Some(0),
-                        scroll_debounce_ms: Some(0),
-                        show_type_hints: Some(true),
-                        show_parameter_hints: Some(false),
-                        show_other_hints: Some(true),
-                        show_background: Some(false),
-                        toggle_on_modifiers_press: None,
+                        ..InlayHintSettingsContent::default()
                     })
             });
         });
@@ -1935,6 +1922,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
     });
     let fake_language_server = fake_language_servers.next().await.unwrap();
     let editor_a = file_a.await.unwrap().downcast::<Editor>().unwrap();
+    executor.advance_clock(Duration::from_millis(100));
     executor.run_until_parked();
 
     let initial_edit = edits_made.load(atomic::Ordering::Acquire);
@@ -1955,6 +1943,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
         .downcast::<Editor>()
         .unwrap();
 
+    executor.advance_clock(Duration::from_millis(100));
     executor.run_until_parked();
     editor_b.update(cx_b, |editor, cx| {
         assert_eq!(
@@ -1973,6 +1962,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
     });
     cx_b.focus(&editor_b);
 
+    executor.advance_clock(Duration::from_secs(1));
     executor.run_until_parked();
     editor_a.update(cx_a, |editor, cx| {
         assert_eq!(
@@ -1996,6 +1986,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
     });
     cx_a.focus(&editor_a);
 
+    executor.advance_clock(Duration::from_secs(1));
     executor.run_until_parked();
     editor_a.update(cx_a, |editor, cx| {
         assert_eq!(
@@ -2017,6 +2008,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
         .into_response()
         .expect("inlay refresh request failed");
 
+    executor.advance_clock(Duration::from_secs(1));
     executor.run_until_parked();
     editor_a.update(cx_a, |editor, cx| {
         assert_eq!(

crates/collab/src/tests/integration_tests.rs πŸ”—

@@ -3694,7 +3694,7 @@ async fn test_buffer_reloading(
         assert_eq!(buf.line_ending(), LineEnding::Unix);
     });
 
-    let new_contents = Rope::from("d\ne\nf");
+    let new_contents = Rope::from_str_small("d\ne\nf");
     client_a
         .fs()
         .save(
@@ -4479,7 +4479,7 @@ async fn test_reloading_buffer_manually(
         .fs()
         .save(
             path!("/a/a.rs").as_ref(),
-            &Rope::from("let seven = 7;"),
+            &Rope::from_str_small("let seven = 7;"),
             LineEnding::Unix,
         )
         .await

crates/collab/src/tests/random_project_collaboration_tests.rs πŸ”—

@@ -27,6 +27,7 @@ use std::{
     rc::Rc,
     sync::Arc,
 };
+use text::Rope;
 use util::{
     ResultExt, path,
     paths::PathStyle,
@@ -938,7 +939,11 @@ impl RandomizedTest for ProjectCollaborationTest {
 
                     client
                         .fs()
-                        .save(&path, &content.as_str().into(), text::LineEnding::Unix)
+                        .save(
+                            &path,
+                            &Rope::from_str_small(content.as_str()),
+                            text::LineEnding::Unix,
+                        )
                         .await
                         .unwrap();
                 }

crates/collab_ui/src/collab_panel.rs πŸ”—

@@ -54,6 +54,10 @@ actions!(
         CollapseSelectedChannel,
         /// Expands the selected channel in the tree view.
         ExpandSelectedChannel,
+        /// Opens the meeting notes for the selected channel in the panel.
+        ///
+        /// Use `collab::OpenChannelNotes` to open the channel notes for the current call.
+        OpenSelectedChannelNotes,
         /// Starts moving a channel to a new location.
         StartMoveChannel,
         /// Moves the selected item to the current location.
@@ -1856,6 +1860,17 @@ impl CollabPanel {
         }
     }
 
+    fn open_selected_channel_notes(
+        &mut self,
+        _: &OpenSelectedChannelNotes,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        if let Some(channel) = self.selected_channel() {
+            self.open_channel_notes(channel.id, window, cx);
+        }
+    }
+
     fn set_channel_visibility(
         &mut self,
         channel_id: ChannelId,
@@ -2976,6 +2991,7 @@ impl Render for CollabPanel {
             .on_action(cx.listener(CollabPanel::remove_selected_channel))
             .on_action(cx.listener(CollabPanel::show_inline_context_menu))
             .on_action(cx.listener(CollabPanel::rename_selected_channel))
+            .on_action(cx.listener(CollabPanel::open_selected_channel_notes))
             .on_action(cx.listener(CollabPanel::collapse_selected_channel))
             .on_action(cx.listener(CollabPanel::expand_selected_channel))
             .on_action(cx.listener(CollabPanel::start_move_selected_channel))

crates/dap_adapters/Cargo.toml πŸ”—

@@ -41,6 +41,10 @@ util.workspace = true
 
 [dev-dependencies]
 dap = { workspace = true, features = ["test-support"] }
+fs = { workspace = true, features = ["test-support"] }
 gpui = { workspace = true, features = ["test-support"] }
+http_client.workspace = true
+node_runtime.workspace = true
+settings = { workspace = true, features = ["test-support"] }
 task = { workspace = true, features = ["test-support"] }
 util = { workspace = true, features = ["test-support"] }

crates/dap_adapters/src/dap_adapters.rs πŸ”—

@@ -4,6 +4,8 @@ mod go;
 mod javascript;
 mod python;
 
+#[cfg(test)]
+use std::path::PathBuf;
 use std::sync::Arc;
 
 use anyhow::Result;
@@ -38,3 +40,65 @@ pub fn init(cx: &mut App) {
         }
     })
 }
+
+#[cfg(test)]
+mod test_mocks {
+    use super::*;
+
+    pub(crate) struct MockDelegate {
+        worktree_root: PathBuf,
+    }
+
+    impl MockDelegate {
+        pub(crate) fn new() -> Arc<dyn adapters::DapDelegate> {
+            Arc::new(Self {
+                worktree_root: PathBuf::from("/tmp/test"),
+            })
+        }
+    }
+
+    #[async_trait::async_trait]
+    impl adapters::DapDelegate for MockDelegate {
+        fn worktree_id(&self) -> settings::WorktreeId {
+            settings::WorktreeId::from_usize(0)
+        }
+
+        fn worktree_root_path(&self) -> &std::path::Path {
+            &self.worktree_root
+        }
+
+        fn http_client(&self) -> Arc<dyn http_client::HttpClient> {
+            unimplemented!("Not needed for tests")
+        }
+
+        fn node_runtime(&self) -> node_runtime::NodeRuntime {
+            unimplemented!("Not needed for tests")
+        }
+
+        fn toolchain_store(&self) -> Arc<dyn language::LanguageToolchainStore> {
+            unimplemented!("Not needed for tests")
+        }
+
+        fn fs(&self) -> Arc<dyn fs::Fs> {
+            unimplemented!("Not needed for tests")
+        }
+
+        fn output_to_console(&self, _msg: String) {}
+
+        async fn which(&self, _command: &std::ffi::OsStr) -> Option<PathBuf> {
+            None
+        }
+
+        async fn read_text_file(&self, _path: &util::rel_path::RelPath) -> Result<String> {
+            Ok(String::new())
+        }
+
+        async fn shell_env(&self) -> collections::HashMap<String, String> {
+            collections::HashMap::default()
+        }
+
+        fn is_headless(&self) -> bool {
+            false
+        }
+    }
+}

crates/dap_adapters/src/python.rs πŸ”—

@@ -23,6 +23,11 @@ use std::{
 use util::command::new_smol_command;
 use util::{ResultExt, paths::PathStyle, rel_path::RelPath};
 
+enum DebugpyLaunchMode<'a> {
+    Normal,
+    AttachWithConnect { host: Option<&'a str> },
+}
+
 #[derive(Default)]
 pub(crate) struct PythonDebugAdapter {
     base_venv_path: OnceCell<Result<Arc<Path>, String>>,
@@ -36,10 +41,11 @@ impl PythonDebugAdapter {
 
     const LANGUAGE_NAME: &'static str = "Python";
 
-    async fn generate_debugpy_arguments(
-        host: &Ipv4Addr,
+    async fn generate_debugpy_arguments<'a>(
+        host: &'a Ipv4Addr,
         port: u16,
-        user_installed_path: Option<&Path>,
+        launch_mode: DebugpyLaunchMode<'a>,
+        user_installed_path: Option<&'a Path>,
         user_args: Option<Vec<String>>,
     ) -> Result<Vec<String>> {
         let mut args = if let Some(user_installed_path) = user_installed_path {
@@ -62,7 +68,20 @@ impl PythonDebugAdapter {
         args.extend(if let Some(args) = user_args {
             args
         } else {
-            vec![format!("--host={}", host), format!("--port={}", port)]
+            match launch_mode {
+                DebugpyLaunchMode::Normal => {
+                    vec![format!("--host={}", host), format!("--port={}", port)]
+                }
+                DebugpyLaunchMode::AttachWithConnect { host } => {
+                    let mut args = vec!["connect".to_string()];
+
+                    if let Some(host) = host {
+                        args.push(format!("{host}:"));
+                    }
+                    args.push(format!("{port}"));
+                    args
+                }
+            }
         });
         Ok(args)
     }
@@ -315,7 +334,46 @@ impl PythonDebugAdapter {
         user_env: Option<HashMap<String, String>>,
         python_from_toolchain: Option<String>,
     ) -> Result<DebugAdapterBinary> {
-        let tcp_connection = config.tcp_connection.clone().unwrap_or_default();
+        let mut tcp_connection = config.tcp_connection.clone().unwrap_or_default();
+
+        let (config_port, config_host) = config
+            .config
+            .get("connect")
+            .map(|value| {
+                (
+                    value
+                        .get("port")
+                        .and_then(|val| val.as_u64().map(|p| p as u16)),
+                    value.get("host").and_then(|val| val.as_str()),
+                )
+            })
+            .unwrap_or_else(|| {
+                (
+                    config
+                        .config
+                        .get("port")
+                        .and_then(|port| port.as_u64().map(|p| p as u16)),
+                    config.config.get("host").and_then(|host| host.as_str()),
+                )
+            });
+
+        let is_attach_with_connect = if config
+            .config
+            .get("request")
+            .is_some_and(|val| val.as_str().is_some_and(|request| request == "attach"))
+        {
+            if tcp_connection.host.is_some() && config_host.is_some() {
+                bail!("Cannot have two different hosts in debug configuration")
+            } else if tcp_connection.port.is_some() && config_port.is_some() {
+                bail!("Cannot have two different ports in debug configuration")
+            }
+
+            tcp_connection.port = config_port;
+            DebugpyLaunchMode::AttachWithConnect { host: config_host }
+        } else {
+            DebugpyLaunchMode::Normal
+        };
+
         let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
 
         let python_path = if let Some(toolchain) = python_from_toolchain {
@@ -330,6 +388,7 @@ impl PythonDebugAdapter {
         let arguments = Self::generate_debugpy_arguments(
             &host,
             port,
+            is_attach_with_connect,
             user_installed_path.as_deref(),
             user_args,
         )
@@ -765,29 +824,58 @@ impl DebugAdapter for PythonDebugAdapter {
                 .await;
         }
 
-        let base_path = config
-            .config
-            .get("cwd")
-            .and_then(|cwd| {
-                RelPath::new(
-                    cwd.as_str()
-                        .map(Path::new)?
-                        .strip_prefix(delegate.worktree_root_path())
-                        .ok()?,
-                    PathStyle::local(),
-                )
-                .ok()
+        let base_paths = ["cwd", "program", "module"]
+            .into_iter()
+            .filter_map(|key| {
+                config.config.get(key).and_then(|cwd| {
+                    RelPath::new(
+                        cwd.as_str()
+                            .map(Path::new)?
+                            .strip_prefix(delegate.worktree_root_path())
+                            .ok()?,
+                        PathStyle::local(),
+                    )
+                    .ok()
+                })
             })
-            .unwrap_or_else(|| RelPath::empty().into());
-        let toolchain = delegate
-            .toolchain_store()
-            .active_toolchain(
-                delegate.worktree_id(),
-                base_path.into_arc(),
-                language::LanguageName::new(Self::LANGUAGE_NAME),
-                cx,
+            .chain(
+                // While Debugpy's wiki saids absolute paths are required, but it actually supports relative paths when cwd is passed in.
+                // (Which should always be the case because Zed defaults to the cwd worktree root)
+                // So we want to check that these relative paths find toolchains as well. Otherwise, they won't be checked
+                // because the strip prefix in the iteration above will return an error
+                config
+                    .config
+                    .get("cwd")
+                    .map(|_| {
+                        ["program", "module"].into_iter().filter_map(|key| {
+                            config.config.get(key).and_then(|value| {
+                                let path = Path::new(value.as_str()?);
+                                RelPath::new(path, PathStyle::local()).ok()
+                            })
+                        })
+                    })
+                    .into_iter()
+                    .flatten(),
             )
-            .await;
+            .chain([RelPath::empty().into()]);
+
+        let mut toolchain = None;
+
+        for base_path in base_paths {
+            if let Some(found_toolchain) = delegate
+                .toolchain_store()
+                .active_toolchain(
+                    delegate.worktree_id(),
+                    base_path.into_arc(),
+                    language::LanguageName::new(Self::LANGUAGE_NAME),
+                    cx,
+                )
+                .await
+            {
+                toolchain = Some(found_toolchain);
+                break;
+            }
+        }
 
         self.fetch_debugpy_whl(toolchain.clone(), delegate)
             .await
@@ -824,7 +912,148 @@ mod tests {
     use util::path;
 
     use super::*;
-    use std::{net::Ipv4Addr, path::PathBuf};
+    use task::TcpArgumentsTemplate;
+
+    #[gpui::test]
+    async fn test_tcp_connection_conflict_with_connect_args() {
+        let adapter = PythonDebugAdapter {
+            base_venv_path: OnceCell::new(),
+            debugpy_whl_base_path: OnceCell::new(),
+        };
+
+        let config_with_port_conflict = json!({
+            "request": "attach",
+            "connect": {
+                "port": 5679
+            }
+        });
+
+        let tcp_connection = TcpArgumentsTemplate {
+            host: None,
+            port: Some(5678),
+            timeout: None,
+        };
+
+        let task_def = DebugTaskDefinition {
+            label: "test".into(),
+            adapter: PythonDebugAdapter::ADAPTER_NAME.into(),
+            config: config_with_port_conflict,
+            tcp_connection: Some(tcp_connection.clone()),
+        };
+
+        let result = adapter
+            .get_installed_binary(
+                &test_mocks::MockDelegate::new(),
+                &task_def,
+                None,
+                None,
+                None,
+                Some("python3".to_string()),
+            )
+            .await;
+
+        assert!(result.is_err());
+        assert!(
+            result
+                .unwrap_err()
+                .to_string()
+                .contains("Cannot have two different ports")
+        );
+
+        let host = Ipv4Addr::new(127, 0, 0, 1);
+        let config_with_host_conflict = json!({
+            "request": "attach",
+            "connect": {
+                "host": "192.168.1.1",
+                "port": 5678
+            }
+        });
+
+        let tcp_connection_with_host = TcpArgumentsTemplate {
+            host: Some(host),
+            port: None,
+            timeout: None,
+        };
+
+        let task_def_host = DebugTaskDefinition {
+            label: "test".into(),
+            adapter: PythonDebugAdapter::ADAPTER_NAME.into(),
+            config: config_with_host_conflict,
+            tcp_connection: Some(tcp_connection_with_host),
+        };
+
+        let result_host = adapter
+            .get_installed_binary(
+                &test_mocks::MockDelegate::new(),
+                &task_def_host,
+                None,
+                None,
+                None,
+                Some("python3".to_string()),
+            )
+            .await;
+
+        assert!(result_host.is_err());
+        assert!(
+            result_host
+                .unwrap_err()
+                .to_string()
+                .contains("Cannot have two different hosts")
+        );
+    }
+
+    #[gpui::test]
+    async fn test_attach_with_connect_mode_generates_correct_arguments() {
+        let host = Ipv4Addr::new(127, 0, 0, 1);
+        let port = 5678;
+
+        let args_without_host = PythonDebugAdapter::generate_debugpy_arguments(
+            &host,
+            port,
+            DebugpyLaunchMode::AttachWithConnect { host: None },
+            None,
+            None,
+        )
+        .await
+        .unwrap();
+
+        let expected_suffix = path!("debug_adapters/Debugpy/debugpy/adapter");
+        assert!(args_without_host[0].ends_with(expected_suffix));
+        assert_eq!(args_without_host[1], "connect");
+        assert_eq!(args_without_host[2], "5678");
+
+        let args_with_host = PythonDebugAdapter::generate_debugpy_arguments(
+            &host,
+            port,
+            DebugpyLaunchMode::AttachWithConnect {
+                host: Some("192.168.1.100"),
+            },
+            None,
+            None,
+        )
+        .await
+        .unwrap();
+
+        assert!(args_with_host[0].ends_with(expected_suffix));
+        assert_eq!(args_with_host[1], "connect");
+        assert_eq!(args_with_host[2], "192.168.1.100:");
+        assert_eq!(args_with_host[3], "5678");
+
+        let args_normal = PythonDebugAdapter::generate_debugpy_arguments(
+            &host,
+            port,
+            DebugpyLaunchMode::Normal,
+            None,
+            None,
+        )
+        .await
+        .unwrap();
+
+        assert!(args_normal[0].ends_with(expected_suffix));
+        assert_eq!(args_normal[1], "--host=127.0.0.1");
+        assert_eq!(args_normal[2], "--port=5678");
+        assert!(!args_normal.contains(&"connect".to_string()));
+    }
 
     #[gpui::test]
     async fn test_debugpy_install_path_cases() {
@@ -833,15 +1062,25 @@ mod tests {
 
         // Case 1: User-defined debugpy path (highest precedence)
         let user_path = PathBuf::from("/custom/path/to/debugpy/src/debugpy/adapter");
-        let user_args =
-            PythonDebugAdapter::generate_debugpy_arguments(&host, port, Some(&user_path), None)
-                .await
-                .unwrap();
+        let user_args = PythonDebugAdapter::generate_debugpy_arguments(
+            &host,
+            port,
+            DebugpyLaunchMode::Normal,
+            Some(&user_path),
+            None,
+        )
+        .await
+        .unwrap();
 
-        // Case 2: Venv-installed debugpy (uses -m debugpy.adapter)
-        let venv_args = PythonDebugAdapter::generate_debugpy_arguments(&host, port, None, None)
-            .await
-            .unwrap();
+        let venv_args = PythonDebugAdapter::generate_debugpy_arguments(
+            &host,
+            port,
+            DebugpyLaunchMode::Normal,
+            None,
+            None,
+        )
+        .await
+        .unwrap();
 
         assert_eq!(user_args[0], "/custom/path/to/debugpy/src/debugpy/adapter");
         assert_eq!(user_args[1], "--host=127.0.0.1");
@@ -856,6 +1095,7 @@ mod tests {
         let user_args = PythonDebugAdapter::generate_debugpy_arguments(
             &host,
             port,
+            DebugpyLaunchMode::Normal,
             Some(&user_path),
             Some(vec!["foo".into()]),
         )
@@ -864,6 +1104,7 @@ mod tests {
         let venv_args = PythonDebugAdapter::generate_debugpy_arguments(
             &host,
             port,
+            DebugpyLaunchMode::Normal,
             None,
             Some(vec!["foo".into()]),
         )

crates/debugger_tools/src/dap_log.rs πŸ”—

@@ -1029,11 +1029,13 @@ impl SearchableItem for DapLogView {
         &mut self,
         index: usize,
         matches: &[Self::Match],
+        collapse: bool,
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
-        self.editor
-            .update(cx, |e, cx| e.activate_match(index, matches, window, cx))
+        self.editor.update(cx, |e, cx| {
+            e.activate_match(index, matches, collapse, window, cx)
+        })
     }
 
     fn select_matches(

crates/debugger_ui/src/session/running/breakpoint_list.rs πŸ”—

@@ -12,6 +12,7 @@ use gpui::{
     Action, AppContext, ClickEvent, Entity, FocusHandle, Focusable, MouseButton, ScrollStrategy,
     Task, UniformListScrollHandle, WeakEntity, actions, uniform_list,
 };
+use itertools::Itertools;
 use language::Point;
 use project::{
     Project,
@@ -24,7 +25,7 @@ use project::{
 };
 use ui::{
     Divider, DividerColor, FluentBuilder as _, Indicator, IntoElement, ListItem, Render,
-    StatefulInteractiveElement, Tooltip, WithScrollbar, prelude::*,
+    ScrollAxes, StatefulInteractiveElement, Tooltip, WithScrollbar, prelude::*,
 };
 use util::rel_path::RelPath;
 use workspace::Workspace;
@@ -55,6 +56,7 @@ pub(crate) struct BreakpointList {
     focus_handle: FocusHandle,
     scroll_handle: UniformListScrollHandle,
     selected_ix: Option<usize>,
+    max_width_index: Option<usize>,
     input: Entity<Editor>,
     strip_mode: Option<ActiveBreakpointStripMode>,
     serialize_exception_breakpoints_task: Option<Task<anyhow::Result<()>>>,
@@ -95,6 +97,7 @@ impl BreakpointList {
                 dap_store,
                 worktree_store,
                 breakpoints: Default::default(),
+                max_width_index: None,
                 workspace,
                 session,
                 focus_handle,
@@ -546,7 +549,7 @@ impl BreakpointList {
             .session
             .as_ref()
             .map(|session| SupportedBreakpointProperties::from(session.read(cx).capabilities()))
-            .unwrap_or_else(SupportedBreakpointProperties::empty);
+            .unwrap_or_else(SupportedBreakpointProperties::all);
         let strip_mode = self.strip_mode;
 
         uniform_list(
@@ -570,6 +573,8 @@ impl BreakpointList {
                     .collect()
             }),
         )
+        .with_horizontal_sizing_behavior(gpui::ListHorizontalSizingBehavior::Unconstrained)
+        .with_width_from_item(self.max_width_index)
         .track_scroll(self.scroll_handle.clone())
         .flex_1()
     }
@@ -732,6 +737,26 @@ impl Render for BreakpointList {
                 .chain(exception_breakpoints),
         );
 
+        let text_pixels = ui::TextSize::Default.pixels(cx).to_f64() as f32;
+
+        self.max_width_index = self
+            .breakpoints
+            .iter()
+            .map(|entry| match &entry.kind {
+                BreakpointEntryKind::LineBreakpoint(line_bp) => {
+                    let name_and_line = format!("{}:{}", line_bp.name, line_bp.line);
+                    let dir_len = line_bp.dir.as_ref().map(|d| d.len()).unwrap_or(0);
+                    (name_and_line.len() + dir_len) as f32 * text_pixels
+                }
+                BreakpointEntryKind::ExceptionBreakpoint(exc_bp) => {
+                    exc_bp.data.label.len() as f32 * text_pixels
+                }
+                BreakpointEntryKind::DataBreakpoint(data_bp) => {
+                    data_bp.0.context.human_readable_label().len() as f32 * text_pixels
+                }
+            })
+            .position_max_by(|left, right| left.total_cmp(right));
+
         v_flex()
             .id("breakpoint-list")
             .key_context("BreakpointList")
@@ -749,7 +774,14 @@ impl Render for BreakpointList {
             .size_full()
             .pt_1()
             .child(self.render_list(cx))
-            .vertical_scrollbar_for(self.scroll_handle.clone(), window, cx)
+            .custom_scrollbars(
+                ui::Scrollbars::new(ScrollAxes::Both)
+                    .tracked_scroll_handle(self.scroll_handle.clone())
+                    .with_track_along(ScrollAxes::Both, cx.theme().colors().panel_background)
+                    .tracked_entity(cx.entity_id()),
+                window,
+                cx,
+            )
             .when_some(self.strip_mode, |this, _| {
                 this.child(Divider::horizontal().color(DividerColor::Border))
                     .child(
@@ -1376,8 +1408,10 @@ impl RenderOnce for BreakpointOptionsStrip {
         h_flex()
             .gap_px()
             .mr_3() // Space to avoid overlapping with the scrollbar
-            .child(
-                div()
+            .justify_end()
+            .when(has_logs || self.is_selected, |this| {
+                this.child(
+                    div()
                     .map(self.add_focus_styles(
                         ActiveBreakpointStripMode::Log,
                         supports_logs,
@@ -1406,45 +1440,46 @@ impl RenderOnce for BreakpointOptionsStrip {
                             )
                         }),
                     )
-                    .when(!has_logs && !self.is_selected, |this| this.invisible()),
-            )
-            .child(
-                div()
-                    .map(self.add_focus_styles(
-                        ActiveBreakpointStripMode::Condition,
-                        supports_condition,
-                        window,
-                        cx,
-                    ))
-                    .child(
-                        IconButton::new(
-                            SharedString::from(format!("{id}-condition-toggle")),
-                            IconName::SplitAlt,
-                        )
-                        .shape(ui::IconButtonShape::Square)
-                        .style(style_for_toggle(
+                )
+            })
+            .when(has_condition || self.is_selected, |this| {
+                this.child(
+                    div()
+                        .map(self.add_focus_styles(
                             ActiveBreakpointStripMode::Condition,
-                            has_condition,
+                            supports_condition,
+                            window,
+                            cx,
                         ))
-                        .icon_size(IconSize::Small)
-                        .icon_color(color_for_toggle(has_condition))
-                        .when(has_condition, |this| this.indicator(Indicator::dot().color(Color::Info)))
-                        .disabled(!supports_condition)
-                        .toggle_state(self.is_toggled(ActiveBreakpointStripMode::Condition))
-                        .on_click(self.on_click_callback(ActiveBreakpointStripMode::Condition))
-                        .tooltip(|_window, cx|  {
-                            Tooltip::with_meta(
-                                "Set Condition",
-                                None,
-                                "Set condition to evaluate when a breakpoint is hit. Program execution will stop only when the condition is met.",
-                                cx,
+                        .child(
+                            IconButton::new(
+                                SharedString::from(format!("{id}-condition-toggle")),
+                                IconName::SplitAlt,
                             )
-                        }),
-                    )
-                    .when(!has_condition && !self.is_selected, |this| this.invisible()),
-            )
-            .child(
-                div()
+                            .shape(ui::IconButtonShape::Square)
+                            .style(style_for_toggle(
+                                ActiveBreakpointStripMode::Condition,
+                                has_condition,
+                            ))
+                            .icon_size(IconSize::Small)
+                            .icon_color(color_for_toggle(has_condition))
+                            .when(has_condition, |this| this.indicator(Indicator::dot().color(Color::Info)))
+                            .disabled(!supports_condition)
+                            .toggle_state(self.is_toggled(ActiveBreakpointStripMode::Condition))
+                            .on_click(self.on_click_callback(ActiveBreakpointStripMode::Condition))
+                            .tooltip(|_window, cx|  {
+                                Tooltip::with_meta(
+                                    "Set Condition",
+                                    None,
+                                    "Set condition to evaluate when a breakpoint is hit. Program execution will stop only when the condition is met.",
+                                    cx,
+                                )
+                            }),
+                        )
+                )
+            })
+            .when(has_hit_condition || self.is_selected, |this| {
+                this.child(div()
                     .map(self.add_focus_styles(
                         ActiveBreakpointStripMode::HitCondition,
                         supports_hit_condition,
@@ -1475,10 +1510,8 @@ impl RenderOnce for BreakpointOptionsStrip {
                                 cx,
                             )
                         }),
-                    )
-                    .when(!has_hit_condition && !self.is_selected, |this| {
-                        this.invisible()
-                    }),
-            )
+                    ))
+
+            })
     }
 }

crates/debugger_ui/src/session/running/memory_view.rs πŸ”—

@@ -10,8 +10,9 @@ use std::{
 use editor::{Editor, EditorElement, EditorStyle};
 use gpui::{
     Action, Along, AppContext, Axis, DismissEvent, DragMoveEvent, Empty, Entity, FocusHandle,
-    Focusable, MouseButton, Point, ScrollStrategy, ScrollWheelEvent, Subscription, Task, TextStyle,
-    UniformList, UniformListScrollHandle, WeakEntity, actions, anchored, deferred, uniform_list,
+    Focusable, ListHorizontalSizingBehavior, MouseButton, Point, ScrollStrategy, ScrollWheelEvent,
+    Subscription, Task, TextStyle, UniformList, UniformListScrollHandle, WeakEntity, actions,
+    anchored, deferred, uniform_list,
 };
 use notifications::status_toast::{StatusToast, ToastIcon};
 use project::debugger::{MemoryCell, dap_command::DataBreakpointContext, session::Session};
@@ -229,6 +230,7 @@ impl MemoryView {
             },
         )
         .track_scroll(view_state.scroll_handle)
+        .with_horizontal_sizing_behavior(ListHorizontalSizingBehavior::Unconstrained)
         .on_scroll_wheel(cx.listener(|this, evt: &ScrollWheelEvent, window, _| {
             let mut view_state = this.view_state();
             let delta = evt.delta.pixel_delta(window.line_height());
@@ -917,7 +919,17 @@ impl Render for MemoryView {
                         )
                         .with_priority(1)
                     }))
-                    .vertical_scrollbar_for(self.view_state_handle.clone(), window, cx),
+                    .custom_scrollbars(
+                        ui::Scrollbars::new(ui::ScrollAxes::Both)
+                            .tracked_scroll_handle(self.view_state_handle.clone())
+                            .with_track_along(
+                                ui::ScrollAxes::Both,
+                                cx.theme().colors().panel_background,
+                            )
+                            .tracked_entity(cx.entity_id()),
+                        window,
+                        cx,
+                    ),
             )
     }
 }

crates/debugger_ui/src/session/running/variable_list.rs πŸ”—

@@ -11,15 +11,18 @@ use gpui::{
     FocusHandle, Focusable, Hsla, MouseDownEvent, Point, Subscription, TextStyleRefinement,
     UniformListScrollHandle, WeakEntity, actions, anchored, deferred, uniform_list,
 };
+use itertools::Itertools;
 use menu::{SelectFirst, SelectLast, SelectNext, SelectPrevious};
 use project::debugger::{
     dap_command::DataBreakpointContext,
     session::{Session, SessionEvent, Watcher},
 };
 use std::{collections::HashMap, ops::Range, sync::Arc};
-use ui::{ContextMenu, ListItem, ScrollableHandle, Tooltip, WithScrollbar, prelude::*};
+use ui::{ContextMenu, ListItem, ScrollAxes, ScrollableHandle, Tooltip, WithScrollbar, prelude::*};
 use util::{debug_panic, maybe};
 
+static INDENT_STEP_SIZE: Pixels = px(10.0);
+
 actions!(
     variable_list,
     [
@@ -185,6 +188,7 @@ struct VariableColor {
 
 pub struct VariableList {
     entries: Vec<ListEntry>,
+    max_width_index: Option<usize>,
     entry_states: HashMap<EntryPath, EntryState>,
     selected_stack_frame_id: Option<StackFrameId>,
     list_handle: UniformListScrollHandle,
@@ -243,6 +247,7 @@ impl VariableList {
             disabled: false,
             edited_path: None,
             entries: Default::default(),
+            max_width_index: None,
             entry_states: Default::default(),
             weak_running,
             memory_view,
@@ -368,6 +373,26 @@ impl VariableList {
         }
 
         self.entries = entries;
+
+        let text_pixels = ui::TextSize::Default.pixels(cx).to_f64() as f32;
+        let indent_size = INDENT_STEP_SIZE.to_f64() as f32;
+
+        self.max_width_index = self
+            .entries
+            .iter()
+            .map(|entry| match &entry.entry {
+                DapEntry::Scope(scope) => scope.name.len() as f32 * text_pixels,
+                DapEntry::Variable(variable) => {
+                    (variable.value.len() + variable.name.len()) as f32 * text_pixels
+                        + (entry.path.indices.len() as f32 * indent_size)
+                }
+                DapEntry::Watcher(watcher) => {
+                    (watcher.value.len() + watcher.expression.len()) as f32 * text_pixels
+                        + (entry.path.indices.len() as f32 * indent_size)
+                }
+            })
+            .position_max_by(|left, right| left.total_cmp(right));
+
         cx.notify();
     }
 
@@ -1244,7 +1269,7 @@ impl VariableList {
                 .disabled(self.disabled)
                 .selectable(false)
                 .indent_level(state.depth)
-                .indent_step_size(px(10.))
+                .indent_step_size(INDENT_STEP_SIZE)
                 .always_show_disclosure_icon(true)
                 .when(var_ref > 0, |list_item| {
                     list_item.toggle(state.is_expanded).on_toggle(cx.listener({
@@ -1445,7 +1470,7 @@ impl VariableList {
                 .disabled(self.disabled)
                 .selectable(false)
                 .indent_level(state.depth)
-                .indent_step_size(px(10.))
+                .indent_step_size(INDENT_STEP_SIZE)
                 .always_show_disclosure_icon(true)
                 .when(var_ref > 0, |list_item| {
                     list_item.toggle(state.is_expanded).on_toggle(cx.listener({
@@ -1507,7 +1532,6 @@ impl Render for VariableList {
             .key_context("VariableList")
             .id("variable-list")
             .group("variable-list")
-            .overflow_y_scroll()
             .size_full()
             .on_action(cx.listener(Self::select_first))
             .on_action(cx.listener(Self::select_last))
@@ -1533,6 +1557,9 @@ impl Render for VariableList {
                     }),
                 )
                 .track_scroll(self.list_handle.clone())
+                .with_width_from_item(self.max_width_index)
+                .with_sizing_behavior(gpui::ListSizingBehavior::Auto)
+                .with_horizontal_sizing_behavior(gpui::ListHorizontalSizingBehavior::Unconstrained)
                 .gap_1_5()
                 .size_full()
                 .flex_grow(),
@@ -1546,7 +1573,15 @@ impl Render for VariableList {
                 )
                 .with_priority(1)
             }))
-            .vertical_scrollbar_for(self.list_handle.clone(), window, cx)
+            // .vertical_scrollbar_for(self.list_handle.clone(), window, cx)
+            .custom_scrollbars(
+                ui::Scrollbars::new(ScrollAxes::Both)
+                    .tracked_scroll_handle(self.list_handle.clone())
+                    .with_track_along(ScrollAxes::Both, cx.theme().colors().panel_background)
+                    .tracked_entity(cx.entity_id()),
+                window,
+                cx,
+            )
     }
 }
 

crates/diagnostics/src/diagnostics_tests.rs πŸ”—

@@ -877,7 +877,7 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S
                             vec![Inlay::edit_prediction(
                                 post_inc(&mut next_inlay_id),
                                 snapshot.buffer_snapshot().anchor_before(position),
-                                Rope::from_iter(["Test inlay ", "next_inlay_id"]),
+                                Rope::from_iter_small(["Test inlay ", "next_inlay_id"]),
                             )],
                             cx,
                         );
@@ -2070,7 +2070,7 @@ fn random_lsp_diagnostic(
     const ERROR_MARGIN: usize = 10;
 
     let file_content = fs.read_file_sync(path).unwrap();
-    let file_text = Rope::from(String::from_utf8_lossy(&file_content).as_ref());
+    let file_text = Rope::from_str_small(String::from_utf8_lossy(&file_content).as_ref());
 
     let start = rng.random_range(0..file_text.len().saturating_add(ERROR_MARGIN));
     let end = rng.random_range(start..file_text.len().saturating_add(ERROR_MARGIN));

crates/edit_prediction_button/src/edit_prediction_button.rs πŸ”—

@@ -1,5 +1,5 @@
 use anyhow::Result;
-use client::{UserStore, zed_urls};
+use client::{Client, UserStore, zed_urls};
 use cloud_llm_client::UsageLimit;
 use codestral::CodestralCompletionProvider;
 use copilot::{Copilot, Status};
@@ -13,7 +13,7 @@ use gpui::{
 };
 use indoc::indoc;
 use language::{
-    EditPredictionsMode, File, Language,
+    EditPredictionsMode, File, Language, Rope,
     language_settings::{self, AllLanguageSettings, EditPredictionProvider, all_language_settings},
 };
 use project::DisableAiSettings;
@@ -192,6 +192,7 @@ impl Render for EditPredictionButton {
                                 Some(ContextMenu::build(window, cx, |menu, _, _| {
                                     let fs = fs.clone();
                                     let activate_url = activate_url.clone();
+
                                     menu.entry("Sign In", None, move |_, cx| {
                                         cx.open_url(activate_url.as_str())
                                     })
@@ -244,15 +245,8 @@ impl Render for EditPredictionButton {
                             } else {
                                 Some(ContextMenu::build(window, cx, |menu, _, _| {
                                     let fs = fs.clone();
-                                    menu.entry("Use Zed AI instead", None, move |_, cx| {
-                                        set_completion_provider(
-                                            fs.clone(),
-                                            cx,
-                                            EditPredictionProvider::Zed,
-                                        )
-                                    })
-                                    .separator()
-                                    .entry(
+
+                                    menu.entry(
                                         "Configure Codestral API Key",
                                         None,
                                         move |window, cx| {
@@ -262,6 +256,18 @@ impl Render for EditPredictionButton {
                                             );
                                         },
                                     )
+                                    .separator()
+                                    .entry(
+                                        "Use Zed AI instead",
+                                        None,
+                                        move |_, cx| {
+                                            set_completion_provider(
+                                                fs.clone(),
+                                                cx,
+                                                EditPredictionProvider::Zed,
+                                            )
+                                        },
+                                    )
                                 }))
                             }
                         })
@@ -412,6 +418,7 @@ impl EditPredictionButton {
         fs: Arc<dyn Fs>,
         user_store: Entity<UserStore>,
         popover_menu_handle: PopoverMenuHandle<ContextMenu>,
+        client: Arc<Client>,
         cx: &mut Context<Self>,
     ) -> Self {
         if let Some(copilot) = Copilot::global(cx) {
@@ -421,6 +428,8 @@ impl EditPredictionButton {
         cx.observe_global::<SettingsStore>(move |_, cx| cx.notify())
             .detach();
 
+        CodestralCompletionProvider::ensure_api_key_loaded(client.http_client(), cx);
+
         Self {
             editor_subscription: None,
             editor_enabled: None,
@@ -435,6 +444,89 @@ impl EditPredictionButton {
         }
     }
 
+    fn get_available_providers(&self, cx: &App) -> Vec<EditPredictionProvider> {
+        let mut providers = Vec::new();
+
+        providers.push(EditPredictionProvider::Zed);
+
+        if let Some(copilot) = Copilot::global(cx) {
+            if matches!(copilot.read(cx).status(), Status::Authorized) {
+                providers.push(EditPredictionProvider::Copilot);
+            }
+        }
+
+        if let Some(supermaven) = Supermaven::global(cx) {
+            if let Supermaven::Spawned(agent) = supermaven.read(cx) {
+                if matches!(agent.account_status, AccountStatus::Ready) {
+                    providers.push(EditPredictionProvider::Supermaven);
+                }
+            }
+        }
+
+        if CodestralCompletionProvider::has_api_key(cx) {
+            providers.push(EditPredictionProvider::Codestral);
+        }
+
+        providers
+    }
+
+    fn add_provider_switching_section(
+        &self,
+        mut menu: ContextMenu,
+        current_provider: EditPredictionProvider,
+        cx: &App,
+    ) -> ContextMenu {
+        let available_providers = self.get_available_providers(cx);
+
+        let other_providers: Vec<_> = available_providers
+            .into_iter()
+            .filter(|p| *p != current_provider && *p != EditPredictionProvider::None)
+            .collect();
+
+        if !other_providers.is_empty() {
+            menu = menu.separator().header("Switch Providers");
+
+            for provider in other_providers {
+                let fs = self.fs.clone();
+
+                menu = match provider {
+                    EditPredictionProvider::Zed => menu.item(
+                        ContextMenuEntry::new("Zed AI")
+                            .documentation_aside(
+                                DocumentationSide::Left,
+                                DocumentationEdge::Top,
+                                |_| {
+                                    Label::new("Zed's edit prediction is powered by Zeta, an open-source, dataset mode.")
+                                        .into_any_element()
+                                },
+                            )
+                            .handler(move |_, cx| {
+                                set_completion_provider(fs.clone(), cx, provider);
+                            }),
+                    ),
+                    EditPredictionProvider::Copilot => {
+                        menu.entry("GitHub Copilot", None, move |_, cx| {
+                            set_completion_provider(fs.clone(), cx, provider);
+                        })
+                    }
+                    EditPredictionProvider::Supermaven => {
+                        menu.entry("Supermaven", None, move |_, cx| {
+                            set_completion_provider(fs.clone(), cx, provider);
+                        })
+                    }
+                    EditPredictionProvider::Codestral => {
+                        menu.entry("Codestral", None, move |_, cx| {
+                            set_completion_provider(fs.clone(), cx, provider);
+                        })
+                    }
+                    EditPredictionProvider::None => continue,
+                };
+            }
+        }
+
+        menu
+    }
+
     pub fn build_copilot_start_menu(
         &mut self,
         window: &mut Window,
@@ -572,8 +664,10 @@ impl EditPredictionButton {
         }
 
         menu = menu.separator().header("Privacy");
+
         if let Some(provider) = &self.edit_prediction_provider {
             let data_collection = provider.data_collection_state(cx);
+
             if data_collection.is_supported() {
                 let provider = provider.clone();
                 let enabled = data_collection.is_enabled();
@@ -691,7 +785,7 @@ impl EditPredictionButton {
                     }
                 }),
         ).item(
-            ContextMenuEntry::new("View Documentation")
+            ContextMenuEntry::new("View Docs")
                 .icon(IconName::FileGeneric)
                 .icon_color(Color::Muted)
                 .handler(move |_, cx| {
@@ -711,6 +805,7 @@ impl EditPredictionButton {
         if let Some(editor_focus_handle) = self.editor_focus_handle.clone() {
             menu = menu
                 .separator()
+                .header("Actions")
                 .entry(
                     "Predict Edit at Cursor",
                     Some(Box::new(ShowEditPrediction)),
@@ -721,7 +816,11 @@ impl EditPredictionButton {
                         }
                     },
                 )
-                .context(editor_focus_handle);
+                .context(editor_focus_handle)
+                .when(
+                    cx.has_flag::<PredictEditsRateCompletionsFeatureFlag>(),
+                    |this| this.action("Rate Completions", RateCompletions.boxed_clone()),
+                );
         }
 
         menu
@@ -733,15 +832,11 @@ impl EditPredictionButton {
         cx: &mut Context<Self>,
     ) -> Entity<ContextMenu> {
         ContextMenu::build(window, cx, |menu, window, cx| {
-            self.build_language_settings_menu(menu, window, cx)
-                .separator()
-                .entry("Use Zed AI instead", None, {
-                    let fs = self.fs.clone();
-                    move |_window, cx| {
-                        set_completion_provider(fs.clone(), cx, EditPredictionProvider::Zed)
-                    }
-                })
-                .separator()
+            let menu = self.build_language_settings_menu(menu, window, cx);
+            let menu =
+                self.add_provider_switching_section(menu, EditPredictionProvider::Copilot, cx);
+
+            menu.separator()
                 .link(
                     "Go to Copilot Settings",
                     OpenBrowser {
@@ -759,8 +854,11 @@ impl EditPredictionButton {
         cx: &mut Context<Self>,
     ) -> Entity<ContextMenu> {
         ContextMenu::build(window, cx, |menu, window, cx| {
-            self.build_language_settings_menu(menu, window, cx)
-                .separator()
+            let menu = self.build_language_settings_menu(menu, window, cx);
+            let menu =
+                self.add_provider_switching_section(menu, EditPredictionProvider::Supermaven, cx);
+
+            menu.separator()
                 .action("Sign Out", supermaven::SignOut.boxed_clone())
         })
     }
@@ -770,14 +868,12 @@ impl EditPredictionButton {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) -> Entity<ContextMenu> {
-        let fs = self.fs.clone();
         ContextMenu::build(window, cx, |menu, window, cx| {
-            self.build_language_settings_menu(menu, window, cx)
-                .separator()
-                .entry("Use Zed AI instead", None, move |_, cx| {
-                    set_completion_provider(fs.clone(), cx, EditPredictionProvider::Zed)
-                })
-                .separator()
+            let menu = self.build_language_settings_menu(menu, window, cx);
+            let menu =
+                self.add_provider_switching_section(menu, EditPredictionProvider::Codestral, cx);
+
+            menu.separator()
                 .entry("Configure Codestral API Key", None, move |window, cx| {
                     window.dispatch_action(zed_actions::agent::OpenSettings.boxed_clone(), cx);
                 })
@@ -872,10 +968,10 @@ impl EditPredictionButton {
                     .separator();
             }
 
-            self.build_language_settings_menu(menu, window, cx).when(
-                cx.has_flag::<PredictEditsRateCompletionsFeatureFlag>(),
-                |this| this.action("Rate Completions", RateCompletions.boxed_clone()),
-            )
+            let menu = self.build_language_settings_menu(menu, window, cx);
+            let menu = self.add_provider_switching_section(menu, EditPredictionProvider::Zed, cx);
+
+            menu
         })
     }
 
@@ -960,8 +1056,11 @@ async fn open_disabled_globs_setting_in_editor(
 ) -> Result<()> {
     let settings_editor = workspace
         .update_in(cx, |_, window, cx| {
-            create_and_open_local_file(paths::settings_file(), window, cx, || {
-                settings::initial_user_settings_content().as_ref().into()
+            create_and_open_local_file(paths::settings_file(), window, cx, |cx| {
+                Rope::from_str(
+                    settings::initial_user_settings_content().as_ref(),
+                    cx.background_executor(),
+                )
             })
         })?
         .await?

crates/editor/src/display_map.rs πŸ”—

@@ -1584,6 +1584,7 @@ pub mod tests {
     use lsp::LanguageServerId;
     use project::Project;
     use rand::{Rng, prelude::*};
+    use rope::Rope;
     use settings::{SettingsContent, SettingsStore};
     use smol::stream::StreamExt;
     use std::{env, sync::Arc};
@@ -2089,7 +2090,7 @@ pub mod tests {
                 vec![Inlay::edit_prediction(
                     0,
                     buffer_snapshot.anchor_after(0),
-                    "\n",
+                    Rope::from_str_small("\n"),
                 )],
                 cx,
             );

crates/editor/src/display_map/inlay_map.rs πŸ”—

@@ -700,16 +700,20 @@ impl InlayMap {
                     .collect::<String>();
 
                 let next_inlay = if i % 2 == 0 {
+                    use rope::Rope;
+
                     Inlay::mock_hint(
                         post_inc(next_inlay_id),
                         snapshot.buffer.anchor_at(position, bias),
-                        &text,
+                        Rope::from_str_small(&text),
                     )
                 } else {
+                    use rope::Rope;
+
                     Inlay::edit_prediction(
                         post_inc(next_inlay_id),
                         snapshot.buffer.anchor_at(position, bias),
-                        &text,
+                        Rope::from_str_small(&text),
                     )
                 };
                 let inlay_id = next_inlay.id;
@@ -1301,7 +1305,7 @@ mod tests {
             vec![Inlay::mock_hint(
                 post_inc(&mut next_inlay_id),
                 buffer.read(cx).snapshot(cx).anchor_after(3),
-                "|123|",
+                Rope::from_str_small("|123|"),
             )],
         );
         assert_eq!(inlay_snapshot.text(), "abc|123|defghi");
@@ -1378,12 +1382,12 @@ mod tests {
                 Inlay::mock_hint(
                     post_inc(&mut next_inlay_id),
                     buffer.read(cx).snapshot(cx).anchor_before(3),
-                    "|123|",
+                    Rope::from_str_small("|123|"),
                 ),
                 Inlay::edit_prediction(
                     post_inc(&mut next_inlay_id),
                     buffer.read(cx).snapshot(cx).anchor_after(3),
-                    "|456|",
+                    Rope::from_str_small("|456|"),
                 ),
             ],
         );
@@ -1593,17 +1597,17 @@ mod tests {
                 Inlay::mock_hint(
                     post_inc(&mut next_inlay_id),
                     buffer.read(cx).snapshot(cx).anchor_before(0),
-                    "|123|\n",
+                    Rope::from_str_small("|123|\n"),
                 ),
                 Inlay::mock_hint(
                     post_inc(&mut next_inlay_id),
                     buffer.read(cx).snapshot(cx).anchor_before(4),
-                    "|456|",
+                    Rope::from_str_small("|456|"),
                 ),
                 Inlay::edit_prediction(
                     post_inc(&mut next_inlay_id),
                     buffer.read(cx).snapshot(cx).anchor_before(7),
-                    "\n|567|\n",
+                    Rope::from_str_small("\n|567|\n"),
                 ),
             ],
         );
@@ -1677,9 +1681,14 @@ mod tests {
                     (offset, inlay.clone())
                 })
                 .collect::<Vec<_>>();
-            let mut expected_text = Rope::from(&buffer_snapshot.text());
+            let mut expected_text =
+                Rope::from_str(&buffer_snapshot.text(), cx.background_executor());
             for (offset, inlay) in inlays.iter().rev() {
-                expected_text.replace(*offset..*offset, &inlay.text().to_string());
+                expected_text.replace(
+                    *offset..*offset,
+                    &inlay.text().to_string(),
+                    cx.background_executor(),
+                );
             }
             assert_eq!(inlay_snapshot.text(), expected_text.to_string());
 
@@ -2067,7 +2076,7 @@ mod tests {
         let inlay = Inlay {
             id: InlayId::Hint(0),
             position,
-            content: InlayContent::Text(text::Rope::from(inlay_text)),
+            content: InlayContent::Text(text::Rope::from_str(inlay_text, cx.background_executor())),
         };
 
         let (inlay_snapshot, _) = inlay_map.splice(&[], vec![inlay]);
@@ -2181,7 +2190,10 @@ mod tests {
             let inlay = Inlay {
                 id: InlayId::Hint(0),
                 position,
-                content: InlayContent::Text(text::Rope::from(test_case.inlay_text)),
+                content: InlayContent::Text(text::Rope::from_str(
+                    test_case.inlay_text,
+                    cx.background_executor(),
+                )),
             };
 
             let (inlay_snapshot, _) = inlay_map.splice(&[], vec![inlay]);

crates/editor/src/display_map/tab_map.rs πŸ”—

@@ -1042,7 +1042,7 @@ mod tests {
         let (mut tab_map, _) = TabMap::new(fold_snapshot, tab_size);
         let tabs_snapshot = tab_map.set_max_expansion_column(32);
 
-        let text = text::Rope::from(tabs_snapshot.text().as_str());
+        let text = text::Rope::from_str(tabs_snapshot.text().as_str(), cx.background_executor());
         log::info!(
             "TabMap text (tab size: {}): {:?}",
             tab_size,

crates/editor/src/display_map/wrap_map.rs πŸ”—

@@ -568,18 +568,15 @@ impl WrapSnapshot {
             let mut old_start = old_cursor.start().output.lines;
             old_start += tab_edit.old.start.0 - old_cursor.start().input.lines;
 
-            // todo(lw): Should these be seek_forward?
-            old_cursor.seek(&tab_edit.old.end, Bias::Right);
+            old_cursor.seek_forward(&tab_edit.old.end, Bias::Right);
             let mut old_end = old_cursor.start().output.lines;
             old_end += tab_edit.old.end.0 - old_cursor.start().input.lines;
 
-            // todo(lw): Should these be seek_forward?
             new_cursor.seek(&tab_edit.new.start, Bias::Right);
             let mut new_start = new_cursor.start().output.lines;
             new_start += tab_edit.new.start.0 - new_cursor.start().input.lines;
 
-            // todo(lw): Should these be seek_forward?
-            new_cursor.seek(&tab_edit.new.end, Bias::Right);
+            new_cursor.seek_forward(&tab_edit.new.end, Bias::Right);
             let mut new_end = new_cursor.start().output.lines;
             new_end += tab_edit.new.end.0 - new_cursor.start().input.lines;
 
@@ -866,7 +863,7 @@ impl WrapSnapshot {
                 }
             }
 
-            let text = language::Rope::from(self.text().as_str());
+            let text = language::Rope::from_str_small(self.text().as_str());
             let mut input_buffer_rows = self.tab_snapshot.rows(0);
             let mut expected_buffer_rows = Vec::new();
             let mut prev_tab_row = 0;
@@ -1416,9 +1413,10 @@ mod tests {
             }
         }
 
-        let mut initial_text = Rope::from(initial_snapshot.text().as_str());
+        let mut initial_text =
+            Rope::from_str(initial_snapshot.text().as_str(), cx.background_executor());
         for (snapshot, patch) in edits {
-            let snapshot_text = Rope::from(snapshot.text().as_str());
+            let snapshot_text = Rope::from_str(snapshot.text().as_str(), cx.background_executor());
             for edit in &patch {
                 let old_start = initial_text.point_to_offset(Point::new(edit.new.start, 0));
                 let old_end = initial_text.point_to_offset(cmp::min(
@@ -1434,7 +1432,7 @@ mod tests {
                     .chunks_in_range(new_start..new_end)
                     .collect::<String>();
 
-                initial_text.replace(old_start..old_end, &new_text);
+                initial_text.replace(old_start..old_end, &new_text, cx.background_executor());
             }
             assert_eq!(initial_text.to_string(), snapshot_text.to_string());
         }

crates/editor/src/editor.rs πŸ”—

@@ -1071,7 +1071,6 @@ pub struct Editor {
     searchable: bool,
     cursor_shape: CursorShape,
     current_line_highlight: Option<CurrentLineHighlight>,
-    collapse_matches: bool,
     autoindent_mode: Option<AutoindentMode>,
     workspace: Option<(WeakEntity<Workspace>, Option<WorkspaceId>)>,
     input_enabled: bool,
@@ -1835,9 +1834,15 @@ impl Editor {
                     project::Event::RefreshCodeLens => {
                         // we always query lens with actions, without storing them, always refreshing them
                     }
-                    project::Event::RefreshInlayHints(server_id) => {
+                    project::Event::RefreshInlayHints {
+                        server_id,
+                        request_id,
+                    } => {
                         editor.refresh_inlay_hints(
-                            InlayHintRefreshReason::RefreshRequested(*server_id),
+                            InlayHintRefreshReason::RefreshRequested {
+                                server_id: *server_id,
+                                request_id: *request_id,
+                            },
                             cx,
                         );
                     }
@@ -2121,7 +2126,7 @@ impl Editor {
                 .unwrap_or_default(),
             current_line_highlight: None,
             autoindent_mode: Some(AutoindentMode::EachLine),
-            collapse_matches: false,
+
             workspace: None,
             input_enabled: !is_minimap,
             use_modal_editing: full_mode,
@@ -2280,7 +2285,7 @@ impl Editor {
                     );
                 }
                 EditorEvent::Edited { .. } => {
-                    if !vim_enabled(cx) {
+                    if vim_flavor(cx).is_none() {
                         let display_map = editor.display_snapshot(cx);
                         let selections = editor.selections.all_adjusted_display(&display_map);
                         let pop_state = editor
@@ -2896,12 +2901,12 @@ impl Editor {
         self.current_line_highlight = current_line_highlight;
     }
 
-    pub fn set_collapse_matches(&mut self, collapse_matches: bool) {
-        self.collapse_matches = collapse_matches;
-    }
-
-    pub fn range_for_match<T: std::marker::Copy>(&self, range: &Range<T>) -> Range<T> {
-        if self.collapse_matches {
+    pub fn range_for_match<T: std::marker::Copy>(
+        &self,
+        range: &Range<T>,
+        collapse: bool,
+    ) -> Range<T> {
+        if collapse {
             return range.start..range.start;
         }
         range.clone()
@@ -7869,7 +7874,7 @@ impl Editor {
                         let inlay = Inlay::edit_prediction(
                             post_inc(&mut self.next_inlay_id),
                             range.start,
-                            new_text.as_str(),
+                            Rope::from_str_small(new_text.as_str()),
                         );
                         inlay_ids.push(inlay.id);
                         inlays.push(inlay);
@@ -16670,7 +16675,7 @@ impl Editor {
 
                 editor.update_in(cx, |editor, window, cx| {
                     let range = target_range.to_point(target_buffer.read(cx));
-                    let range = editor.range_for_match(&range);
+                    let range = editor.range_for_match(&range, false);
                     let range = collapse_multiline_range(range);
 
                     if !split
@@ -21474,7 +21479,7 @@ impl Editor {
             .and_then(|e| e.to_str())
             .map(|a| a.to_string()));
 
-        let vim_mode = vim_enabled(cx);
+        let vim_mode = vim_flavor(cx).is_some();
 
         let edit_predictions_provider = all_language_settings(file, cx).edit_predictions.provider;
         let copilot_enabled = edit_predictions_provider
@@ -22105,10 +22110,26 @@ fn edit_for_markdown_paste<'a>(
     (range, new_text)
 }
 
-fn vim_enabled(cx: &App) -> bool {
-    vim_mode_setting::VimModeSetting::try_get(cx)
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum VimFlavor {
+    Vim,
+    Helix,
+}
+
+pub fn vim_flavor(cx: &App) -> Option<VimFlavor> {
+    if vim_mode_setting::HelixModeSetting::try_get(cx)
+        .map(|helix_mode| helix_mode.0)
+        .unwrap_or(false)
+    {
+        Some(VimFlavor::Helix)
+    } else if vim_mode_setting::VimModeSetting::try_get(cx)
         .map(|vim_mode| vim_mode.0)
         .unwrap_or(false)
+    {
+        Some(VimFlavor::Vim)
+    } else {
+        None // neither vim nor helix mode
+    }
 }
 
 fn process_completion_for_edit(

crates/editor/src/element.rs πŸ”—

@@ -5114,19 +5114,21 @@ impl EditorElement {
                 cx,
             )
         });
-        let Some((position, hover_popovers)) = hover_popovers else {
+        let Some((popover_position, hover_popovers)) = hover_popovers else {
             return;
         };
 
         // This is safe because we check on layout whether the required row is available
-        let hovered_row_layout =
-            &line_layouts[position.row().minus(visible_display_row_range.start) as usize];
+        let hovered_row_layout = &line_layouts[popover_position
+            .row()
+            .minus(visible_display_row_range.start)
+            as usize];
 
         // Compute Hovered Point
-        let x = hovered_row_layout.x_for_index(position.column() as usize)
+        let x = hovered_row_layout.x_for_index(popover_position.column() as usize)
             - Pixels::from(scroll_pixel_position.x);
         let y = Pixels::from(
-            position.row().as_f64() * ScrollPixelOffset::from(line_height)
+            popover_position.row().as_f64() * ScrollPixelOffset::from(line_height)
                 - scroll_pixel_position.y,
         );
         let hovered_point = content_origin + point(x, y);

crates/editor/src/git/blame.rs πŸ”—

@@ -602,6 +602,7 @@ impl GitBlame {
     }
 
     fn regenerate_on_edit(&mut self, cx: &mut Context<Self>) {
+        // todo(lw): hot foreground spawn
         self.regenerate_on_edit_task = cx.spawn(async move |this, cx| {
             cx.background_executor()
                 .timer(REGENERATE_ON_EDIT_DEBOUNCE_INTERVAL)
@@ -1114,18 +1115,19 @@ mod tests {
 
         let fs = FakeFs::new(cx.executor());
         let buffer_initial_text_len = rng.random_range(5..15);
-        let mut buffer_initial_text = Rope::from(
+        let mut buffer_initial_text = Rope::from_str(
             RandomCharIter::new(&mut rng)
                 .take(buffer_initial_text_len)
                 .collect::<String>()
                 .as_str(),
+            cx.background_executor(),
         );
 
         let mut newline_ixs = (0..buffer_initial_text_len).choose_multiple(&mut rng, 5);
         newline_ixs.sort_unstable();
         for newline_ix in newline_ixs.into_iter().rev() {
             let newline_ix = buffer_initial_text.clip_offset(newline_ix, Bias::Right);
-            buffer_initial_text.replace(newline_ix..newline_ix, "\n");
+            buffer_initial_text.replace(newline_ix..newline_ix, "\n", cx.background_executor());
         }
         log::info!("initial buffer text: {:?}", buffer_initial_text);
 

crates/editor/src/hover_popover.rs πŸ”—

@@ -797,23 +797,22 @@ impl HoverState {
                 })
             })?;
         let mut point = anchor.to_display_point(&snapshot.display_snapshot);
-
         // Clamp the point within the visible rows in case the popup source spans multiple lines
-        if point.row() < visible_rows.start {
-            point = crate::movement::down_by_rows(
+        if visible_rows.end <= point.row() {
+            point = crate::movement::up_by_rows(
                 &snapshot.display_snapshot,
                 point,
-                (visible_rows.start - point.row()).0,
+                1 + (point.row() - visible_rows.end).0,
                 text::SelectionGoal::None,
                 true,
                 text_layout_details,
             )
             .0;
-        } else if visible_rows.end <= point.row() {
-            point = crate::movement::up_by_rows(
+        } else if point.row() < visible_rows.start {
+            point = crate::movement::down_by_rows(
                 &snapshot.display_snapshot,
                 point,
-                (visible_rows.end - point.row()).0,
+                (visible_rows.start - point.row()).0,
                 text::SelectionGoal::None,
                 true,
                 text_layout_details,
@@ -821,6 +820,11 @@ impl HoverState {
             .0;
         }
 
+        if !visible_rows.contains(&point.row()) {
+            log::error!("Hover popover point out of bounds after moving");
+            return None;
+        }
+
         let mut elements = Vec::new();
 
         if let Some(diagnostic_popover) = self.diagnostic_popover.as_ref() {

crates/editor/src/inlays.rs πŸ”—

@@ -59,10 +59,10 @@ impl Inlay {
     pub fn hint(id: InlayId, position: Anchor, hint: &InlayHint) -> Self {
         let mut text = hint.text();
         if hint.padding_right && text.reversed_chars_at(text.len()).next() != Some(' ') {
-            text.push(" ");
+            text.push_small(" ");
         }
         if hint.padding_left && text.chars_at(0).next() != Some(' ') {
-            text.push_front(" ");
+            text.push_front_small(" ");
         }
         Self {
             id,
@@ -72,11 +72,11 @@ impl Inlay {
     }
 
     #[cfg(any(test, feature = "test-support"))]
-    pub fn mock_hint(id: usize, position: Anchor, text: impl Into<Rope>) -> Self {
+    pub fn mock_hint(id: usize, position: Anchor, text: Rope) -> Self {
         Self {
             id: InlayId::Hint(id),
             position,
-            content: InlayContent::Text(text.into()),
+            content: InlayContent::Text(text),
         }
     }
 
@@ -88,19 +88,19 @@ impl Inlay {
         }
     }
 
-    pub fn edit_prediction<T: Into<Rope>>(id: usize, position: Anchor, text: T) -> Self {
+    pub fn edit_prediction(id: usize, position: Anchor, text: Rope) -> Self {
         Self {
             id: InlayId::EditPrediction(id),
             position,
-            content: InlayContent::Text(text.into()),
+            content: InlayContent::Text(text),
         }
     }
 
-    pub fn debugger<T: Into<Rope>>(id: usize, position: Anchor, text: T) -> Self {
+    pub fn debugger(id: usize, position: Anchor, text: Rope) -> Self {
         Self {
             id: InlayId::DebuggerValue(id),
             position,
-            content: InlayContent::Text(text.into()),
+            content: InlayContent::Text(text),
         }
     }
 
@@ -108,7 +108,7 @@ impl Inlay {
         static COLOR_TEXT: OnceLock<Rope> = OnceLock::new();
         match &self.content {
             InlayContent::Text(text) => text,
-            InlayContent::Color(_) => COLOR_TEXT.get_or_init(|| Rope::from("β—Ό")),
+            InlayContent::Color(_) => COLOR_TEXT.get_or_init(|| Rope::from_str_small("β—Ό")),
         }
     }
 

crates/editor/src/inlays/inlay_hints.rs πŸ”—

@@ -1,5 +1,4 @@
 use std::{
-    collections::hash_map,
     ops::{ControlFlow, Range},
     time::Duration,
 };
@@ -49,8 +48,8 @@ pub struct LspInlayHintData {
     allowed_hint_kinds: HashSet<Option<InlayHintKind>>,
     invalidate_debounce: Option<Duration>,
     append_debounce: Option<Duration>,
-    hint_refresh_tasks: HashMap<BufferId, HashMap<Vec<Range<BufferRow>>, Vec<Task<()>>>>,
-    hint_chunk_fetched: HashMap<BufferId, (Global, HashSet<Range<BufferRow>>)>,
+    hint_refresh_tasks: HashMap<BufferId, Vec<Task<()>>>,
+    hint_chunk_fetching: HashMap<BufferId, (Global, HashSet<Range<BufferRow>>)>,
     invalidate_hints_for_buffers: HashSet<BufferId>,
     pub added_hints: HashMap<InlayId, Option<InlayHintKind>>,
 }
@@ -63,7 +62,7 @@ impl LspInlayHintData {
             enabled_in_settings: settings.enabled,
             hint_refresh_tasks: HashMap::default(),
             added_hints: HashMap::default(),
-            hint_chunk_fetched: HashMap::default(),
+            hint_chunk_fetching: HashMap::default(),
             invalidate_hints_for_buffers: HashSet::default(),
             invalidate_debounce: debounce_value(settings.edit_debounce_ms),
             append_debounce: debounce_value(settings.scroll_debounce_ms),
@@ -99,9 +98,8 @@ impl LspInlayHintData {
 
     pub fn clear(&mut self) {
         self.hint_refresh_tasks.clear();
-        self.hint_chunk_fetched.clear();
+        self.hint_chunk_fetching.clear();
         self.added_hints.clear();
-        self.invalidate_hints_for_buffers.clear();
     }
 
     /// Checks inlay hint settings for enabled hint kinds and general enabled state.
@@ -199,7 +197,7 @@ impl LspInlayHintData {
     ) {
         for buffer_id in removed_buffer_ids {
             self.hint_refresh_tasks.remove(buffer_id);
-            self.hint_chunk_fetched.remove(buffer_id);
+            self.hint_chunk_fetching.remove(buffer_id);
         }
     }
 }
@@ -211,7 +209,10 @@ pub enum InlayHintRefreshReason {
     SettingsChange(InlayHintSettings),
     NewLinesShown,
     BufferEdited(BufferId),
-    RefreshRequested(LanguageServerId),
+    RefreshRequested {
+        server_id: LanguageServerId,
+        request_id: Option<usize>,
+    },
     ExcerptsRemoved(Vec<ExcerptId>),
 }
 
@@ -296,7 +297,7 @@ impl Editor {
             | InlayHintRefreshReason::Toggle(_)
             | InlayHintRefreshReason::SettingsChange(_) => true,
             InlayHintRefreshReason::NewLinesShown
-            | InlayHintRefreshReason::RefreshRequested(_)
+            | InlayHintRefreshReason::RefreshRequested { .. }
             | InlayHintRefreshReason::ExcerptsRemoved(_) => false,
             InlayHintRefreshReason::BufferEdited(buffer_id) => {
                 let Some(affected_language) = self
@@ -370,48 +371,45 @@ impl Editor {
             let Some(buffer) = multi_buffer.read(cx).buffer(buffer_id) else {
                 continue;
             };
-            let fetched_tasks = inlay_hints.hint_chunk_fetched.entry(buffer_id).or_default();
+
+            let (fetched_for_version, fetched_chunks) = inlay_hints
+                .hint_chunk_fetching
+                .entry(buffer_id)
+                .or_default();
             if visible_excerpts
                 .buffer_version
-                .changed_since(&fetched_tasks.0)
+                .changed_since(fetched_for_version)
             {
-                fetched_tasks.1.clear();
-                fetched_tasks.0 = visible_excerpts.buffer_version.clone();
+                *fetched_for_version = visible_excerpts.buffer_version.clone();
+                fetched_chunks.clear();
                 inlay_hints.hint_refresh_tasks.remove(&buffer_id);
             }
 
-            let applicable_chunks =
-                semantics_provider.applicable_inlay_chunks(&buffer, &visible_excerpts.ranges, cx);
+            let known_chunks = if ignore_previous_fetches {
+                None
+            } else {
+                Some((fetched_for_version.clone(), fetched_chunks.clone()))
+            };
 
-            match inlay_hints
+            let mut applicable_chunks =
+                semantics_provider.applicable_inlay_chunks(&buffer, &visible_excerpts.ranges, cx);
+            applicable_chunks.retain(|chunk| fetched_chunks.insert(chunk.clone()));
+            if applicable_chunks.is_empty() && !ignore_previous_fetches {
+                continue;
+            }
+            inlay_hints
                 .hint_refresh_tasks
                 .entry(buffer_id)
                 .or_default()
-                .entry(applicable_chunks)
-            {
-                hash_map::Entry::Occupied(mut o) => {
-                    if invalidate_cache.should_invalidate() || ignore_previous_fetches {
-                        o.get_mut().push(spawn_editor_hints_refresh(
-                            buffer_id,
-                            invalidate_cache,
-                            ignore_previous_fetches,
-                            debounce,
-                            visible_excerpts,
-                            cx,
-                        ));
-                    }
-                }
-                hash_map::Entry::Vacant(v) => {
-                    v.insert(Vec::new()).push(spawn_editor_hints_refresh(
-                        buffer_id,
-                        invalidate_cache,
-                        ignore_previous_fetches,
-                        debounce,
-                        visible_excerpts,
-                        cx,
-                    ));
-                }
-            }
+                .push(spawn_editor_hints_refresh(
+                    buffer_id,
+                    invalidate_cache,
+                    debounce,
+                    visible_excerpts,
+                    known_chunks,
+                    applicable_chunks,
+                    cx,
+                ));
         }
     }
 
@@ -506,9 +504,13 @@ impl Editor {
             }
             InlayHintRefreshReason::NewLinesShown => InvalidationStrategy::None,
             InlayHintRefreshReason::BufferEdited(_) => InvalidationStrategy::BufferEdited,
-            InlayHintRefreshReason::RefreshRequested(server_id) => {
-                InvalidationStrategy::RefreshRequested(*server_id)
-            }
+            InlayHintRefreshReason::RefreshRequested {
+                server_id,
+                request_id,
+            } => InvalidationStrategy::RefreshRequested {
+                server_id: *server_id,
+                request_id: *request_id,
+            },
         };
 
         match &mut self.inlay_hints {
@@ -718,44 +720,29 @@ impl Editor {
     fn inlay_hints_for_buffer(
         &mut self,
         invalidate_cache: InvalidationStrategy,
-        ignore_previous_fetches: bool,
         buffer_excerpts: VisibleExcerpts,
+        known_chunks: Option<(Global, HashSet<Range<BufferRow>>)>,
         cx: &mut Context<Self>,
     ) -> Option<Vec<Task<(Range<BufferRow>, anyhow::Result<CacheInlayHints>)>>> {
         let semantics_provider = self.semantics_provider()?;
-        let inlay_hints = self.inlay_hints.as_mut()?;
-        let buffer_id = buffer_excerpts.buffer.read(cx).remote_id();
 
         let new_hint_tasks = semantics_provider
             .inlay_hints(
                 invalidate_cache,
                 buffer_excerpts.buffer,
                 buffer_excerpts.ranges,
-                inlay_hints
-                    .hint_chunk_fetched
-                    .get(&buffer_id)
-                    .filter(|_| !ignore_previous_fetches && !invalidate_cache.should_invalidate())
-                    .cloned(),
+                known_chunks,
                 cx,
             )
             .unwrap_or_default();
 
-        let (known_version, known_chunks) =
-            inlay_hints.hint_chunk_fetched.entry(buffer_id).or_default();
-        if buffer_excerpts.buffer_version.changed_since(known_version) {
-            known_chunks.clear();
-            *known_version = buffer_excerpts.buffer_version;
-        }
-
-        let mut hint_tasks = Vec::new();
+        let mut hint_tasks = None;
         for (row_range, new_hints_task) in new_hint_tasks {
-            let inserted = known_chunks.insert(row_range.clone());
-            if inserted || ignore_previous_fetches || invalidate_cache.should_invalidate() {
-                hint_tasks.push(cx.spawn(async move |_, _| (row_range, new_hints_task.await)));
-            }
+            hint_tasks
+                .get_or_insert_with(Vec::new)
+                .push(cx.spawn(async move |_, _| (row_range, new_hints_task.await)));
         }
-
-        Some(hint_tasks)
+        hint_tasks
     }
 
     fn apply_fetched_hints(
@@ -793,20 +780,28 @@ impl Editor {
         let excerpts = self.buffer.read(cx).excerpt_ids();
         let hints_to_insert = new_hints
             .into_iter()
-            .filter_map(|(chunk_range, hints_result)| match hints_result {
-                Ok(new_hints) => Some(new_hints),
-                Err(e) => {
-                    log::error!(
-                        "Failed to query inlays for buffer row range {chunk_range:?}, {e:#}"
-                    );
-                    if let Some((for_version, chunks_fetched)) =
-                        inlay_hints.hint_chunk_fetched.get_mut(&buffer_id)
-                    {
-                        if for_version == &query_version {
-                            chunks_fetched.remove(&chunk_range);
+            .filter_map(|(chunk_range, hints_result)| {
+                let chunks_fetched = inlay_hints.hint_chunk_fetching.get_mut(&buffer_id);
+                match hints_result {
+                    Ok(new_hints) => {
+                        if new_hints.is_empty() {
+                            if let Some((_, chunks_fetched)) = chunks_fetched {
+                                chunks_fetched.remove(&chunk_range);
+                            }
                         }
+                        Some(new_hints)
+                    }
+                    Err(e) => {
+                        log::error!(
+                            "Failed to query inlays for buffer row range {chunk_range:?}, {e:#}"
+                        );
+                        if let Some((for_version, chunks_fetched)) = chunks_fetched {
+                            if for_version == &query_version {
+                                chunks_fetched.remove(&chunk_range);
+                            }
+                        }
+                        None
                     }
-                    None
                 }
             })
             .flat_map(|hints| hints.into_values())
@@ -856,9 +851,10 @@ struct VisibleExcerpts {
 fn spawn_editor_hints_refresh(
     buffer_id: BufferId,
     invalidate_cache: InvalidationStrategy,
-    ignore_previous_fetches: bool,
     debounce: Option<Duration>,
     buffer_excerpts: VisibleExcerpts,
+    known_chunks: Option<(Global, HashSet<Range<BufferRow>>)>,
+    applicable_chunks: Vec<Range<BufferRow>>,
     cx: &mut Context<'_, Editor>,
 ) -> Task<()> {
     cx.spawn(async move |editor, cx| {
@@ -869,12 +865,7 @@ fn spawn_editor_hints_refresh(
         let query_version = buffer_excerpts.buffer_version.clone();
         let Some(hint_tasks) = editor
             .update(cx, |editor, cx| {
-                editor.inlay_hints_for_buffer(
-                    invalidate_cache,
-                    ignore_previous_fetches,
-                    buffer_excerpts,
-                    cx,
-                )
+                editor.inlay_hints_for_buffer(invalidate_cache, buffer_excerpts, known_chunks, cx)
             })
             .ok()
         else {
@@ -882,6 +873,19 @@ fn spawn_editor_hints_refresh(
         };
         let hint_tasks = hint_tasks.unwrap_or_default();
         if hint_tasks.is_empty() {
+            editor
+                .update(cx, |editor, _| {
+                    if let Some((_, hint_chunk_fetching)) = editor
+                        .inlay_hints
+                        .as_mut()
+                        .and_then(|inlay_hints| inlay_hints.hint_chunk_fetching.get_mut(&buffer_id))
+                    {
+                        for applicable_chunks in &applicable_chunks {
+                            hint_chunk_fetching.remove(applicable_chunks);
+                        }
+                    }
+                })
+                .ok();
             return;
         }
         let new_hints = join_all(hint_tasks).await;
@@ -1102,7 +1106,10 @@ pub mod tests {
         editor
             .update(cx, |editor, _window, cx| {
                 editor.refresh_inlay_hints(
-                    InlayHintRefreshReason::RefreshRequested(fake_server.server.server_id()),
+                    InlayHintRefreshReason::RefreshRequested {
+                        server_id: fake_server.server.server_id(),
+                        request_id: Some(1),
+                    },
                     cx,
                 );
             })
@@ -1958,15 +1965,8 @@ pub mod tests {
     async fn test_large_buffer_inlay_requests_split(cx: &mut gpui::TestAppContext) {
         init_test(cx, |settings| {
             settings.defaults.inlay_hints = Some(InlayHintSettingsContent {
-                show_value_hints: Some(true),
                 enabled: Some(true),
-                edit_debounce_ms: Some(0),
-                scroll_debounce_ms: Some(0),
-                show_type_hints: Some(true),
-                show_parameter_hints: Some(true),
-                show_other_hints: Some(true),
-                show_background: Some(false),
-                toggle_on_modifiers_press: None,
+                ..InlayHintSettingsContent::default()
             })
         });
 
@@ -2044,6 +2044,7 @@ pub mod tests {
             cx.add_window(|window, cx| Editor::for_buffer(buffer, Some(project), window, cx));
         cx.executor().run_until_parked();
         let _fake_server = fake_servers.next().await.unwrap();
+        cx.executor().advance_clock(Duration::from_millis(100));
         cx.executor().run_until_parked();
 
         let ranges = lsp_request_ranges
@@ -2129,6 +2130,7 @@ pub mod tests {
                 );
             })
             .unwrap();
+        cx.executor().advance_clock(Duration::from_millis(100));
         cx.executor().run_until_parked();
         editor.update(cx, |_, _, _| {
             let ranges = lsp_request_ranges
@@ -2145,6 +2147,7 @@ pub mod tests {
                 editor.handle_input("++++more text++++", window, cx);
             })
             .unwrap();
+        cx.executor().advance_clock(Duration::from_secs(1));
         cx.executor().run_until_parked();
         editor.update(cx, |editor, _window, cx| {
             let mut ranges = lsp_request_ranges.lock().drain(..).collect::<Vec<_>>();
@@ -3887,7 +3890,10 @@ let c = 3;"#
         editor
             .update(cx, |editor, _, cx| {
                 editor.refresh_inlay_hints(
-                    InlayHintRefreshReason::RefreshRequested(fake_server.server.server_id()),
+                    InlayHintRefreshReason::RefreshRequested {
+                        server_id: fake_server.server.server_id(),
+                        request_id: Some(1),
+                    },
                     cx,
                 );
             })
@@ -4022,7 +4028,7 @@ let c = 3;"#
         let mut all_fetched_hints = Vec::new();
         for buffer in editor.buffer.read(cx).all_buffers() {
             lsp_store.update(cx, |lsp_store, cx| {
-                let hints = &lsp_store.latest_lsp_data(&buffer, cx).inlay_hints();
+                let hints = lsp_store.latest_lsp_data(&buffer, cx).inlay_hints();
                 all_cached_labels.extend(hints.all_cached_hints().into_iter().map(|hint| {
                     let mut label = hint.text().to_string();
                     if hint.padding_left {

crates/editor/src/items.rs πŸ”—

@@ -1587,11 +1587,12 @@ impl SearchableItem for Editor {
         &mut self,
         index: usize,
         matches: &[Range<Anchor>],
+        collapse: bool,
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
         self.unfold_ranges(&[matches[index].clone()], false, true, cx);
-        let range = self.range_for_match(&matches[index]);
+        let range = self.range_for_match(&matches[index], collapse);
         self.change_selections(Default::default(), window, cx, |s| {
             s.select_ranges([range]);
         })

crates/editor/src/linked_editing_ranges.rs πŸ”—

@@ -1,5 +1,5 @@
 use collections::HashMap;
-use gpui::{Context, Window};
+use gpui::{AppContext, Context, Window};
 use itertools::Itertools;
 use std::{ops::Range, time::Duration};
 use text::{AnchorRangeExt, BufferId, ToPoint};
@@ -59,8 +59,9 @@ pub(super) fn refresh_linked_ranges(
         let mut applicable_selections = Vec::new();
         editor
             .update(cx, |editor, cx| {
-                let selections = editor.selections.all::<usize>(&editor.display_snapshot(cx));
-                let snapshot = editor.buffer.read(cx).snapshot(cx);
+                let display_snapshot = editor.display_snapshot(cx);
+                let selections = editor.selections.all::<usize>(&display_snapshot);
+                let snapshot = display_snapshot.buffer_snapshot();
                 let buffer = editor.buffer.read(cx);
                 for selection in selections {
                     let cursor_position = selection.head();
@@ -90,14 +91,16 @@ pub(super) fn refresh_linked_ranges(
         let highlights = project
             .update(cx, |project, cx| {
                 let mut linked_edits_tasks = vec![];
-
                 for (buffer, start, end) in &applicable_selections {
-                    let snapshot = buffer.read(cx).snapshot();
-                    let buffer_id = buffer.read(cx).remote_id();
-
                     let linked_edits_task = project.linked_edits(buffer, *start, cx);
-                    let highlights = move || async move {
+                    let cx = cx.to_async();
+                    let highlights = async move {
                         let edits = linked_edits_task.await.log_err()?;
+                        let snapshot = cx
+                            .read_entity(&buffer, |buffer, _| buffer.snapshot())
+                            .ok()?;
+                        let buffer_id = snapshot.remote_id();
+
                         // Find the range containing our current selection.
                         // We might not find one, because the selection contains both the start and end of the contained range
                         // (think of selecting <`html>foo`</html> - even though there's a matching closing tag, the selection goes beyond the range of the opening tag)
@@ -128,7 +131,7 @@ pub(super) fn refresh_linked_ranges(
                         siblings.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0, &snapshot));
                         Some((buffer_id, siblings))
                     };
-                    linked_edits_tasks.push(highlights());
+                    linked_edits_tasks.push(highlights);
                 }
                 linked_edits_tasks
             })

crates/editor/src/movement.rs πŸ”—

@@ -878,6 +878,7 @@ mod tests {
     use gpui::{AppContext as _, font, px};
     use language::Capability;
     use project::{Project, project_settings::DiagnosticSeverity};
+    use rope::Rope;
     use settings::SettingsStore;
     use util::post_inc;
 
@@ -1024,22 +1025,22 @@ mod tests {
                     Inlay::edit_prediction(
                         post_inc(&mut id),
                         buffer_snapshot.anchor_before(offset),
-                        "test",
+                        Rope::from_str_small("test"),
                     ),
                     Inlay::edit_prediction(
                         post_inc(&mut id),
                         buffer_snapshot.anchor_after(offset),
-                        "test",
+                        Rope::from_str_small("test"),
                     ),
                     Inlay::mock_hint(
                         post_inc(&mut id),
                         buffer_snapshot.anchor_before(offset),
-                        "test",
+                        Rope::from_str_small("test"),
                     ),
                     Inlay::mock_hint(
                         post_inc(&mut id),
                         buffer_snapshot.anchor_after(offset),
-                        "test",
+                        Rope::from_str_small("test"),
                     ),
                 ]
             })

crates/editor/src/signature_help.rs πŸ”—

@@ -193,7 +193,7 @@ impl Editor {
 
                         if let Some(language) = language {
                             for signature in &mut signature_help.signatures {
-                                let text = Rope::from(signature.label.as_ref());
+                                let text = Rope::from_str_small(signature.label.as_ref());
                                 let highlights = language
                                     .highlight_text(&text, 0..signature.label.len())
                                     .into_iter()

crates/extension_cli/src/main.rs πŸ”—

@@ -145,6 +145,10 @@ fn extension_provides(manifest: &ExtensionManifest) -> BTreeSet<ExtensionProvide
         provides.insert(ExtensionProvides::ContextServers);
     }
 
+    if !manifest.agent_servers.is_empty() {
+        provides.insert(ExtensionProvides::AgentServers);
+    }
+
     if manifest.snippets.is_some() {
         provides.insert(ExtensionProvides::Snippets);
     }

crates/extension_host/src/extension_host.rs πŸ”—

@@ -360,7 +360,7 @@ impl ExtensionStore {
                         }
                         extension_id = reload_rx.next() => {
                             let Some(extension_id) = extension_id else { break; };
-                            this.update( cx, |this, _| {
+                            this.update(cx, |this, _| {
                                 this.modified_extensions.extend(extension_id);
                             })?;
                             index_changed = true;
@@ -608,7 +608,7 @@ impl ExtensionStore {
                     .extension_index
                     .extensions
                     .contains_key(extension_id.as_ref());
-                !is_already_installed
+                !is_already_installed && !SUPPRESSED_EXTENSIONS.contains(&extension_id.as_ref())
             })
             .cloned()
             .collect::<Vec<_>>();
@@ -1468,6 +1468,7 @@ impl ExtensionStore {
         let extensions_dir = self.installed_dir.clone();
         let index_path = self.index_path.clone();
         let proxy = self.proxy.clone();
+        let executor = cx.background_executor().clone();
         cx.background_spawn(async move {
             let start_time = Instant::now();
             let mut index = ExtensionIndex::default();
@@ -1501,10 +1502,14 @@ impl ExtensionStore {
             }
 
             if let Ok(index_json) = serde_json::to_string_pretty(&index) {
-                fs.save(&index_path, &index_json.as_str().into(), Default::default())
-                    .await
-                    .context("failed to save extension index")
-                    .log_err();
+                fs.save(
+                    &index_path,
+                    &Rope::from_str(&index_json, &executor),
+                    Default::default(),
+                )
+                .await
+                .context("failed to save extension index")
+                .log_err();
             }
 
             log::info!("rebuilt extension index in {:?}", start_time.elapsed());
@@ -1671,7 +1676,7 @@ impl ExtensionStore {
                 let manifest_toml = toml::to_string(&loaded_extension.manifest)?;
                 fs.save(
                     &tmp_dir.join(EXTENSION_TOML),
-                    &Rope::from(manifest_toml),
+                    &Rope::from_str_small(&manifest_toml),
                     language::LineEnding::Unix,
                 )
                 .await?;

crates/extensions_ui/src/extensions_ui.rs πŸ”—

@@ -225,6 +225,9 @@ impl ExtensionFilter {
 
 #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
 enum Feature {
+    AgentClaude,
+    AgentCodex,
+    AgentGemini,
     ExtensionRuff,
     ExtensionTailwind,
     Git,
@@ -244,6 +247,9 @@ fn keywords_by_feature() -> &'static BTreeMap<Feature, Vec<&'static str>> {
     static KEYWORDS_BY_FEATURE: OnceLock<BTreeMap<Feature, Vec<&'static str>>> = OnceLock::new();
     KEYWORDS_BY_FEATURE.get_or_init(|| {
         BTreeMap::from_iter([
+            (Feature::AgentClaude, vec!["claude", "claude code"]),
+            (Feature::AgentCodex, vec!["codex", "codex cli"]),
+            (Feature::AgentGemini, vec!["gemini", "gemini cli"]),
             (Feature::ExtensionRuff, vec!["ruff"]),
             (Feature::ExtensionTailwind, vec!["tail", "tailwind"]),
             (Feature::Git, vec!["git"]),
@@ -799,25 +805,22 @@ impl ExtensionsPage {
             )
             .child(
                 h_flex()
-                    .gap_2()
+                    .gap_1()
                     .justify_between()
                     .child(
-                        h_flex()
-                            .gap_1()
-                            .child(
-                                Icon::new(IconName::Person)
-                                    .size(IconSize::XSmall)
-                                    .color(Color::Muted),
-                            )
-                            .child(
-                                Label::new(extension.manifest.authors.join(", "))
-                                    .size(LabelSize::Small)
-                                    .color(Color::Muted)
-                                    .truncate(),
-                            ),
+                        Icon::new(IconName::Person)
+                            .size(IconSize::XSmall)
+                            .color(Color::Muted),
+                    )
+                    .child(
+                        Label::new(extension.manifest.authors.join(", "))
+                            .size(LabelSize::Small)
+                            .color(Color::Muted)
+                            .truncate(),
                     )
                     .child(
                         h_flex()
+                            .ml_auto()
                             .gap_1()
                             .child(
                                 IconButton::new(
@@ -1422,6 +1425,24 @@ impl ExtensionsPage {
 
         for feature in &self.upsells {
             let banner = match feature {
+                Feature::AgentClaude => self.render_feature_upsell_banner(
+                    "Claude Code support is built-in to Zed!".into(),
+                    "https://zed.dev/docs/ai/external-agents#claude-code".into(),
+                    false,
+                    cx,
+                ),
+                Feature::AgentCodex => self.render_feature_upsell_banner(
+                    "Codex CLI support is built-in to Zed!".into(),
+                    "https://zed.dev/docs/ai/external-agents#codex-cli".into(),
+                    false,
+                    cx,
+                ),
+                Feature::AgentGemini => self.render_feature_upsell_banner(
+                    "Gemini CLI support is built-in to Zed!".into(),
+                    "https://zed.dev/docs/ai/external-agents#gemini-cli".into(),
+                    false,
+                    cx,
+                ),
                 Feature::ExtensionRuff => self.render_feature_upsell_banner(
                     "Ruff (linter for Python) support is built-in to Zed!".into(),
                     "https://zed.dev/docs/languages/python#code-formatting--linting".into(),

crates/file_finder/src/open_path_prompt.rs πŸ”—

@@ -711,7 +711,9 @@ impl PickerDelegate for OpenPathDelegate {
 
         match &self.directory_state {
             DirectoryState::List { parent_path, .. } => {
-                let (label, indices) = if *parent_path == self.prompt_root {
+                let (label, indices) = if is_current_dir_candidate {
+                    ("open this directory".to_string(), vec![])
+                } else if *parent_path == self.prompt_root {
                     match_positions.iter_mut().for_each(|position| {
                         *position += self.prompt_root.len();
                     });
@@ -719,8 +721,6 @@ impl PickerDelegate for OpenPathDelegate {
                         format!("{}{}", self.prompt_root, candidate.path.string),
                         match_positions,
                     )
-                } else if is_current_dir_candidate {
-                    ("open this directory".to_string(), vec![])
                 } else {
                     (candidate.path.string, match_positions)
                 };

crates/fs/src/fs.rs πŸ”—

@@ -377,7 +377,7 @@ impl Fs for RealFs {
 
         #[cfg(windows)]
         if smol::fs::metadata(&target).await?.is_dir() {
-            let status = smol::process::Command::new("cmd")
+            let status = new_smol_command("cmd")
                 .args(["/C", "mklink", "/J"])
                 .args([path, target.as_path()])
                 .status()

crates/git_hosting_providers/Cargo.toml πŸ”—

@@ -23,6 +23,7 @@ serde.workspace = true
 serde_json.workspace = true
 settings.workspace = true
 url.workspace = true
+urlencoding.workspace = true
 util.workspace = true
 
 [dev-dependencies]

crates/git_hosting_providers/src/providers/gitee.rs πŸ”—

@@ -1,5 +1,11 @@
-use std::str::FromStr;
-
+use std::{str::FromStr, sync::Arc};
+
+use anyhow::{Context as _, Result, bail};
+use async_trait::async_trait;
+use futures::AsyncReadExt;
+use gpui::SharedString;
+use http_client::{AsyncBody, HttpClient, HttpRequestExt, Request};
+use serde::Deserialize;
 use url::Url;
 
 use git::{
@@ -9,6 +15,55 @@ use git::{
 
 pub struct Gitee;
 
+#[derive(Debug, Deserialize)]
+struct CommitDetails {
+    author: Option<Author>,
+}
+
+#[derive(Debug, Deserialize)]
+struct Author {
+    avatar_url: String,
+}
+
+impl Gitee {
+    async fn fetch_gitee_commit_author(
+        &self,
+        repo_owner: &str,
+        repo: &str,
+        commit: &str,
+        client: &Arc<dyn HttpClient>,
+    ) -> Result<Option<Author>> {
+        let url = format!("https://gitee.com/api/v5/repos/{repo_owner}/{repo}/commits/{commit}");
+
+        let request = Request::get(&url)
+            .header("Content-Type", "application/json")
+            .follow_redirects(http_client::RedirectPolicy::FollowAll);
+
+        let mut response = client
+            .send(request.body(AsyncBody::default())?)
+            .await
+            .with_context(|| format!("error fetching Gitee commit details at {:?}", url))?;
+
+        let mut body = Vec::new();
+        response.body_mut().read_to_end(&mut body).await?;
+
+        if response.status().is_client_error() {
+            let text = String::from_utf8_lossy(body.as_slice());
+            bail!(
+                "status error {}, response: {text:?}",
+                response.status().as_u16()
+            );
+        }
+
+        let body_str = std::str::from_utf8(&body)?;
+
+        serde_json::from_str::<CommitDetails>(body_str)
+            .map(|commit| commit.author)
+            .context("failed to deserialize Gitee commit details")
+    }
+}
+
+#[async_trait]
 impl GitHostingProvider for Gitee {
     fn name(&self) -> String {
         "Gitee".to_string()
@@ -19,7 +74,7 @@ impl GitHostingProvider for Gitee {
     }
 
     fn supports_avatars(&self) -> bool {
-        false
+        true
     }
 
     fn format_line_number(&self, line: u32) -> String {
@@ -80,6 +135,26 @@ impl GitHostingProvider for Gitee {
         );
         permalink
     }
+
+    async fn commit_author_avatar_url(
+        &self,
+        repo_owner: &str,
+        repo: &str,
+        commit: SharedString,
+        http_client: Arc<dyn HttpClient>,
+    ) -> Result<Option<Url>> {
+        let commit = commit.to_string();
+        let avatar_url = self
+            .fetch_gitee_commit_author(repo_owner, repo, &commit, &http_client)
+            .await?
+            .map(|author| -> Result<Url, url::ParseError> {
+                let mut url = Url::parse(&author.avatar_url)?;
+                url.set_query(Some("width=128"));
+                Ok(url)
+            })
+            .transpose()?;
+        Ok(avatar_url)
+    }
 }
 
 #[cfg(test)]

crates/git_hosting_providers/src/providers/gitlab.rs πŸ”—

@@ -1,6 +1,11 @@
-use std::str::FromStr;
-
-use anyhow::{Result, bail};
+use std::{str::FromStr, sync::Arc};
+
+use anyhow::{Context as _, Result, bail};
+use async_trait::async_trait;
+use futures::AsyncReadExt;
+use gpui::SharedString;
+use http_client::{AsyncBody, HttpClient, HttpRequestExt, Request};
+use serde::Deserialize;
 use url::Url;
 
 use git::{
@@ -10,6 +15,16 @@ use git::{
 
 use crate::get_host_from_git_remote_url;
 
+#[derive(Debug, Deserialize)]
+struct CommitDetails {
+    author_email: String,
+}
+
+#[derive(Debug, Deserialize)]
+struct AvatarInfo {
+    avatar_url: String,
+}
+
 #[derive(Debug)]
 pub struct Gitlab {
     name: String,
@@ -46,8 +61,79 @@ impl Gitlab {
             Url::parse(&format!("https://{}", host))?,
         ))
     }
+
+    async fn fetch_gitlab_commit_author(
+        &self,
+        repo_owner: &str,
+        repo: &str,
+        commit: &str,
+        client: &Arc<dyn HttpClient>,
+    ) -> Result<Option<AvatarInfo>> {
+        let Some(host) = self.base_url.host_str() else {
+            bail!("failed to get host from gitlab base url");
+        };
+        let project_path = format!("{}/{}", repo_owner, repo);
+        let project_path_encoded = urlencoding::encode(&project_path);
+        let url = format!(
+            "https://{host}/api/v4/projects/{project_path_encoded}/repository/commits/{commit}"
+        );
+
+        let request = Request::get(&url)
+            .header("Content-Type", "application/json")
+            .follow_redirects(http_client::RedirectPolicy::FollowAll);
+
+        let mut response = client
+            .send(request.body(AsyncBody::default())?)
+            .await
+            .with_context(|| format!("error fetching GitLab commit details at {:?}", url))?;
+
+        let mut body = Vec::new();
+        response.body_mut().read_to_end(&mut body).await?;
+
+        if response.status().is_client_error() {
+            let text = String::from_utf8_lossy(body.as_slice());
+            bail!(
+                "status error {}, response: {text:?}",
+                response.status().as_u16()
+            );
+        }
+
+        let body_str = std::str::from_utf8(&body)?;
+
+        let author_email = serde_json::from_str::<CommitDetails>(body_str)
+            .map(|commit| commit.author_email)
+            .context("failed to deserialize GitLab commit details")?;
+
+        let avatar_info_url = format!("https://{host}/api/v4/avatar?email={author_email}");
+
+        let request = Request::get(&avatar_info_url)
+            .header("Content-Type", "application/json")
+            .follow_redirects(http_client::RedirectPolicy::FollowAll);
+
+        let mut response = client
+            .send(request.body(AsyncBody::default())?)
+            .await
+            .with_context(|| format!("error fetching GitLab avatar info at {:?}", url))?;
+
+        let mut body = Vec::new();
+        response.body_mut().read_to_end(&mut body).await?;
+
+        if response.status().is_client_error() {
+            let text = String::from_utf8_lossy(body.as_slice());
+            bail!(
+                "status error {}, response: {text:?}",
+                response.status().as_u16()
+            );
+        }
+
+        let body_str = std::str::from_utf8(&body)?;
+
+        serde_json::from_str::<Option<AvatarInfo>>(body_str)
+            .context("failed to deserialize GitLab avatar info")
+    }
 }
 
+#[async_trait]
 impl GitHostingProvider for Gitlab {
     fn name(&self) -> String {
         self.name.clone()
@@ -58,7 +144,7 @@ impl GitHostingProvider for Gitlab {
     }
 
     fn supports_avatars(&self) -> bool {
-        false
+        true
     }
 
     fn format_line_number(&self, line: u32) -> String {
@@ -122,6 +208,39 @@ impl GitHostingProvider for Gitlab {
         );
         permalink
     }
+
+    async fn commit_author_avatar_url(
+        &self,
+        repo_owner: &str,
+        repo: &str,
+        commit: SharedString,
+        http_client: Arc<dyn HttpClient>,
+    ) -> Result<Option<Url>> {
+        let commit = commit.to_string();
+        let avatar_url = self
+            .fetch_gitlab_commit_author(repo_owner, repo, &commit, &http_client)
+            .await?
+            .map(|author| -> Result<Url, url::ParseError> {
+                let mut url = Url::parse(&author.avatar_url)?;
+                if let Some(host) = url.host_str() {
+                    let size_query = if host.contains("gravatar") || host.contains("libravatar") {
+                        Some("s=128")
+                    } else if self
+                        .base_url
+                        .host_str()
+                        .is_some_and(|base_host| host.contains(base_host))
+                    {
+                        Some("width=128")
+                    } else {
+                        None
+                    };
+                    url.set_query(size_query);
+                }
+                Ok(url)
+            })
+            .transpose()?;
+        Ok(avatar_url)
+    }
 }
 
 #[cfg(test)]
@@ -134,8 +253,8 @@ mod tests {
     #[test]
     fn test_invalid_self_hosted_remote_url() {
         let remote_url = "https://gitlab.com/zed-industries/zed.git";
-        let github = Gitlab::from_remote_url(remote_url);
-        assert!(github.is_err());
+        let gitlab = Gitlab::from_remote_url(remote_url);
+        assert!(gitlab.is_err());
     }
 
     #[test]

crates/git_ui/src/commit_view.rs πŸ”—

@@ -170,7 +170,10 @@ impl CommitView {
                     ReplicaId::LOCAL,
                     cx.entity_id().as_non_zero_u64().into(),
                     LineEnding::default(),
-                    format_commit(&commit, stash.is_some()).into(),
+                    Rope::from_str(
+                        &format_commit(&commit, stash.is_some()),
+                        cx.background_executor(),
+                    ),
                 );
                 metadata_buffer_id = Some(buffer.remote_id());
                 Buffer::build(buffer, Some(file.clone()), Capability::ReadWrite)
@@ -336,7 +339,7 @@ async fn build_buffer(
 ) -> Result<Entity<Buffer>> {
     let line_ending = LineEnding::detect(&text);
     LineEnding::normalize(&mut text);
-    let text = Rope::from(text);
+    let text = Rope::from_str(&text, cx.background_executor());
     let language = cx.update(|cx| language_registry.language_for_file(&blob, Some(&text), cx))?;
     let language = if let Some(language) = language {
         language_registry
@@ -376,7 +379,7 @@ async fn build_buffer_diff(
     let base_buffer = cx
         .update(|cx| {
             Buffer::build_snapshot(
-                old_text.as_deref().unwrap_or("").into(),
+                Rope::from_str(old_text.as_deref().unwrap_or(""), cx.background_executor()),
                 buffer.language().cloned(),
                 Some(language_registry.clone()),
                 cx,

crates/git_ui/src/file_diff_view.rs πŸ”—

@@ -359,6 +359,7 @@ mod tests {
     use super::*;
     use editor::test::editor_test_context::assert_state_with_diff;
     use gpui::TestAppContext;
+    use language::Rope;
     use project::{FakeFs, Fs, Project};
     use settings::SettingsStore;
     use std::path::PathBuf;
@@ -429,7 +430,7 @@ mod tests {
         // Modify the new file on disk
         fs.save(
             path!("/test/new_file.txt").as_ref(),
-            &unindent(
+            &Rope::from_str_small(&unindent(
                 "
                 new line 1
                 line 2
@@ -437,8 +438,7 @@ mod tests {
                 line 4
                 new line 5
                 ",
-            )
-            .into(),
+            )),
             Default::default(),
         )
         .await
@@ -465,15 +465,14 @@ mod tests {
         // Modify the old file on disk
         fs.save(
             path!("/test/old_file.txt").as_ref(),
-            &unindent(
+            &Rope::from_str_small(&unindent(
                 "
                 new line 1
                 line 2
                 old line 3
                 line 4
                 ",
-            )
-            .into(),
+            )),
             Default::default(),
         )
         .await

crates/git_ui/src/git_panel.rs πŸ”—

@@ -58,8 +58,8 @@ use std::{collections::HashSet, sync::Arc, time::Duration, usize};
 use strum::{IntoEnumIterator, VariantNames};
 use time::OffsetDateTime;
 use ui::{
-    Checkbox, CommonAnimationExt, ContextMenu, ElevationIndex, IconPosition, Label, LabelSize,
-    PopoverMenu, ScrollAxes, Scrollbars, SplitButton, Tooltip, WithScrollbar, prelude::*,
+    ButtonLike, Checkbox, CommonAnimationExt, ContextMenu, ElevationIndex, PopoverMenu, ScrollAxes,
+    Scrollbars, SplitButton, Tooltip, WithScrollbar, prelude::*,
 };
 use util::paths::PathStyle;
 use util::{ResultExt, TryFutureExt, maybe};
@@ -286,6 +286,12 @@ struct PendingOperation {
     op_id: usize,
 }
 
+impl PendingOperation {
+    fn contains_path(&self, path: &RepoPath) -> bool {
+        self.entries.iter().any(|p| &p.repo_path == path)
+    }
+}
+
 pub struct GitPanel {
     pub(crate) active_repository: Option<Entity<Repository>>,
     pub(crate) commit_editor: Entity<Editor>,
@@ -1240,19 +1246,21 @@ impl GitPanel {
         };
         let (stage, repo_paths) = match entry {
             GitListEntry::Status(status_entry) => {
-                if status_entry.status.staging().is_fully_staged() {
+                let repo_paths = vec![status_entry.clone()];
+                let stage = if let Some(status) = self.entry_staging(&status_entry) {
+                    !status.is_fully_staged()
+                } else if status_entry.status.staging().is_fully_staged() {
                     if let Some(op) = self.bulk_staging.clone()
                         && op.anchor == status_entry.repo_path
                     {
                         self.bulk_staging = None;
                     }
-
-                    (false, vec![status_entry.clone()])
+                    false
                 } else {
                     self.set_bulk_staging_anchor(status_entry.repo_path.clone(), cx);
-
-                    (true, vec![status_entry.clone()])
-                }
+                    true
+                };
+                (stage, repo_paths)
             }
             GitListEntry::Header(section) => {
                 let goal_staged_state = !self.header_state(section.header).selected();
@@ -2677,10 +2685,7 @@ impl GitPanel {
             if self.pending.iter().any(|pending| {
                 pending.target_status == TargetStatus::Reverted
                     && !pending.finished
-                    && pending
-                        .entries
-                        .iter()
-                        .any(|pending| pending.repo_path == entry.repo_path)
+                    && pending.contains_path(&entry.repo_path)
             }) {
                 continue;
             }
@@ -2731,10 +2736,7 @@ impl GitPanel {
                 last_pending_staged = pending.entries.first().cloned();
             }
             if let Some(single_staged) = &single_staged_entry
-                && pending
-                    .entries
-                    .iter()
-                    .any(|entry| entry.repo_path == single_staged.repo_path)
+                && pending.contains_path(&single_staged.repo_path)
             {
                 pending_status_for_single_staged = Some(pending.target_status);
             }
@@ -2797,7 +2799,7 @@ impl GitPanel {
             && let Some(index) = bulk_staging_anchor_new_index
             && let Some(entry) = self.entries.get(index)
             && let Some(entry) = entry.status_entry()
-            && self.entry_staging(entry) == StageStatus::Staged
+            && self.entry_staging(entry).unwrap_or(entry.staging) == StageStatus::Staged
         {
             self.bulk_staging = bulk_staging;
         }
@@ -2845,39 +2847,47 @@ impl GitPanel {
             self.entry_count += 1;
             if repo.had_conflict_on_last_merge_head_change(&status_entry.repo_path) {
                 self.conflicted_count += 1;
-                if self.entry_staging(status_entry).has_staged() {
+                if self
+                    .entry_staging(status_entry)
+                    .unwrap_or(status_entry.staging)
+                    .has_staged()
+                {
                     self.conflicted_staged_count += 1;
                 }
             } else if status_entry.status.is_created() {
                 self.new_count += 1;
-                if self.entry_staging(status_entry).has_staged() {
+                if self
+                    .entry_staging(status_entry)
+                    .unwrap_or(status_entry.staging)
+                    .has_staged()
+                {
                     self.new_staged_count += 1;
                 }
             } else {
                 self.tracked_count += 1;
-                if self.entry_staging(status_entry).has_staged() {
+                if self
+                    .entry_staging(status_entry)
+                    .unwrap_or(status_entry.staging)
+                    .has_staged()
+                {
                     self.tracked_staged_count += 1;
                 }
             }
         }
     }
 
-    fn entry_staging(&self, entry: &GitStatusEntry) -> StageStatus {
+    fn entry_staging(&self, entry: &GitStatusEntry) -> Option<StageStatus> {
         for pending in self.pending.iter().rev() {
-            if pending
-                .entries
-                .iter()
-                .any(|pending_entry| pending_entry.repo_path == entry.repo_path)
-            {
+            if pending.contains_path(&entry.repo_path) {
                 match pending.target_status {
-                    TargetStatus::Staged => return StageStatus::Staged,
-                    TargetStatus::Unstaged => return StageStatus::Unstaged,
+                    TargetStatus::Staged => return Some(StageStatus::Staged),
+                    TargetStatus::Unstaged => return Some(StageStatus::Unstaged),
                     TargetStatus::Reverted => continue,
                     TargetStatus::Unchanged => continue,
                 }
             }
         }
-        entry.staging
+        None
     }
 
     pub(crate) fn has_staged_changes(&self) -> bool {
@@ -3495,6 +3505,12 @@ impl GitPanel {
         let amend = self.amend_pending();
         let signoff = self.signoff_enabled;
 
+        let label_color = if self.pending_commit.is_some() {
+            Color::Disabled
+        } else {
+            Color::Default
+        };
+
         div()
             .id("commit-wrapper")
             .on_hover(cx.listener(move |this, hovered, _, cx| {
@@ -3503,14 +3519,15 @@ impl GitPanel {
                 cx.notify()
             }))
             .child(SplitButton::new(
-                ui::ButtonLike::new_rounded_left(ElementId::Name(
+                ButtonLike::new_rounded_left(ElementId::Name(
                     format!("split-button-left-{}", title).into(),
                 ))
-                .layer(ui::ElevationIndex::ModalSurface)
-                .size(ui::ButtonSize::Compact)
+                .layer(ElevationIndex::ModalSurface)
+                .size(ButtonSize::Compact)
                 .child(
-                    div()
-                        .child(Label::new(title).size(LabelSize::Small))
+                    Label::new(title)
+                        .size(LabelSize::Small)
+                        .color(label_color)
                         .mr_0p5(),
                 )
                 .on_click({
@@ -3710,7 +3727,8 @@ impl GitPanel {
         let ix = self.entry_by_path(&repo_path, cx)?;
         let entry = self.entries.get(ix)?;
 
-        let entry_staging = self.entry_staging(entry.status_entry()?);
+        let status = entry.status_entry()?;
+        let entry_staging = self.entry_staging(status).unwrap_or(status.staging);
 
         let checkbox = Checkbox::new("stage-file", entry_staging.as_bool().into())
             .disabled(!self.has_write_access(cx))
@@ -4004,8 +4022,8 @@ impl GitPanel {
         let checkbox_id: ElementId =
             ElementId::Name(format!("entry_{}_{}_checkbox", display_name, ix).into());
 
-        let entry_staging = self.entry_staging(entry);
-        let mut is_staged: ToggleState = self.entry_staging(entry).as_bool().into();
+        let entry_staging = self.entry_staging(entry).unwrap_or(entry.staging);
+        let mut is_staged: ToggleState = entry_staging.as_bool().into();
         if self.show_placeholders && !self.has_staged_changes() && !entry.status.is_created() {
             is_staged = ToggleState::Selected;
         }

crates/git_ui/src/stash_picker.rs πŸ”—

@@ -5,16 +5,14 @@ use git::stash::StashEntry;
 use gpui::{
     Action, AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable,
     InteractiveElement, IntoElement, Modifiers, ModifiersChangedEvent, ParentElement, Render,
-    SharedString, Styled, Subscription, Task, WeakEntity, Window, actions, rems, svg,
+    SharedString, Styled, Subscription, Task, WeakEntity, Window, actions, rems,
 };
 use picker::{Picker, PickerDelegate};
 use project::git_store::{Repository, RepositoryEvent};
 use std::sync::Arc;
 use time::{OffsetDateTime, UtcOffset};
 use time_format;
-use ui::{
-    ButtonLike, HighlightedLabel, KeyBinding, ListItem, ListItemSpacing, Tooltip, prelude::*,
-};
+use ui::{HighlightedLabel, KeyBinding, ListItem, ListItemSpacing, Tooltip, prelude::*};
 use util::ResultExt;
 use workspace::notifications::DetachAndPromptErr;
 use workspace::{ModalView, Workspace};
@@ -434,7 +432,7 @@ impl PickerDelegate for StashListDelegate {
         ix: usize,
         selected: bool,
         _window: &mut Window,
-        cx: &mut Context<Picker<Self>>,
+        _cx: &mut Context<Picker<Self>>,
     ) -> Option<Self::ListItem> {
         let entry_match = &self.matches[ix];
 
@@ -446,23 +444,14 @@ impl PickerDelegate for StashListDelegate {
             .into_any_element();
 
         let branch_name = entry_match.entry.branch.clone().unwrap_or_default();
-        let branch_label = h_flex()
+        let branch_info = h_flex()
             .gap_1p5()
             .w_full()
             .child(
-                h_flex()
-                    .gap_0p5()
-                    .child(
-                        Icon::new(IconName::GitBranch)
-                            .color(Color::Muted)
-                            .size(IconSize::Small),
-                    )
-                    .child(
-                        Label::new(branch_name)
-                            .truncate()
-                            .color(Color::Muted)
-                            .size(LabelSize::Small),
-                    ),
+                Label::new(branch_name)
+                    .truncate()
+                    .color(Color::Muted)
+                    .size(LabelSize::Small),
             )
             .child(
                 Label::new("β€’")
@@ -476,42 +465,12 @@ impl PickerDelegate for StashListDelegate {
                     .size(LabelSize::Small),
             );
 
-        let show_button = div()
-            .group("show-button-hover")
-            .child(
-                ButtonLike::new("show-button")
-                    .child(
-                        svg()
-                            .size(IconSize::Medium.rems())
-                            .flex_none()
-                            .path(IconName::Eye.path())
-                            .text_color(Color::Default.color(cx))
-                            .group_hover("show-button-hover", |this| {
-                                this.text_color(Color::Accent.color(cx))
-                            })
-                            .hover(|this| this.text_color(Color::Accent.color(cx))),
-                    )
-                    .tooltip(Tooltip::for_action_title("Show Stash", &ShowStashItem))
-                    .on_click(cx.listener(move |picker, _, window, cx| {
-                        cx.stop_propagation();
-                        picker.delegate.show_stash_at(ix, window, cx);
-                    })),
-            )
-            .into_any_element();
-
         Some(
             ListItem::new(SharedString::from(format!("stash-{ix}")))
                 .inset(true)
                 .spacing(ListItemSpacing::Sparse)
                 .toggle_state(selected)
-                .end_slot(show_button)
-                .child(
-                    v_flex()
-                        .w_full()
-                        .overflow_hidden()
-                        .child(stash_label)
-                        .child(branch_label.into_element()),
-                )
+                .child(v_flex().w_full().child(stash_label).child(branch_info))
                 .tooltip(Tooltip::text(format!(
                     "stash@{{{}}}",
                     entry_match.entry.index
@@ -535,15 +494,35 @@ impl PickerDelegate for StashListDelegate {
                 .border_t_1()
                 .border_color(cx.theme().colors().border_variant)
                 .child(
-                    Button::new("apply-stash", "Apply")
+                    Button::new("drop-stash", "Drop")
                         .key_binding(
-                            KeyBinding::for_action_in(&menu::Confirm, &focus_handle, cx)
-                                .map(|kb| kb.size(rems_from_px(12.))),
+                            KeyBinding::for_action_in(
+                                &stash_picker::DropStashItem,
+                                &focus_handle,
+                                cx,
+                            )
+                            .map(|kb| kb.size(rems_from_px(12.))),
                         )
                         .on_click(|_, window, cx| {
-                            window.dispatch_action(menu::Confirm.boxed_clone(), cx)
+                            window.dispatch_action(stash_picker::DropStashItem.boxed_clone(), cx)
                         }),
                 )
+                .child(
+                    Button::new("view-stash", "View")
+                        .key_binding(
+                            KeyBinding::for_action_in(
+                                &stash_picker::ShowStashItem,
+                                &focus_handle,
+                                cx,
+                            )
+                            .map(|kb| kb.size(rems_from_px(12.))),
+                        )
+                        .on_click(cx.listener(move |picker, _, window, cx| {
+                            cx.stop_propagation();
+                            let selected_ix = picker.delegate.selected_index();
+                            picker.delegate.show_stash_at(selected_ix, window, cx);
+                        })),
+                )
                 .child(
                     Button::new("pop-stash", "Pop")
                         .key_binding(
@@ -555,17 +534,13 @@ impl PickerDelegate for StashListDelegate {
                         }),
                 )
                 .child(
-                    Button::new("drop-stash", "Drop")
+                    Button::new("apply-stash", "Apply")
                         .key_binding(
-                            KeyBinding::for_action_in(
-                                &stash_picker::DropStashItem,
-                                &focus_handle,
-                                cx,
-                            )
-                            .map(|kb| kb.size(rems_from_px(12.))),
+                            KeyBinding::for_action_in(&menu::Confirm, &focus_handle, cx)
+                                .map(|kb| kb.size(rems_from_px(12.))),
                         )
                         .on_click(|_, window, cx| {
-                            window.dispatch_action(stash_picker::DropStashItem.boxed_clone(), cx)
+                            window.dispatch_action(menu::Confirm.boxed_clone(), cx)
                         }),
                 )
                 .into_any(),

crates/go_to_line/src/cursor_position.rs πŸ”—

@@ -1,4 +1,4 @@
-use editor::{Editor, MultiBufferSnapshot};
+use editor::{Editor, EditorEvent, MultiBufferSnapshot};
 use gpui::{App, Entity, FocusHandle, Focusable, Styled, Subscription, Task, WeakEntity};
 use settings::Settings;
 use std::{fmt::Write, num::NonZeroU32, time::Duration};
@@ -81,7 +81,7 @@ impl CursorPosition {
 
     fn update_position(
         &mut self,
-        editor: Entity<Editor>,
+        editor: &Entity<Editor>,
         debounce: Option<Duration>,
         window: &mut Window,
         cx: &mut Context<Self>,
@@ -269,19 +269,21 @@ impl StatusItemView for CursorPosition {
         cx: &mut Context<Self>,
     ) {
         if let Some(editor) = active_pane_item.and_then(|item| item.act_as::<Editor>(cx)) {
-            self._observe_active_editor =
-                Some(
-                    cx.observe_in(&editor, window, |cursor_position, editor, window, cx| {
-                        Self::update_position(
-                            cursor_position,
-                            editor,
-                            Some(UPDATE_DEBOUNCE),
-                            window,
-                            cx,
-                        )
-                    }),
-                );
-            self.update_position(editor, None, window, cx);
+            self._observe_active_editor = Some(cx.subscribe_in(
+                &editor,
+                window,
+                |cursor_position, editor, event, window, cx| match event {
+                    EditorEvent::SelectionsChanged { .. } => Self::update_position(
+                        cursor_position,
+                        editor,
+                        Some(UPDATE_DEBOUNCE),
+                        window,
+                        cx,
+                    ),
+                    _ => {}
+                },
+            ));
+            self.update_position(&editor, None, window, cx);
         } else {
             self.position = None;
             self._observe_active_editor = None;

crates/gpui/src/app/async_context.rs πŸ”—

@@ -176,7 +176,7 @@ impl AsyncApp {
         lock.open_window(options, build_root_view)
     }
 
-    /// Schedule a future to be polled in the background.
+    /// Schedule a future to be polled in the foreground.
     #[track_caller]
     pub fn spawn<AsyncFn, R>(&self, f: AsyncFn) -> Task<R>
     where
@@ -260,6 +260,19 @@ impl AsyncApp {
     }
 }
 
+impl sum_tree::BackgroundSpawn for BackgroundExecutor {
+    type Task<R>
+        = Task<R>
+    where
+        R: Send + Sync;
+    fn background_spawn<R>(&self, future: impl Future<Output = R> + Send + 'static) -> Self::Task<R>
+    where
+        R: Send + Sync + 'static,
+    {
+        self.spawn(future)
+    }
+}
+
 /// A cloneable, owned handle to the application context,
 /// composed with the window associated with the current task.
 #[derive(Clone, Deref, DerefMut)]

crates/gpui/src/app/test_context.rs πŸ”—

@@ -393,6 +393,11 @@ impl TestAppContext {
         }
     }
 
+    /// Returns the background executor for this context.
+    pub fn background_executor(&self) -> &BackgroundExecutor {
+        &self.background_executor
+    }
+
     /// Wait until there are no more pending tasks.
     pub fn run_until_parked(&mut self) {
         self.background_executor.run_until_parked()

crates/gpui/src/executor.rs πŸ”—

@@ -342,7 +342,7 @@ impl BackgroundExecutor {
     /// for all of them to complete before returning.
     pub async fn scoped<'scope, F>(&self, scheduler: F)
     where
-        F: FnOnce(&mut Scope<'scope>),
+        F: for<'a> FnOnce(&'a mut Scope<'scope>),
     {
         let mut scope = Scope::new(self.clone());
         (scheduler)(&mut scope);
@@ -479,7 +479,6 @@ impl ForegroundExecutor {
     }
 
     /// Enqueues the given Task to run on the main thread at some point in the future.
-    #[track_caller]
     pub fn spawn<R>(&self, future: impl Future<Output = R> + 'static) -> Task<R>
     where
         R: 'static,

crates/gpui/src/window.rs πŸ”—

@@ -4326,10 +4326,10 @@ impl Window {
     }
 
     /// Returns a generic event listener that invokes the given listener with the view and context associated with the given view handle.
-    pub fn listener_for<V: Render, E>(
+    pub fn listener_for<T: 'static, E>(
         &self,
-        view: &Entity<V>,
-        f: impl Fn(&mut V, &E, &mut Window, &mut Context<V>) + 'static,
+        view: &Entity<T>,
+        f: impl Fn(&mut T, &E, &mut Window, &mut Context<T>) + 'static,
     ) -> impl Fn(&E, &mut Window, &mut App) + 'static {
         let view = view.downgrade();
         move |e: &E, window: &mut Window, cx: &mut App| {

crates/keymap_editor/src/keymap_editor.rs πŸ”—

@@ -22,7 +22,7 @@ use gpui::{
     ScrollWheelEvent, Stateful, StyledText, Subscription, Task, TextStyleRefinement, WeakEntity,
     actions, anchored, deferred, div,
 };
-use language::{Language, LanguageConfig, ToOffset as _};
+use language::{Language, LanguageConfig, Rope, ToOffset as _};
 use notifications::status_toast::{StatusToast, ToastIcon};
 use project::{CompletionDisplayOptions, Project};
 use settings::{
@@ -2119,7 +2119,7 @@ impl RenderOnce for SyntaxHighlightedText {
 
         let highlights = self
             .language
-            .highlight_text(&text.as_ref().into(), 0..text.len());
+            .highlight_text(&Rope::from_str_small(text.as_ref()), 0..text.len());
         let mut runs = Vec::with_capacity(highlights.len());
         let mut offset = 0;
 

crates/language/src/buffer.rs πŸ”—

@@ -24,8 +24,8 @@ use collections::HashMap;
 use fs::MTime;
 use futures::channel::oneshot;
 use gpui::{
-    App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
-    Task, TaskLabel, TextStyle,
+    App, AppContext as _, BackgroundExecutor, Context, Entity, EventEmitter, HighlightStyle,
+    SharedString, StyledText, Task, TaskLabel, TextStyle,
 };
 
 use lsp::{LanguageServerId, NumberOrString};
@@ -883,6 +883,7 @@ impl Buffer {
                 ReplicaId::LOCAL,
                 cx.entity_id().as_non_zero_u64().into(),
                 base_text.into(),
+                &cx.background_executor(),
             ),
             None,
             Capability::ReadWrite,
@@ -913,9 +914,10 @@ impl Buffer {
         replica_id: ReplicaId,
         capability: Capability,
         base_text: impl Into<String>,
+        cx: &BackgroundExecutor,
     ) -> Self {
         Self::build(
-            TextBuffer::new(replica_id, remote_id, base_text.into()),
+            TextBuffer::new(replica_id, remote_id, base_text.into(), cx),
             None,
             capability,
         )
@@ -928,9 +930,10 @@ impl Buffer {
         capability: Capability,
         message: proto::BufferState,
         file: Option<Arc<dyn File>>,
+        cx: &BackgroundExecutor,
     ) -> Result<Self> {
         let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
-        let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
+        let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text, cx);
         let mut this = Self::build(buffer, file, capability);
         this.text.set_line_ending(proto::deserialize_line_ending(
             rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
@@ -1210,13 +1213,14 @@ impl Buffer {
         let old_snapshot = self.text.snapshot();
         let mut branch_buffer = self.text.branch();
         let mut syntax_snapshot = self.syntax_map.lock().snapshot();
+        let executor = cx.background_executor().clone();
         cx.background_spawn(async move {
             if !edits.is_empty() {
                 if let Some(language) = language.clone() {
                     syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
                 }
 
-                branch_buffer.edit(edits.iter().cloned());
+                branch_buffer.edit(edits.iter().cloned(), &executor);
                 let snapshot = branch_buffer.snapshot();
                 syntax_snapshot.interpolate(&snapshot);
 
@@ -1645,21 +1649,24 @@ impl Buffer {
                 self.reparse = None;
             }
             Err(parse_task) => {
+                // todo(lw): hot foreground spawn
                 self.reparse = Some(cx.spawn(async move |this, cx| {
-                    let new_syntax_map = parse_task.await;
+                    let new_syntax_map = cx.background_spawn(parse_task).await;
                     this.update(cx, move |this, cx| {
-                        let grammar_changed =
+                        let grammar_changed = || {
                             this.language.as_ref().is_none_or(|current_language| {
                                 !Arc::ptr_eq(&language, current_language)
-                            });
-                        let language_registry_changed = new_syntax_map
-                            .contains_unknown_injections()
-                            && language_registry.is_some_and(|registry| {
-                                registry.version() != new_syntax_map.language_registry_version()
-                            });
-                        let parse_again = language_registry_changed
-                            || grammar_changed
-                            || this.version.changed_since(&parsed_version);
+                            })
+                        };
+                        let language_registry_changed = || {
+                            new_syntax_map.contains_unknown_injections()
+                                && language_registry.is_some_and(|registry| {
+                                    registry.version() != new_syntax_map.language_registry_version()
+                                })
+                        };
+                        let parse_again = this.version.changed_since(&parsed_version)
+                            || language_registry_changed()
+                            || grammar_changed();
                         this.did_finish_parsing(new_syntax_map, cx);
                         this.reparse = None;
                         if parse_again {
@@ -2430,7 +2437,9 @@ impl Buffer {
         let autoindent_request = autoindent_mode
             .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
 
-        let edit_operation = self.text.edit(edits.iter().cloned());
+        let edit_operation = self
+            .text
+            .edit(edits.iter().cloned(), cx.background_executor());
         let edit_id = edit_operation.timestamp();
 
         if let Some((before_edit, mode)) = autoindent_request {
@@ -2661,7 +2670,8 @@ impl Buffer {
         for operation in buffer_ops.iter() {
             self.send_operation(Operation::Buffer(operation.clone()), false, cx);
         }
-        self.text.apply_ops(buffer_ops);
+        self.text
+            .apply_ops(buffer_ops, Some(cx.background_executor()));
         self.deferred_ops.insert(deferred_ops);
         self.flush_deferred_ops(cx);
         self.did_edit(&old_version, was_dirty, cx);

crates/language/src/buffer_tests.rs πŸ”—

@@ -75,6 +75,7 @@ fn test_set_line_ending(cx: &mut TestAppContext) {
             Capability::ReadWrite,
             base.read(cx).to_proto(cx),
             None,
+            cx.background_executor(),
         )
         .unwrap()
     });
@@ -255,14 +256,18 @@ async fn test_first_line_pattern(cx: &mut TestAppContext) {
             .is_none()
     );
     assert!(
-        cx.read(|cx| languages.language_for_file(&file("the/script"), Some(&"nothing".into()), cx))
-            .is_none()
+        cx.read(|cx| languages.language_for_file(
+            &file("the/script"),
+            Some(&Rope::from_str("nothing", cx.background_executor())),
+            cx
+        ))
+        .is_none()
     );
 
     assert_eq!(
         cx.read(|cx| languages.language_for_file(
             &file("the/script"),
-            Some(&"#!/bin/env node".into()),
+            Some(&Rope::from_str("#!/bin/env node", cx.background_executor())),
             cx
         ))
         .unwrap()
@@ -406,6 +411,7 @@ fn test_edit_events(cx: &mut gpui::App) {
             ReplicaId::new(1),
             Capability::ReadWrite,
             "abcdef",
+            cx.background_executor(),
         )
     });
     let buffer1_ops = Arc::new(Mutex::new(Vec::new()));
@@ -2781,8 +2787,14 @@ fn test_serialization(cx: &mut gpui::App) {
         .background_executor()
         .block(buffer1.read(cx).serialize_ops(None, cx));
     let buffer2 = cx.new(|cx| {
-        let mut buffer =
-            Buffer::from_proto(ReplicaId::new(1), Capability::ReadWrite, state, None).unwrap();
+        let mut buffer = Buffer::from_proto(
+            ReplicaId::new(1),
+            Capability::ReadWrite,
+            state,
+            None,
+            cx.background_executor(),
+        )
+        .unwrap();
         buffer.apply_ops(
             ops.into_iter()
                 .map(|op| proto::deserialize_operation(op).unwrap()),
@@ -2806,6 +2818,7 @@ fn test_branch_and_merge(cx: &mut TestAppContext) {
             Capability::ReadWrite,
             base.read(cx).to_proto(cx),
             None,
+            cx.background_executor(),
         )
         .unwrap()
     });
@@ -3120,9 +3133,14 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) {
             let ops = cx
                 .background_executor()
                 .block(base_buffer.read(cx).serialize_ops(None, cx));
-            let mut buffer =
-                Buffer::from_proto(ReplicaId::new(i as u16), Capability::ReadWrite, state, None)
-                    .unwrap();
+            let mut buffer = Buffer::from_proto(
+                ReplicaId::new(i as u16),
+                Capability::ReadWrite,
+                state,
+                None,
+                cx.background_executor(),
+            )
+            .unwrap();
             buffer.apply_ops(
                 ops.into_iter()
                     .map(|op| proto::deserialize_operation(op).unwrap()),
@@ -3251,6 +3269,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) {
                         Capability::ReadWrite,
                         old_buffer_state,
                         None,
+                        cx.background_executor(),
                     )
                     .unwrap();
                     new_buffer.apply_ops(
@@ -3414,7 +3433,7 @@ fn test_contiguous_ranges() {
 }
 
 #[gpui::test(iterations = 500)]
-fn test_trailing_whitespace_ranges(mut rng: StdRng) {
+fn test_trailing_whitespace_ranges(mut rng: StdRng, cx: &mut TestAppContext) {
     // Generate a random multi-line string containing
     // some lines with trailing whitespace.
     let mut text = String::new();
@@ -3438,7 +3457,7 @@ fn test_trailing_whitespace_ranges(mut rng: StdRng) {
         _ => {}
     }
 
-    let rope = Rope::from(text.as_str());
+    let rope = Rope::from_str(text.as_str(), cx.background_executor());
     let actual_ranges = trailing_whitespace_ranges(&rope);
     let expected_ranges = TRAILING_WHITESPACE_REGEX
         .find_iter(&text)

crates/language/src/syntax_map/syntax_map_tests.rs πŸ”—

@@ -100,6 +100,7 @@ fn test_syntax_map_layers_for_range(cx: &mut App) {
             }
         "#
         .unindent(),
+        cx.background_executor(),
     );
 
     let mut syntax_map = SyntaxMap::new(&buffer);
@@ -147,7 +148,7 @@ fn test_syntax_map_layers_for_range(cx: &mut App) {
 
     // Replace a vec! macro invocation with a plain slice, removing a syntactic layer.
     let macro_name_range = range_for_text(&buffer, "vec!");
-    buffer.edit([(macro_name_range, "&")]);
+    buffer.edit([(macro_name_range, "&")], cx.background_executor());
     syntax_map.interpolate(&buffer);
     syntax_map.reparse(language.clone(), &buffer);
 
@@ -199,6 +200,7 @@ fn test_dynamic_language_injection(cx: &mut App) {
             ```
         "#
         .unindent(),
+        cx.background_executor(),
     );
 
     let mut syntax_map = SyntaxMap::new(&buffer);
@@ -218,7 +220,10 @@ fn test_dynamic_language_injection(cx: &mut App) {
 
     // Replace `rs` with a path to ending in `.rb` in code block.
     let macro_name_range = range_for_text(&buffer, "rs");
-    buffer.edit([(macro_name_range, "foo/bar/baz.rb")]);
+    buffer.edit(
+        [(macro_name_range, "foo/bar/baz.rb")],
+        cx.background_executor(),
+    );
     syntax_map.interpolate(&buffer);
     syntax_map.reparse(markdown.clone(), &buffer);
     syntax_map.reparse(markdown_inline.clone(), &buffer);
@@ -235,7 +240,7 @@ fn test_dynamic_language_injection(cx: &mut App) {
 
     // Replace Ruby with a language that hasn't been loaded yet.
     let macro_name_range = range_for_text(&buffer, "foo/bar/baz.rb");
-    buffer.edit([(macro_name_range, "html")]);
+    buffer.edit([(macro_name_range, "html")], cx.background_executor());
     syntax_map.interpolate(&buffer);
     syntax_map.reparse(markdown.clone(), &buffer);
     syntax_map.reparse(markdown_inline.clone(), &buffer);
@@ -811,7 +816,12 @@ fn test_syntax_map_languages_loading_with_erb(cx: &mut App) {
     .unindent();
 
     let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone()));
-    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), text);
+    let mut buffer = Buffer::new(
+        ReplicaId::LOCAL,
+        BufferId::new(1).unwrap(),
+        text,
+        cx.background_executor(),
+    );
 
     let mut syntax_map = SyntaxMap::new(&buffer);
     syntax_map.set_language_registry(registry.clone());
@@ -859,7 +869,7 @@ fn test_syntax_map_languages_loading_with_erb(cx: &mut App) {
     .unindent();
 
     log::info!("editing");
-    buffer.edit_via_marked_text(&text);
+    buffer.edit_via_marked_text(&text, cx.background_executor());
     syntax_map.interpolate(&buffer);
     syntax_map.reparse(language, &buffer);
 
@@ -903,7 +913,7 @@ fn test_random_syntax_map_edits_rust_macros(rng: StdRng, cx: &mut App) {
     let language = Arc::new(rust_lang());
     registry.add(language.clone());
 
-    test_random_edits(text, registry, language, rng);
+    test_random_edits(text, registry, language, rng, cx);
 }
 
 #[gpui::test(iterations = 50)]
@@ -932,7 +942,7 @@ fn test_random_syntax_map_edits_with_erb(rng: StdRng, cx: &mut App) {
     registry.add(Arc::new(ruby_lang()));
     registry.add(Arc::new(html_lang()));
 
-    test_random_edits(text, registry, language, rng);
+    test_random_edits(text, registry, language, rng, cx);
 }
 
 #[gpui::test(iterations = 50)]
@@ -965,7 +975,7 @@ fn test_random_syntax_map_edits_with_heex(rng: StdRng, cx: &mut App) {
     registry.add(Arc::new(heex_lang()));
     registry.add(Arc::new(html_lang()));
 
-    test_random_edits(text, registry, language, rng);
+    test_random_edits(text, registry, language, rng, cx);
 }
 
 fn test_random_edits(
@@ -973,12 +983,18 @@ fn test_random_edits(
     registry: Arc<LanguageRegistry>,
     language: Arc<Language>,
     mut rng: StdRng,
+    cx: &mut App,
 ) {
     let operations = env::var("OPERATIONS")
         .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
         .unwrap_or(10);
 
-    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), text);
+    let mut buffer = Buffer::new(
+        ReplicaId::LOCAL,
+        BufferId::new(1).unwrap(),
+        text,
+        cx.background_executor(),
+    );
 
     let mut syntax_map = SyntaxMap::new(&buffer);
     syntax_map.set_language_registry(registry.clone());
@@ -993,7 +1009,7 @@ fn test_random_edits(
         let prev_buffer = buffer.snapshot();
         let prev_syntax_map = syntax_map.snapshot();
 
-        buffer.randomly_edit(&mut rng, 3);
+        buffer.randomly_edit(&mut rng, 3, cx.background_executor());
         log::info!("text:\n{}", buffer.text());
 
         syntax_map.interpolate(&buffer);
@@ -1159,7 +1175,12 @@ fn test_edit_sequence(language_name: &str, steps: &[&str], cx: &mut App) -> (Buf
         .now_or_never()
         .unwrap()
         .unwrap();
-    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "");
+    let mut buffer = Buffer::new(
+        ReplicaId::LOCAL,
+        BufferId::new(1).unwrap(),
+        "",
+        cx.background_executor(),
+    );
 
     let mut mutated_syntax_map = SyntaxMap::new(&buffer);
     mutated_syntax_map.set_language_registry(registry.clone());
@@ -1168,7 +1189,7 @@ fn test_edit_sequence(language_name: &str, steps: &[&str], cx: &mut App) -> (Buf
     for (i, marked_string) in steps.iter().enumerate() {
         let marked_string = marked_string.unindent();
         log::info!("incremental parse {i}: {marked_string:?}");
-        buffer.edit_via_marked_text(&marked_string);
+        buffer.edit_via_marked_text(&marked_string, cx.background_executor());
 
         // Reparse the syntax map
         mutated_syntax_map.interpolate(&buffer);

crates/language_extension/src/extension_lsp_adapter.rs πŸ”—

@@ -11,7 +11,7 @@ use futures::{Future, FutureExt, future::join_all};
 use gpui::{App, AppContext, AsyncApp, Task};
 use language::{
     BinaryStatus, CodeLabel, DynLspInstaller, HighlightId, Language, LanguageName, LspAdapter,
-    LspAdapterDelegate, Toolchain,
+    LspAdapterDelegate, Rope, Toolchain,
 };
 use lsp::{
     CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions, LanguageServerName,
@@ -403,7 +403,10 @@ fn labels_from_extension(
             let runs = if label.code.is_empty() {
                 Vec::new()
             } else {
-                language.highlight_text(&label.code.as_str().into(), 0..label.code.len())
+                language.highlight_text(
+                    &Rope::from_str_small(label.code.as_str()),
+                    0..label.code.len(),
+                )
             };
             build_code_label(&label, &runs, language)
         })

crates/language_models/src/provider/bedrock.rs πŸ”—

@@ -1221,7 +1221,6 @@ impl Render for ConfigurationView {
                     )
             )
             .child(self.render_static_credentials_ui())
-            .child(self.region_editor.clone())
             .child(
                 Label::new(
                     format!("You can also assign the {ZED_BEDROCK_ACCESS_KEY_ID_VAR}, {ZED_BEDROCK_SECRET_ACCESS_KEY_VAR} AND {ZED_BEDROCK_REGION_VAR} environment variables and restart Zed."),

crates/language_models/src/provider/copilot_chat.rs πŸ”—

@@ -1377,11 +1377,12 @@ impl Render for ConfigurationView {
 
                         v_flex().gap_2().child(Label::new(LABEL)).child(
                             Button::new("sign_in", "Sign in to use GitHub Copilot")
+                                .full_width()
+                                .style(ButtonStyle::Outlined)
                                 .icon_color(Color::Muted)
                                 .icon(IconName::Github)
                                 .icon_position(IconPosition::Start)
-                                .icon_size(IconSize::Medium)
-                                .full_width()
+                                .icon_size(IconSize::Small)
                                 .on_click(|_, window, cx| copilot::initiate_sign_in(window, cx)),
                         )
                     }

crates/language_models/src/provider/mistral.rs πŸ”—

@@ -753,9 +753,9 @@ struct ConfigurationView {
 impl ConfigurationView {
     fn new(state: Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
         let api_key_editor =
-            cx.new(|cx| InputField::new(window, cx, "0aBCDEFGhIjKLmNOpqrSTUVwxyzabCDE1f2"));
+            cx.new(|cx| InputField::new(window, cx, "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"));
         let codestral_api_key_editor =
-            cx.new(|cx| InputField::new(window, cx, "0aBCDEFGhIjKLmNOpqrSTUVwxyzabCDE1f2"));
+            cx.new(|cx| InputField::new(window, cx, "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"));
 
         cx.observe(&state, |_, _, cx| {
             cx.notify();

crates/language_models/src/provider/ollama.rs πŸ”—

@@ -906,6 +906,16 @@ impl Render for ConfigurationView {
                                             .child(Icon::new(IconName::Check).color(Color::Success))
                                             .child(Label::new("Connected"))
                                             .into_any_element(),
+                                    )
+                                    .child(
+                                        IconButton::new("refresh-models", IconName::RotateCcw)
+                                            .tooltip(Tooltip::text("Refresh models"))
+                                            .on_click(cx.listener(|this, _, _, cx| {
+                                                this.state.update(cx, |state, _| {
+                                                    state.fetched_models.clear();
+                                                });
+                                                this.retry_connection(cx);
+                                            })),
                                     ),
                             )
                         } else {

crates/language_tools/src/lsp_log_view.rs πŸ”—

@@ -812,11 +812,13 @@ impl SearchableItem for LspLogView {
         &mut self,
         index: usize,
         matches: &[Self::Match],
+        collapse: bool,
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
-        self.editor
-            .update(cx, |e, cx| e.activate_match(index, matches, window, cx))
+        self.editor.update(cx, |e, cx| {
+            e.activate_match(index, matches, collapse, window, cx)
+        })
     }
 
     fn select_matches(

crates/languages/src/c.rs πŸ”—

@@ -166,19 +166,30 @@ impl super::LspAdapter for CLspAdapter {
             None => "",
         };
 
-        let label = completion
+        let mut label = completion
             .label
             .strip_prefix('β€’')
             .unwrap_or(&completion.label)
             .trim()
-            .to_owned()
-            + label_detail;
+            .to_owned();
+
+        if !label_detail.is_empty() {
+            let should_add_space = match completion.kind {
+                Some(lsp::CompletionItemKind::FUNCTION | lsp::CompletionItemKind::METHOD) => false,
+                _ => true,
+            };
+
+            if should_add_space && !label.ends_with(' ') && !label_detail.starts_with(' ') {
+                label.push(' ');
+            }
+            label.push_str(label_detail);
+        }
 
         match completion.kind {
             Some(lsp::CompletionItemKind::FIELD) if completion.detail.is_some() => {
                 let detail = completion.detail.as_ref().unwrap();
                 let text = format!("{} {}", detail, label);
-                let source = Rope::from(format!("struct S {{ {} }}", text).as_str());
+                let source = Rope::from_str_small(format!("struct S {{ {} }}", text).as_str());
                 let runs = language.highlight_text(&source, 11..11 + text.len());
                 let filter_range = completion
                     .filter_text
@@ -195,7 +206,8 @@ impl super::LspAdapter for CLspAdapter {
             {
                 let detail = completion.detail.as_ref().unwrap();
                 let text = format!("{} {}", detail, label);
-                let runs = language.highlight_text(&Rope::from(text.as_str()), 0..text.len());
+                let runs =
+                    language.highlight_text(&Rope::from_str_small(text.as_str()), 0..text.len());
                 let filter_range = completion
                     .filter_text
                     .as_deref()
@@ -211,7 +223,8 @@ impl super::LspAdapter for CLspAdapter {
             {
                 let detail = completion.detail.as_ref().unwrap();
                 let text = format!("{} {}", detail, label);
-                let runs = language.highlight_text(&Rope::from(text.as_str()), 0..text.len());
+                let runs =
+                    language.highlight_text(&Rope::from_str_small(text.as_str()), 0..text.len());
                 let filter_range = completion
                     .filter_text
                     .as_deref()
@@ -315,7 +328,7 @@ impl super::LspAdapter for CLspAdapter {
         Some(CodeLabel::new(
             text[display_range.clone()].to_string(),
             filter_range,
-            language.highlight_text(&text.as_str().into(), display_range),
+            language.highlight_text(&Rope::from_str_small(text.as_str()), display_range),
         ))
     }
 

crates/languages/src/go.rs πŸ”—

@@ -221,7 +221,7 @@ impl LspAdapter for GoLspAdapter {
         match completion.kind.zip(completion.detail.as_ref()) {
             Some((lsp::CompletionItemKind::MODULE, detail)) => {
                 let text = format!("{label} {detail}");
-                let source = Rope::from(format!("import {text}").as_str());
+                let source = Rope::from_str_small(format!("import {text}").as_str());
                 let runs = language.highlight_text(&source, 7..7 + text[name_offset..].len());
                 let filter_range = completion
                     .filter_text
@@ -238,8 +238,9 @@ impl LspAdapter for GoLspAdapter {
                 detail,
             )) => {
                 let text = format!("{label} {detail}");
-                let source =
-                    Rope::from(format!("var {} {}", &text[name_offset..], detail).as_str());
+                let source = Rope::from_str_small(
+                    format!("var {} {}", &text[name_offset..], detail).as_str(),
+                );
                 let runs = adjust_runs(
                     name_offset,
                     language.highlight_text(&source, 4..4 + text[name_offset..].len()),
@@ -256,7 +257,8 @@ impl LspAdapter for GoLspAdapter {
             }
             Some((lsp::CompletionItemKind::STRUCT, _)) => {
                 let text = format!("{label} struct {{}}");
-                let source = Rope::from(format!("type {}", &text[name_offset..]).as_str());
+                let source =
+                    Rope::from_str_small(format!("type {}", &text[name_offset..]).as_str());
                 let runs = adjust_runs(
                     name_offset,
                     language.highlight_text(&source, 5..5 + text[name_offset..].len()),
@@ -273,7 +275,8 @@ impl LspAdapter for GoLspAdapter {
             }
             Some((lsp::CompletionItemKind::INTERFACE, _)) => {
                 let text = format!("{label} interface {{}}");
-                let source = Rope::from(format!("type {}", &text[name_offset..]).as_str());
+                let source =
+                    Rope::from_str_small(format!("type {}", &text[name_offset..]).as_str());
                 let runs = adjust_runs(
                     name_offset,
                     language.highlight_text(&source, 5..5 + text[name_offset..].len()),
@@ -290,8 +293,9 @@ impl LspAdapter for GoLspAdapter {
             }
             Some((lsp::CompletionItemKind::FIELD, detail)) => {
                 let text = format!("{label} {detail}");
-                let source =
-                    Rope::from(format!("type T struct {{ {} }}", &text[name_offset..]).as_str());
+                let source = Rope::from_str_small(
+                    format!("type T struct {{ {} }}", &text[name_offset..]).as_str(),
+                );
                 let runs = adjust_runs(
                     name_offset,
                     language.highlight_text(&source, 16..16 + text[name_offset..].len()),
@@ -309,7 +313,9 @@ impl LspAdapter for GoLspAdapter {
             Some((lsp::CompletionItemKind::FUNCTION | lsp::CompletionItemKind::METHOD, detail)) => {
                 if let Some(signature) = detail.strip_prefix("func") {
                     let text = format!("{label}{signature}");
-                    let source = Rope::from(format!("func {} {{}}", &text[name_offset..]).as_str());
+                    let source = Rope::from_str_small(
+                        format!("func {} {{}}", &text[name_offset..]).as_str(),
+                    );
                     let runs = adjust_runs(
                         name_offset,
                         language.highlight_text(&source, 5..5 + text[name_offset..].len()),
@@ -385,7 +391,7 @@ impl LspAdapter for GoLspAdapter {
         Some(CodeLabel::new(
             text[display_range.clone()].to_string(),
             filter_range,
-            language.highlight_text(&text.as_str().into(), display_range),
+            language.highlight_text(&Rope::from_str_small(text.as_str()), display_range),
         ))
     }
 

crates/languages/src/python.rs πŸ”—

@@ -19,6 +19,7 @@ use pet_core::python_environment::{PythonEnvironment, PythonEnvironmentKind};
 use pet_virtualenv::is_virtualenv_dir;
 use project::Fs;
 use project::lsp_store::language_server_settings;
+use rope::Rope;
 use serde::{Deserialize, Serialize};
 use serde_json::{Value, json};
 use smol::lock::OnceCell;
@@ -466,7 +467,7 @@ impl LspAdapter for PyrightLspAdapter {
         Some(language::CodeLabel::new(
             text[display_range.clone()].to_string(),
             filter_range,
-            language.highlight_text(&text.as_str().into(), display_range),
+            language.highlight_text(&Rope::from_str_small(text.as_str()), display_range),
         ))
     }
 
@@ -1210,7 +1211,7 @@ impl ToolchainLister for PythonToolchainProvider {
                 activation_script.extend(match shell {
                     ShellKind::Fish => Some(format!("\"{pyenv}\" shell - fish {version}")),
                     ShellKind::Posix => Some(format!("\"{pyenv}\" shell - sh {version}")),
-                    ShellKind::Nushell => Some(format!("\"{pyenv}\" shell - nu {version}")),
+                    ShellKind::Nushell => Some(format!("^\"{pyenv}\" shell - nu {version}")),
                     ShellKind::PowerShell => None,
                     ShellKind::Csh => None,
                     ShellKind::Tcsh => None,
@@ -1511,7 +1512,7 @@ impl LspAdapter for PyLspAdapter {
         Some(language::CodeLabel::new(
             text[display_range.clone()].to_string(),
             filter_range,
-            language.highlight_text(&text.as_str().into(), display_range),
+            language.highlight_text(&Rope::from_str_small(text.as_str()), display_range),
         ))
     }
 
@@ -1800,7 +1801,7 @@ impl LspAdapter for BasedPyrightLspAdapter {
         Some(language::CodeLabel::new(
             text[display_range.clone()].to_string(),
             filter_range,
-            language.highlight_text(&text.as_str().into(), display_range),
+            language.highlight_text(&Rope::from_str_small(text.as_str()), display_range),
         ))
     }
 

crates/languages/src/rust.rs πŸ”—

@@ -252,7 +252,7 @@ impl LspAdapter for RustLspAdapter {
                 let name = &completion.label;
                 let text = format!("{name}: {signature}");
                 let prefix = "struct S { ";
-                let source = Rope::from_iter([prefix, &text, " }"]);
+                let source = Rope::from_iter_small([prefix, &text, " }"]);
                 let runs =
                     language.highlight_text(&source, prefix.len()..prefix.len() + text.len());
                 mk_label(text, &|| 0..completion.label.len(), runs)
@@ -264,7 +264,7 @@ impl LspAdapter for RustLspAdapter {
                 let name = &completion.label;
                 let text = format!("{name}: {signature}",);
                 let prefix = "let ";
-                let source = Rope::from_iter([prefix, &text, " = ();"]);
+                let source = Rope::from_iter_small([prefix, &text, " = ();"]);
                 let runs =
                     language.highlight_text(&source, prefix.len()..prefix.len() + text.len());
                 mk_label(text, &|| 0..completion.label.len(), runs)
@@ -302,7 +302,7 @@ impl LspAdapter for RustLspAdapter {
                     .filter(|it| it.contains(&label))
                     .and_then(|it| Some((it, FULL_SIGNATURE_REGEX.find(it)?)))
                 {
-                    let source = Rope::from(function_signature);
+                    let source = Rope::from_str_small(function_signature);
                     let runs = language.highlight_text(&source, 0..function_signature.len());
                     mk_label(
                         function_signature.to_owned(),
@@ -311,7 +311,7 @@ impl LspAdapter for RustLspAdapter {
                     )
                 } else if let Some((prefix, suffix)) = fn_prefixed {
                     let text = format!("{label}{suffix}");
-                    let source = Rope::from_iter([prefix, " ", &text, " {}"]);
+                    let source = Rope::from_iter_small([prefix, " ", &text, " {}"]);
                     let run_start = prefix.len() + 1;
                     let runs = language.highlight_text(&source, run_start..run_start + text.len());
                     mk_label(text, &|| 0..label.len(), runs)
@@ -322,7 +322,7 @@ impl LspAdapter for RustLspAdapter {
                 {
                     let text = completion.label.clone();
                     let len = text.len();
-                    let source = Rope::from(text.as_str());
+                    let source = Rope::from_str_small(text.as_str());
                     let runs = language.highlight_text(&source, 0..len);
                     mk_label(text, &|| 0..completion.label.len(), runs)
                 } else if detail_left.is_none() {
@@ -399,7 +399,10 @@ impl LspAdapter for RustLspAdapter {
         Some(CodeLabel::new(
             format!("{prefix}{name}"),
             filter_range,
-            language.highlight_text(&Rope::from_iter([prefix, name, suffix]), display_range),
+            language.highlight_text(
+                &Rope::from_iter_small([prefix, name, suffix]),
+                display_range,
+            ),
         ))
     }
 

crates/markdown/src/markdown.rs πŸ”—

@@ -1558,7 +1558,9 @@ impl MarkdownElementBuilder {
 
         if let Some(Some(language)) = self.code_block_stack.last() {
             let mut offset = 0;
-            for (range, highlight_id) in language.highlight_text(&Rope::from(text), 0..text.len()) {
+            for (range, highlight_id) in
+                language.highlight_text(&Rope::from_str_small(text), 0..text.len())
+            {
                 if range.start > offset {
                     self.pending_line
                         .runs

crates/markdown_preview/src/markdown_parser.rs πŸ”—

@@ -779,7 +779,7 @@ impl<'a> MarkdownParser<'a> {
 
         let highlights = if let Some(language) = &language {
             if let Some(registry) = &self.language_registry {
-                let rope: language::Rope = code.as_str().into();
+                let rope = language::Rope::from_str_small(code.as_str());
                 registry
                     .language_for_name_or_extension(language)
                     .await

crates/multi_buffer/src/multi_buffer_tests.rs πŸ”—

@@ -1,6 +1,6 @@
 use super::*;
 use buffer_diff::{DiffHunkStatus, DiffHunkStatusKind};
-use gpui::{App, TestAppContext};
+use gpui::{App, BackgroundExecutor, TestAppContext};
 use indoc::indoc;
 use language::{Buffer, Rope};
 use parking_lot::RwLock;
@@ -79,9 +79,14 @@ fn test_remote(cx: &mut App) {
         let ops = cx
             .background_executor()
             .block(host_buffer.read(cx).serialize_ops(None, cx));
-        let mut buffer =
-            Buffer::from_proto(ReplicaId::REMOTE_SERVER, Capability::ReadWrite, state, None)
-                .unwrap();
+        let mut buffer = Buffer::from_proto(
+            ReplicaId::REMOTE_SERVER,
+            Capability::ReadWrite,
+            state,
+            None,
+            cx.background_executor(),
+        )
+        .unwrap();
         buffer.apply_ops(
             ops.into_iter()
                 .map(|op| language::proto::deserialize_operation(op).unwrap()),
@@ -1224,7 +1229,7 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) {
     assert_chunks_in_ranges(&snapshot);
     assert_consistent_line_numbers(&snapshot);
     assert_position_translation(&snapshot);
-    assert_line_indents(&snapshot);
+    assert_line_indents(&snapshot, cx.background_executor());
 
     multibuffer.update(cx, |multibuffer, cx| {
         multibuffer.collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], cx)
@@ -1248,7 +1253,7 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) {
     assert_chunks_in_ranges(&snapshot);
     assert_consistent_line_numbers(&snapshot);
     assert_position_translation(&snapshot);
-    assert_line_indents(&snapshot);
+    assert_line_indents(&snapshot, cx.background_executor());
 
     // Expand the first diff hunk
     multibuffer.update(cx, |multibuffer, cx| {
@@ -1300,7 +1305,7 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) {
     assert_chunks_in_ranges(&snapshot);
     assert_consistent_line_numbers(&snapshot);
     assert_position_translation(&snapshot);
-    assert_line_indents(&snapshot);
+    assert_line_indents(&snapshot, cx.background_executor());
 
     // Edit the buffer before the first hunk
     buffer.update(cx, |buffer, cx| {
@@ -1342,7 +1347,7 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) {
     assert_chunks_in_ranges(&snapshot);
     assert_consistent_line_numbers(&snapshot);
     assert_position_translation(&snapshot);
-    assert_line_indents(&snapshot);
+    assert_line_indents(&snapshot, cx.background_executor());
 
     // Recalculate the diff, changing the first diff hunk.
     diff.update(cx, |diff, cx| {
@@ -2067,7 +2072,7 @@ fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) {
     }
 
     assert_position_translation(&snapshot);
-    assert_line_indents(&snapshot);
+    assert_line_indents(&snapshot, cx.background_executor());
 
     assert_eq!(
         snapshot
@@ -2118,7 +2123,7 @@ fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) {
         ),
     );
 
-    assert_line_indents(&snapshot);
+    assert_line_indents(&snapshot, cx.background_executor());
 }
 
 /// A naive implementation of a multi-buffer that does not maintain
@@ -2888,7 +2893,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
             );
         }
 
-        let text_rope = Rope::from(expected_text.as_str());
+        let text_rope = Rope::from_str(expected_text.as_str(), cx.background_executor());
         for _ in 0..10 {
             let end_ix = text_rope.clip_offset(rng.random_range(0..=text_rope.len()), Bias::Right);
             let start_ix = text_rope.clip_offset(rng.random_range(0..=end_ix), Bias::Left);
@@ -3512,7 +3517,7 @@ fn assert_consistent_line_numbers(snapshot: &MultiBufferSnapshot) {
 
 #[track_caller]
 fn assert_position_translation(snapshot: &MultiBufferSnapshot) {
-    let text = Rope::from(snapshot.text());
+    let text = Rope::from_str_small(&snapshot.text());
 
     let mut left_anchors = Vec::new();
     let mut right_anchors = Vec::new();
@@ -3636,10 +3641,10 @@ fn assert_position_translation(snapshot: &MultiBufferSnapshot) {
     }
 }
 
-fn assert_line_indents(snapshot: &MultiBufferSnapshot) {
+fn assert_line_indents(snapshot: &MultiBufferSnapshot, executor: &BackgroundExecutor) {
     let max_row = snapshot.max_point().row;
     let buffer_id = snapshot.excerpts().next().unwrap().1.remote_id();
-    let text = text::Buffer::new(ReplicaId::LOCAL, buffer_id, snapshot.text());
+    let text = text::Buffer::new(ReplicaId::LOCAL, buffer_id, snapshot.text(), executor);
     let mut line_indents = text
         .line_indents_in_row_range(0..max_row + 1)
         .collect::<Vec<_>>();

crates/multi_buffer/src/path_key.rs πŸ”—

@@ -1,7 +1,7 @@
 use std::{mem, ops::Range, sync::Arc};
 
 use collections::HashSet;
-use gpui::{App, AppContext, Context, Entity, Task};

+use gpui::{App, AppContext, Context, Entity};

 use itertools::Itertools;
 use language::{Buffer, BufferSnapshot};
 use rope::Point;
@@ -117,12 +117,14 @@ impl MultiBuffer {
         buffer: Entity<Buffer>,
         ranges: Vec<Range<text::Anchor>>,
         context_line_count: u32,
-        cx: &mut Context<Self>,

-    ) -> Task<Vec<Range<Anchor>>> {

+        cx: &Context<Self>,

+    ) -> impl Future<Output = Vec<Range<Anchor>>> + use<> {

         let buffer_snapshot = buffer.read(cx).snapshot();
-        cx.spawn(async move |multi_buffer, cx| {

+        let multi_buffer = cx.weak_entity();

+        let mut app = cx.to_async();

+        async move {

             let snapshot = buffer_snapshot.clone();
-            let (excerpt_ranges, new, counts) = cx

+            let (excerpt_ranges, new, counts) = app

                 .background_spawn(async move {
                     let ranges = ranges.into_iter().map(|range| range.to_point(&snapshot));
                     let excerpt_ranges =
@@ -133,7 +135,7 @@ impl MultiBuffer {
                 .await;
 
             multi_buffer
-                .update(cx, move |multi_buffer, cx| {

+                .update(&mut app, move |multi_buffer, cx| {

                     let (ranges, _) = multi_buffer.set_merged_excerpt_ranges_for_path(
                         path_key,
                         buffer,
@@ -147,7 +149,7 @@ impl MultiBuffer {
                 })
                 .ok()
                 .unwrap_or_default()
-        })

+        }

     }
 
     pub(super) fn expand_excerpts_with_paths(

crates/outline_panel/src/outline_panel.rs πŸ”—

@@ -1635,56 +1635,79 @@ impl OutlinePanel {
         let Some(active_editor) = self.active_editor() else {
             return;
         };
-        let mut buffers_to_unfold = HashSet::default();
-        let expanded_entries =
-            self.fs_entries
-                .iter()
-                .fold(HashSet::default(), |mut entries, fs_entry| {
-                    match fs_entry {
-                        FsEntry::ExternalFile(external_file) => {
-                            buffers_to_unfold.insert(external_file.buffer_id);
-                            entries.insert(CollapsedEntry::ExternalFile(external_file.buffer_id));
-                            entries.extend(
-                                self.excerpts
-                                    .get(&external_file.buffer_id)
-                                    .into_iter()
-                                    .flat_map(|excerpts| {
-                                        excerpts.keys().map(|excerpt_id| {
-                                            CollapsedEntry::Excerpt(
-                                                external_file.buffer_id,
-                                                *excerpt_id,
-                                            )
-                                        })
-                                    }),
-                            );
-                        }
-                        FsEntry::Directory(directory) => {
-                            entries.insert(CollapsedEntry::Dir(
-                                directory.worktree_id,
-                                directory.entry.id,
+
+        let mut to_uncollapse: HashSet<CollapsedEntry> = HashSet::default();
+        let mut buffers_to_unfold: HashSet<BufferId> = HashSet::default();
+
+        for fs_entry in &self.fs_entries {
+            match fs_entry {
+                FsEntry::File(FsEntryFile {
+                    worktree_id,
+                    buffer_id,
+                    ..
+                }) => {
+                    to_uncollapse.insert(CollapsedEntry::File(*worktree_id, *buffer_id));
+                    buffers_to_unfold.insert(*buffer_id);
+                }
+                FsEntry::ExternalFile(FsEntryExternalFile { buffer_id, .. }) => {
+                    to_uncollapse.insert(CollapsedEntry::ExternalFile(*buffer_id));
+                    buffers_to_unfold.insert(*buffer_id);
+                }
+                FsEntry::Directory(FsEntryDirectory {
+                    worktree_id, entry, ..
+                }) => {
+                    to_uncollapse.insert(CollapsedEntry::Dir(*worktree_id, entry.id));
+                }
+            }
+        }
+
+        for (&buffer_id, excerpts) in &self.excerpts {
+            for (&excerpt_id, excerpt) in excerpts {
+                match &excerpt.outlines {
+                    ExcerptOutlines::Outlines(outlines) => {
+                        for outline in outlines {
+                            to_uncollapse.insert(CollapsedEntry::Outline(
+                                buffer_id,
+                                excerpt_id,
+                                outline.range.clone(),
                             ));
                         }
-                        FsEntry::File(file) => {
-                            buffers_to_unfold.insert(file.buffer_id);
-                            entries.insert(CollapsedEntry::File(file.worktree_id, file.buffer_id));
-                            entries.extend(
-                                self.excerpts.get(&file.buffer_id).into_iter().flat_map(
-                                    |excerpts| {
-                                        excerpts.keys().map(|excerpt_id| {
-                                            CollapsedEntry::Excerpt(file.buffer_id, *excerpt_id)
-                                        })
-                                    },
-                                ),
-                            );
+                    }
+                    ExcerptOutlines::Invalidated(outlines) => {
+                        for outline in outlines {
+                            to_uncollapse.insert(CollapsedEntry::Outline(
+                                buffer_id,
+                                excerpt_id,
+                                outline.range.clone(),
+                            ));
                         }
-                    };
-                    entries
-                });
+                    }
+                    ExcerptOutlines::NotFetched => {}
+                }
+                to_uncollapse.insert(CollapsedEntry::Excerpt(buffer_id, excerpt_id));
+            }
+        }
+
+        for cached in &self.cached_entries {
+            if let PanelEntry::FoldedDirs(FoldedDirsEntry {
+                worktree_id,
+                entries,
+                ..
+            }) = &cached.entry
+            {
+                if let Some(last) = entries.last() {
+                    to_uncollapse.insert(CollapsedEntry::Dir(*worktree_id, last.id));
+                }
+            }
+        }
+
         self.collapsed_entries
-            .retain(|entry| !expanded_entries.contains(entry));
+            .retain(|entry| !to_uncollapse.contains(entry));
+
         active_editor.update(cx, |editor, cx| {
             buffers_to_unfold.retain(|buffer_id| editor.is_buffer_folded(*buffer_id, cx));
         });
+
         if buffers_to_unfold.is_empty() {
             self.update_cached_entries(None, window, cx);
         } else {
@@ -1703,37 +1726,44 @@ impl OutlinePanel {
             return;
         };
         let mut buffers_to_fold = HashSet::default();
-        let new_entries = self
-            .cached_entries
-            .iter()
-            .flat_map(|cached_entry| match &cached_entry.entry {
-                PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory {
-                    worktree_id, entry, ..
-                })) => Some(CollapsedEntry::Dir(*worktree_id, entry.id)),
-                PanelEntry::Fs(FsEntry::File(FsEntryFile {
-                    worktree_id,
-                    buffer_id,
-                    ..
-                })) => {
-                    buffers_to_fold.insert(*buffer_id);
-                    Some(CollapsedEntry::File(*worktree_id, *buffer_id))
-                }
-                PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => {
-                    buffers_to_fold.insert(external_file.buffer_id);
-                    Some(CollapsedEntry::ExternalFile(external_file.buffer_id))
-                }
-                PanelEntry::FoldedDirs(FoldedDirsEntry {
-                    worktree_id,
-                    entries,
-                    ..
-                }) => Some(CollapsedEntry::Dir(*worktree_id, entries.last()?.id)),
-                PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => {
-                    Some(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id))
-                }
-                PanelEntry::Search(_) | PanelEntry::Outline(..) => None,
-            })
-            .collect::<Vec<_>>();
-        self.collapsed_entries.extend(new_entries);
+        self.collapsed_entries
+            .extend(self.cached_entries.iter().filter_map(
+                |cached_entry| match &cached_entry.entry {
+                    PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory {
+                        worktree_id,
+                        entry,
+                        ..
+                    })) => Some(CollapsedEntry::Dir(*worktree_id, entry.id)),
+                    PanelEntry::Fs(FsEntry::File(FsEntryFile {
+                        worktree_id,
+                        buffer_id,
+                        ..
+                    })) => {
+                        buffers_to_fold.insert(*buffer_id);
+                        Some(CollapsedEntry::File(*worktree_id, *buffer_id))
+                    }
+                    PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => {
+                        buffers_to_fold.insert(external_file.buffer_id);
+                        Some(CollapsedEntry::ExternalFile(external_file.buffer_id))
+                    }
+                    PanelEntry::FoldedDirs(FoldedDirsEntry {
+                        worktree_id,
+                        entries,
+                        ..
+                    }) => Some(CollapsedEntry::Dir(*worktree_id, entries.last()?.id)),
+                    PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => {
+                        Some(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id))
+                    }
+                    PanelEntry::Outline(OutlineEntry::Outline(outline)) => {
+                        Some(CollapsedEntry::Outline(
+                            outline.buffer_id,
+                            outline.excerpt_id,
+                            outline.outline.range.clone(),
+                        ))
+                    }
+                    PanelEntry::Search(_) => None,
+                },
+            ));
 
         active_editor.update(cx, |editor, cx| {
             buffers_to_fold.retain(|buffer_id| !editor.is_buffer_folded(*buffer_id, cx));
@@ -6592,6 +6622,60 @@ outline: struct OutlineEntryExcerpt
       search: {{ "something": "static" }}
   src/
     app/(site)/
+    components/
+      ErrorBoundary.tsx  <==== selected
+        search: static"#
+                )
+            );
+        });
+
+        outline_panel.update_in(cx, |outline_panel, window, cx| {
+            outline_panel.collapse_all_entries(&CollapseAllEntries, window, cx);
+        });
+        cx.executor()
+            .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100));
+        cx.run_until_parked();
+        outline_panel.update(cx, |outline_panel, cx| {
+            assert_eq!(
+                display_entries(
+                    &project,
+                    &snapshot(outline_panel, cx),
+                    &outline_panel.cached_entries,
+                    outline_panel.selected_entry(),
+                    cx,
+                ),
+                format!(r#"frontend-project/"#)
+            );
+        });
+
+        outline_panel.update_in(cx, |outline_panel, window, cx| {
+            outline_panel.expand_all_entries(&ExpandAllEntries, window, cx);
+        });
+        cx.executor()
+            .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100));
+        cx.run_until_parked();
+        outline_panel.update(cx, |outline_panel, cx| {
+            assert_eq!(
+                display_entries(
+                    &project,
+                    &snapshot(outline_panel, cx),
+                    &outline_panel.cached_entries,
+                    outline_panel.selected_entry(),
+                    cx,
+                ),
+                format!(
+                    r#"frontend-project/
+  public/lottie/
+    syntax-tree.json
+      search: {{ "something": "static" }}
+  src/
+    app/(site)/
+      (about)/jobs/[slug]/
+        page.tsx
+          search: static
+      (blog)/post/[slug]/
+        page.tsx
+          search: static
     components/
       ErrorBoundary.tsx  <==== selected
         search: static"#
@@ -7510,4 +7594,237 @@ outline: fn main()"
             );
         });
     }
+
+    #[gpui::test]
+    async fn test_outline_expand_collapse_all(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let fs = FakeFs::new(cx.background_executor.clone());
+        fs.insert_tree(
+            "/test",
+            json!({
+                "src": {
+                    "lib.rs": indoc!("
+                            mod outer {
+                                pub struct OuterStruct {
+                                    field: String,
+                                }
+                                impl OuterStruct {
+                                    pub fn new() -> Self {
+                                        Self { field: String::new() }
+                                    }
+                                    pub fn method(&self) {
+                                        println!(\"{}\", self.field);
+                                    }
+                                }
+                                mod inner {
+                                    pub fn inner_function() {
+                                        let x = 42;
+                                        println!(\"{}\", x);
+                                    }
+                                    pub struct InnerStruct {
+                                        value: i32,
+                                    }
+                                }
+                            }
+                            fn main() {
+                                let s = outer::OuterStruct::new();
+                                s.method();
+                            }
+                        "),
+                }
+            }),
+        )
+        .await;
+
+        let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await;
+        project.read_with(cx, |project, _| {
+            project.languages().add(Arc::new(
+                rust_lang()
+                    .with_outline_query(
+                        r#"
+                            (struct_item
+                                (visibility_modifier)? @context
+                                "struct" @context
+                                name: (_) @name) @item
+                            (impl_item
+                                "impl" @context
+                                trait: (_)? @context
+                                "for"? @context
+                                type: (_) @context
+                                body: (_)) @item
+                            (function_item
+                                (visibility_modifier)? @context
+                                "fn" @context
+                                name: (_) @name
+                                parameters: (_) @context) @item
+                            (mod_item
+                                (visibility_modifier)? @context
+                                "mod" @context
+                                name: (_) @name) @item
+                            (enum_item
+                                (visibility_modifier)? @context
+                                "enum" @context
+                                name: (_) @name) @item
+                            (field_declaration
+                                (visibility_modifier)? @context
+                                name: (_) @name
+                                ":" @context
+                                type: (_) @context) @item
+                            "#,
+                    )
+                    .unwrap(),
+            ))
+        });
+        let workspace = add_outline_panel(&project, cx).await;
+        let cx = &mut VisualTestContext::from_window(*workspace, cx);
+        let outline_panel = outline_panel(&workspace, cx);
+
+        outline_panel.update_in(cx, |outline_panel, window, cx| {
+            outline_panel.set_active(true, window, cx)
+        });
+
+        workspace
+            .update(cx, |workspace, window, cx| {
+                workspace.open_abs_path(
+                    PathBuf::from("/test/src/lib.rs"),
+                    OpenOptions {
+                        visible: Some(OpenVisible::All),
+                        ..Default::default()
+                    },
+                    window,
+                    cx,
+                )
+            })
+            .unwrap()
+            .await
+            .unwrap();
+
+        cx.executor()
+            .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(500));
+        cx.run_until_parked();
+
+        // Force another update cycle to ensure outlines are fetched
+        outline_panel.update_in(cx, |panel, window, cx| {
+            panel.update_non_fs_items(window, cx);
+            panel.update_cached_entries(Some(UPDATE_DEBOUNCE), window, cx);
+        });
+        cx.executor()
+            .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(500));
+        cx.run_until_parked();
+
+        outline_panel.update(cx, |outline_panel, cx| {
+            assert_eq!(
+                display_entries(
+                    &project,
+                    &snapshot(outline_panel, cx),
+                    &outline_panel.cached_entries,
+                    outline_panel.selected_entry(),
+                    cx,
+                ),
+                indoc!(
+                    "
+outline: mod outer  <==== selected
+  outline: pub struct OuterStruct
+    outline: field: String
+  outline: impl OuterStruct
+    outline: pub fn new()
+    outline: pub fn method(&self)
+  outline: mod inner
+    outline: pub fn inner_function()
+    outline: pub struct InnerStruct
+      outline: value: i32
+outline: fn main()"
+                )
+            );
+        });
+
+        let _parent_outline = outline_panel
+            .read_with(cx, |panel, _cx| {
+                panel
+                    .cached_entries
+                    .iter()
+                    .find_map(|entry| match &entry.entry {
+                        PanelEntry::Outline(OutlineEntry::Outline(outline))
+                            if panel
+                                .outline_children_cache
+                                .get(&outline.buffer_id)
+                                .and_then(|children_map| {
+                                    let key =
+                                        (outline.outline.range.clone(), outline.outline.depth);
+                                    children_map.get(&key)
+                                })
+                                .copied()
+                                .unwrap_or(false) =>
+                        {
+                            Some(entry.entry.clone())
+                        }
+                        _ => None,
+                    })
+            })
+            .expect("Should find an outline with children");
+
+        // Collapse all entries
+        outline_panel.update_in(cx, |panel, window, cx| {
+            panel.collapse_all_entries(&CollapseAllEntries, window, cx);
+        });
+        cx.executor()
+            .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100));
+        cx.run_until_parked();
+
+        let expected_collapsed_output = indoc!(
+            "
+        outline: mod outer  <==== selected
+        outline: fn main()"
+        );
+
+        outline_panel.update(cx, |panel, cx| {
+            assert_eq! {
+                display_entries(
+                    &project,
+                    &snapshot(panel, cx),
+                    &panel.cached_entries,
+                    panel.selected_entry(),
+                    cx,
+                ),
+                expected_collapsed_output
+            };
+        });
+
+        // Expand all entries
+        outline_panel.update_in(cx, |panel, window, cx| {
+            panel.expand_all_entries(&ExpandAllEntries, window, cx);
+        });
+        cx.executor()
+            .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100));
+        cx.run_until_parked();
+
+        let expected_expanded_output = indoc!(
+            "
+        outline: mod outer  <==== selected
+          outline: pub struct OuterStruct
+            outline: field: String
+          outline: impl OuterStruct
+            outline: pub fn new()
+            outline: pub fn method(&self)
+          outline: mod inner
+            outline: pub fn inner_function()
+            outline: pub struct InnerStruct
+              outline: value: i32
+        outline: fn main()"
+        );
+
+        outline_panel.update(cx, |panel, cx| {
+            assert_eq! {
+                display_entries(
+                    &project,
+                    &snapshot(panel, cx),
+                    &panel.cached_entries,
+                    panel.selected_entry(),
+                    cx,
+                ),
+                expected_expanded_output
+            };
+        });
+    }
 }

crates/picker/src/picker.rs πŸ”—

@@ -314,7 +314,7 @@ impl<D: PickerDelegate> Picker<D> {
             confirm_on_update: None,
             width: None,
             widest_item: None,
-            max_height: Some(rems(18.).into()),
+            max_height: Some(rems(24.).into()),
             show_scrollbar: false,
             is_modal: true,
         };

crates/project/src/agent_server_store.rs πŸ”—

@@ -1638,7 +1638,9 @@ impl BuiltinAgentServerSettings {
 impl From<settings::BuiltinAgentServerSettings> for BuiltinAgentServerSettings {
     fn from(value: settings::BuiltinAgentServerSettings) -> Self {
         BuiltinAgentServerSettings {
-            path: value.path,
+            path: value
+                .path
+                .map(|p| PathBuf::from(shellexpand::tilde(&p.to_string_lossy()).as_ref())),
             args: value.args,
             env: value.env,
             ignore_system_version: value.ignore_system_version,
@@ -1673,7 +1675,7 @@ impl From<settings::CustomAgentServerSettings> for CustomAgentServerSettings {
     fn from(value: settings::CustomAgentServerSettings) -> Self {
         CustomAgentServerSettings {
             command: AgentServerCommand {
-                path: value.path,
+                path: PathBuf::from(shellexpand::tilde(&value.path.to_string_lossy()).as_ref()),
                 args: value.args,
                 env: value.env,
             },
@@ -1893,4 +1895,40 @@ mod extension_agent_tests {
         let target = manifest_entry.targets.get("linux-x86_64").unwrap();
         assert_eq!(target.cmd, "./release-agent");
     }
+
+    #[test]
+    fn test_tilde_expansion_in_settings() {
+        let settings = settings::BuiltinAgentServerSettings {
+            path: Some(PathBuf::from("~/bin/agent")),
+            args: Some(vec!["--flag".into()]),
+            env: None,
+            ignore_system_version: None,
+            default_mode: None,
+        };
+
+        let BuiltinAgentServerSettings { path, .. } = settings.into();
+
+        let path = path.unwrap();
+        assert!(
+            !path.to_string_lossy().starts_with("~"),
+            "Tilde should be expanded for builtin agent path"
+        );
+
+        let settings = settings::CustomAgentServerSettings {
+            path: PathBuf::from("~/custom/agent"),
+            args: vec!["serve".into()],
+            env: None,
+            default_mode: None,
+        };
+
+        let CustomAgentServerSettings {
+            command: AgentServerCommand { path, .. },
+            ..
+        } = settings.into();
+
+        assert!(
+            !path.to_string_lossy().starts_with("~"),
+            "Tilde should be expanded for custom agent path"
+        );
+    }
 }

crates/project/src/buffer_store.rs πŸ”—

@@ -180,7 +180,13 @@ impl RemoteBufferStore {
                         buffer_file = Some(Arc::new(File::from_proto(file, worktree, cx)?)
                             as Arc<dyn language::File>);
                     }
-                    Buffer::from_proto(replica_id, capability, state, buffer_file)
+                    Buffer::from_proto(
+                        replica_id,
+                        capability,
+                        state,
+                        buffer_file,
+                        cx.background_executor(),
+                    )
                 });
 
                 match buffer_result {
@@ -619,32 +625,33 @@ impl LocalBufferStore {
         worktree: Entity<Worktree>,
         cx: &mut Context<BufferStore>,
     ) -> Task<Result<Entity<Buffer>>> {
-        let load_buffer = worktree.update(cx, |worktree, cx| {
-            let load_file = worktree.load_file(path.as_ref(), cx);
-            let reservation = cx.reserve_entity();
-            let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64());
-            let path = path.clone();
-            cx.spawn(async move |_, cx| {
-                let loaded = load_file.await.with_context(|| {
-                    format!("Could not open path: {}", path.display(PathStyle::local()))
-                })?;
-                let text_buffer = cx
-                    .background_spawn(async move {
-                        text::Buffer::new(ReplicaId::LOCAL, buffer_id, loaded.text)
-                    })
-                    .await;
-                cx.insert_entity(reservation, |_| {
-                    Buffer::build(text_buffer, Some(loaded.file), Capability::ReadWrite)
-                })
-            })
-        });
-
+        let load_file = worktree.update(cx, |worktree, cx| worktree.load_file(path.as_ref(), cx));
         cx.spawn(async move |this, cx| {
-            let buffer = match load_buffer.await {
-                Ok(buffer) => Ok(buffer),
+            let path = path.clone();
+            let buffer = match load_file.await.with_context(|| {
+                format!("Could not open path: {}", path.display(PathStyle::local()))
+            }) {
+                Ok(loaded) => {
+                    let reservation = cx.reserve_entity::<Buffer>()?;
+                    let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64());
+                    let executor = cx.background_executor().clone();
+                    let text_buffer = cx
+                        .background_spawn(async move {
+                            text::Buffer::new(ReplicaId::LOCAL, buffer_id, loaded.text, &executor)
+                        })
+                        .await;
+                    cx.insert_entity(reservation, |_| {
+                        Buffer::build(text_buffer, Some(loaded.file), Capability::ReadWrite)
+                    })?
+                }
                 Err(error) if is_not_found_error(&error) => cx.new(|cx| {
                     let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64());
-                    let text_buffer = text::Buffer::new(ReplicaId::LOCAL, buffer_id, "");
+                    let text_buffer = text::Buffer::new(
+                        ReplicaId::LOCAL,
+                        buffer_id,
+                        "",
+                        cx.background_executor(),
+                    );
                     Buffer::build(
                         text_buffer,
                         Some(Arc::new(File {
@@ -657,9 +664,9 @@ impl LocalBufferStore {
                         })),
                         Capability::ReadWrite,
                     )
-                }),
-                Err(e) => Err(e),
-            }?;
+                })?,
+                Err(e) => return Err(e),
+            };
             this.update(cx, |this, cx| {
                 this.add_buffer(buffer.clone(), cx)?;
                 let buffer_id = buffer.read(cx).remote_id();
@@ -840,6 +847,7 @@ impl BufferStore {
 
                 entry
                     .insert(
+                        // todo(lw): hot foreground spawn
                         cx.spawn(async move |this, cx| {
                             let load_result = load_buffer.await;
                             this.update(cx, |this, cx| {

crates/project/src/git_store.rs πŸ”—

@@ -709,6 +709,7 @@ impl GitStore {
                     repo.load_committed_text(buffer_id, repo_path, cx)
                 });
 
+                // todo(lw): hot foreground spawn
                 cx.spawn(async move |this, cx| {
                     Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
                         .await
@@ -3716,20 +3717,15 @@ impl Repository {
         Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
     }
 
-    pub fn stage_entries(
+    fn save_buffers<'a>(
         &self,
-        entries: Vec<RepoPath>,
+        entries: impl IntoIterator<Item = &'a RepoPath>,
         cx: &mut Context<Self>,
-    ) -> Task<anyhow::Result<()>> {
-        if entries.is_empty() {
-            return Task::ready(Ok(()));
-        }
-        let id = self.id;
-
+    ) -> Vec<Task<anyhow::Result<()>>> {
         let mut save_futures = Vec::new();
         if let Some(buffer_store) = self.buffer_store(cx) {
             buffer_store.update(cx, |buffer_store, cx| {
-                for path in &entries {
+                for path in entries {
                     let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
                         continue;
                     };
@@ -3745,37 +3741,64 @@ impl Repository {
                 }
             })
         }
+        save_futures
+    }
+
+    pub fn stage_entries(
+        &self,
+        entries: Vec<RepoPath>,
+        cx: &mut Context<Self>,
+    ) -> Task<anyhow::Result<()>> {
+        if entries.is_empty() {
+            return Task::ready(Ok(()));
+        }
+        let id = self.id;
+        let save_tasks = self.save_buffers(&entries, cx);
+        let paths = entries
+            .iter()
+            .map(|p| p.as_unix_str())
+            .collect::<Vec<_>>()
+            .join(" ");
+        let status = format!("git add {paths}");
+        let job_key = match entries.len() {
+            1 => Some(GitJobKey::WriteIndex(entries[0].clone())),
+            _ => None,
+        };
 
         cx.spawn(async move |this, cx| {
-            for save_future in save_futures {
-                save_future.await?;
+            for save_task in save_tasks {
+                save_task.await?;
             }
 
             this.update(cx, |this, _| {
-                this.send_job(None, move |git_repo, _cx| async move {
-                    match git_repo {
-                        RepositoryState::Local {
-                            backend,
-                            environment,
-                            ..
-                        } => backend.stage_paths(entries, environment.clone()).await,
-                        RepositoryState::Remote { project_id, client } => {
-                            client
-                                .request(proto::Stage {
-                                    project_id: project_id.0,
-                                    repository_id: id.to_proto(),
-                                    paths: entries
-                                        .into_iter()
-                                        .map(|repo_path| repo_path.to_proto())
-                                        .collect(),
-                                })
-                                .await
-                                .context("sending stage request")?;
+                this.send_keyed_job(
+                    job_key,
+                    Some(status.into()),
+                    move |git_repo, _cx| async move {
+                        match git_repo {
+                            RepositoryState::Local {
+                                backend,
+                                environment,
+                                ..
+                            } => backend.stage_paths(entries, environment.clone()).await,
+                            RepositoryState::Remote { project_id, client } => {
+                                client
+                                    .request(proto::Stage {
+                                        project_id: project_id.0,
+                                        repository_id: id.to_proto(),
+                                        paths: entries
+                                            .into_iter()
+                                            .map(|repo_path| repo_path.to_proto())
+                                            .collect(),
+                                    })
+                                    .await
+                                    .context("sending stage request")?;
 
-                            Ok(())
+                                Ok(())
+                            }
                         }
-                    }
-                })
+                    },
+                )
             })?
             .await??;
 
@@ -3792,57 +3815,52 @@ impl Repository {
             return Task::ready(Ok(()));
         }
         let id = self.id;
-
-        let mut save_futures = Vec::new();
-        if let Some(buffer_store) = self.buffer_store(cx) {
-            buffer_store.update(cx, |buffer_store, cx| {
-                for path in &entries {
-                    let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
-                        continue;
-                    };
-                    if let Some(buffer) = buffer_store.get_by_path(&project_path)
-                        && buffer
-                            .read(cx)
-                            .file()
-                            .is_some_and(|file| file.disk_state().exists())
-                        && buffer.read(cx).has_unsaved_edits()
-                    {
-                        save_futures.push(buffer_store.save_buffer(buffer, cx));
-                    }
-                }
-            })
-        }
+        let save_tasks = self.save_buffers(&entries, cx);
+        let paths = entries
+            .iter()
+            .map(|p| p.as_unix_str())
+            .collect::<Vec<_>>()
+            .join(" ");
+        let status = format!("git reset {paths}");
+        let job_key = match entries.len() {
+            1 => Some(GitJobKey::WriteIndex(entries[0].clone())),
+            _ => None,
+        };
 
         cx.spawn(async move |this, cx| {
-            for save_future in save_futures {
-                save_future.await?;
+            for save_task in save_tasks {
+                save_task.await?;
             }
 
             this.update(cx, |this, _| {
-                this.send_job(None, move |git_repo, _cx| async move {
-                    match git_repo {
-                        RepositoryState::Local {
-                            backend,
-                            environment,
-                            ..
-                        } => backend.unstage_paths(entries, environment).await,
-                        RepositoryState::Remote { project_id, client } => {
-                            client
-                                .request(proto::Unstage {
-                                    project_id: project_id.0,
-                                    repository_id: id.to_proto(),
-                                    paths: entries
-                                        .into_iter()
-                                        .map(|repo_path| repo_path.to_proto())
-                                        .collect(),
-                                })
-                                .await
-                                .context("sending unstage request")?;
+                this.send_keyed_job(
+                    job_key,
+                    Some(status.into()),
+                    move |git_repo, _cx| async move {
+                        match git_repo {
+                            RepositoryState::Local {
+                                backend,
+                                environment,
+                                ..
+                            } => backend.unstage_paths(entries, environment).await,
+                            RepositoryState::Remote { project_id, client } => {
+                                client
+                                    .request(proto::Unstage {
+                                        project_id: project_id.0,
+                                        repository_id: id.to_proto(),
+                                        paths: entries
+                                            .into_iter()
+                                            .map(|repo_path| repo_path.to_proto())
+                                            .collect(),
+                                    })
+                                    .await
+                                    .context("sending unstage request")?;
 
-                            Ok(())
+                                Ok(())
+                            }
                         }
-                    }
-                })
+                    },
+                )
             })?
             .await??;
 

crates/project/src/git_store/conflict_set.rs πŸ”—

@@ -276,8 +276,8 @@ mod tests {
     use util::{path, rel_path::rel_path};
     use worktree::WorktreeSettings;
 
-    #[test]
-    fn test_parse_conflicts_in_buffer() {
+    #[gpui::test]
+    fn test_parse_conflicts_in_buffer(cx: &mut TestAppContext) {
         // Create a buffer with conflict markers
         let test_content = r#"
             This is some text before the conflict.
@@ -299,7 +299,12 @@ mod tests {
         .unindent();
 
         let buffer_id = BufferId::new(1).unwrap();
-        let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content);
+        let buffer = Buffer::new(
+            ReplicaId::LOCAL,
+            buffer_id,
+            test_content,
+            cx.background_executor(),
+        );
         let snapshot = buffer.snapshot();
 
         let conflict_snapshot = ConflictSet::parse(&snapshot);
@@ -355,8 +360,8 @@ mod tests {
         assert_eq!(conflicts_in_range.len(), 0);
     }
 
-    #[test]
-    fn test_nested_conflict_markers() {
+    #[gpui::test]
+    fn test_nested_conflict_markers(cx: &mut TestAppContext) {
         // Create a buffer with nested conflict markers
         let test_content = r#"
             This is some text before the conflict.
@@ -374,7 +379,12 @@ mod tests {
         .unindent();
 
         let buffer_id = BufferId::new(1).unwrap();
-        let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content);
+        let buffer = Buffer::new(
+            ReplicaId::LOCAL,
+            buffer_id,
+            test_content,
+            cx.background_executor(),
+        );
         let snapshot = buffer.snapshot();
 
         let conflict_snapshot = ConflictSet::parse(&snapshot);
@@ -396,8 +406,8 @@ mod tests {
         assert_eq!(their_text, "This is their version in a nested conflict\n");
     }
 
-    #[test]
-    fn test_conflict_markers_at_eof() {
+    #[gpui::test]
+    fn test_conflict_markers_at_eof(cx: &mut TestAppContext) {
         let test_content = r#"
             <<<<<<< ours
             =======
@@ -405,15 +415,20 @@ mod tests {
             >>>>>>> "#
             .unindent();
         let buffer_id = BufferId::new(1).unwrap();
-        let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content);
+        let buffer = Buffer::new(
+            ReplicaId::LOCAL,
+            buffer_id,
+            test_content,
+            cx.background_executor(),
+        );
         let snapshot = buffer.snapshot();
 
         let conflict_snapshot = ConflictSet::parse(&snapshot);
         assert_eq!(conflict_snapshot.conflicts.len(), 1);
     }
 
-    #[test]
-    fn test_conflicts_in_range() {
+    #[gpui::test]
+    fn test_conflicts_in_range(cx: &mut TestAppContext) {
         // Create a buffer with conflict markers
         let test_content = r#"
             one
@@ -447,7 +462,12 @@ mod tests {
         .unindent();
 
         let buffer_id = BufferId::new(1).unwrap();
-        let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content.clone());
+        let buffer = Buffer::new(
+            ReplicaId::LOCAL,
+            buffer_id,
+            test_content.clone(),
+            cx.background_executor(),
+        );
         let snapshot = buffer.snapshot();
 
         let conflict_snapshot = ConflictSet::parse(&snapshot);

crates/project/src/lsp_store.rs πŸ”—

@@ -853,23 +853,32 @@ impl LocalLspStore {
         language_server
             .on_request::<lsp::request::InlayHintRefreshRequest, _, _>({
                 let lsp_store = lsp_store.clone();
+                let request_id = Arc::new(AtomicUsize::new(0));
                 move |(), cx| {
-                    let this = lsp_store.clone();
+                    let lsp_store = lsp_store.clone();
+                    let request_id = request_id.clone();
                     let mut cx = cx.clone();
                     async move {
-                        this.update(&mut cx, |lsp_store, cx| {
-                            cx.emit(LspStoreEvent::RefreshInlayHints(server_id));
-                            lsp_store
-                                .downstream_client
-                                .as_ref()
-                                .map(|(client, project_id)| {
-                                    client.send(proto::RefreshInlayHints {
-                                        project_id: *project_id,
-                                        server_id: server_id.to_proto(),
+                        lsp_store
+                            .update(&mut cx, |lsp_store, cx| {
+                                let request_id =
+                                    Some(request_id.fetch_add(1, atomic::Ordering::AcqRel));
+                                cx.emit(LspStoreEvent::RefreshInlayHints {
+                                    server_id,
+                                    request_id,
+                                });
+                                lsp_store
+                                    .downstream_client
+                                    .as_ref()
+                                    .map(|(client, project_id)| {
+                                        client.send(proto::RefreshInlayHints {
+                                            project_id: *project_id,
+                                            server_id: server_id.to_proto(),
+                                            request_id: request_id.map(|id| id as u64),
+                                        })
                                     })
-                                })
-                        })?
-                        .transpose()?;
+                            })?
+                            .transpose()?;
                         Ok(())
                     }
                 }
@@ -3659,7 +3668,10 @@ pub enum LspStoreEvent {
         new_language: Option<Arc<Language>>,
     },
     Notification(String),
-    RefreshInlayHints(LanguageServerId),
+    RefreshInlayHints {
+        server_id: LanguageServerId,
+        request_id: Option<usize>,
+    },
     RefreshCodeLens,
     DiagnosticsUpdated {
         server_id: LanguageServerId,
@@ -6636,14 +6648,22 @@ impl LspStore {
         cx: &mut Context<Self>,
     ) -> HashMap<Range<BufferRow>, Task<Result<CacheInlayHints>>> {
         let buffer_snapshot = buffer.read(cx).snapshot();
-        let for_server = if let InvalidationStrategy::RefreshRequested(server_id) = invalidate {
+        let next_hint_id = self.next_hint_id.clone();
+        let lsp_data = self.latest_lsp_data(&buffer, cx);
+        let mut lsp_refresh_requested = false;
+        let for_server = if let InvalidationStrategy::RefreshRequested {
+            server_id,
+            request_id,
+        } = invalidate
+        {
+            let invalidated = lsp_data
+                .inlay_hints
+                .invalidate_for_server_refresh(server_id, request_id);
+            lsp_refresh_requested = invalidated;
             Some(server_id)
         } else {
             None
         };
-        let invalidate_cache = invalidate.should_invalidate();
-        let next_hint_id = self.next_hint_id.clone();
-        let lsp_data = self.latest_lsp_data(&buffer, cx);
         let existing_inlay_hints = &mut lsp_data.inlay_hints;
         let known_chunks = known_chunks
             .filter(|(known_version, _)| !lsp_data.buffer_version.changed_since(known_version))
@@ -6651,8 +6671,8 @@ impl LspStore {
             .unwrap_or_default();
 
         let mut hint_fetch_tasks = Vec::new();
-        let mut cached_inlay_hints = HashMap::default();
-        let mut ranges_to_query = Vec::new();
+        let mut cached_inlay_hints = None;
+        let mut ranges_to_query = None;
         let applicable_chunks = existing_inlay_hints
             .applicable_chunks(ranges.as_slice())
             .filter(|chunk| !known_chunks.contains(&(chunk.start..chunk.end)))
@@ -6667,12 +6687,12 @@ impl LspStore {
             match (
                 existing_inlay_hints
                     .cached_hints(&row_chunk)
-                    .filter(|_| !invalidate_cache)
+                    .filter(|_| !lsp_refresh_requested)
                     .cloned(),
                 existing_inlay_hints
                     .fetched_hints(&row_chunk)
                     .as_ref()
-                    .filter(|_| !invalidate_cache)
+                    .filter(|_| !lsp_refresh_requested)
                     .cloned(),
             ) {
                 (None, None) => {
@@ -6681,19 +6701,18 @@ impl LspStore {
                     } else {
                         Point::new(row_chunk.end, 0)
                     };
-                    ranges_to_query.push((
+                    ranges_to_query.get_or_insert_with(Vec::new).push((
                         row_chunk,
                         buffer_snapshot.anchor_before(Point::new(row_chunk.start, 0))
                             ..buffer_snapshot.anchor_after(end),
                     ));
                 }
-                (None, Some(fetched_hints)) => {
-                    hint_fetch_tasks.push((row_chunk, fetched_hints.clone()))
-                }
+                (None, Some(fetched_hints)) => hint_fetch_tasks.push((row_chunk, fetched_hints)),
                 (Some(cached_hints), None) => {
                     for (server_id, cached_hints) in cached_hints {
                         if for_server.is_none_or(|for_server| for_server == server_id) {
                             cached_inlay_hints
+                                .get_or_insert_with(HashMap::default)
                                 .entry(row_chunk.start..row_chunk.end)
                                 .or_insert_with(HashMap::default)
                                 .entry(server_id)
@@ -6703,10 +6722,11 @@ impl LspStore {
                     }
                 }
                 (Some(cached_hints), Some(fetched_hints)) => {
-                    hint_fetch_tasks.push((row_chunk, fetched_hints.clone()));
+                    hint_fetch_tasks.push((row_chunk, fetched_hints));
                     for (server_id, cached_hints) in cached_hints {
                         if for_server.is_none_or(|for_server| for_server == server_id) {
                             cached_inlay_hints
+                                .get_or_insert_with(HashMap::default)
                                 .entry(row_chunk.start..row_chunk.end)
                                 .or_insert_with(HashMap::default)
                                 .entry(server_id)
@@ -6718,18 +6738,18 @@ impl LspStore {
             }
         }
 
-        let cached_chunk_data = cached_inlay_hints
-            .into_iter()
-            .map(|(row_chunk, hints)| (row_chunk, Task::ready(Ok(hints))))
-            .collect();
-        if hint_fetch_tasks.is_empty() && ranges_to_query.is_empty() {
-            cached_chunk_data
+        if hint_fetch_tasks.is_empty()
+            && ranges_to_query
+                .as_ref()
+                .is_none_or(|ranges| ranges.is_empty())
+            && let Some(cached_inlay_hints) = cached_inlay_hints
+        {
+            cached_inlay_hints
+                .into_iter()
+                .map(|(row_chunk, hints)| (row_chunk, Task::ready(Ok(hints))))
+                .collect()
         } else {
-            if invalidate_cache {
-                lsp_data.inlay_hints.clear();
-            }
-
-            for (chunk, range_to_query) in ranges_to_query {
+            for (chunk, range_to_query) in ranges_to_query.into_iter().flatten() {
                 let next_hint_id = next_hint_id.clone();
                 let buffer = buffer.clone();
                 let new_inlay_hints = cx
@@ -6745,31 +6765,38 @@ impl LspStore {
                                     let update_cache = !lsp_data
                                         .buffer_version
                                         .changed_since(&buffer.read(cx).version());
-                                    new_hints_by_server
-                                        .into_iter()
-                                        .map(|(server_id, new_hints)| {
-                                            let new_hints = new_hints
-                                                .into_iter()
-                                                .map(|new_hint| {
-                                                    (
-                                                        InlayId::Hint(next_hint_id.fetch_add(
-                                                            1,
-                                                            atomic::Ordering::AcqRel,
-                                                        )),
-                                                        new_hint,
-                                                    )
-                                                })
-                                                .collect::<Vec<_>>();
-                                            if update_cache {
-                                                lsp_data.inlay_hints.insert_new_hints(
-                                                    chunk,
-                                                    server_id,
-                                                    new_hints.clone(),
-                                                );
-                                            }
-                                            (server_id, new_hints)
-                                        })
-                                        .collect()
+                                    if new_hints_by_server.is_empty() {
+                                        if update_cache {
+                                            lsp_data.inlay_hints.invalidate_for_chunk(chunk);
+                                        }
+                                        HashMap::default()
+                                    } else {
+                                        new_hints_by_server
+                                            .into_iter()
+                                            .map(|(server_id, new_hints)| {
+                                                let new_hints = new_hints
+                                                    .into_iter()
+                                                    .map(|new_hint| {
+                                                        (
+                                                            InlayId::Hint(next_hint_id.fetch_add(
+                                                                1,
+                                                                atomic::Ordering::AcqRel,
+                                                            )),
+                                                            new_hint,
+                                                        )
+                                                    })
+                                                    .collect::<Vec<_>>();
+                                                if update_cache {
+                                                    lsp_data.inlay_hints.insert_new_hints(
+                                                        chunk,
+                                                        server_id,
+                                                        new_hints.clone(),
+                                                    );
+                                                }
+                                                (server_id, new_hints)
+                                            })
+                                            .collect()
+                                    }
                                 })
                             })
                             .map_err(Arc::new)
@@ -6781,22 +6808,25 @@ impl LspStore {
                 hint_fetch_tasks.push((chunk, new_inlay_hints));
             }
 
-            let mut combined_data = cached_chunk_data;
-            combined_data.extend(hint_fetch_tasks.into_iter().map(|(chunk, hints_fetch)| {
-                (
-                    chunk.start..chunk.end,
-                    cx.spawn(async move |_, _| {
-                        hints_fetch.await.map_err(|e| {
-                            if e.error_code() != ErrorCode::Internal {
-                                anyhow!(e.error_code())
-                            } else {
-                                anyhow!("{e:#}")
-                            }
-                        })
-                    }),
-                )
-            }));
-            combined_data
+            cached_inlay_hints
+                .unwrap_or_default()
+                .into_iter()
+                .map(|(row_chunk, hints)| (row_chunk, Task::ready(Ok(hints))))
+                .chain(hint_fetch_tasks.into_iter().map(|(chunk, hints_fetch)| {
+                    (
+                        chunk.start..chunk.end,
+                        cx.spawn(async move |_, _| {
+                            hints_fetch.await.map_err(|e| {
+                                if e.error_code() != ErrorCode::Internal {
+                                    anyhow!(e.error_code())
+                                } else {
+                                    anyhow!("{e:#}")
+                                }
+                            })
+                        }),
+                    )
+                }))
+                .collect()
         }
     }
 
@@ -9604,7 +9634,10 @@ impl LspStore {
             if let Some(work) = status.pending_work.remove(&token)
                 && !work.is_disk_based_diagnostics_progress
             {
-                cx.emit(LspStoreEvent::RefreshInlayHints(language_server_id));
+                cx.emit(LspStoreEvent::RefreshInlayHints {
+                    server_id: language_server_id,
+                    request_id: None,
+                });
             }
             cx.notify();
         }
@@ -9743,9 +9776,10 @@ impl LspStore {
         mut cx: AsyncApp,
     ) -> Result<proto::Ack> {
         lsp_store.update(&mut cx, |_, cx| {
-            cx.emit(LspStoreEvent::RefreshInlayHints(
-                LanguageServerId::from_proto(envelope.payload.server_id),
-            ));
+            cx.emit(LspStoreEvent::RefreshInlayHints {
+                server_id: LanguageServerId::from_proto(envelope.payload.server_id),
+                request_id: envelope.payload.request_id.map(|id| id as usize),
+            });
         })?;
         Ok(proto::Ack {})
     }
@@ -10972,7 +11006,6 @@ impl LspStore {
             language_server.name(),
             Some(key.worktree_id),
         ));
-        cx.emit(LspStoreEvent::RefreshInlayHints(server_id));
 
         let server_capabilities = language_server.capabilities();
         if let Some((downstream_client, project_id)) = self.downstream_client.as_ref() {

crates/project/src/lsp_store/inlay_hint_cache.rs πŸ”—

@@ -19,7 +19,10 @@ pub enum InvalidationStrategy {
     /// Demands to re-query all inlay hints needed and invalidate all cached entries, but does not require instant update with invalidation.
     ///
     /// Despite nothing forbids language server from sending this request on every edit, it is expected to be sent only when certain internal server state update, invisible for the editor otherwise.
-    RefreshRequested(LanguageServerId),
+    RefreshRequested {
+        server_id: LanguageServerId,
+        request_id: Option<usize>,
+    },
     /// Multibuffer excerpt(s) and/or singleton buffer(s) were edited at least on one place.
     /// Neither editor nor LSP is able to tell which open file hints' are not affected, so all of them have to be invalidated, re-queried and do that fast enough to avoid being slow, but also debounce to avoid loading hints on every fast keystroke sequence.
     BufferEdited,
@@ -36,7 +39,7 @@ impl InvalidationStrategy {
     pub fn should_invalidate(&self) -> bool {
         matches!(
             self,
-            InvalidationStrategy::RefreshRequested(_) | InvalidationStrategy::BufferEdited
+            InvalidationStrategy::RefreshRequested { .. } | InvalidationStrategy::BufferEdited
         )
     }
 }
@@ -47,6 +50,7 @@ pub struct BufferInlayHints {
     hints_by_chunks: Vec<Option<CacheInlayHints>>,
     fetches_by_chunks: Vec<Option<CacheInlayHintsTask>>,
     hints_by_id: HashMap<InlayId, HintForId>,
+    latest_invalidation_requests: HashMap<LanguageServerId, Option<usize>>,
     pub(super) hint_resolves: HashMap<InlayId, Shared<Task<()>>>,
 }
 
@@ -104,6 +108,7 @@ impl BufferInlayHints {
         Self {
             hints_by_chunks: vec![None; buffer_chunks.len()],
             fetches_by_chunks: vec![None; buffer_chunks.len()],
+            latest_invalidation_requests: HashMap::default(),
             hints_by_id: HashMap::default(),
             hint_resolves: HashMap::default(),
             snapshot,
@@ -176,6 +181,7 @@ impl BufferInlayHints {
         self.fetches_by_chunks = vec![None; self.buffer_chunks.len()];
         self.hints_by_id.clear();
         self.hint_resolves.clear();
+        self.latest_invalidation_requests.clear();
     }
 
     pub fn insert_new_hints(
@@ -222,4 +228,48 @@ impl BufferInlayHints {
     pub fn buffer_chunks_len(&self) -> usize {
         self.buffer_chunks.len()
     }
+
+    pub(crate) fn invalidate_for_server_refresh(
+        &mut self,
+        for_server: LanguageServerId,
+        request_id: Option<usize>,
+    ) -> bool {
+        match self.latest_invalidation_requests.entry(for_server) {
+            hash_map::Entry::Occupied(mut o) => {
+                if request_id > *o.get() {
+                    o.insert(request_id);
+                } else {
+                    return false;
+                }
+            }
+            hash_map::Entry::Vacant(v) => {
+                v.insert(request_id);
+            }
+        }
+
+        for (chunk_id, chunk_data) in self.hints_by_chunks.iter_mut().enumerate() {
+            if let Some(removed_hints) = chunk_data
+                .as_mut()
+                .and_then(|chunk_data| chunk_data.remove(&for_server))
+            {
+                for (id, _) in removed_hints {
+                    self.hints_by_id.remove(&id);
+                    self.hint_resolves.remove(&id);
+                }
+                self.fetches_by_chunks[chunk_id] = None;
+            }
+        }
+
+        true
+    }
+
+    pub(crate) fn invalidate_for_chunk(&mut self, chunk: BufferChunk) {
+        self.fetches_by_chunks[chunk.id] = None;
+        if let Some(hints_by_server) = self.hints_by_chunks[chunk.id].take() {
+            for (hint_id, _) in hints_by_server.into_values().flatten() {
+                self.hints_by_id.remove(&hint_id);
+                self.hint_resolves.remove(&hint_id);
+            }
+        }
+    }
 }

crates/project/src/prettier_store.rs πŸ”—

@@ -13,7 +13,9 @@ use futures::{
     future::{self, Shared},
     stream::FuturesUnordered,
 };
-use gpui::{AppContext as _, AsyncApp, Context, Entity, EventEmitter, Task, WeakEntity};
+use gpui::{
+    AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, EventEmitter, Task, WeakEntity,
+};
 use language::{
     Buffer, LanguageRegistry, LocalFile,
     language_settings::{Formatter, LanguageSettings},
@@ -558,99 +560,137 @@ impl PrettierStore {
         let plugins_to_install = new_plugins.clone();
         let fs = Arc::clone(&self.fs);
         let new_installation_task = cx
-            .spawn(async move  |prettier_store, cx| {
-                cx.background_executor().timer(Duration::from_millis(30)).await;
+            .spawn(async move |prettier_store, cx| {
+                cx.background_executor()
+                    .timer(Duration::from_millis(30))
+                    .await;
                 let location_data = prettier_store.update(cx, |prettier_store, cx| {
-                    worktree.and_then(|worktree_id| {
-                        prettier_store.worktree_store
-                            .read(cx)
-                            .worktree_for_id(worktree_id, cx)
-                            .map(|worktree| worktree.read(cx).abs_path())
-                    }).map(|locate_from| {
-                        let installed_prettiers = prettier_store.prettier_instances.keys().cloned().collect();
-                        (locate_from, installed_prettiers)
-                    })
+                    worktree
+                        .and_then(|worktree_id| {
+                            prettier_store
+                                .worktree_store
+                                .read(cx)
+                                .worktree_for_id(worktree_id, cx)
+                                .map(|worktree| worktree.read(cx).abs_path())
+                        })
+                        .map(|locate_from| {
+                            let installed_prettiers =
+                                prettier_store.prettier_instances.keys().cloned().collect();
+                            (locate_from, installed_prettiers)
+                        })
                 })?;
                 let locate_prettier_installation = match location_data {
-                    Some((locate_from, installed_prettiers)) => Prettier::locate_prettier_installation(
-                        fs.as_ref(),
-                        &installed_prettiers,
-                        locate_from.as_ref(),
-                    )
-                    .await
-                    .context("locate prettier installation").map_err(Arc::new)?,
+                    Some((locate_from, installed_prettiers)) => {
+                        Prettier::locate_prettier_installation(
+                            fs.as_ref(),
+                            &installed_prettiers,
+                            locate_from.as_ref(),
+                        )
+                        .await
+                        .context("locate prettier installation")
+                        .map_err(Arc::new)?
+                    }
                     None => ControlFlow::Continue(None),
                 };
 
-                match locate_prettier_installation
-                {
+                match locate_prettier_installation {
                     ControlFlow::Break(()) => return Ok(()),
                     ControlFlow::Continue(prettier_path) => {
                         if prettier_path.is_some() {
                             new_plugins.clear();
                         }
-                        let mut needs_install = should_write_prettier_server_file(fs.as_ref()).await;
+                        let mut needs_install =
+                            should_write_prettier_server_file(fs.as_ref()).await;
                         if let Some(previous_installation_task) = previous_installation_task
-                            && let Err(e) = previous_installation_task.await {
-                                log::error!("Failed to install default prettier: {e:#}");
-                                prettier_store.update(cx, |prettier_store, _| {
-                                    if let PrettierInstallation::NotInstalled { attempts, not_installed_plugins, .. } = &mut prettier_store.default_prettier.prettier {
-                                        *attempts += 1;
-                                        new_plugins.extend(not_installed_plugins.iter().cloned());
-                                        installation_attempt = *attempts;
-                                        needs_install = true;
-                                    };
-                                })?;
-                            };
+                            && let Err(e) = previous_installation_task.await
+                        {
+                            log::error!("Failed to install default prettier: {e:#}");
+                            prettier_store.update(cx, |prettier_store, _| {
+                                if let PrettierInstallation::NotInstalled {
+                                    attempts,
+                                    not_installed_plugins,
+                                    ..
+                                } = &mut prettier_store.default_prettier.prettier
+                                {
+                                    *attempts += 1;
+                                    new_plugins.extend(not_installed_plugins.iter().cloned());
+                                    installation_attempt = *attempts;
+                                    needs_install = true;
+                                };
+                            })?;
+                        };
                         if installation_attempt > prettier::FAIL_THRESHOLD {
                             prettier_store.update(cx, |prettier_store, _| {
-                                if let PrettierInstallation::NotInstalled { installation_task, .. } = &mut prettier_store.default_prettier.prettier {
+                                if let PrettierInstallation::NotInstalled {
+                                    installation_task,
+                                    ..
+                                } = &mut prettier_store.default_prettier.prettier
+                                {
                                     *installation_task = None;
                                 };
                             })?;
                             log::warn!(
-                                "Default prettier installation had failed {installation_attempt} times, not attempting again",
+                                "Default prettier installation had failed {installation_attempt} \
+                                times, not attempting again",
                             );
                             return Ok(());
                         }
                         prettier_store.update(cx, |prettier_store, _| {
                             new_plugins.retain(|plugin| {
-                                !prettier_store.default_prettier.installed_plugins.contains(plugin)
+                                !prettier_store
+                                    .default_prettier
+                                    .installed_plugins
+                                    .contains(plugin)
                             });
-                            if let PrettierInstallation::NotInstalled { not_installed_plugins, .. } = &mut prettier_store.default_prettier.prettier {
+                            if let PrettierInstallation::NotInstalled {
+                                not_installed_plugins,
+                                ..
+                            } = &mut prettier_store.default_prettier.prettier
+                            {
                                 not_installed_plugins.retain(|plugin| {
-                                    !prettier_store.default_prettier.installed_plugins.contains(plugin)
+                                    !prettier_store
+                                        .default_prettier
+                                        .installed_plugins
+                                        .contains(plugin)
                                 });
                                 not_installed_plugins.extend(new_plugins.iter().cloned());
                             }
                             needs_install |= !new_plugins.is_empty();
                         })?;
                         if needs_install {
-                            log::info!("Initializing default prettier with plugins {new_plugins:?}");
+                            log::info!(
+                                "Initializing default prettier with plugins {new_plugins:?}"
+                            );
                             let installed_plugins = new_plugins.clone();
+                            let executor = cx.background_executor().clone();
                             cx.background_spawn(async move {
                                 install_prettier_packages(fs.as_ref(), new_plugins, node).await?;
                                 // Save the server file last, so the reinstall need could be determined by the absence of the file.
-                                save_prettier_server_file(fs.as_ref()).await?;
+                                save_prettier_server_file(fs.as_ref(), &executor).await?;
                                 anyhow::Ok(())
                             })
-                                .await
-                                .context("prettier & plugins install")
-                                .map_err(Arc::new)?;
-                            log::info!("Initialized default prettier with plugins: {installed_plugins:?}");
+                            .await
+                            .context("prettier & plugins install")
+                            .map_err(Arc::new)?;
+                            log::info!(
+                                "Initialized default prettier with plugins: {installed_plugins:?}"
+                            );
                             prettier_store.update(cx, |prettier_store, _| {
                                 prettier_store.default_prettier.prettier =
                                     PrettierInstallation::Installed(PrettierInstance {
                                         attempt: 0,
                                         prettier: None,
                                     });
-                                prettier_store.default_prettier
+                                prettier_store
+                                    .default_prettier
                                     .installed_plugins
                                     .extend(installed_plugins);
                             })?;
                         } else {
                             prettier_store.update(cx, |prettier_store, _| {
-                                if let PrettierInstallation::NotInstalled { .. } = &mut prettier_store.default_prettier.prettier {
+                                if let PrettierInstallation::NotInstalled { .. } =
+                                    &mut prettier_store.default_prettier.prettier
+                                {
                                     prettier_store.default_prettier.prettier =
                                         PrettierInstallation::Installed(PrettierInstance {
                                             attempt: 0,
@@ -936,11 +976,14 @@ async fn install_prettier_packages(
     anyhow::Ok(())
 }
 
-async fn save_prettier_server_file(fs: &dyn Fs) -> anyhow::Result<()> {
+async fn save_prettier_server_file(
+    fs: &dyn Fs,
+    executor: &BackgroundExecutor,
+) -> anyhow::Result<()> {
     let prettier_wrapper_path = default_prettier_dir().join(prettier::PRETTIER_SERVER_FILE);
     fs.save(
         &prettier_wrapper_path,
-        &text::Rope::from(prettier::PRETTIER_SERVER_JS),
+        &text::Rope::from_str(prettier::PRETTIER_SERVER_JS, executor),
         text::LineEnding::Unix,
     )
     .await

crates/project/src/project.rs πŸ”—

@@ -337,7 +337,10 @@ pub enum Event {
     HostReshared,
     Reshared,
     Rejoined,
-    RefreshInlayHints(LanguageServerId),
+    RefreshInlayHints {
+        server_id: LanguageServerId,
+        request_id: Option<usize>,
+    },
     RefreshCodeLens,
     RevealInProjectPanel(ProjectEntryId),
     SnippetEdit(BufferId, Vec<(lsp::Range, Snippet)>),
@@ -712,8 +715,10 @@ pub enum ResolveState {
 impl InlayHint {
     pub fn text(&self) -> Rope {
         match &self.label {
-            InlayHintLabel::String(s) => Rope::from(s),
-            InlayHintLabel::LabelParts(parts) => parts.iter().map(|part| &*part.value).collect(),
+            InlayHintLabel::String(s) => Rope::from_str_small(s),
+            InlayHintLabel::LabelParts(parts) => {
+                Rope::from_iter_small(parts.iter().map(|part| &*part.value))
+            }
         }
     }
 }
@@ -3074,9 +3079,13 @@ impl Project {
                     return;
                 };
             }
-            LspStoreEvent::RefreshInlayHints(server_id) => {
-                cx.emit(Event::RefreshInlayHints(*server_id))
-            }
+            LspStoreEvent::RefreshInlayHints {
+                server_id,
+                request_id,
+            } => cx.emit(Event::RefreshInlayHints {
+                server_id: *server_id,
+                request_id: *request_id,
+            }),
             LspStoreEvent::RefreshCodeLens => cx.emit(Event::RefreshCodeLens),
             LspStoreEvent::LanguageServerPrompt(prompt) => {
                 cx.emit(Event::LanguageServerPrompt(prompt.clone()))
@@ -5402,7 +5411,12 @@ impl Project {
             worktree
                 .update(cx, |worktree, cx| {
                     let line_ending = text::LineEnding::detect(&new_text);
-                    worktree.write_file(rel_path.clone(), new_text.into(), line_ending, cx)
+                    worktree.write_file(
+                        rel_path.clone(),
+                        Rope::from_str(&new_text, cx.background_executor()),
+                        line_ending,
+                        cx,
+                    )
                 })?
                 .await
                 .context("Failed to write settings file")?;

crates/project/src/project_tests.rs πŸ”—

@@ -1461,21 +1461,21 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
     .unwrap();
     fs.save(
         path!("/the-root/Cargo.lock").as_ref(),
-        &"".into(),
+        &Rope::default(),
         Default::default(),
     )
     .await
     .unwrap();
     fs.save(
         path!("/the-stdlib/LICENSE").as_ref(),
-        &"".into(),
+        &Rope::default(),
         Default::default(),
     )
     .await
     .unwrap();
     fs.save(
         path!("/the/stdlib/src/string.rs").as_ref(),
-        &"".into(),
+        &Rope::default(),
         Default::default(),
     )
     .await
@@ -1815,10 +1815,6 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
     fake_server
         .start_progress(format!("{}/0", progress_token))
         .await;
-    assert_eq!(
-        events.next().await.unwrap(),
-        Event::RefreshInlayHints(fake_server.server.server_id())
-    );
     assert_eq!(
         events.next().await.unwrap(),
         Event::DiskBasedDiagnosticsStarted {
@@ -1957,10 +1953,6 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC
             Some(worktree_id)
         )
     );
-    assert_eq!(
-        events.next().await.unwrap(),
-        Event::RefreshInlayHints(fake_server.server.server_id())
-    );
     fake_server.start_progress(progress_token).await;
     assert_eq!(
         events.next().await.unwrap(),
@@ -4072,7 +4064,7 @@ async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext)
     // to be detected by the worktree, so that the buffer starts reloading.
     fs.save(
         path!("/dir/file1").as_ref(),
-        &"the first contents".into(),
+        &Rope::from_str("the first contents", cx.background_executor()),
         Default::default(),
     )
     .await
@@ -4083,7 +4075,7 @@ async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext)
     // previous file change may still be in progress.
     fs.save(
         path!("/dir/file1").as_ref(),
-        &"the second contents".into(),
+        &Rope::from_str("the second contents", cx.background_executor()),
         Default::default(),
     )
     .await
@@ -4127,7 +4119,7 @@ async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
     // to be detected by the worktree, so that the buffer starts reloading.
     fs.save(
         path!("/dir/file1").as_ref(),
-        &"the first contents".into(),
+        &Rope::from_str("the first contents", cx.background_executor()),
         Default::default(),
     )
     .await
@@ -4805,7 +4797,7 @@ async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
         marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
     fs.save(
         path!("/dir/the-file").as_ref(),
-        &new_contents.as_str().into(),
+        &Rope::from_str(new_contents.as_str(), cx.background_executor()),
         LineEnding::Unix,
     )
     .await
@@ -4837,7 +4829,7 @@ async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
     // Change the file on disk again, adding blank lines to the beginning.
     fs.save(
         path!("/dir/the-file").as_ref(),
-        &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
+        &Rope::from_str("\n\n\nAAAA\naaa\nBB\nbbbbb\n", cx.background_executor()),
         LineEnding::Unix,
     )
     .await
@@ -4889,7 +4881,7 @@ async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
     // state updates correctly.
     fs.save(
         path!("/dir/file1").as_ref(),
-        &"aaa\nb\nc\n".into(),
+        &Rope::from_str("aaa\nb\nc\n", cx.background_executor()),
         LineEnding::Windows,
     )
     .await
@@ -9171,7 +9163,9 @@ async fn test_odd_events_for_ignored_dirs(
         repository_updates.lock().drain(..).collect::<Vec<_>>(),
         vec![
             RepositoryEvent::MergeHeadsChanged,
-            RepositoryEvent::BranchChanged
+            RepositoryEvent::BranchChanged,
+            RepositoryEvent::StatusesChanged { full_scan: false },
+            RepositoryEvent::StatusesChanged { full_scan: false },
         ],
         "Initial worktree scan should produce a repo update event"
     );

crates/project/src/terminals.rs πŸ”—

@@ -8,7 +8,6 @@ use remote::RemoteClient;
 use settings::{Settings, SettingsLocation};
 use smol::channel::bounded;
 use std::{
-    borrow::Cow,
     path::{Path, PathBuf},
     sync::Arc,
 };
@@ -122,6 +121,7 @@ impl Project {
         let lang_registry = self.languages.clone();
         cx.spawn(async move |project, cx| {
             let shell_kind = ShellKind::new(&shell, is_windows);
+
             let activation_script = maybe!(async {
                 for toolchain in toolchains {
                     let Some(toolchain) = toolchain.await else {
@@ -139,142 +139,140 @@ impl Project {
             .await
             .unwrap_or_default();
 
-            project.update(cx, move |this, cx| {
-                let format_to_run = || {
-                    if let Some(command) = &spawn_task.command {
-                        let mut command: Option<Cow<str>> = shell_kind.try_quote(command);
-                        if let Some(command) = &mut command
-                            && command.starts_with('"')
-                            && let Some(prefix) = shell_kind.command_prefix()
-                        {
-                            *command = Cow::Owned(format!("{prefix}{command}"));
+            let builder = project
+                .update(cx, move |_, cx| {
+                    let format_to_run = || {
+                        if let Some(command) = &spawn_task.command {
+                            let command = shell_kind.prepend_command_prefix(command);
+                            let command = shell_kind.try_quote_prefix_aware(&command);
+                            let args = spawn_task
+                                .args
+                                .iter()
+                                .filter_map(|arg| shell_kind.try_quote(&arg));
+
+                            command.into_iter().chain(args).join(" ")
+                        } else {
+                            // todo: this breaks for remotes to windows
+                            format!("exec {shell} -l")
                         }
+                    };
 
-                        let args = spawn_task
-                            .args
-                            .iter()
-                            .filter_map(|arg| shell_kind.try_quote(&arg));
-
-                        command.into_iter().chain(args).join(" ")
-                    } else {
-                        // todo: this breaks for remotes to windows
-                        format!("exec {shell} -l")
-                    }
-                };
-
-                let (shell, env) = {
-                    env.extend(spawn_task.env);
-                    match remote_client {
-                        Some(remote_client) => match activation_script.clone() {
-                            activation_script if !activation_script.is_empty() => {
-                                let separator = shell_kind.sequential_commands_separator();
-                                let activation_script =
-                                    activation_script.join(&format!("{separator} "));
-                                let to_run = format_to_run();
-                                let shell = remote_client
-                                    .read(cx)
-                                    .shell()
-                                    .unwrap_or_else(get_default_system_shell);
-                                let arg = format!("{activation_script}{separator} {to_run}");
-                                let args = shell_kind.args_for_shell(false, arg);
-
-                                create_remote_shell(
-                                    Some((&shell, &args)),
+                    let (shell, env) = {
+                        env.extend(spawn_task.env);
+                        match remote_client {
+                            Some(remote_client) => match activation_script.clone() {
+                                activation_script if !activation_script.is_empty() => {
+                                    let separator = shell_kind.sequential_commands_separator();
+                                    let activation_script =
+                                        activation_script.join(&format!("{separator} "));
+                                    let to_run = format_to_run();
+
+                                    let arg = format!("{activation_script}{separator} {to_run}");
+                                    let args = shell_kind.args_for_shell(false, arg);
+                                    let shell = remote_client
+                                        .read(cx)
+                                        .shell()
+                                        .unwrap_or_else(get_default_system_shell);
+
+                                    create_remote_shell(
+                                        Some((&shell, &args)),
+                                        env,
+                                        path,
+                                        remote_client,
+                                        cx,
+                                    )?
+                                }
+                                _ => create_remote_shell(
+                                    spawn_task
+                                        .command
+                                        .as_ref()
+                                        .map(|command| (command, &spawn_task.args)),
                                     env,
                                     path,
                                     remote_client,
                                     cx,
-                                )?
-                            }
-                            _ => create_remote_shell(
-                                spawn_task
-                                    .command
-                                    .as_ref()
-                                    .map(|command| (command, &spawn_task.args)),
-                                env,
-                                path,
-                                remote_client,
-                                cx,
-                            )?,
-                        },
-                        None => match activation_script.clone() {
-                            activation_script if !activation_script.is_empty() => {
-                                let separator = shell_kind.sequential_commands_separator();
-                                let activation_script =
-                                    activation_script.join(&format!("{separator} "));
-                                let to_run = format_to_run();
-
-                                let mut arg = format!("{activation_script}{separator} {to_run}");
-                                if shell_kind == ShellKind::Cmd {
-                                    // We need to put the entire command in quotes since otherwise CMD tries to execute them
-                                    // as separate commands rather than chaining one after another.
-                                    arg = format!("\"{arg}\"");
-                                }
+                                )?,
+                            },
+                            None => match activation_script.clone() {
+                                activation_script if !activation_script.is_empty() => {
+                                    let separator = shell_kind.sequential_commands_separator();
+                                    let activation_script =
+                                        activation_script.join(&format!("{separator} "));
+                                    let to_run = format_to_run();
+
+                                    let mut arg =
+                                        format!("{activation_script}{separator} {to_run}");
+                                    if shell_kind == ShellKind::Cmd {
+                                        // We need to put the entire command in quotes since otherwise CMD tries to execute them
+                                        // as separate commands rather than chaining one after another.
+                                        arg = format!("\"{arg}\"");
+                                    }
 
-                                let args = shell_kind.args_for_shell(false, arg);
+                                    let args = shell_kind.args_for_shell(false, arg);
 
-                                (
-                                    Shell::WithArguments {
-                                        program: shell,
-                                        args,
-                                        title_override: None,
+                                    (
+                                        Shell::WithArguments {
+                                            program: shell,
+                                            args,
+                                            title_override: None,
+                                        },
+                                        env,
+                                    )
+                                }
+                                _ => (
+                                    if let Some(program) = spawn_task.command {
+                                        Shell::WithArguments {
+                                            program,
+                                            args: spawn_task.args,
+                                            title_override: None,
+                                        }
+                                    } else {
+                                        Shell::System
                                     },
                                     env,
-                                )
-                            }
-                            _ => (
-                                if let Some(program) = spawn_task.command {
-                                    Shell::WithArguments {
-                                        program,
-                                        args: spawn_task.args,
-                                        title_override: None,
-                                    }
-                                } else {
-                                    Shell::System
-                                },
-                                env,
-                            ),
-                        },
-                    }
-                };
-                TerminalBuilder::new(
-                    local_path.map(|path| path.to_path_buf()),
-                    task_state,
-                    shell,
-                    env,
-                    settings.cursor_shape,
-                    settings.alternate_scroll,
-                    settings.max_scroll_history_lines,
-                    is_via_remote,
-                    cx.entity_id().as_u64(),
-                    Some(completion_tx),
-                    cx,
-                    activation_script,
-                )
-                .map(|builder| {
-                    let terminal_handle = cx.new(|cx| builder.subscribe(cx));
-
-                    this.terminals
-                        .local_handles
-                        .push(terminal_handle.downgrade());
-
-                    let id = terminal_handle.entity_id();
-                    cx.observe_release(&terminal_handle, move |project, _terminal, cx| {
-                        let handles = &mut project.terminals.local_handles;
-
-                        if let Some(index) = handles
-                            .iter()
-                            .position(|terminal| terminal.entity_id() == id)
-                        {
-                            handles.remove(index);
-                            cx.notify();
+                                ),
+                            },
                         }
-                    })
-                    .detach();
+                    };
+                    anyhow::Ok(TerminalBuilder::new(
+                        local_path.map(|path| path.to_path_buf()),
+                        task_state,
+                        shell,
+                        env,
+                        settings.cursor_shape,
+                        settings.alternate_scroll,
+                        settings.max_scroll_history_lines,
+                        is_via_remote,
+                        cx.entity_id().as_u64(),
+                        Some(completion_tx),
+                        cx,
+                        activation_script,
+                    ))
+                })??
+                .await?;
+            project.update(cx, move |this, cx| {
+                let terminal_handle = cx.new(|cx| builder.subscribe(cx));
 
-                    terminal_handle
+                this.terminals
+                    .local_handles
+                    .push(terminal_handle.downgrade());
+
+                let id = terminal_handle.entity_id();
+                cx.observe_release(&terminal_handle, move |project, _terminal, cx| {
+                    let handles = &mut project.terminals.local_handles;
+
+                    if let Some(index) = handles
+                        .iter()
+                        .position(|terminal| terminal.entity_id() == id)
+                    {
+                        handles.remove(index);
+                        cx.notify();
+                    }
                 })
-            })?
+                .detach();
+
+                terminal_handle
+            })
         })
     }
 
@@ -355,53 +353,55 @@ impl Project {
             })
             .await
             .unwrap_or_default();
-            project.update(cx, move |this, cx| {
-                let (shell, env) = {
-                    match remote_client {
-                        Some(remote_client) => {
-                            create_remote_shell(None, env, path, remote_client, cx)?
-                        }
-                        None => (settings.shell, env),
-                    }
-                };
-                TerminalBuilder::new(
-                    local_path.map(|path| path.to_path_buf()),
-                    None,
-                    shell,
-                    env,
-                    settings.cursor_shape,
-                    settings.alternate_scroll,
-                    settings.max_scroll_history_lines,
-                    is_via_remote,
-                    cx.entity_id().as_u64(),
-                    None,
-                    cx,
-                    activation_script,
-                )
-                .map(|builder| {
-                    let terminal_handle = cx.new(|cx| builder.subscribe(cx));
-
-                    this.terminals
-                        .local_handles
-                        .push(terminal_handle.downgrade());
-
-                    let id = terminal_handle.entity_id();
-                    cx.observe_release(&terminal_handle, move |project, _terminal, cx| {
-                        let handles = &mut project.terminals.local_handles;
-
-                        if let Some(index) = handles
-                            .iter()
-                            .position(|terminal| terminal.entity_id() == id)
-                        {
-                            handles.remove(index);
-                            cx.notify();
+            let builder = project
+                .update(cx, move |_, cx| {
+                    let (shell, env) = {
+                        match remote_client {
+                            Some(remote_client) => {
+                                create_remote_shell(None, env, path, remote_client, cx)?
+                            }
+                            None => (settings.shell, env),
                         }
-                    })
-                    .detach();
+                    };
+                    anyhow::Ok(TerminalBuilder::new(
+                        local_path.map(|path| path.to_path_buf()),
+                        None,
+                        shell,
+                        env,
+                        settings.cursor_shape,
+                        settings.alternate_scroll,
+                        settings.max_scroll_history_lines,
+                        is_via_remote,
+                        cx.entity_id().as_u64(),
+                        None,
+                        cx,
+                        activation_script,
+                    ))
+                })??
+                .await?;
+            project.update(cx, move |this, cx| {
+                let terminal_handle = cx.new(|cx| builder.subscribe(cx));
+
+                this.terminals
+                    .local_handles
+                    .push(terminal_handle.downgrade());
+
+                let id = terminal_handle.entity_id();
+                cx.observe_release(&terminal_handle, move |project, _terminal, cx| {
+                    let handles = &mut project.terminals.local_handles;
 
-                    terminal_handle
+                    if let Some(index) = handles
+                        .iter()
+                        .position(|terminal| terminal.entity_id() == id)
+                    {
+                        handles.remove(index);
+                        cx.notify();
+                    }
                 })
-            })?
+                .detach();
+
+                terminal_handle
+            })
         })
     }
 
@@ -422,13 +422,14 @@ impl Project {
             cwd
         };
 
-        let new_terminal = terminal
-            .read(cx)
-            .clone_builder(cx, local_path)
-            .map(|builder| {
-                let terminal_handle = cx.new(|cx| builder.subscribe(cx));
+        let builder = terminal.read(cx).clone_builder(cx, local_path);
+        cx.spawn(async |project, cx| {
+            let terminal = builder.await?;
+            project.update(cx, |project, cx| {
+                let terminal_handle = cx.new(|cx| terminal.subscribe(cx));
 
-                self.terminals
+                project
+                    .terminals
                     .local_handles
                     .push(terminal_handle.downgrade());
 
@@ -447,8 +448,8 @@ impl Project {
                 .detach();
 
                 terminal_handle
-            });
-        Task::ready(new_terminal)
+            })
+        })
     }
 
     pub fn terminal_settings<'a>(

crates/project_panel/src/project_panel.rs πŸ”—

@@ -1038,9 +1038,8 @@ impl ProjectPanel {
                                 "Copy Relative Path",
                                 Box::new(zed_actions::workspace::CopyRelativePath),
                             )
-                            .separator()
                             .when(!should_hide_rename, |menu| {
-                                menu.action("Rename", Box::new(Rename))
+                                menu.separator().action("Rename", Box::new(Rename))
                             })
                             .when(!is_root && !is_remote, |menu| {
                                 menu.action("Trash", Box::new(Trash { skip_prompt: false }))

crates/proto/proto/lsp.proto πŸ”—

@@ -466,6 +466,7 @@ message ResolveInlayHintResponse {
 message RefreshInlayHints {
     uint64 project_id = 1;
     uint64 server_id = 2;
+    optional uint64 request_id = 3;
 }
 
 message CodeLens {

crates/recent_projects/src/remote_connections.rs πŸ”—

@@ -574,6 +574,7 @@ pub async fn open_remote_project(
     open_options: workspace::OpenOptions,
     cx: &mut AsyncApp,
 ) -> Result<()> {
+    let created_new_window = open_options.replace_window.is_none();
     let window = if let Some(window) = open_options.replace_window {
         window
     } else {
@@ -648,7 +649,45 @@ pub async fn open_remote_project(
         let Some(delegate) = delegate else { break };
 
         let remote_connection =
-            remote::connect(connection_options.clone(), delegate.clone(), cx).await?;
+            match remote::connect(connection_options.clone(), delegate.clone(), cx).await {
+                Ok(connection) => connection,
+                Err(e) => {
+                    window
+                        .update(cx, |workspace, _, cx| {
+                            if let Some(ui) = workspace.active_modal::<RemoteConnectionModal>(cx) {
+                                ui.update(cx, |modal, cx| modal.finished(cx))
+                            }
+                        })
+                        .ok();
+                    log::error!("Failed to open project: {e:?}");
+                    let response = window
+                        .update(cx, |_, window, cx| {
+                            window.prompt(
+                                PromptLevel::Critical,
+                                match connection_options {
+                                    RemoteConnectionOptions::Ssh(_) => "Failed to connect over SSH",
+                                    RemoteConnectionOptions::Wsl(_) => "Failed to connect to WSL",
+                                },
+                                Some(&e.to_string()),
+                                &["Retry", "Cancel"],
+                                cx,
+                            )
+                        })?
+                        .await;
+
+                    if response == Ok(0) {
+                        continue;
+                    }
+
+                    if created_new_window {
+                        window
+                            .update(cx, |_, window, _| window.remove_window())
+                            .ok();
+                    }
+                    break;
+                }
+            };
+
         let (paths, paths_with_positions) =
             determine_paths_with_positions(&remote_connection, paths.clone()).await;
 
@@ -686,7 +725,7 @@ pub async fn open_remote_project(
                                 RemoteConnectionOptions::Wsl(_) => "Failed to connect to WSL",
                             },
                             Some(&e.to_string()),
-                            &["Retry", "Ok"],
+                            &["Retry", "Cancel"],
                             cx,
                         )
                     })?
@@ -694,7 +733,14 @@ pub async fn open_remote_project(
                 if response == Ok(0) {
                     continue;
                 }
+
+                if created_new_window {
+                    window
+                        .update(cx, |_, window, _| window.remove_window())
+                        .ok();
+                }
             }
+
             Ok(items) => {
                 for (item, path) in items.into_iter().zip(paths_with_positions) {
                     let Some(item) = item else {

crates/remote/src/transport/ssh.rs πŸ”—

@@ -39,6 +39,7 @@ pub(crate) struct SshRemoteConnection {
     ssh_platform: RemotePlatform,
     ssh_path_style: PathStyle,
     ssh_shell: String,
+    ssh_shell_kind: ShellKind,
     ssh_default_system_shell: String,
     _temp_dir: TempDir,
 }
@@ -241,6 +242,7 @@ impl RemoteConnection for SshRemoteConnection {
         let Self {
             ssh_path_style,
             socket,
+            ssh_shell_kind,
             ssh_shell,
             ..
         } = self;
@@ -254,6 +256,7 @@ impl RemoteConnection for SshRemoteConnection {
             env,
             *ssh_path_style,
             ssh_shell,
+            *ssh_shell_kind,
             socket.ssh_args(),
         )
     }
@@ -367,7 +370,7 @@ impl RemoteConnection for SshRemoteConnection {
 
         let ssh_proxy_process = match self
             .socket
-            .ssh_command("env", &proxy_args)
+            .ssh_command(self.ssh_shell_kind, "env", &proxy_args)
             // IMPORTANT: we kill this process when we drop the task that uses it.
             .kill_on_drop(true)
             .spawn()
@@ -490,6 +493,13 @@ impl SshRemoteConnection {
             _ => PathStyle::Posix,
         };
         let ssh_default_system_shell = String::from("/bin/sh");
+        let ssh_shell_kind = ShellKind::new(
+            &ssh_shell,
+            match ssh_platform.os {
+                "windows" => true,
+                _ => false,
+            },
+        );
 
         let mut this = Self {
             socket,
@@ -499,6 +509,7 @@ impl SshRemoteConnection {
             ssh_path_style,
             ssh_platform,
             ssh_shell,
+            ssh_shell_kind,
             ssh_default_system_shell,
         };
 
@@ -563,7 +574,11 @@ impl SshRemoteConnection {
 
         if self
             .socket
-            .run_command(&dst_path.display(self.path_style()), &["version"])
+            .run_command(
+                self.ssh_shell_kind,
+                &dst_path.display(self.path_style()),
+                &["version"],
+            )
             .await
             .is_ok()
         {
@@ -632,7 +647,11 @@ impl SshRemoteConnection {
     ) -> Result<()> {
         if let Some(parent) = tmp_path_gz.parent() {
             self.socket
-                .run_command("mkdir", &["-p", parent.display(self.path_style()).as_ref()])
+                .run_command(
+                    self.ssh_shell_kind,
+                    "mkdir",
+                    &["-p", parent.display(self.path_style()).as_ref()],
+                )
                 .await?;
         }
 
@@ -641,6 +660,7 @@ impl SshRemoteConnection {
         match self
             .socket
             .run_command(
+                self.ssh_shell_kind,
                 "curl",
                 &[
                     "-f",
@@ -660,13 +680,19 @@ impl SshRemoteConnection {
         {
             Ok(_) => {}
             Err(e) => {
-                if self.socket.run_command("which", &["curl"]).await.is_ok() {
+                if self
+                    .socket
+                    .run_command(self.ssh_shell_kind, "which", &["curl"])
+                    .await
+                    .is_ok()
+                {
                     return Err(e);
                 }
 
                 match self
                     .socket
                     .run_command(
+                        self.ssh_shell_kind,
                         "wget",
                         &[
                             "--header=Content-Type: application/json",
@@ -681,7 +707,12 @@ impl SshRemoteConnection {
                 {
                     Ok(_) => {}
                     Err(e) => {
-                        if self.socket.run_command("which", &["wget"]).await.is_ok() {
+                        if self
+                            .socket
+                            .run_command(self.ssh_shell_kind, "which", &["wget"])
+                            .await
+                            .is_ok()
+                        {
                             return Err(e);
                         } else {
                             anyhow::bail!("Neither curl nor wget is available");
@@ -703,7 +734,11 @@ impl SshRemoteConnection {
     ) -> Result<()> {
         if let Some(parent) = tmp_path_gz.parent() {
             self.socket
-                .run_command("mkdir", &["-p", parent.display(self.path_style()).as_ref()])
+                .run_command(
+                    self.ssh_shell_kind,
+                    "mkdir",
+                    &["-p", parent.display(self.path_style()).as_ref()],
+                )
                 .await?;
         }
 
@@ -750,7 +785,7 @@ impl SshRemoteConnection {
             format!("chmod {server_mode} {orig_tmp_path} && mv {orig_tmp_path} {dst_path}",)
         };
         let args = shell_kind.args_for_shell(false, script.to_string());
-        self.socket.run_command("sh", &args).await?;
+        self.socket.run_command(shell_kind, "sh", &args).await?;
         Ok(())
     }
 
@@ -894,11 +929,16 @@ impl SshSocket {
     // Furthermore, some setups (e.g. Coder) will change directory when SSH'ing
     // into a machine. You must use `cd` to get back to $HOME.
     // You need to do it like this: $ ssh host "cd; sh -c 'ls -l /tmp'"
-    fn ssh_command(&self, program: &str, args: &[impl AsRef<str>]) -> process::Command {
-        let shell_kind = ShellKind::Posix;
+    fn ssh_command(
+        &self,
+        shell_kind: ShellKind,
+        program: &str,
+        args: &[impl AsRef<str>],
+    ) -> process::Command {
         let mut command = util::command::new_smol_command("ssh");
+        let program = shell_kind.prepend_command_prefix(program);
         let mut to_run = shell_kind
-            .try_quote(program)
+            .try_quote_prefix_aware(&program)
             .expect("shell quoting")
             .into_owned();
         for arg in args {
@@ -920,8 +960,13 @@ impl SshSocket {
         command
     }
 
-    async fn run_command(&self, program: &str, args: &[impl AsRef<str>]) -> Result<String> {
-        let output = self.ssh_command(program, args).output().await?;
+    async fn run_command(
+        &self,
+        shell_kind: ShellKind,
+        program: &str,
+        args: &[impl AsRef<str>],
+    ) -> Result<String> {
+        let output = self.ssh_command(shell_kind, program, args).output().await?;
         anyhow::ensure!(
             output.status.success(),
             "failed to run command: {}",
@@ -994,12 +1039,7 @@ impl SshSocket {
     }
 
     async fn platform(&self, shell: ShellKind) -> Result<RemotePlatform> {
-        let program = if shell == ShellKind::Nushell {
-            "^uname"
-        } else {
-            "uname"
-        };
-        let uname = self.run_command(program, &["-sm"]).await?;
+        let uname = self.run_command(shell, "uname", &["-sm"]).await?;
         let Some((os, arch)) = uname.split_once(" ") else {
             anyhow::bail!("unknown uname: {uname:?}")
         };
@@ -1030,7 +1070,10 @@ impl SshSocket {
     }
 
     async fn shell(&self) -> String {
-        match self.run_command("sh", &["-c", "echo $SHELL"]).await {
+        match self
+            .run_command(ShellKind::Posix, "sh", &["-c", "echo $SHELL"])
+            .await
+        {
             Ok(shell) => shell.trim().to_owned(),
             Err(e) => {
                 log::error!("Failed to get shell: {e}");
@@ -1256,11 +1299,11 @@ fn build_command(
     ssh_env: HashMap<String, String>,
     ssh_path_style: PathStyle,
     ssh_shell: &str,
+    ssh_shell_kind: ShellKind,
     ssh_args: Vec<String>,
 ) -> Result<CommandTemplate> {
     use std::fmt::Write as _;
 
-    let shell_kind = ShellKind::new(ssh_shell, false);
     let mut exec = String::new();
     if let Some(working_dir) = working_dir {
         let working_dir = RemotePathBuf::new(working_dir, ssh_path_style).to_string();
@@ -1270,12 +1313,24 @@ fn build_command(
         const TILDE_PREFIX: &'static str = "~/";
         if working_dir.starts_with(TILDE_PREFIX) {
             let working_dir = working_dir.trim_start_matches("~").trim_start_matches("/");
-            write!(exec, "cd \"$HOME/{working_dir}\" && ",)?;
+            write!(
+                exec,
+                "cd \"$HOME/{working_dir}\" {} ",
+                ssh_shell_kind.sequential_and_commands_separator()
+            )?;
         } else {
-            write!(exec, "cd \"{working_dir}\" && ",)?;
+            write!(
+                exec,
+                "cd \"{working_dir}\" {} ",
+                ssh_shell_kind.sequential_and_commands_separator()
+            )?;
         }
     } else {
-        write!(exec, "cd && ")?;
+        write!(
+            exec,
+            "cd {} ",
+            ssh_shell_kind.sequential_and_commands_separator()
+        )?;
     };
     write!(exec, "exec env ")?;
 
@@ -1284,7 +1339,7 @@ fn build_command(
             exec,
             "{}={} ",
             k,
-            shell_kind.try_quote(v).context("shell quoting")?
+            ssh_shell_kind.try_quote(v).context("shell quoting")?
         )?;
     }
 
@@ -1292,12 +1347,12 @@ fn build_command(
         write!(
             exec,
             "{}",
-            shell_kind
-                .try_quote(&input_program)
+            ssh_shell_kind
+                .try_quote_prefix_aware(&input_program)
                 .context("shell quoting")?
         )?;
         for arg in input_args {
-            let arg = shell_kind.try_quote(&arg).context("shell quoting")?;
+            let arg = ssh_shell_kind.try_quote(&arg).context("shell quoting")?;
             write!(exec, " {}", &arg)?;
         }
     } else {
@@ -1341,6 +1396,7 @@ mod tests {
             env.clone(),
             PathStyle::Posix,
             "/bin/fish",
+            ShellKind::Fish,
             vec!["-p".to_string(), "2222".to_string()],
         )?;
 
@@ -1370,6 +1426,7 @@ mod tests {
             env.clone(),
             PathStyle::Posix,
             "/bin/fish",
+            ShellKind::Fish,
             vec!["-p".to_string(), "2222".to_string()],
         )?;
 

crates/remote/src/transport/wsl.rs πŸ”—

@@ -44,6 +44,7 @@ pub(crate) struct WslRemoteConnection {
     remote_binary_path: Option<Arc<RelPath>>,
     platform: RemotePlatform,
     shell: String,
+    shell_kind: ShellKind,
     default_system_shell: String,
     connection_options: WslConnectionOptions,
     can_exec: bool,
@@ -73,16 +74,17 @@ impl WslRemoteConnection {
             remote_binary_path: None,
             platform: RemotePlatform { os: "", arch: "" },
             shell: String::new(),
+            shell_kind: ShellKind::Posix,
             default_system_shell: String::from("/bin/sh"),
             can_exec: true,
         };
         delegate.set_status(Some("Detecting WSL environment"), cx);
         this.shell = this.detect_shell().await?;
-        let shell = ShellKind::new(&this.shell, false);
-        this.can_exec = this.detect_can_exec(shell).await?;
-        this.platform = this.detect_platform(shell).await?;
+        this.shell_kind = ShellKind::new(&this.shell, false);
+        this.can_exec = this.detect_can_exec().await?;
+        this.platform = this.detect_platform().await?;
         this.remote_binary_path = Some(
-            this.ensure_server_binary(&delegate, release_channel, version, commit, shell, cx)
+            this.ensure_server_binary(&delegate, release_channel, version, commit, cx)
                 .await?,
         );
         log::debug!("Detected WSL environment: {this:#?}");
@@ -90,20 +92,16 @@ impl WslRemoteConnection {
         Ok(this)
     }
 
-    async fn detect_can_exec(&self, shell: ShellKind) -> Result<bool> {
+    async fn detect_can_exec(&self) -> Result<bool> {
         let options = &self.connection_options;
-        let program = if shell == ShellKind::Nushell {
-            "^uname"
-        } else {
-            "uname"
-        };
+        let program = self.shell_kind.prepend_command_prefix("uname");
         let args = &["-m"];
-        let output = wsl_command_impl(options, program, args, true)
+        let output = wsl_command_impl(options, &program, args, true)
             .output()
             .await?;
 
         if !output.status.success() {
-            let output = wsl_command_impl(options, program, args, false)
+            let output = wsl_command_impl(options, &program, args, false)
                 .output()
                 .await?;
 
@@ -120,14 +118,9 @@ impl WslRemoteConnection {
             Ok(true)
         }
     }
-    async fn detect_platform(&self, shell: ShellKind) -> Result<RemotePlatform> {
-        let arch_str = if shell == ShellKind::Nushell {
-            // https://github.com/nushell/nushell/issues/12570
-            self.run_wsl_command("sh", &["-c", "uname -m"])
-        } else {
-            self.run_wsl_command("uname", &["-m"])
-        }
-        .await?;
+    async fn detect_platform(&self) -> Result<RemotePlatform> {
+        let program = self.shell_kind.prepend_command_prefix("uname");
+        let arch_str = self.run_wsl_command(&program, &["-m"]).await?;
         let arch_str = arch_str.trim().to_string();
         let arch = match arch_str.as_str() {
             "x86_64" => "x86_64",
@@ -163,7 +156,6 @@ impl WslRemoteConnection {
         release_channel: ReleaseChannel,
         version: SemanticVersion,
         commit: Option<AppCommitSha>,
-        shell: ShellKind,
         cx: &mut AsyncApp,
     ) -> Result<Arc<RelPath>> {
         let version_str = match release_channel {
@@ -186,12 +178,9 @@ impl WslRemoteConnection {
 
         if let Some(parent) = dst_path.parent() {
             let parent = parent.display(PathStyle::Posix);
-            if shell == ShellKind::Nushell {
-                self.run_wsl_command("mkdir", &[&parent]).await
-            } else {
-                self.run_wsl_command("mkdir", &["-p", &parent]).await
-            }
-            .map_err(|e| anyhow!("Failed to create directory: {}", e))?;
+            self.run_wsl_command("mkdir", &["-p", &parent])
+                .await
+                .map_err(|e| anyhow!("Failed to create directory: {}", e))?;
         }
 
         #[cfg(debug_assertions)]
@@ -206,7 +195,7 @@ impl WslRemoteConnection {
                 ))
                 .unwrap(),
             );
-            self.upload_file(&remote_server_path, &tmp_path, delegate, &shell, cx)
+            self.upload_file(&remote_server_path, &tmp_path, delegate, cx)
                 .await?;
             self.extract_and_install(&tmp_path, &dst_path, delegate, cx)
                 .await?;
@@ -239,8 +228,7 @@ impl WslRemoteConnection {
         );
         let tmp_path = RelPath::unix(&tmp_path).unwrap();
 
-        self.upload_file(&src_path, &tmp_path, delegate, &shell, cx)
-            .await?;
+        self.upload_file(&src_path, &tmp_path, delegate, cx).await?;
         self.extract_and_install(&tmp_path, &dst_path, delegate, cx)
             .await?;
 
@@ -252,19 +240,15 @@ impl WslRemoteConnection {
         src_path: &Path,
         dst_path: &RelPath,
         delegate: &Arc<dyn RemoteClientDelegate>,
-        shell: &ShellKind,
         cx: &mut AsyncApp,
     ) -> Result<()> {
         delegate.set_status(Some("Uploading remote server to WSL"), cx);
 
         if let Some(parent) = dst_path.parent() {
             let parent = parent.display(PathStyle::Posix);
-            if *shell == ShellKind::Nushell {
-                self.run_wsl_command("mkdir", &[&parent]).await
-            } else {
-                self.run_wsl_command("mkdir", &["-p", &parent]).await
-            }
-            .map_err(|e| anyhow!("Failed to create directory when uploading file: {}", e))?;
+            self.run_wsl_command("mkdir", &["-p", &parent])
+                .await
+                .map_err(|e| anyhow!("Failed to create directory when uploading file: {}", e))?;
         }
 
         let t0 = Instant::now();
@@ -441,7 +425,7 @@ impl RemoteConnection for WslRemoteConnection {
             bail!("WSL shares the network interface with the host system");
         }
 
-        let shell_kind = ShellKind::new(&self.shell, false);
+        let shell_kind = self.shell_kind;
         let working_dir = working_dir
             .map(|working_dir| RemotePathBuf::new(working_dir, PathStyle::Posix).to_string())
             .unwrap_or("~".to_string());
@@ -461,7 +445,9 @@ impl RemoteConnection for WslRemoteConnection {
             write!(
                 exec,
                 "{}",
-                shell_kind.try_quote(&program).context("shell quoting")?
+                shell_kind
+                    .try_quote_prefix_aware(&program)
+                    .context("shell quoting")?
             )?;
             for arg in args {
                 let arg = shell_kind.try_quote(&arg).context("shell quoting")?;

crates/remote_server/src/remote_editing_tests.rs πŸ”—

@@ -13,7 +13,7 @@ use fs::{FakeFs, Fs};
 use gpui::{AppContext as _, Entity, SemanticVersion, SharedString, TestAppContext};
 use http_client::{BlockedHttpClient, FakeHttpClient};
 use language::{
-    Buffer, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageRegistry, LineEnding,
+    Buffer, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageRegistry, LineEnding, Rope,
     language_settings::{AllLanguageSettings, language_settings},
 };
 use lsp::{CompletionContext, CompletionResponse, CompletionTriggerKind, LanguageServerName};
@@ -120,7 +120,7 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test
     // sees the new file.
     fs.save(
         path!("/code/project1/src/main.rs").as_ref(),
-        &"fn main() {}".into(),
+        &Rope::from_str_small("fn main() {}"),
         Default::default(),
     )
     .await
@@ -766,7 +766,7 @@ async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppCont
 
     fs.save(
         &PathBuf::from(path!("/code/project1/src/lib.rs")),
-        &("bangles".to_string().into()),
+        &Rope::from_str_small("bangles"),
         LineEnding::Unix,
     )
     .await
@@ -781,7 +781,7 @@ async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppCont
 
     fs.save(
         &PathBuf::from(path!("/code/project1/src/lib.rs")),
-        &("bloop".to_string().into()),
+        &Rope::from_str_small("bloop"),
         LineEnding::Unix,
     )
     .await

crates/rich_text/src/rich_text.rs πŸ”—

@@ -1,9 +1,10 @@
 use futures::FutureExt;
 use gpui::{
-    AnyElement, AnyView, App, ElementId, FontStyle, FontWeight, HighlightStyle, InteractiveText,
-    IntoElement, SharedString, StrikethroughStyle, StyledText, UnderlineStyle, Window,
+    AnyElement, AnyView, App, BackgroundExecutor, ElementId, FontStyle, FontWeight, HighlightStyle,
+    InteractiveText, IntoElement, SharedString, StrikethroughStyle, StyledText, UnderlineStyle,
+    Window,
 };
-use language::{HighlightId, Language, LanguageRegistry};
+use language::{HighlightId, Language, LanguageRegistry, Rope};
 use std::{ops::Range, sync::Arc};
 use theme::ActiveTheme;
 use ui::LinkPreview;
@@ -56,6 +57,7 @@ impl RichText {
         block: String,
         mentions: &[Mention],
         language_registry: &Arc<LanguageRegistry>,
+        executor: &BackgroundExecutor,
     ) -> Self {
         let mut text = String::new();
         let mut highlights = Vec::new();
@@ -70,6 +72,7 @@ impl RichText {
             &mut highlights,
             &mut link_ranges,
             &mut link_urls,
+            executor,
         );
         text.truncate(text.trim_end().len());
 
@@ -184,6 +187,7 @@ pub fn render_markdown_mut(
     highlights: &mut Vec<(Range<usize>, Highlight)>,
     link_ranges: &mut Vec<Range<usize>>,
     link_urls: &mut Vec<String>,
+    executor: &BackgroundExecutor,
 ) {
     use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag, TagEnd};
 
@@ -202,7 +206,7 @@ pub fn render_markdown_mut(
         match event {
             Event::Text(t) => {
                 if let Some(language) = &current_language {
-                    render_code(text, highlights, t.as_ref(), language);
+                    render_code(text, highlights, t.as_ref(), language, executor);
                 } else {
                     while let Some(mention) = mentions.first() {
                         if !source_range.contains_inclusive(&mention.range) {
@@ -373,11 +377,14 @@ pub fn render_code(
     highlights: &mut Vec<(Range<usize>, Highlight)>,
     content: &str,
     language: &Arc<Language>,
+    executor: &BackgroundExecutor,
 ) {
     let prev_len = text.len();
     text.push_str(content);
     let mut offset = 0;
-    for (range, highlight_id) in language.highlight_text(&content.into(), 0..content.len()) {
+    for (range, highlight_id) in
+        language.highlight_text(&Rope::from_str(content, executor), 0..content.len())
+    {
         if range.start > offset {
             highlights.push((prev_len + offset..prev_len + range.start, Highlight::Code));
         }

crates/rope/Cargo.toml πŸ”—

@@ -14,10 +14,10 @@ path = "src/rope.rs"
 [dependencies]
 arrayvec = "0.7.1"
 log.workspace = true
-rayon.workspace = true
 sum_tree.workspace = true
 unicode-segmentation.workspace = true
 util.workspace = true
+gpui.workspace = true
 
 [dev-dependencies]
 ctor.workspace = true

crates/rope/benches/rope_benchmark.rs πŸ”—

@@ -3,6 +3,7 @@ use std::ops::Range;
 use criterion::{
     BatchSize, BenchmarkId, Criterion, Throughput, black_box, criterion_group, criterion_main,
 };
+use gpui::{AsyncApp, TestAppContext};
 use rand::prelude::*;
 use rand::rngs::StdRng;
 use rope::{Point, Rope};
@@ -26,10 +27,10 @@ fn generate_random_text(rng: &mut StdRng, len: usize) -> String {
     str
 }
 
-fn generate_random_rope(rng: &mut StdRng, text_len: usize) -> Rope {
+fn generate_random_rope(rng: &mut StdRng, text_len: usize, cx: &AsyncApp) -> Rope {
     let text = generate_random_text(rng, text_len);
     let mut rope = Rope::new();
-    rope.push(&text);
+    rope.push(&text, cx.background_executor());
     rope
 }
 
@@ -82,11 +83,13 @@ fn rope_benchmarks(c: &mut Criterion) {
         group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| {
             let mut rng = StdRng::seed_from_u64(SEED);
             let text = generate_random_text(&mut rng, *size);
+            let cx = TestAppContext::single();
+            let cx = cx.to_async();
 
             b.iter(|| {
                 let mut rope = Rope::new();
                 for _ in 0..10 {
-                    rope.push(&text);
+                    rope.push(&text, cx.background_executor());
                 }
             });
         });
@@ -99,8 +102,10 @@ fn rope_benchmarks(c: &mut Criterion) {
         group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| {
             let mut rng = StdRng::seed_from_u64(SEED);
             let mut random_ropes = Vec::new();
+            let cx = TestAppContext::single();
+            let cx = cx.to_async();
             for _ in 0..5 {
-                let rope = generate_random_rope(&mut rng, *size);
+                let rope = generate_random_rope(&mut rng, *size, &cx);
                 random_ropes.push(rope);
             }
 
@@ -119,7 +124,9 @@ fn rope_benchmarks(c: &mut Criterion) {
         group.throughput(Throughput::Bytes(*size as u64));
         group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| {
             let mut rng = StdRng::seed_from_u64(SEED);
-            let rope = generate_random_rope(&mut rng, *size);
+            let cx = TestAppContext::single();
+            let cx = cx.to_async();
+            let rope = generate_random_rope(&mut rng, *size, &cx);
 
             b.iter_batched(
                 || generate_random_rope_ranges(&mut rng, &rope),
@@ -139,7 +146,9 @@ fn rope_benchmarks(c: &mut Criterion) {
         group.throughput(Throughput::Bytes(*size as u64));
         group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| {
             let mut rng = StdRng::seed_from_u64(SEED);
-            let rope = generate_random_rope(&mut rng, *size);
+            let cx = TestAppContext::single();
+            let cx = cx.to_async();
+            let rope = generate_random_rope(&mut rng, *size, &cx);
 
             b.iter_batched(
                 || generate_random_rope_ranges(&mut rng, &rope),
@@ -160,7 +169,9 @@ fn rope_benchmarks(c: &mut Criterion) {
         group.throughput(Throughput::Bytes(*size as u64));
         group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| {
             let mut rng = StdRng::seed_from_u64(SEED);
-            let rope = generate_random_rope(&mut rng, *size);
+            let cx = TestAppContext::single();
+            let cx = cx.to_async();
+            let rope = generate_random_rope(&mut rng, *size, &cx);
 
             b.iter(|| {
                 let chars = rope.chars().count();
@@ -175,7 +186,9 @@ fn rope_benchmarks(c: &mut Criterion) {
         group.throughput(Throughput::Bytes(*size as u64));
         group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| {
             let mut rng = StdRng::seed_from_u64(SEED);
-            let rope = generate_random_rope(&mut rng, *size);
+            let cx = TestAppContext::single();
+            let cx = cx.to_async();
+            let rope = generate_random_rope(&mut rng, *size, &cx);
 
             b.iter_batched(
                 || generate_random_rope_points(&mut rng, &rope),
@@ -196,7 +209,9 @@ fn rope_benchmarks(c: &mut Criterion) {
         group.throughput(Throughput::Bytes(*size as u64));
         group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| {
             let mut rng = StdRng::seed_from_u64(SEED);
-            let rope = generate_random_rope(&mut rng, *size);
+            let cx = TestAppContext::single();
+            let cx = cx.to_async();
+            let rope = generate_random_rope(&mut rng, *size, &cx);
 
             b.iter_batched(
                 || generate_random_rope_points(&mut rng, &rope),
@@ -216,7 +231,9 @@ fn rope_benchmarks(c: &mut Criterion) {
         group.throughput(Throughput::Bytes(*size as u64));
         group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| {
             let mut rng = StdRng::seed_from_u64(SEED);
-            let rope = generate_random_rope(&mut rng, *size);
+            let cx = TestAppContext::single();
+            let cx = cx.to_async();
+            let rope = generate_random_rope(&mut rng, *size, &cx);
 
             b.iter_batched(
                 || {

crates/rope/src/rope.rs πŸ”—

@@ -5,7 +5,7 @@ mod point_utf16;
 mod unclipped;
 
 use arrayvec::ArrayVec;
-use rayon::iter::{IntoParallelIterator, ParallelIterator as _};
+use gpui::BackgroundExecutor;
 use std::{
     cmp, fmt, io, mem,
     ops::{self, AddAssign, Range},
@@ -31,6 +31,41 @@ impl Rope {
         Self::default()
     }
 
+    /// Create a new rope from a string without trying to parallelize the construction for large strings.
+    pub fn from_str_small(text: &str) -> Self {
+        let mut rope = Self::new();
+        rope.push_small(text);
+        rope
+    }
+
+    /// Create a new rope from a string.
+    pub fn from_str(text: &str, executor: &BackgroundExecutor) -> Self {
+        let mut rope = Self::new();
+        rope.push(text, executor);
+        rope
+    }
+
+    /// Create a new rope from a string without trying to parallelize the construction for large strings.
+    pub fn from_iter_small<'a, T: IntoIterator<Item = &'a str>>(iter: T) -> Self {
+        let mut rope = Rope::new();
+        for chunk in iter {
+            rope.push_small(chunk);
+        }
+        rope
+    }
+
+    /// Create a new rope from a string.
+    pub fn from_iter<'a, T: IntoIterator<Item = &'a str>>(
+        iter: T,
+        executor: &BackgroundExecutor,
+    ) -> Self {
+        let mut rope = Rope::new();
+        for chunk in iter {
+            rope.push(chunk, executor);
+        }
+        rope
+    }
+
     /// Checks that `index`-th byte is the first byte in a UTF-8 code point
     /// sequence or the end of the string.
     ///
@@ -145,12 +180,12 @@ impl Rope {
         self.check_invariants();
     }
 
-    pub fn replace(&mut self, range: Range<usize>, text: &str) {
+    pub fn replace(&mut self, range: Range<usize>, text: &str, executor: &BackgroundExecutor) {
         let mut new_rope = Rope::new();
         let mut cursor = self.cursor(0);
         new_rope.append(cursor.slice(range.start));
         cursor.seek_forward(range.end);
-        new_rope.push(text);
+        new_rope.push(text, executor);
         new_rope.append(cursor.suffix());
         *self = new_rope;
     }
@@ -168,28 +203,12 @@ impl Rope {
         self.slice(start..end)
     }
 
-    pub fn push(&mut self, mut text: &str) {
-        self.chunks.update_last(
-            |last_chunk| {
-                let split_ix = if last_chunk.text.len() + text.len() <= chunk::MAX_BASE {
-                    text.len()
-                } else {
-                    let mut split_ix = cmp::min(
-                        chunk::MIN_BASE.saturating_sub(last_chunk.text.len()),
-                        text.len(),
-                    );
-                    while !text.is_char_boundary(split_ix) {
-                        split_ix += 1;
-                    }
-                    split_ix
-                };
+    pub fn push(&mut self, mut text: &str, executor: &BackgroundExecutor) {
+        self.fill_last_chunk(&mut text);
 
-                let (suffix, remainder) = text.split_at(split_ix);
-                last_chunk.push_str(suffix);
-                text = remainder;
-            },
-            (),
-        );
+        if text.is_empty() {
+            return;
+        }
 
         #[cfg(all(test, not(rust_analyzer)))]
         const NUM_CHUNKS: usize = 16;
@@ -200,7 +219,8 @@ impl Rope {
         // but given the chunk boundary can land within a character
         // we need to accommodate for the worst case where every chunk gets cut short by up to 4 bytes
         if text.len() > NUM_CHUNKS * chunk::MAX_BASE - NUM_CHUNKS * 4 {
-            return self.push_large(text);
+            let future = self.push_large(text, executor.clone());
+            return executor.block(future);
         }
         // 16 is enough as otherwise we will hit the branch above
         let mut new_chunks = ArrayVec::<_, NUM_CHUNKS>::new();
@@ -220,8 +240,57 @@ impl Rope {
         self.check_invariants();
     }
 
+    /// Pushes a string into the rope. Unlike [`push`], this method does not parallelize the construction on large strings.
+    pub fn push_small(&mut self, mut text: &str) {
+        self.fill_last_chunk(&mut text);
+        if text.is_empty() {
+            return;
+        }
+
+        // 16 is enough as otherwise we will hit the branch above
+        let mut new_chunks = Vec::new();
+
+        while !text.is_empty() {
+            let mut split_ix = cmp::min(chunk::MAX_BASE, text.len());
+            while !text.is_char_boundary(split_ix) {
+                split_ix -= 1;
+            }
+            let (chunk, remainder) = text.split_at(split_ix);
+            new_chunks.push(chunk);
+            text = remainder;
+        }
+        self.chunks
+            .extend(new_chunks.into_iter().map(Chunk::new), ());
+
+        self.check_invariants();
+    }
+
+    fn fill_last_chunk(&mut self, text: &mut &str) {
+        self.chunks.update_last(
+            |last_chunk| {
+                let split_ix = if last_chunk.text.len() + text.len() <= chunk::MAX_BASE {
+                    text.len()
+                } else {
+                    let mut split_ix = cmp::min(
+                        chunk::MIN_BASE.saturating_sub(last_chunk.text.len()),
+                        text.len(),
+                    );
+                    while !text.is_char_boundary(split_ix) {
+                        split_ix += 1;
+                    }
+                    split_ix
+                };
+
+                let (suffix, remainder) = text.split_at(split_ix);
+                last_chunk.push_str(suffix);
+                *text = remainder;
+            },
+            (),
+        );
+    }
+
     /// A copy of `push` specialized for working with large quantities of text.
-    fn push_large(&mut self, mut text: &str) {
+    async fn push_large(&mut self, mut text: &str, executor: BackgroundExecutor) {
         // To avoid frequent reallocs when loading large swaths of file contents,
         // we estimate worst-case `new_chunks` capacity;
         // Chunk is a fixed-capacity buffer. If a character falls on
@@ -254,8 +323,14 @@ impl Rope {
         const PARALLEL_THRESHOLD: usize = 4 * (2 * sum_tree::TREE_BASE);
 
         if new_chunks.len() >= PARALLEL_THRESHOLD {
+            // SAFETY: transmuting to 'static is sound here. We block on the future making use of this
+            // and we know that the result of this computation is not stashing the static reference
+            // away.
+            let new_chunks =
+                unsafe { std::mem::transmute::<Vec<&str>, Vec<&'static str>>(new_chunks) };
             self.chunks
-                .par_extend(new_chunks.into_par_iter().map(Chunk::new), ());
+                .async_extend(new_chunks.into_iter().map(Chunk::new), executor)
+                .await;
         } else {
             self.chunks
                 .extend(new_chunks.into_iter().map(Chunk::new), ());
@@ -292,8 +367,13 @@ impl Rope {
         }
     }
 
-    pub fn push_front(&mut self, text: &str) {
-        let suffix = mem::replace(self, Rope::from(text));
+    pub fn push_front(&mut self, text: &str, cx: &BackgroundExecutor) {
+        let suffix = mem::replace(self, Rope::from_str(text, cx));
+        self.append(suffix);
+    }
+
+    pub fn push_front_small(&mut self, text: &str) {
+        let suffix = mem::replace(self, Rope::from_str_small(text));
         self.append(suffix);
     }
 
@@ -577,37 +657,19 @@ impl Rope {
     }
 }
 
-impl<'a> From<&'a str> for Rope {
-    fn from(text: &'a str) -> Self {
-        let mut rope = Self::new();
-        rope.push(text);
-        rope
-    }
-}
+// impl From<String> for Rope {
+//     #[inline(always)]
+//     fn from(text: String) -> Self {
+//         Rope::from(text.as_str())
+//     }
+// }
 
-impl<'a> FromIterator<&'a str> for Rope {
-    fn from_iter<T: IntoIterator<Item = &'a str>>(iter: T) -> Self {
-        let mut rope = Rope::new();
-        for chunk in iter {
-            rope.push(chunk);
-        }
-        rope
-    }
-}
-
-impl From<String> for Rope {
-    #[inline(always)]
-    fn from(text: String) -> Self {
-        Rope::from(text.as_str())
-    }
-}
-
-impl From<&String> for Rope {
-    #[inline(always)]
-    fn from(text: &String) -> Self {
-        Rope::from(text.as_str())
-    }
-}
+// impl From<&String> for Rope {
+//     #[inline(always)]
+//     fn from(text: &String) -> Self {
+//         Rope::from(text.as_str())
+//     }
+// }
 
 impl fmt::Display for Rope {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
@@ -1639,6 +1701,7 @@ where
 mod tests {
     use super::*;
     use Bias::{Left, Right};
+    use gpui::TestAppContext;
     use rand::prelude::*;
     use std::{cmp::Ordering, env, io::Read};
     use util::RandomCharIter;
@@ -1648,17 +1711,17 @@ mod tests {
         zlog::init_test();
     }
 
-    #[test]
-    fn test_all_4_byte_chars() {
+    #[gpui::test]
+    async fn test_all_4_byte_chars(cx: &mut TestAppContext) {
         let mut rope = Rope::new();
         let text = "πŸ€".repeat(256);
-        rope.push(&text);
+        rope.push(&text, cx.background_executor());
         assert_eq!(rope.text(), text);
     }
 
-    #[test]
-    fn test_clip() {
-        let rope = Rope::from("🧘");
+    #[gpui::test]
+    fn test_clip(cx: &mut TestAppContext) {
+        let rope = Rope::from_str("🧘", cx.background_executor());
 
         assert_eq!(rope.clip_offset(1, Bias::Left), 0);
         assert_eq!(rope.clip_offset(1, Bias::Right), 4);
@@ -1704,9 +1767,9 @@ mod tests {
         );
     }
 
-    #[test]
-    fn test_prev_next_line() {
-        let rope = Rope::from("abc\ndef\nghi\njkl");
+    #[gpui::test]
+    fn test_prev_next_line(cx: &mut TestAppContext) {
+        let rope = Rope::from_str("abc\ndef\nghi\njkl", cx.background_executor());
 
         let mut chunks = rope.chunks();
         assert_eq!(chunks.peek().unwrap().chars().next().unwrap(), 'a');
@@ -1748,16 +1811,16 @@ mod tests {
         assert_eq!(chunks.peek(), None);
     }
 
-    #[test]
-    fn test_lines() {
-        let rope = Rope::from("abc\ndefg\nhi");
+    #[gpui::test]
+    fn test_lines(cx: &mut TestAppContext) {
+        let rope = Rope::from_str("abc\ndefg\nhi", cx.background_executor());
         let mut lines = rope.chunks().lines();
         assert_eq!(lines.next(), Some("abc"));
         assert_eq!(lines.next(), Some("defg"));
         assert_eq!(lines.next(), Some("hi"));
         assert_eq!(lines.next(), None);
 
-        let rope = Rope::from("abc\ndefg\nhi\n");
+        let rope = Rope::from_str("abc\ndefg\nhi\n", cx.background_executor());
         let mut lines = rope.chunks().lines();
         assert_eq!(lines.next(), Some("abc"));
         assert_eq!(lines.next(), Some("defg"));
@@ -1765,14 +1828,14 @@ mod tests {
         assert_eq!(lines.next(), Some(""));
         assert_eq!(lines.next(), None);
 
-        let rope = Rope::from("abc\ndefg\nhi");
+        let rope = Rope::from_str("abc\ndefg\nhi", cx.background_executor());
         let mut lines = rope.reversed_chunks_in_range(0..rope.len()).lines();
         assert_eq!(lines.next(), Some("hi"));
         assert_eq!(lines.next(), Some("defg"));
         assert_eq!(lines.next(), Some("abc"));
         assert_eq!(lines.next(), None);
 
-        let rope = Rope::from("abc\ndefg\nhi\n");
+        let rope = Rope::from_str("abc\ndefg\nhi\n", cx.background_executor());
         let mut lines = rope.reversed_chunks_in_range(0..rope.len()).lines();
         assert_eq!(lines.next(), Some(""));
         assert_eq!(lines.next(), Some("hi"));
@@ -1780,14 +1843,14 @@ mod tests {
         assert_eq!(lines.next(), Some("abc"));
         assert_eq!(lines.next(), None);
 
-        let rope = Rope::from("abc\nlonger line test\nhi");
+        let rope = Rope::from_str("abc\nlonger line test\nhi", cx.background_executor());
         let mut lines = rope.chunks().lines();
         assert_eq!(lines.next(), Some("abc"));
         assert_eq!(lines.next(), Some("longer line test"));
         assert_eq!(lines.next(), Some("hi"));
         assert_eq!(lines.next(), None);
 
-        let rope = Rope::from("abc\nlonger line test\nhi");
+        let rope = Rope::from_str("abc\nlonger line test\nhi", cx.background_executor());
         let mut lines = rope.reversed_chunks_in_range(0..rope.len()).lines();
         assert_eq!(lines.next(), Some("hi"));
         assert_eq!(lines.next(), Some("longer line test"));
@@ -1796,7 +1859,7 @@ mod tests {
     }
 
     #[gpui::test(iterations = 100)]
-    fn test_random_rope(mut rng: StdRng) {
+    async fn test_random_rope(cx: &mut TestAppContext, mut rng: StdRng) {
         let operations = env::var("OPERATIONS")
             .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
             .unwrap_or(10);
@@ -1812,7 +1875,7 @@ mod tests {
             let mut new_actual = Rope::new();
             let mut cursor = actual.cursor(0);
             new_actual.append(cursor.slice(start_ix));
-            new_actual.push(&new_text);
+            new_actual.push(&new_text, cx.background_executor());
             cursor.seek_forward(end_ix);
             new_actual.append(cursor.suffix());
             actual = new_actual;
@@ -2112,10 +2175,10 @@ mod tests {
         }
     }
 
-    #[test]
-    fn test_chunks_equals_str() {
+    #[gpui::test]
+    fn test_chunks_equals_str(cx: &mut TestAppContext) {
         let text = "This is a multi-chunk\n& multi-line test string!";
-        let rope = Rope::from(text);
+        let rope = Rope::from_str(text, cx.background_executor());
         for start in 0..text.len() {
             for end in start..text.len() {
                 let range = start..end;
@@ -2158,34 +2221,37 @@ mod tests {
             }
         }
 
-        let rope = Rope::from("");
+        let rope = Rope::from_str("", cx.background_executor());
         assert!(rope.chunks_in_range(0..0).equals_str(""));
         assert!(rope.reversed_chunks_in_range(0..0).equals_str(""));
         assert!(!rope.chunks_in_range(0..0).equals_str("foo"));
         assert!(!rope.reversed_chunks_in_range(0..0).equals_str("foo"));
     }
 
-    #[test]
-    fn test_is_char_boundary() {
+    #[gpui::test]
+    fn test_is_char_boundary(cx: &mut TestAppContext) {
         let fixture = "地";
-        let rope = Rope::from("地");
+        let rope = Rope::from_str("地", cx.background_executor());
         for b in 0..=fixture.len() {
             assert_eq!(rope.is_char_boundary(b), fixture.is_char_boundary(b));
         }
         let fixture = "";
-        let rope = Rope::from("");
+        let rope = Rope::from_str("", cx.background_executor());
         for b in 0..=fixture.len() {
             assert_eq!(rope.is_char_boundary(b), fixture.is_char_boundary(b));
         }
         let fixture = "πŸ”΄πŸŸ πŸŸ‘πŸŸ’πŸ”΅πŸŸ£βš«οΈβšͺ️🟀\nπŸ³οΈβ€βš§οΈπŸπŸ³οΈβ€πŸŒˆπŸ΄β€β˜ οΈβ›³οΈπŸ“¬πŸ“­πŸ΄πŸ³οΈπŸš©";
-        let rope = Rope::from("πŸ”΄πŸŸ πŸŸ‘πŸŸ’πŸ”΅πŸŸ£βš«οΈβšͺ️🟀\nπŸ³οΈβ€βš§οΈπŸπŸ³οΈβ€πŸŒˆπŸ΄β€β˜ οΈβ›³οΈπŸ“¬πŸ“­πŸ΄πŸ³οΈπŸš©");
+        let rope = Rope::from_str(
+            "πŸ”΄πŸŸ πŸŸ‘πŸŸ’πŸ”΅πŸŸ£βš«οΈβšͺ️🟀\nπŸ³οΈβ€βš§οΈπŸπŸ³οΈβ€πŸŒˆπŸ΄β€β˜ οΈβ›³οΈπŸ“¬πŸ“­πŸ΄πŸ³οΈπŸš©",
+            cx.background_executor(),
+        );
         for b in 0..=fixture.len() {
             assert_eq!(rope.is_char_boundary(b), fixture.is_char_boundary(b));
         }
     }
 
-    #[test]
-    fn test_floor_char_boundary() {
+    #[gpui::test]
+    fn test_floor_char_boundary(cx: &mut TestAppContext) {
         // polyfill of str::floor_char_boundary
         fn floor_char_boundary(str: &str, index: usize) -> usize {
             if index >= str.len() {
@@ -2201,7 +2267,7 @@ mod tests {
         }
 
         let fixture = "地";
-        let rope = Rope::from("地");
+        let rope = Rope::from_str("地", cx.background_executor());
         for b in 0..=fixture.len() {
             assert_eq!(
                 rope.floor_char_boundary(b),
@@ -2210,7 +2276,7 @@ mod tests {
         }
 
         let fixture = "";
-        let rope = Rope::from("");
+        let rope = Rope::from_str("", cx.background_executor());
         for b in 0..=fixture.len() {
             assert_eq!(
                 rope.floor_char_boundary(b),
@@ -2219,7 +2285,10 @@ mod tests {
         }
 
         let fixture = "πŸ”΄πŸŸ πŸŸ‘πŸŸ’πŸ”΅πŸŸ£βš«οΈβšͺ️🟀\nπŸ³οΈβ€βš§οΈπŸπŸ³οΈβ€πŸŒˆπŸ΄β€β˜ οΈβ›³οΈπŸ“¬πŸ“­πŸ΄πŸ³οΈπŸš©";
-        let rope = Rope::from("πŸ”΄πŸŸ πŸŸ‘πŸŸ’πŸ”΅πŸŸ£βš«οΈβšͺ️🟀\nπŸ³οΈβ€βš§οΈπŸπŸ³οΈβ€πŸŒˆπŸ΄β€β˜ οΈβ›³οΈπŸ“¬πŸ“­πŸ΄πŸ³οΈπŸš©");
+        let rope = Rope::from_str(
+            "πŸ”΄πŸŸ πŸŸ‘πŸŸ’πŸ”΅πŸŸ£βš«οΈβšͺ️🟀\nπŸ³οΈβ€βš§οΈπŸπŸ³οΈβ€πŸŒˆπŸ΄β€β˜ οΈβ›³οΈπŸ“¬πŸ“­πŸ΄πŸ³οΈπŸš©",
+            cx.background_executor(),
+        );
         for b in 0..=fixture.len() {
             assert_eq!(
                 rope.floor_char_boundary(b),
@@ -2228,8 +2297,8 @@ mod tests {
         }
     }
 
-    #[test]
-    fn test_ceil_char_boundary() {
+    #[gpui::test]
+    fn test_ceil_char_boundary(cx: &mut TestAppContext) {
         // polyfill of str::ceil_char_boundary
         fn ceil_char_boundary(str: &str, index: usize) -> usize {
             if index > str.len() {
@@ -2244,19 +2313,22 @@ mod tests {
         }
 
         let fixture = "地";
-        let rope = Rope::from("地");
+        let rope = Rope::from_str("地", cx.background_executor());
         for b in 0..=fixture.len() {
             assert_eq!(rope.ceil_char_boundary(b), ceil_char_boundary(&fixture, b));
         }
 
         let fixture = "";
-        let rope = Rope::from("");
+        let rope = Rope::from_str("", cx.background_executor());
         for b in 0..=fixture.len() {
             assert_eq!(rope.ceil_char_boundary(b), ceil_char_boundary(&fixture, b));
         }
 
         let fixture = "πŸ”΄πŸŸ πŸŸ‘πŸŸ’πŸ”΅πŸŸ£βš«οΈβšͺ️🟀\nπŸ³οΈβ€βš§οΈπŸπŸ³οΈβ€πŸŒˆπŸ΄β€β˜ οΈβ›³οΈπŸ“¬πŸ“­πŸ΄πŸ³οΈπŸš©";
-        let rope = Rope::from("πŸ”΄πŸŸ πŸŸ‘πŸŸ’πŸ”΅πŸŸ£βš«οΈβšͺ️🟀\nπŸ³οΈβ€βš§οΈπŸπŸ³οΈβ€πŸŒˆπŸ΄β€β˜ οΈβ›³οΈπŸ“¬πŸ“­πŸ΄πŸ³οΈπŸš©");
+        let rope = Rope::from_str(
+            "πŸ”΄πŸŸ πŸŸ‘πŸŸ’πŸ”΅πŸŸ£βš«οΈβšͺ️🟀\nπŸ³οΈβ€βš§οΈπŸπŸ³οΈβ€πŸŒˆπŸ΄β€β˜ οΈβ›³οΈπŸ“¬πŸ“­πŸ΄πŸ³οΈπŸš©",
+            cx.background_executor(),
+        );
         for b in 0..=fixture.len() {
             assert_eq!(rope.ceil_char_boundary(b), ceil_char_boundary(&fixture, b));
         }

crates/rules_library/src/rules_library.rs πŸ”—

@@ -554,7 +554,7 @@ impl RulesLibrary {
 
         let prompt_id = PromptId::new();
         let save = self.store.update(cx, |store, cx| {
-            store.save(prompt_id, None, false, "".into(), cx)
+            store.save(prompt_id, None, false, Default::default(), cx)
         });
         self.picker
             .update(cx, |picker, cx| picker.refresh(window, cx));
@@ -888,7 +888,13 @@ impl RulesLibrary {
             let new_id = PromptId::new();
             let body = rule.body_editor.read(cx).text(cx);
             let save = self.store.update(cx, |store, cx| {
-                store.save(new_id, Some(title.into()), false, body.into(), cx)
+                store.save(
+                    new_id,
+                    Some(title.into()),
+                    false,
+                    Rope::from_str(&body, cx.background_executor()),
+                    cx,
+                )
             });
             self.picker
                 .update(cx, |picker, cx| picker.refresh(window, cx));

crates/search/src/buffer_search.rs πŸ”—

@@ -10,8 +10,9 @@ use any_vec::AnyVec;
 use anyhow::Context as _;
 use collections::HashMap;
 use editor::{
-    DisplayPoint, Editor, EditorSettings,
+    DisplayPoint, Editor, EditorSettings, VimFlavor,
     actions::{Backtab, Tab},
+    vim_flavor,
 };
 use futures::channel::oneshot;
 use gpui::{
@@ -825,7 +826,8 @@ impl BufferSearchBar {
                 .searchable_items_with_matches
                 .get(&active_searchable_item.downgrade())
         {
-            active_searchable_item.activate_match(match_ix, matches, window, cx)
+            let collapse = editor::vim_flavor(cx) == Some(VimFlavor::Vim);
+            active_searchable_item.activate_match(match_ix, matches, collapse, window, cx)
         }
     }
 
@@ -970,7 +972,8 @@ impl BufferSearchBar {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
-        self.select_match(Direction::Next, 1, window, cx);
+        let collapse = vim_flavor(cx) == Some(VimFlavor::Vim);
+        self.select_match(Direction::Next, 1, collapse, window, cx);
     }
 
     fn select_prev_match(
@@ -979,7 +982,8 @@ impl BufferSearchBar {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
-        self.select_match(Direction::Prev, 1, window, cx);
+        let collapse = vim_flavor(cx) == Some(VimFlavor::Vim);
+        self.select_match(Direction::Prev, 1, collapse, window, cx);
     }
 
     pub fn select_all_matches(
@@ -1004,6 +1008,7 @@ impl BufferSearchBar {
         &mut self,
         direction: Direction,
         count: usize,
+        collapse: bool,
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
@@ -1026,7 +1031,7 @@ impl BufferSearchBar {
                 .match_index_for_direction(matches, index, direction, count, window, cx);
 
             searchable_item.update_matches(matches, window, cx);
-            searchable_item.activate_match(new_match_index, matches, window, cx);
+            searchable_item.activate_match(new_match_index, matches, collapse, window, cx);
         }
     }
 
@@ -1040,7 +1045,8 @@ impl BufferSearchBar {
                 return;
             }
             searchable_item.update_matches(matches, window, cx);
-            searchable_item.activate_match(0, matches, window, cx);
+            let collapse = vim_flavor(cx) == Some(VimFlavor::Vim);
+            searchable_item.activate_match(0, matches, collapse, window, cx);
         }
     }
 
@@ -1055,7 +1061,8 @@ impl BufferSearchBar {
             }
             let new_match_index = matches.len() - 1;
             searchable_item.update_matches(matches, window, cx);
-            searchable_item.activate_match(new_match_index, matches, window, cx);
+            let collapse = vim_flavor(cx) == Some(VimFlavor::Vim);
+            searchable_item.activate_match(new_match_index, matches, collapse, window, cx);
         }
     }
 

crates/search/src/project_search.rs πŸ”—

@@ -9,10 +9,10 @@ use anyhow::Context as _;
 use collections::HashMap;
 use editor::{
     Anchor, Editor, EditorEvent, EditorSettings, MAX_TAB_TITLE_LEN, MultiBuffer, PathKey,
-    SelectionEffects,
+    SelectionEffects, VimFlavor,
     actions::{Backtab, SelectAll, Tab},
     items::active_match_index,
-    multibuffer_context_lines,
+    multibuffer_context_lines, vim_flavor,
 };
 use futures::{StreamExt, stream::FuturesOrdered};
 use gpui::{
@@ -322,18 +322,25 @@ impl ProjectSearch {
 
             let mut limit_reached = false;
             while let Some(results) = matches.next().await {
-                let mut buffers_with_ranges = Vec::with_capacity(results.len());
-                for result in results {
-                    match result {
-                        project::search::SearchResult::Buffer { buffer, ranges } => {
-                            buffers_with_ranges.push((buffer, ranges));
-                        }
-                        project::search::SearchResult::LimitReached => {
-                            limit_reached = true;
+                let (buffers_with_ranges, has_reached_limit) = cx
+                    .background_executor()
+                    .spawn(async move {
+                        let mut limit_reached = false;
+                        let mut buffers_with_ranges = Vec::with_capacity(results.len());
+                        for result in results {
+                            match result {
+                                project::search::SearchResult::Buffer { buffer, ranges } => {
+                                    buffers_with_ranges.push((buffer, ranges));
+                                }
+                                project::search::SearchResult::LimitReached => {
+                                    limit_reached = true;
+                                }
+                            }
                         }
-                    }
-                }
-
+                        (buffers_with_ranges, limit_reached)
+                    })
+                    .await;
+                limit_reached |= has_reached_limit;
                 let mut new_ranges = project_search
                     .update(cx, |project_search, cx| {
                         project_search.excerpts.update(cx, |excerpts, cx| {
@@ -352,7 +359,6 @@ impl ProjectSearch {
                         })
                     })
                     .ok()?;
-
                 while let Some(new_ranges) = new_ranges.next().await {
                     project_search
                         .update(cx, |project_search, cx| {
@@ -1338,7 +1344,8 @@ impl ProjectSearchView {
 
             let range_to_select = match_ranges[new_index].clone();
             self.results_editor.update(cx, |editor, cx| {
-                let range_to_select = editor.range_for_match(&range_to_select);
+                let collapse = vim_flavor(cx) == Some(VimFlavor::Vim);
+                let range_to_select = editor.range_for_match(&range_to_select, collapse);
                 editor.unfold_ranges(std::slice::from_ref(&range_to_select), false, true, cx);
                 editor.change_selections(Default::default(), window, cx, |s| {
                     s.select_ranges([range_to_select])
@@ -1409,9 +1416,10 @@ impl ProjectSearchView {
             let is_new_search = self.search_id != prev_search_id;
             self.results_editor.update(cx, |editor, cx| {
                 if is_new_search {
+                    let collapse = vim_flavor(cx) == Some(VimFlavor::Vim);
                     let range_to_select = match_ranges
                         .first()
-                        .map(|range| editor.range_for_match(range));
+                        .map(|range| editor.range_for_match(range, collapse));
                     editor.change_selections(Default::default(), window, cx, |s| {
                         s.select_ranges(range_to_select)
                     });
@@ -2338,7 +2346,15 @@ pub fn perform_project_search(
 
 #[cfg(test)]
 pub mod tests {
-    use std::{ops::Deref as _, sync::Arc, time::Duration};
+    use std::{
+        ops::Deref as _,
+        path::PathBuf,
+        sync::{
+            Arc,
+            atomic::{self, AtomicUsize},
+        },
+        time::Duration,
+    };
 
     use super::*;
     use editor::{DisplayPoint, display_map::DisplayRow};
@@ -4239,6 +4255,8 @@ pub mod tests {
         )
         .await;
 
+        let requests_count = Arc::new(AtomicUsize::new(0));
+        let closure_requests_count = requests_count.clone();
         let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
         let language_registry = project.read_with(cx, |project, _| project.languages().clone());
         let language = rust_lang();
@@ -4250,21 +4268,26 @@ pub mod tests {
                     inlay_hint_provider: Some(lsp::OneOf::Left(true)),
                     ..lsp::ServerCapabilities::default()
                 },
-                initializer: Some(Box::new(|fake_server| {
-                    fake_server.set_request_handler::<lsp::request::InlayHintRequest, _, _>(
-                        move |_, _| async move {
-                            Ok(Some(vec![lsp::InlayHint {
-                                position: lsp::Position::new(0, 17),
-                                label: lsp::InlayHintLabel::String(": i32".to_owned()),
-                                kind: Some(lsp::InlayHintKind::TYPE),
-                                text_edits: None,
-                                tooltip: None,
-                                padding_left: None,
-                                padding_right: None,
-                                data: None,
-                            }]))
-                        },
-                    );
+                initializer: Some(Box::new(move |fake_server| {
+                    let requests_count = closure_requests_count.clone();
+                    fake_server.set_request_handler::<lsp::request::InlayHintRequest, _, _>({
+                        move |_, _| {
+                            let requests_count = requests_count.clone();
+                            async move {
+                                requests_count.fetch_add(1, atomic::Ordering::Release);
+                                Ok(Some(vec![lsp::InlayHint {
+                                    position: lsp::Position::new(0, 17),
+                                    label: lsp::InlayHintLabel::String(": i32".to_owned()),
+                                    kind: Some(lsp::InlayHintKind::TYPE),
+                                    text_edits: None,
+                                    tooltip: None,
+                                    padding_left: None,
+                                    padding_right: None,
+                                    data: None,
+                                }]))
+                            }
+                        }
+                    });
                 })),
                 ..FakeLspAdapter::default()
             },
@@ -4278,7 +4301,7 @@ pub mod tests {
         });
 
         perform_search(search_view, "let ", cx);
-        let _fake_server = fake_servers.next().await.unwrap();
+        let fake_server = fake_servers.next().await.unwrap();
         cx.executor().advance_clock(Duration::from_secs(1));
         cx.executor().run_until_parked();
         search_view
@@ -4291,11 +4314,127 @@ pub mod tests {
                 );
             })
             .unwrap();
+        assert_eq!(
+            requests_count.load(atomic::Ordering::Acquire),
+            1,
+            "New hints should have been queried",
+        );
 
         // Can do the 2nd search without any panics
         perform_search(search_view, "let ", cx);
+        cx.executor().advance_clock(Duration::from_secs(1));
+        cx.executor().run_until_parked();
+        search_view
+            .update(cx, |search_view, _, cx| {
+                assert_eq!(
+                    search_view
+                        .results_editor
+                        .update(cx, |editor, cx| editor.display_text(cx)),
+                    "\n\nfn main() { let a: i32 = 2; }\n"
+                );
+            })
+            .unwrap();
+        assert_eq!(
+            requests_count.load(atomic::Ordering::Acquire),
+            2,
+            "We did drop the previous buffer when cleared the old project search results, hence another query was made",
+        );
+
+        let singleton_editor = window
+            .update(cx, |workspace, window, cx| {
+                workspace.open_abs_path(
+                    PathBuf::from(path!("/dir/main.rs")),
+                    workspace::OpenOptions::default(),
+                    window,
+                    cx,
+                )
+            })
+            .unwrap()
+            .await
+            .unwrap()
+            .downcast::<Editor>()
+            .unwrap();
         cx.executor().advance_clock(Duration::from_millis(100));
         cx.executor().run_until_parked();
+        singleton_editor.update(cx, |editor, cx| {
+            assert_eq!(
+                editor.display_text(cx),
+                "fn main() { let a: i32 = 2; }\n",
+                "Newly opened editor should have the correct text with hints",
+            );
+        });
+        assert_eq!(
+            requests_count.load(atomic::Ordering::Acquire),
+            2,
+            "Opening the same buffer again should reuse the cached hints",
+        );
+
+        window
+            .update(cx, |_, window, cx| {
+                singleton_editor.update(cx, |editor, cx| {
+                    editor.handle_input("test", window, cx);
+                });
+            })
+            .unwrap();
+
+        cx.executor().advance_clock(Duration::from_secs(1));
+        cx.executor().run_until_parked();
+        singleton_editor.update(cx, |editor, cx| {
+            assert_eq!(
+                editor.display_text(cx),
+                "testfn main() { l: i32et a = 2; }\n",
+                "Newly opened editor should have the correct text with hints",
+            );
+        });
+        assert_eq!(
+            requests_count.load(atomic::Ordering::Acquire),
+            3,
+            "We have edited the buffer and should send a new request",
+        );
+
+        window
+            .update(cx, |_, window, cx| {
+                singleton_editor.update(cx, |editor, cx| {
+                    editor.undo(&editor::actions::Undo, window, cx);
+                });
+            })
+            .unwrap();
+        cx.executor().advance_clock(Duration::from_secs(1));
+        cx.executor().run_until_parked();
+        assert_eq!(
+            requests_count.load(atomic::Ordering::Acquire),
+            4,
+            "We have edited the buffer again and should send a new request again",
+        );
+        singleton_editor.update(cx, |editor, cx| {
+            assert_eq!(
+                editor.display_text(cx),
+                "fn main() { let a: i32 = 2; }\n",
+                "Newly opened editor should have the correct text with hints",
+            );
+        });
+        project.update(cx, |_, cx| {
+            cx.emit(project::Event::RefreshInlayHints {
+                server_id: fake_server.server.server_id(),
+                request_id: Some(1),
+            });
+        });
+        cx.executor().advance_clock(Duration::from_secs(1));
+        cx.executor().run_until_parked();
+        assert_eq!(
+            requests_count.load(atomic::Ordering::Acquire),
+            5,
+            "After a simulated server refresh request, we should have sent another request",
+        );
+
+        perform_search(search_view, "let ", cx);
+        cx.executor().advance_clock(Duration::from_secs(1));
+        cx.executor().run_until_parked();
+        assert_eq!(
+            requests_count.load(atomic::Ordering::Acquire),
+            5,
+            "New project search should reuse the cached hints",
+        );
         search_view
             .update(cx, |search_view, _, cx| {
                 assert_eq!(

crates/settings_ui/src/page_data.rs πŸ”—

@@ -5724,7 +5724,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec<SettingsPage> {
                             title: "Display Mode",
                             description: "When to show edit predictions previews in buffer. The eager mode displays them inline, while the subtle mode displays them only when holding a modifier key.",
                             field: Box::new(SettingField {
-                                json_path: Some("edit_prediction_mode"),
+                                json_path: Some("edit_prediction.display_mode"),
                                 pick: |settings_content| {
                                     settings_content.project.all_languages.edit_predictions.as_ref()?.mode.as_ref()
                                 },
@@ -5739,7 +5739,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec<SettingsPage> {
                             title: "In Text Threads",
                             description: "Whether edit predictions are enabled when editing text threads in the agent panel.",
                             field: Box::new(SettingField {
-                                json_path: Some("edit_prediction_in_text_threads"),
+                                json_path: Some("edit_prediction.in_text_threads"),
                                 pick: |settings_content| {
                                     settings_content.project.all_languages.edit_predictions.as_ref()?.enabled_in_text_threads.as_ref()
                                 },
@@ -5752,10 +5752,10 @@ pub(crate) fn settings_data(cx: &App) -> Vec<SettingsPage> {
                         }),
                         SettingsPageItem::SettingItem(SettingItem {
                             title: "Copilot Provider",
-                            description: "Set up GitHub Copilot as your edit prediction provider. You can toggle between it and Zed's default provider.",
+                            description: "Use GitHub Copilot as your edit prediction provider.",
                             field: Box::new(
                                 SettingField {
-                                    json_path: Some("languages.$(language).wrap_guides"),
+                                    json_path: Some("edit_prediction.copilot_provider"),
                                     pick: |settings_content| {
                                         settings_content.project.all_languages.edit_predictions.as_ref()?.copilot.as_ref()
                                     },
@@ -5770,10 +5770,10 @@ pub(crate) fn settings_data(cx: &App) -> Vec<SettingsPage> {
                         }),
                         SettingsPageItem::SettingItem(SettingItem {
                             title: "Codestral Provider",
-                            description: "Set up Mistral's Codestral as your edit prediction provider. You can toggle between it and Zed's default provider.",
+                            description: "Use Mistral's Codestral as your edit prediction provider.",
                             field: Box::new(
                                 SettingField {
-                                    json_path: Some("languages.$(language).wrap_guides"),
+                                    json_path: Some("edit_prediction.codestral_provider"),
                                     pick: |settings_content| {
                                         settings_content.project.all_languages.edit_predictions.as_ref()?.codestral.as_ref()
                                     },

crates/settings_ui/src/settings_ui.rs πŸ”—

@@ -1989,6 +1989,21 @@ impl SettingsWindow {
 
         let this = cx.entity();
 
+        let selected_file_ix = self
+            .files
+            .iter()
+            .enumerate()
+            .skip(OVERFLOW_LIMIT)
+            .find_map(|(ix, (file, _))| {
+                if file == &self.current_file {
+                    Some(ix)
+                } else {
+                    None
+                }
+            })
+            .unwrap_or(OVERFLOW_LIMIT);
+        let edit_in_json_id = SharedString::new(format!("edit-in-json-{}", selected_file_ix));
+
         h_flex()
             .w_full()
             .gap_1()
@@ -2005,20 +2020,6 @@ impl SettingsWindow {
                         ),
                     )
                     .when(self.files.len() > OVERFLOW_LIMIT, |div| {
-                        let selected_file_ix = self
-                            .files
-                            .iter()
-                            .enumerate()
-                            .skip(OVERFLOW_LIMIT)
-                            .find_map(|(ix, (file, _))| {
-                                if file == &self.current_file {
-                                    Some(ix)
-                                } else {
-                                    None
-                                }
-                            })
-                            .unwrap_or(OVERFLOW_LIMIT);
-
                         let (file, focus_handle) = &self.files[selected_file_ix];
 
                         div.child(file_button(selected_file_ix, file, focus_handle, cx))
@@ -2081,11 +2082,11 @@ impl SettingsWindow {
                     }),
             )
             .child(
-                Button::new("edit-in-json", "Edit in settings.json")
+                Button::new(edit_in_json_id, "Edit in settings.json")
                     .tab_index(0_isize)
                     .style(ButtonStyle::OutlinedGhost)
-                    .on_click(cx.listener(|this, _, _, cx| {
-                        this.open_current_settings_file(cx);
+                    .on_click(cx.listener(|this, _, window, cx| {
+                        this.open_current_settings_file(window, cx);
                     })),
             )
     }
@@ -2801,8 +2802,8 @@ impl SettingsWindow {
                             Button::new("fix-in-json", "Fix in settings.json")
                                 .tab_index(0_isize)
                                 .style(ButtonStyle::Tinted(ui::TintColor::Warning))
-                                .on_click(cx.listener(|this, _, _, cx| {
-                                    this.open_current_settings_file(cx);
+                                .on_click(cx.listener(|this, _, window, cx| {
+                                    this.open_current_settings_file(window, cx);
                                 })),
                         ),
                     )
@@ -2941,7 +2942,7 @@ impl SettingsWindow {
     /// This function will create a new settings file if one doesn't exist
     /// if the current file is a project settings with a valid worktree id
     /// We do this because the settings ui allows initializing project settings
-    fn open_current_settings_file(&mut self, cx: &mut Context<Self>) {
+    fn open_current_settings_file(&mut self, window: &mut Window, cx: &mut Context<Self>) {
         match &self.current_file {
             SettingsUiFile::User => {
                 let Some(original_window) = self.original_window else {
@@ -2983,6 +2984,8 @@ impl SettingsWindow {
                             .detach();
                     })
                     .ok();
+
+                window.remove_window();
             }
             SettingsUiFile::Project((worktree_id, path)) => {
                 let settings_path = path.join(paths::local_settings_file_relative_path());
@@ -3066,8 +3069,11 @@ impl SettingsWindow {
                         .detach();
                     })
                     .ok();
+
+                window.remove_window();
             }
             SettingsUiFile::Server(_) => {
+                // Server files are not editable
                 return;
             }
         };
@@ -3186,8 +3192,8 @@ impl Render for SettingsWindow {
                         .id("settings-window")
                         .key_context("SettingsWindow")
                         .track_focus(&self.focus_handle)
-                        .on_action(cx.listener(|this, _: &OpenCurrentFile, _, cx| {
-                            this.open_current_settings_file(cx);
+                        .on_action(cx.listener(|this, _: &OpenCurrentFile, window, cx| {
+                            this.open_current_settings_file(window, cx);
                         }))
                         .on_action(|_: &Minimize, window, _cx| {
                             window.minimize_window();

crates/streaming_diff/Cargo.toml πŸ”—

@@ -14,6 +14,7 @@ path = "src/streaming_diff.rs"
 [dependencies]
 ordered-float.workspace = true
 rope.workspace = true
+gpui.workspace = true
 
 [dev-dependencies]
 rand.workspace = true

crates/streaming_diff/src/streaming_diff.rs πŸ”—

@@ -503,11 +503,12 @@ fn is_line_end(point: Point, text: &Rope) -> bool {
 #[cfg(test)]
 mod tests {
     use super::*;
+    use gpui::BackgroundExecutor;
     use rand::prelude::*;
     use std::env;
 
-    #[test]
-    fn test_delete_first_of_two_lines() {
+    #[gpui::test]
+    fn test_delete_first_of_two_lines(cx: &mut gpui::TestAppContext) {
         let old_text = "aaaa\nbbbb";
         let char_ops = vec![
             CharOperation::Delete { bytes: 5 },
@@ -523,18 +524,18 @@ mod tests {
             apply_line_operations(old_text, &new_text, &expected_line_ops)
         );
 
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
         assert_eq!(line_ops, expected_line_ops);
     }
 
-    #[test]
-    fn test_delete_second_of_two_lines() {
+    #[gpui::test]
+    fn test_delete_second_of_two_lines(cx: &mut gpui::TestAppContext) {
         let old_text = "aaaa\nbbbb";
         let char_ops = vec![
             CharOperation::Keep { bytes: 5 },
             CharOperation::Delete { bytes: 4 },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
         assert_eq!(
             line_ops,
             vec![
@@ -550,8 +551,8 @@ mod tests {
         );
     }
 
-    #[test]
-    fn test_add_new_line() {
+    #[gpui::test]
+    fn test_add_new_line(cx: &mut gpui::TestAppContext) {
         let old_text = "aaaa\nbbbb";
         let char_ops = vec![
             CharOperation::Keep { bytes: 9 },
@@ -559,7 +560,7 @@ mod tests {
                 text: "\ncccc".into(),
             },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
         assert_eq!(
             line_ops,
             vec![
@@ -574,15 +575,15 @@ mod tests {
         );
     }
 
-    #[test]
-    fn test_delete_line_in_middle() {
+    #[gpui::test]
+    fn test_delete_line_in_middle(cx: &mut gpui::TestAppContext) {
         let old_text = "aaaa\nbbbb\ncccc";
         let char_ops = vec![
             CharOperation::Keep { bytes: 5 },
             CharOperation::Delete { bytes: 5 },
             CharOperation::Keep { bytes: 4 },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
         assert_eq!(
             line_ops,
             vec![
@@ -598,8 +599,8 @@ mod tests {
         );
     }
 
-    #[test]
-    fn test_replace_line() {
+    #[gpui::test]
+    fn test_replace_line(cx: &mut gpui::TestAppContext) {
         let old_text = "aaaa\nbbbb\ncccc";
         let char_ops = vec![
             CharOperation::Keep { bytes: 5 },
@@ -609,7 +610,7 @@ mod tests {
             },
             CharOperation::Keep { bytes: 5 },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
         assert_eq!(
             line_ops,
             vec![
@@ -626,8 +627,8 @@ mod tests {
         );
     }
 
-    #[test]
-    fn test_multiple_edits_on_different_lines() {
+    #[gpui::test]
+    fn test_multiple_edits_on_different_lines(cx: &mut gpui::TestAppContext) {
         let old_text = "aaaa\nbbbb\ncccc\ndddd";
         let char_ops = vec![
             CharOperation::Insert { text: "A".into() },
@@ -638,7 +639,7 @@ mod tests {
                 text: "\nEEEE".into(),
             },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
         assert_eq!(
             line_ops,
             vec![
@@ -656,15 +657,15 @@ mod tests {
         );
     }
 
-    #[test]
-    fn test_edit_at_end_of_line() {
+    #[gpui::test]
+    fn test_edit_at_end_of_line(cx: &mut gpui::TestAppContext) {
         let old_text = "aaaa\nbbbb\ncccc";
         let char_ops = vec![
             CharOperation::Keep { bytes: 4 },
             CharOperation::Insert { text: "A".into() },
             CharOperation::Keep { bytes: 10 },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
         assert_eq!(
             line_ops,
             vec![
@@ -680,8 +681,8 @@ mod tests {
         );
     }
 
-    #[test]
-    fn test_insert_newline_character() {
+    #[gpui::test]
+    fn test_insert_newline_character(cx: &mut gpui::TestAppContext) {
         let old_text = "aaaabbbb";
         let char_ops = vec![
             CharOperation::Keep { bytes: 4 },
@@ -689,7 +690,7 @@ mod tests {
             CharOperation::Keep { bytes: 4 },
         ];
         let new_text = apply_char_operations(old_text, &char_ops);
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
         assert_eq!(
             line_ops,
             vec![
@@ -703,14 +704,14 @@ mod tests {
         );
     }
 
-    #[test]
-    fn test_insert_newline_at_beginning() {
+    #[gpui::test]
+    fn test_insert_newline_at_beginning(cx: &mut gpui::TestAppContext) {
         let old_text = "aaaa\nbbbb";
         let char_ops = vec![
             CharOperation::Insert { text: "\n".into() },
             CharOperation::Keep { bytes: 9 },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
         assert_eq!(
             line_ops,
             vec![
@@ -725,15 +726,15 @@ mod tests {
         );
     }
 
-    #[test]
-    fn test_delete_newline() {
+    #[gpui::test]
+    fn test_delete_newline(cx: &mut gpui::TestAppContext) {
         let old_text = "aaaa\nbbbb";
         let char_ops = vec![
             CharOperation::Keep { bytes: 4 },
             CharOperation::Delete { bytes: 1 },
             CharOperation::Keep { bytes: 4 },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
         assert_eq!(
             line_ops,
             vec![
@@ -749,8 +750,8 @@ mod tests {
         );
     }
 
-    #[test]
-    fn test_insert_multiple_newlines() {
+    #[gpui::test]
+    fn test_insert_multiple_newlines(cx: &mut gpui::TestAppContext) {
         let old_text = "aaaa\nbbbb";
         let char_ops = vec![
             CharOperation::Keep { bytes: 5 },
@@ -759,7 +760,7 @@ mod tests {
             },
             CharOperation::Keep { bytes: 4 },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
         assert_eq!(
             line_ops,
             vec![
@@ -775,15 +776,15 @@ mod tests {
         );
     }
 
-    #[test]
-    fn test_delete_multiple_newlines() {
+    #[gpui::test]
+    fn test_delete_multiple_newlines(cx: &mut gpui::TestAppContext) {
         let old_text = "aaaa\n\n\nbbbb";
         let char_ops = vec![
             CharOperation::Keep { bytes: 5 },
             CharOperation::Delete { bytes: 2 },
             CharOperation::Keep { bytes: 4 },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
         assert_eq!(
             line_ops,
             vec![
@@ -799,8 +800,8 @@ mod tests {
         );
     }
 
-    #[test]
-    fn test_complex_scenario() {
+    #[gpui::test]
+    fn test_complex_scenario(cx: &mut gpui::TestAppContext) {
         let old_text = "line1\nline2\nline3\nline4";
         let char_ops = vec![
             CharOperation::Keep { bytes: 6 },
@@ -814,7 +815,7 @@ mod tests {
             },
             CharOperation::Keep { bytes: 6 },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
         assert_eq!(
             line_ops,
             vec![
@@ -834,8 +835,8 @@ mod tests {
         );
     }
 
-    #[test]
-    fn test_cleaning_up_common_suffix() {
+    #[gpui::test]
+    fn test_cleaning_up_common_suffix(cx: &mut gpui::TestAppContext) {
         let old_text = concat!(
             "        for y in 0..size.y() {\n",
             "            let a = 10;\n",
@@ -883,7 +884,7 @@ mod tests {
             },
             CharOperation::Keep { bytes: 1 },
         ];
-        let line_ops = char_ops_to_line_ops(old_text, &char_ops);
+        let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor());
         assert_eq!(
             line_ops,
             vec![
@@ -901,8 +902,8 @@ mod tests {
         );
     }
 
-    #[test]
-    fn test_random_diffs() {
+    #[gpui::test]
+    fn test_random_diffs(cx: &mut gpui::TestAppContext) {
         random_test(|mut rng| {
             let old_text_len = env::var("OLD_TEXT_LEN")
                 .map(|i| i.parse().expect("invalid `OLD_TEXT_LEN` variable"))
@@ -922,15 +923,19 @@ mod tests {
             assert_eq!(patched, new);
 
             // Test char_ops_to_line_ops
-            let line_ops = char_ops_to_line_ops(&old, &char_operations);
+            let line_ops = char_ops_to_line_ops(&old, &char_operations, cx.background_executor());
             println!("line operations: {:?}", line_ops);
             let patched = apply_line_operations(&old, &new, &line_ops);
             assert_eq!(patched, new);
         });
     }
 
-    fn char_ops_to_line_ops(old_text: &str, char_ops: &[CharOperation]) -> Vec<LineOperation> {
-        let old_rope = Rope::from(old_text);
+    fn char_ops_to_line_ops(
+        old_text: &str,
+        char_ops: &[CharOperation],
+        executor: &BackgroundExecutor,
+    ) -> Vec<LineOperation> {
+        let old_rope = Rope::from_str(old_text, executor);
         let mut diff = LineDiff::default();
         for op in char_ops {
             diff.push_char_operation(op, &old_rope);

crates/sum_tree/Cargo.toml πŸ”—

@@ -15,10 +15,12 @@ doctest = false
 
 [dependencies]
 arrayvec = "0.7.1"
-rayon.workspace = true
 log.workspace = true
+futures.workspace = true
+futures-lite.workspace = true
 
 [dev-dependencies]
 ctor.workspace = true
 rand.workspace = true
 zlog.workspace = true
+pollster = "0.4.0"

crates/sum_tree/src/sum_tree.rs πŸ”—

@@ -3,17 +3,30 @@ mod tree_map;
 
 use arrayvec::ArrayVec;
 pub use cursor::{Cursor, FilterCursor, Iter};
-use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator as _};
+use futures::{StreamExt, stream};
+use futures_lite::future::yield_now;
 use std::marker::PhantomData;
 use std::mem;
 use std::{cmp::Ordering, fmt, iter::FromIterator, sync::Arc};
 pub use tree_map::{MapSeekTarget, TreeMap, TreeSet};
 
-#[cfg(test)]
+#[cfg(all(test, not(rust_analyzer)))]
 pub const TREE_BASE: usize = 2;
-#[cfg(not(test))]
+#[cfg(not(all(test, not(rust_analyzer))))]
 pub const TREE_BASE: usize = 6;
 
+pub trait BackgroundSpawn {
+    type Task<R>: Future<Output = R> + Send + Sync
+    where
+        R: Send + Sync;
+    fn background_spawn<R>(
+        &self,
+        future: impl Future<Output = R> + Send + Sync + 'static,
+    ) -> Self::Task<R>
+    where
+        R: Send + Sync + 'static;
+}
+
 /// An item that can be stored in a [`SumTree`]
 ///
 /// Must be summarized by a type that implements [`Summary`]
@@ -298,62 +311,85 @@ impl<T: Item> SumTree<T> {
         }
     }
 
-    pub fn from_par_iter<I, Iter>(iter: I, cx: <T::Summary as Summary>::Context<'_>) -> Self
+    pub async fn from_iter_async<I, S>(iter: I, spawn: S) -> Self
     where
-        I: IntoParallelIterator<Iter = Iter>,
-        Iter: IndexedParallelIterator<Item = T>,
-        T: Send + Sync,
-        T::Summary: Send + Sync,
-        for<'a> <T::Summary as Summary>::Context<'a>: Sync,
+        T: 'static + Send + Sync,
+        for<'a> T::Summary: Summary<Context<'a> = ()> + Send + Sync,
+        S: BackgroundSpawn,
+        I: IntoIterator<Item = T, IntoIter: ExactSizeIterator>,
     {
-        let mut nodes = iter
-            .into_par_iter()
-            .chunks(2 * TREE_BASE)
-            .map(|items| {
-                let items: ArrayVec<T, { 2 * TREE_BASE }> = items.into_iter().collect();
-                let item_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }> =
-                    items.iter().map(|item| item.summary(cx)).collect();
-                let mut summary = item_summaries[0].clone();
-                for item_summary in &item_summaries[1..] {
-                    <T::Summary as Summary>::add_summary(&mut summary, item_summary, cx);
+        let iter = iter.into_iter();
+        let num_leaves = iter.len().div_ceil(2 * TREE_BASE);
+
+        if num_leaves == 0 {
+            return Self::new(());
+        }
+
+        let mut nodes = stream::iter(iter)
+            .chunks(num_leaves.div_ceil(4))
+            .map(|chunk| async move {
+                let mut chunk = chunk.into_iter();
+                let mut leaves = vec![];
+                loop {
+                    let items: ArrayVec<T, { 2 * TREE_BASE }> =
+                        chunk.by_ref().take(2 * TREE_BASE).collect();
+                    if items.is_empty() {
+                        break;
+                    }
+                    let item_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }> =
+                        items.iter().map(|item| item.summary(())).collect();
+                    let mut summary = item_summaries[0].clone();
+                    for item_summary in &item_summaries[1..] {
+                        <T::Summary as Summary>::add_summary(&mut summary, item_summary, ());
+                    }
+                    leaves.push(SumTree(Arc::new(Node::Leaf {
+                        summary,
+                        items,
+                        item_summaries,
+                    })));
+                    yield_now().await;
                 }
-                SumTree(Arc::new(Node::Leaf {
-                    summary,
-                    items,
-                    item_summaries,
-                }))
+                leaves
             })
-            .collect::<Vec<_>>();
+            .map(|future| spawn.background_spawn(future))
+            .buffered(4)
+            .flat_map(|it| stream::iter(it.into_iter()))
+            .collect::<Vec<_>>()
+            .await;
 
         let mut height = 0;
         while nodes.len() > 1 {
             height += 1;
-            nodes = nodes
-                .into_par_iter()
+            let current_nodes = mem::take(&mut nodes);
+            nodes = stream::iter(current_nodes)
                 .chunks(2 * TREE_BASE)
-                .map(|child_nodes| {
-                    let child_trees: ArrayVec<SumTree<T>, { 2 * TREE_BASE }> =
-                        child_nodes.into_iter().collect();
-                    let child_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }> = child_trees
-                        .iter()
-                        .map(|child_tree| child_tree.summary().clone())
-                        .collect();
-                    let mut summary = child_summaries[0].clone();
-                    for child_summary in &child_summaries[1..] {
-                        <T::Summary as Summary>::add_summary(&mut summary, child_summary, cx);
-                    }
-                    SumTree(Arc::new(Node::Internal {
-                        height,
-                        summary,
-                        child_summaries,
-                        child_trees,
-                    }))
+                .map(|chunk| {
+                    spawn.background_spawn(async move {
+                        let child_trees: ArrayVec<SumTree<T>, { 2 * TREE_BASE }> =
+                            chunk.into_iter().collect();
+                        let child_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }> = child_trees
+                            .iter()
+                            .map(|child_tree| child_tree.summary().clone())
+                            .collect();
+                        let mut summary = child_summaries[0].clone();
+                        for child_summary in &child_summaries[1..] {
+                            <T::Summary as Summary>::add_summary(&mut summary, child_summary, ());
+                        }
+                        SumTree(Arc::new(Node::Internal {
+                            height,
+                            summary,
+                            child_summaries,
+                            child_trees,
+                        }))
+                    })
                 })
-                .collect::<Vec<_>>();
+                .buffered(4)
+                .collect::<Vec<_>>()
+                .await;
         }
 
         if nodes.is_empty() {
-            Self::new(cx)
+            Self::new(())
         } else {
             debug_assert_eq!(nodes.len(), 1);
             nodes.pop().unwrap()
@@ -597,15 +633,15 @@ impl<T: Item> SumTree<T> {
         self.append(Self::from_iter(iter, cx), cx);
     }
 
-    pub fn par_extend<I, Iter>(&mut self, iter: I, cx: <T::Summary as Summary>::Context<'_>)
+    pub async fn async_extend<S, I>(&mut self, iter: I, spawn: S)
     where
-        I: IntoParallelIterator<Iter = Iter>,
-        Iter: IndexedParallelIterator<Item = T>,
-        T: Send + Sync,
-        T::Summary: Send + Sync,
-        for<'a> <T::Summary as Summary>::Context<'a>: Sync,
+        S: BackgroundSpawn,
+        I: IntoIterator<Item = T, IntoIter: ExactSizeIterator>,
+        T: 'static + Send + Sync,
+        for<'b> T::Summary: Summary<Context<'b> = ()> + Send + Sync,
     {
-        self.append(Self::from_par_iter(iter, cx), cx);
+        let other = Self::from_iter_async(iter, spawn);
+        self.append(other.await, ());
     }
 
     pub fn push(&mut self, item: T, cx: <T::Summary as Summary>::Context<'_>) {
@@ -1070,6 +1106,23 @@ mod tests {
 
     #[test]
     fn test_random() {
+        struct NoSpawn;
+        impl BackgroundSpawn for NoSpawn {
+            type Task<R>
+                = std::pin::Pin<Box<dyn Future<Output = R> + Sync + Send>>
+            where
+                R: Send + Sync;
+            fn background_spawn<R>(
+                &self,
+                future: impl Future<Output = R> + Send + Sync + 'static,
+            ) -> Self::Task<R>
+            where
+                R: Send + Sync + 'static,
+            {
+                Box::pin(future)
+            }
+        }
+
         let mut starting_seed = 0;
         if let Ok(value) = std::env::var("SEED") {
             starting_seed = value.parse().expect("invalid SEED variable");
@@ -1087,7 +1140,7 @@ mod tests {
 
             let rng = &mut rng;
             let mut tree = SumTree::<u8>::default();
-            let count = rng.random_range(0..10);
+            let count = rng.random_range(0..128);
             if rng.random() {
                 tree.extend(rng.sample_iter(StandardUniform).take(count), ());
             } else {
@@ -1095,13 +1148,13 @@ mod tests {
                     .sample_iter(StandardUniform)
                     .take(count)
                     .collect::<Vec<_>>();
-                tree.par_extend(items, ());
+                pollster::block_on(tree.async_extend(items, NoSpawn));
             }
 
             for _ in 0..num_operations {
                 let splice_end = rng.random_range(0..tree.extent::<Count>(()).0 + 1);
                 let splice_start = rng.random_range(0..splice_end + 1);
-                let count = rng.random_range(0..10);
+                let count = rng.random_range(0..128);
                 let tree_end = tree.extent::<Count>(());
                 let new_items = rng
                     .sample_iter(StandardUniform)
@@ -1117,7 +1170,7 @@ mod tests {
                     if rng.random() {
                         new_tree.extend(new_items, ());
                     } else {
-                        new_tree.par_extend(new_items, ());
+                        pollster::block_on(new_tree.async_extend(new_items, NoSpawn));
                     }
                     cursor.seek(&Count(splice_end), Bias::Right);
                     new_tree.append(cursor.slice(&tree_end, Bias::Right), ());

crates/terminal/src/pty_info.rs πŸ”—

@@ -15,6 +15,12 @@ pub struct ProcessIdGetter {
     fallback_pid: u32,
 }
 
+impl ProcessIdGetter {
+    pub fn fallback_pid(&self) -> Pid {
+        Pid::from_u32(self.fallback_pid)
+    }
+}
+
 #[cfg(unix)]
 impl ProcessIdGetter {
     fn new(pty: &Pty) -> ProcessIdGetter {
@@ -31,10 +37,6 @@ impl ProcessIdGetter {
         }
         Some(Pid::from_u32(pid as u32))
     }
-
-    pub fn fallback_pid(&self) -> u32 {
-        self.fallback_pid
-    }
 }
 
 #[cfg(windows)]
@@ -66,10 +68,6 @@ impl ProcessIdGetter {
         }
         Some(Pid::from_u32(pid))
     }
-
-    pub fn fallback_pid(&self) -> u32 {
-        self.fallback_pid
-    }
 }
 
 #[derive(Clone, Debug)]
@@ -122,10 +120,19 @@ impl PtyProcessInfo {
         }
     }
 
+    fn get_child(&self) -> Option<&Process> {
+        let pid = self.pid_getter.fallback_pid();
+        self.system.process(pid)
+    }
+
     pub(crate) fn kill_current_process(&mut self) -> bool {
         self.refresh().is_some_and(|process| process.kill())
     }
 
+    pub(crate) fn kill_child_process(&mut self) -> bool {
+        self.get_child().is_some_and(|process| process.kill())
+    }
+
     fn load(&mut self) -> Option<ProcessInfo> {
         let process = self.refresh()?;
         let cwd = process.cwd().map_or(PathBuf::new(), |p| p.to_owned());

crates/terminal/src/terminal.rs πŸ”—

@@ -402,6 +402,7 @@ impl TerminalBuilder {
                 window_id,
             },
             child_exited: None,
+            event_loop_task: Task::ready(Ok(())),
         };
 
         Ok(TerminalBuilder {
@@ -423,236 +424,236 @@ impl TerminalBuilder {
         completion_tx: Option<Sender<Option<ExitStatus>>>,
         cx: &App,
         activation_script: Vec<String>,
-    ) -> Result<TerminalBuilder> {
-        // If the parent environment doesn't have a locale set
-        // (As is the case when launched from a .app on MacOS),
-        // and the Project doesn't have a locale set, then
-        // set a fallback for our child environment to use.
-        if std::env::var("LANG").is_err() {
-            env.entry("LANG".to_string())
-                .or_insert_with(|| "en_US.UTF-8".to_string());
-        }
-
-        env.insert("ZED_TERM".to_string(), "true".to_string());
-        env.insert("TERM_PROGRAM".to_string(), "zed".to_string());
-        env.insert("TERM".to_string(), "xterm-256color".to_string());
-        env.insert("COLORTERM".to_string(), "truecolor".to_string());
-        env.insert(
-            "TERM_PROGRAM_VERSION".to_string(),
-            release_channel::AppVersion::global(cx).to_string(),
-        );
-
-        #[derive(Default)]
-        struct ShellParams {
-            program: String,
-            args: Option<Vec<String>>,
-            title_override: Option<String>,
-        }
-
-        impl ShellParams {
-            fn new(
+    ) -> Task<Result<TerminalBuilder>> {
+        let version = release_channel::AppVersion::global(cx);
+        cx.background_spawn(async move {
+            // If the parent environment doesn't have a locale set
+            // (As is the case when launched from a .app on MacOS),
+            // and the Project doesn't have a locale set, then
+            // set a fallback for our child environment to use.
+            if std::env::var("LANG").is_err() {
+                env.entry("LANG".to_string())
+                    .or_insert_with(|| "en_US.UTF-8".to_string());
+            }
+
+            env.insert("ZED_TERM".to_string(), "true".to_string());
+            env.insert("TERM_PROGRAM".to_string(), "zed".to_string());
+            env.insert("TERM".to_string(), "xterm-256color".to_string());
+            env.insert("COLORTERM".to_string(), "truecolor".to_string());
+            env.insert("TERM_PROGRAM_VERSION".to_string(), version.to_string());
+
+            #[derive(Default)]
+            struct ShellParams {
                 program: String,
                 args: Option<Vec<String>>,
                 title_override: Option<String>,
-            ) -> Self {
-                log::debug!("Using {program} as shell");
-                Self {
-                    program,
-                    args,
-                    title_override,
-                }
             }
-        }
 
-        let shell_params = match shell.clone() {
-            Shell::System => {
-                if cfg!(windows) {
-                    Some(ShellParams::new(
-                        util::shell::get_windows_system_shell(),
-                        None,
-                        None,
-                    ))
-                } else {
-                    None
+            impl ShellParams {
+                fn new(
+                    program: String,
+                    args: Option<Vec<String>>,
+                    title_override: Option<String>,
+                ) -> Self {
+                    log::debug!("Using {program} as shell");
+                    Self {
+                        program,
+                        args,
+                        title_override,
+                    }
                 }
             }
-            Shell::Program(program) => Some(ShellParams::new(program, None, None)),
-            Shell::WithArguments {
-                program,
-                args,
-                title_override,
-            } => Some(ShellParams::new(program, Some(args), title_override)),
-        };
-        let terminal_title_override = shell_params.as_ref().and_then(|e| e.title_override.clone());
 
-        #[cfg(windows)]
-        let shell_program = shell_params.as_ref().map(|params| {
-            use util::ResultExt;
+            let shell_params = match shell.clone() {
+                Shell::System => {
+                    if cfg!(windows) {
+                        Some(ShellParams::new(
+                            util::shell::get_windows_system_shell(),
+                            None,
+                            None,
+                        ))
+                    } else {
+                        None
+                    }
+                }
+                Shell::Program(program) => Some(ShellParams::new(program, None, None)),
+                Shell::WithArguments {
+                    program,
+                    args,
+                    title_override,
+                } => Some(ShellParams::new(program, Some(args), title_override)),
+            };
+            let terminal_title_override =
+                shell_params.as_ref().and_then(|e| e.title_override.clone());
 
-            Self::resolve_path(&params.program)
-                .log_err()
-                .unwrap_or(params.program.clone())
-        });
+            #[cfg(windows)]
+            let shell_program = shell_params.as_ref().map(|params| {
+                use util::ResultExt;
 
-        // Note: when remoting, this shell_kind will scrutinize `ssh` or
-        // `wsl.exe` as a shell and fall back to posix or powershell based on
-        // the compilation target. This is fine right now due to the restricted
-        // way we use the return value, but would become incorrect if we
-        // supported remoting into windows.
-        let shell_kind = shell.shell_kind(cfg!(windows));
-
-        let pty_options = {
-            let alac_shell = shell_params.as_ref().map(|params| {
-                alacritty_terminal::tty::Shell::new(
-                    params.program.clone(),
-                    params.args.clone().unwrap_or_default(),
-                )
+                Self::resolve_path(&params.program)
+                    .log_err()
+                    .unwrap_or(params.program.clone())
             });
 
-            alacritty_terminal::tty::Options {
-                shell: alac_shell,
-                working_directory: working_directory.clone(),
-                drain_on_exit: true,
-                env: env.clone().into_iter().collect(),
-                #[cfg(windows)]
-                escape_args: shell_kind.tty_escape_args(),
-            }
-        };
+            // Note: when remoting, this shell_kind will scrutinize `ssh` or
+            // `wsl.exe` as a shell and fall back to posix or powershell based on
+            // the compilation target. This is fine right now due to the restricted
+            // way we use the return value, but would become incorrect if we
+            // supported remoting into windows.
+            let shell_kind = shell.shell_kind(cfg!(windows));
+
+            let pty_options = {
+                let alac_shell = shell_params.as_ref().map(|params| {
+                    alacritty_terminal::tty::Shell::new(
+                        params.program.clone(),
+                        params.args.clone().unwrap_or_default(),
+                    )
+                });
 
-        let default_cursor_style = AlacCursorStyle::from(cursor_shape);
-        let scrolling_history = if task.is_some() {
-            // Tasks like `cargo build --all` may produce a lot of output, ergo allow maximum scrolling.
-            // After the task finishes, we do not allow appending to that terminal, so small tasks output should not
-            // cause excessive memory usage over time.
-            MAX_SCROLL_HISTORY_LINES
-        } else {
-            max_scroll_history_lines
-                .unwrap_or(DEFAULT_SCROLL_HISTORY_LINES)
-                .min(MAX_SCROLL_HISTORY_LINES)
-        };
-        let config = Config {
-            scrolling_history,
-            default_cursor_style,
-            ..Config::default()
-        };
+                alacritty_terminal::tty::Options {
+                    shell: alac_shell,
+                    working_directory: working_directory.clone(),
+                    drain_on_exit: true,
+                    env: env.clone().into_iter().collect(),
+                    #[cfg(windows)]
+                    escape_args: shell_kind.tty_escape_args(),
+                }
+            };
 
-        //Spawn a task so the Alacritty EventLoop can communicate with us
-        //TODO: Remove with a bounded sender which can be dispatched on &self
-        let (events_tx, events_rx) = unbounded();
-        //Set up the terminal...
-        let mut term = Term::new(
-            config.clone(),
-            &TerminalBounds::default(),
-            ZedListener(events_tx.clone()),
-        );
+            let default_cursor_style = AlacCursorStyle::from(cursor_shape);
+            let scrolling_history = if task.is_some() {
+                // Tasks like `cargo build --all` may produce a lot of output, ergo allow maximum scrolling.
+                // After the task finishes, we do not allow appending to that terminal, so small tasks output should not
+                // cause excessive memory usage over time.
+                MAX_SCROLL_HISTORY_LINES
+            } else {
+                max_scroll_history_lines
+                    .unwrap_or(DEFAULT_SCROLL_HISTORY_LINES)
+                    .min(MAX_SCROLL_HISTORY_LINES)
+            };
+            let config = Config {
+                scrolling_history,
+                default_cursor_style,
+                ..Config::default()
+            };
 
-        //Alacritty defaults to alternate scrolling being on, so we just need to turn it off.
-        if let AlternateScroll::Off = alternate_scroll {
-            term.unset_private_mode(PrivateMode::Named(NamedPrivateMode::AlternateScroll));
-        }
+            //Setup the pty...
+            let pty = match tty::new(&pty_options, TerminalBounds::default().into(), window_id) {
+                Ok(pty) => pty,
+                Err(error) => {
+                    bail!(TerminalError {
+                        directory: working_directory,
+                        program: shell_params.as_ref().map(|params| params.program.clone()),
+                        args: shell_params.as_ref().and_then(|params| params.args.clone()),
+                        title_override: terminal_title_override,
+                        source: error,
+                    });
+                }
+            };
 
-        let term = Arc::new(FairMutex::new(term));
+            //Spawn a task so the Alacritty EventLoop can communicate with us
+            //TODO: Remove with a bounded sender which can be dispatched on &self
+            let (events_tx, events_rx) = unbounded();
+            //Set up the terminal...
+            let mut term = Term::new(
+                config.clone(),
+                &TerminalBounds::default(),
+                ZedListener(events_tx.clone()),
+            );
 
-        //Setup the pty...
-        let pty = match tty::new(&pty_options, TerminalBounds::default().into(), window_id) {
-            Ok(pty) => pty,
-            Err(error) => {
-                bail!(TerminalError {
-                    directory: working_directory,
-                    program: shell_params.as_ref().map(|params| params.program.clone()),
-                    args: shell_params.as_ref().and_then(|params| params.args.clone()),
-                    title_override: terminal_title_override,
-                    source: error,
-                });
+            //Alacritty defaults to alternate scrolling being on, so we just need to turn it off.
+            if let AlternateScroll::Off = alternate_scroll {
+                term.unset_private_mode(PrivateMode::Named(NamedPrivateMode::AlternateScroll));
             }
-        };
 
-        let pty_info = PtyProcessInfo::new(&pty);
+            let term = Arc::new(FairMutex::new(term));
 
-        //And connect them together
-        let event_loop = EventLoop::new(
-            term.clone(),
-            ZedListener(events_tx),
-            pty,
-            pty_options.drain_on_exit,
-            false,
-        )
-        .context("failed to create event loop")?;
+            let pty_info = PtyProcessInfo::new(&pty);
 
-        //Kick things off
-        let pty_tx = event_loop.channel();
-        let _io_thread = event_loop.spawn(); // DANGER
+            //And connect them together
+            let event_loop = EventLoop::new(
+                term.clone(),
+                ZedListener(events_tx),
+                pty,
+                pty_options.drain_on_exit,
+                false,
+            )
+            .context("failed to create event loop")?;
 
-        let no_task = task.is_none();
+            let pty_tx = event_loop.channel();
+            let _io_thread = event_loop.spawn(); // DANGER
 
-        let terminal = Terminal {
-            task,
-            terminal_type: TerminalType::Pty {
-                pty_tx: Notifier(pty_tx),
-                info: pty_info,
-            },
-            completion_tx,
-            term,
-            term_config: config,
-            title_override: terminal_title_override,
-            events: VecDeque::with_capacity(10), //Should never get this high.
-            last_content: Default::default(),
-            last_mouse: None,
-            matches: Vec::new(),
-            selection_head: None,
-            breadcrumb_text: String::new(),
-            scroll_px: px(0.),
-            next_link_id: 0,
-            selection_phase: SelectionPhase::Ended,
-            hyperlink_regex_searches: RegexSearches::new(),
-            vi_mode_enabled: false,
-            is_ssh_terminal,
-            last_mouse_move_time: Instant::now(),
-            last_hyperlink_search_position: None,
-            #[cfg(windows)]
-            shell_program,
-            activation_script: activation_script.clone(),
-            template: CopyTemplate {
-                shell,
-                env,
-                cursor_shape,
-                alternate_scroll,
-                max_scroll_history_lines,
-                window_id,
-            },
-            child_exited: None,
-        };
+            let no_task = task.is_none();
+            let terminal = Terminal {
+                task,
+                terminal_type: TerminalType::Pty {
+                    pty_tx: Notifier(pty_tx),
+                    info: pty_info,
+                },
+                completion_tx,
+                term,
+                term_config: config,
+                title_override: terminal_title_override,
+                events: VecDeque::with_capacity(10), //Should never get this high.
+                last_content: Default::default(),
+                last_mouse: None,
+                matches: Vec::new(),
+                selection_head: None,
+                breadcrumb_text: String::new(),
+                scroll_px: px(0.),
+                next_link_id: 0,
+                selection_phase: SelectionPhase::Ended,
+                hyperlink_regex_searches: RegexSearches::new(),
+                vi_mode_enabled: false,
+                is_ssh_terminal,
+                last_mouse_move_time: Instant::now(),
+                last_hyperlink_search_position: None,
+                #[cfg(windows)]
+                shell_program,
+                activation_script: activation_script.clone(),
+                template: CopyTemplate {
+                    shell,
+                    env,
+                    cursor_shape,
+                    alternate_scroll,
+                    max_scroll_history_lines,
+                    window_id,
+                },
+                child_exited: None,
+                event_loop_task: Task::ready(Ok(())),
+            };
 
-        if !activation_script.is_empty() && no_task {
-            for activation_script in activation_script {
-                terminal.write_to_pty(activation_script.into_bytes());
+            if !activation_script.is_empty() && no_task {
+                for activation_script in activation_script {
+                    terminal.write_to_pty(activation_script.into_bytes());
+                    // Simulate enter key press
+                    // NOTE(PowerShell): using `\r\n` will put PowerShell in a continuation mode (infamous >> character)
+                    // and generally mess up the rendering.
+                    terminal.write_to_pty(b"\x0d");
+                }
+                // In order to clear the screen at this point, we have two options:
+                // 1. We can send a shell-specific command such as "clear" or "cls"
+                // 2. We can "echo" a marker message that we will then catch when handling a Wakeup event
+                //    and clear the screen using `terminal.clear()` method
+                // We cannot issue a `terminal.clear()` command at this point as alacritty is evented
+                // and while we have sent the activation script to the pty, it will be executed asynchronously.
+                // Therefore, we somehow need to wait for the activation script to finish executing before we
+                // can proceed with clearing the screen.
+                terminal.write_to_pty(shell_kind.clear_screen_command().as_bytes());
                 // Simulate enter key press
-                // NOTE(PowerShell): using `\r\n` will put PowerShell in a continuation mode (infamous >> character)
-                // and generally mess up the rendering.
                 terminal.write_to_pty(b"\x0d");
             }
-            // In order to clear the screen at this point, we have two options:
-            // 1. We can send a shell-specific command such as "clear" or "cls"
-            // 2. We can "echo" a marker message that we will then catch when handling a Wakeup event
-            //    and clear the screen using `terminal.clear()` method
-            // We cannot issue a `terminal.clear()` command at this point as alacritty is evented
-            // and while we have sent the activation script to the pty, it will be executed asynchronously.
-            // Therefore, we somehow need to wait for the activation script to finish executing before we
-            // can proceed with clearing the screen.
-            terminal.write_to_pty(shell_kind.clear_screen_command().as_bytes());
-            // Simulate enter key press
-            terminal.write_to_pty(b"\x0d");
-        }
 
-        Ok(TerminalBuilder {
-            terminal,
-            events_rx,
+            Ok(TerminalBuilder {
+                terminal,
+                events_rx,
+            })
         })
     }
 
     pub fn subscribe(mut self, cx: &Context<Terminal>) -> Terminal {
         //Event loop
-        cx.spawn(async move |terminal, cx| {
+        self.terminal.event_loop_task = cx.spawn(async move |terminal, cx| {
             while let Some(event) = self.events_rx.next().await {
                 terminal.update(cx, |terminal, cx| {
                     //Process the first event immediately for lowered latency
@@ -709,11 +710,8 @@ impl TerminalBuilder {
                     smol::future::yield_now().await;
                 }
             }
-
             anyhow::Ok(())
-        })
-        .detach();
-
+        });
         self.terminal
     }
 
@@ -836,6 +834,7 @@ pub struct Terminal {
     template: CopyTemplate,
     activation_script: Vec<String>,
     child_exited: Option<ExitStatus>,
+    event_loop_task: Task<Result<(), anyhow::Error>>,
 }
 
 struct CopyTemplate {
@@ -1266,15 +1265,11 @@ impl Terminal {
     }
 
     pub fn total_lines(&self) -> usize {
-        let term = self.term.clone();
-        let terminal = term.lock_unfair();
-        terminal.total_lines()
+        self.term.lock_unfair().total_lines()
     }
 
     pub fn viewport_lines(&self) -> usize {
-        let term = self.term.clone();
-        let terminal = term.lock_unfair();
-        terminal.screen_lines()
+        self.term.lock_unfair().screen_lines()
     }
 
     //To test:
@@ -2151,7 +2146,7 @@ impl Terminal {
         self.vi_mode_enabled
     }
 
-    pub fn clone_builder(&self, cx: &App, cwd: Option<PathBuf>) -> Result<TerminalBuilder> {
+    pub fn clone_builder(&self, cx: &App, cwd: Option<PathBuf>) -> Task<Result<TerminalBuilder>> {
         let working_directory = self.working_directory().or_else(|| cwd);
         TerminalBuilder::new(
             working_directory,
@@ -2241,7 +2236,8 @@ unsafe fn append_text_to_term(term: &mut Term<ZedListener>, text_lines: &[&str])
 
 impl Drop for Terminal {
     fn drop(&mut self) {
-        if let TerminalType::Pty { pty_tx, .. } = &self.terminal_type {
+        if let TerminalType::Pty { pty_tx, info } = &mut self.terminal_type {
+            info.kill_child_process();
             pty_tx.0.send(Msg::Shutdown).ok();
         }
     }
@@ -2387,28 +2383,30 @@ mod tests {
         let (completion_tx, completion_rx) = smol::channel::unbounded();
         let (program, args) = ShellBuilder::new(&Shell::System, false)
             .build(Some("echo".to_owned()), &["hello".to_owned()]);
-        let terminal = cx.new(|cx| {
-            TerminalBuilder::new(
-                None,
-                None,
-                task::Shell::WithArguments {
-                    program,
-                    args,
-                    title_override: None,
-                },
-                HashMap::default(),
-                CursorShape::default(),
-                AlternateScroll::On,
-                None,
-                false,
-                0,
-                Some(completion_tx),
-                cx,
-                vec![],
-            )
-            .unwrap()
-            .subscribe(cx)
-        });
+        let builder = cx
+            .update(|cx| {
+                TerminalBuilder::new(
+                    None,
+                    None,
+                    task::Shell::WithArguments {
+                        program,
+                        args,
+                        title_override: None,
+                    },
+                    HashMap::default(),
+                    CursorShape::default(),
+                    AlternateScroll::On,
+                    None,
+                    false,
+                    0,
+                    Some(completion_tx),
+                    cx,
+                    vec![],
+                )
+            })
+            .await
+            .unwrap();
+        let terminal = cx.new(|cx| builder.subscribe(cx));
         assert_eq!(
             completion_rx.recv().await.unwrap(),
             Some(ExitStatus::default())
@@ -2437,25 +2435,27 @@ mod tests {
         cx.executor().allow_parking();
 
         let (completion_tx, completion_rx) = smol::channel::unbounded();
+        let builder = cx
+            .update(|cx| {
+                TerminalBuilder::new(
+                    None,
+                    None,
+                    task::Shell::System,
+                    HashMap::default(),
+                    CursorShape::default(),
+                    AlternateScroll::On,
+                    None,
+                    false,
+                    0,
+                    Some(completion_tx),
+                    cx,
+                    Vec::new(),
+                )
+            })
+            .await
+            .unwrap();
         // Build an empty command, which will result in a tty shell spawned.
-        let terminal = cx.new(|cx| {
-            TerminalBuilder::new(
-                None,
-                None,
-                task::Shell::System,
-                HashMap::default(),
-                CursorShape::default(),
-                AlternateScroll::On,
-                None,
-                false,
-                0,
-                Some(completion_tx),
-                cx,
-                Vec::new(),
-            )
-            .unwrap()
-            .subscribe(cx)
-        });
+        let terminal = cx.new(|cx| builder.subscribe(cx));
 
         let (event_tx, event_rx) = smol::channel::unbounded::<Event>();
         cx.update(|cx| {
@@ -2506,28 +2506,30 @@ mod tests {
         let (completion_tx, completion_rx) = smol::channel::unbounded();
         let (program, args) = ShellBuilder::new(&Shell::System, false)
             .build(Some("asdasdasdasd".to_owned()), &["@@@@@".to_owned()]);
-        let terminal = cx.new(|cx| {
-            TerminalBuilder::new(
-                None,
-                None,
-                task::Shell::WithArguments {
-                    program,
-                    args,
-                    title_override: None,
-                },
-                HashMap::default(),
-                CursorShape::default(),
-                AlternateScroll::On,
-                None,
-                false,
-                0,
-                Some(completion_tx),
-                cx,
-                Vec::new(),
-            )
-            .unwrap()
-            .subscribe(cx)
-        });
+        let builder = cx
+            .update(|cx| {
+                TerminalBuilder::new(
+                    None,
+                    None,
+                    task::Shell::WithArguments {
+                        program,
+                        args,
+                        title_override: None,
+                    },
+                    HashMap::default(),
+                    CursorShape::default(),
+                    AlternateScroll::On,
+                    None,
+                    false,
+                    0,
+                    Some(completion_tx),
+                    cx,
+                    Vec::new(),
+                )
+            })
+            .await
+            .unwrap();
+        let terminal = cx.new(|cx| builder.subscribe(cx));
 
         let (event_tx, event_rx) = smol::channel::unbounded::<Event>();
         cx.update(|cx| {

crates/terminal_view/src/persistence.rs πŸ”—

@@ -214,14 +214,6 @@ async fn deserialize_pane_group(
         }
         SerializedPaneGroup::Pane(serialized_pane) => {
             let active = serialized_pane.active;
-            let new_items = deserialize_terminal_views(
-                workspace_id,
-                project.clone(),
-                workspace.clone(),
-                serialized_pane.children.as_slice(),
-                cx,
-            )
-            .await;
 
             let pane = panel
                 .update_in(cx, |terminal_panel, window, cx| {
@@ -236,56 +228,71 @@ async fn deserialize_pane_group(
                 .log_err()?;
             let active_item = serialized_pane.active_item;
             let pinned_count = serialized_pane.pinned_count;
-            let terminal = pane
-                .update_in(cx, |pane, window, cx| {
-                    populate_pane_items(pane, new_items, active_item, window, cx);
-                    pane.set_pinned_count(pinned_count);
+            let new_items = deserialize_terminal_views(
+                workspace_id,
+                project.clone(),
+                workspace.clone(),
+                serialized_pane.children.as_slice(),
+                cx,
+            );
+            cx.spawn({
+                let pane = pane.downgrade();
+                async move |cx| {
+                    let new_items = new_items.await;
+
+                    let items = pane.update_in(cx, |pane, window, cx| {
+                        populate_pane_items(pane, new_items, active_item, window, cx);
+                        pane.set_pinned_count(pinned_count);
+                        pane.items_len()
+                    });
                     // Avoid blank panes in splits
-                    if pane.items_len() == 0 {
+                    if items.is_ok_and(|items| items == 0) {
                         let working_directory = workspace
                             .update(cx, |workspace, cx| default_working_directory(workspace, cx))
                             .ok()
                             .flatten();
-                        let terminal = project.update(cx, |project, cx| {
-                            project.create_terminal_shell(working_directory, cx)
-                        });
-                        Some(Some(terminal))
-                    } else {
-                        Some(None)
+                        let Some(terminal) = project
+                            .update(cx, |project, cx| {
+                                project.create_terminal_shell(working_directory, cx)
+                            })
+                            .log_err()
+                        else {
+                            return;
+                        };
+
+                        let terminal = terminal.await.log_err();
+                        pane.update_in(cx, |pane, window, cx| {
+                            if let Some(terminal) = terminal {
+                                let terminal_view = Box::new(cx.new(|cx| {
+                                    TerminalView::new(
+                                        terminal,
+                                        workspace.clone(),
+                                        Some(workspace_id),
+                                        project.downgrade(),
+                                        window,
+                                        cx,
+                                    )
+                                }));
+                                pane.add_item(terminal_view, true, false, None, window, cx);
+                            }
+                        })
+                        .ok();
                     }
-                })
-                .ok()
-                .flatten()?;
-            if let Some(terminal) = terminal {
-                let terminal = terminal.await.ok()?;
-                pane.update_in(cx, |pane, window, cx| {
-                    let terminal_view = Box::new(cx.new(|cx| {
-                        TerminalView::new(
-                            terminal,
-                            workspace.clone(),
-                            Some(workspace_id),
-                            project.downgrade(),
-                            window,
-                            cx,
-                        )
-                    }));
-                    pane.add_item(terminal_view, true, false, None, window, cx);
-                })
-                .ok()?;
-            }
+                }
+            })
+            .await;
             Some((Member::Pane(pane.clone()), active.then_some(pane)))
         }
     }
 }
 
-async fn deserialize_terminal_views(
+fn deserialize_terminal_views(
     workspace_id: WorkspaceId,
     project: Entity<Project>,
     workspace: WeakEntity<Workspace>,
     item_ids: &[u64],
     cx: &mut AsyncWindowContext,
-) -> Vec<Entity<TerminalView>> {
-    let mut items = Vec::with_capacity(item_ids.len());
+) -> impl Future<Output = Vec<Entity<TerminalView>>> + use<> {
     let mut deserialized_items = item_ids
         .iter()
         .map(|item_id| {
@@ -302,12 +309,15 @@ async fn deserialize_terminal_views(
             .unwrap_or_else(|e| Task::ready(Err(e.context("no window present"))))
         })
         .collect::<FuturesUnordered<_>>();
-    while let Some(item) = deserialized_items.next().await {
-        if let Some(item) = item.log_err() {
-            items.push(item);
+    async move {
+        let mut items = Vec::with_capacity(deserialized_items.len());
+        while let Some(item) = deserialized_items.next().await {
+            if let Some(item) = item.log_err() {
+                items.push(item);
+            }
         }
+        items
     }
-    items
 }
 
 #[derive(Debug, Serialize, Deserialize)]

crates/terminal_view/src/terminal_view.rs πŸ”—

@@ -1141,7 +1141,8 @@ impl Item for TerminalView {
         let pid = terminal.pid_getter()?.fallback_pid();
 
         Some(TabTooltipContent::Custom(Box::new(move |_window, cx| {
-            cx.new(|_| TerminalTooltip::new(title.clone(), pid)).into()
+            cx.new(|_| TerminalTooltip::new(title.clone(), pid.as_u32()))
+                .into()
         })))
     }
 
@@ -1223,26 +1224,26 @@ impl Item for TerminalView {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) -> Task<Option<Entity<Self>>> {
-        let Some(terminal_task) = self
-            .project
-            .update(cx, |project, cx| {
-                let cwd = project
-                    .active_project_directory(cx)
-                    .map(|it| it.to_path_buf());
-                project.clone_terminal(self.terminal(), cx, cwd)
-            })
-            .ok()
-        else {
+        let Ok(terminal) = self.project.update(cx, |project, cx| {
+            let cwd = project
+                .active_project_directory(cx)
+                .map(|it| it.to_path_buf());
+            project.clone_terminal(self.terminal(), cx, cwd)
+        }) else {
             return Task::ready(None);
         };
-
-        let workspace = self.workspace.clone();
-        let project = self.project.clone();
-        cx.spawn_in(window, async move |_, cx| {
-            let terminal = terminal_task.await.log_err()?;
-            cx.update(|window, cx| {
+        cx.spawn_in(window, async move |this, cx| {
+            let terminal = terminal.await.log_err()?;
+            this.update_in(cx, |this, window, cx| {
                 cx.new(|cx| {
-                    TerminalView::new(terminal, workspace, workspace_id, project, window, cx)
+                    TerminalView::new(
+                        terminal,
+                        this.workspace.clone(),
+                        workspace_id,
+                        this.project.clone(),
+                        window,
+                        cx,
+                    )
                 })
             })
             .ok()
@@ -1447,6 +1448,7 @@ impl SearchableItem for TerminalView {
         &mut self,
         index: usize,
         _: &[Self::Match],
+        _collapse: bool,
         _window: &mut Window,
         cx: &mut Context<Self>,
     ) {

crates/text/Cargo.toml πŸ”—

@@ -28,6 +28,7 @@ rope.workspace = true
 smallvec.workspace = true
 sum_tree.workspace = true
 util.workspace = true
+gpui.workspace = true
 
 [dev-dependencies]
 collections = { workspace = true, features = ["test-support"] }

crates/text/src/tests.rs πŸ”—

@@ -14,24 +14,29 @@ fn init_logger() {
     zlog::init_test();
 }
 
-#[test]
-fn test_edit() {
-    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "abc");
+#[gpui::test]
+fn test_edit(cx: &mut gpui::TestAppContext) {
+    let mut buffer = Buffer::new(
+        ReplicaId::LOCAL,
+        BufferId::new(1).unwrap(),
+        "abc",
+        cx.background_executor(),
+    );
     assert_eq!(buffer.text(), "abc");
-    buffer.edit([(3..3, "def")]);
+    buffer.edit([(3..3, "def")], cx.background_executor());
     assert_eq!(buffer.text(), "abcdef");
-    buffer.edit([(0..0, "ghi")]);
+    buffer.edit([(0..0, "ghi")], cx.background_executor());
     assert_eq!(buffer.text(), "ghiabcdef");
-    buffer.edit([(5..5, "jkl")]);
+    buffer.edit([(5..5, "jkl")], cx.background_executor());
     assert_eq!(buffer.text(), "ghiabjklcdef");
-    buffer.edit([(6..7, "")]);
+    buffer.edit([(6..7, "")], cx.background_executor());
     assert_eq!(buffer.text(), "ghiabjlcdef");
-    buffer.edit([(4..9, "mno")]);
+    buffer.edit([(4..9, "mno")], cx.background_executor());
     assert_eq!(buffer.text(), "ghiamnoef");
 }
 
 #[gpui::test(iterations = 100)]
-fn test_random_edits(mut rng: StdRng) {
+fn test_random_edits(cx: &mut gpui::TestAppContext, mut rng: StdRng) {
     let operations = env::var("OPERATIONS")
         .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
         .unwrap_or(10);
@@ -44,6 +49,7 @@ fn test_random_edits(mut rng: StdRng) {
         ReplicaId::LOCAL,
         BufferId::new(1).unwrap(),
         reference_string.clone(),
+        cx.background_executor(),
     );
     LineEnding::normalize(&mut reference_string);
 
@@ -56,7 +62,7 @@ fn test_random_edits(mut rng: StdRng) {
     );
 
     for _i in 0..operations {
-        let (edits, _) = buffer.randomly_edit(&mut rng, 5);
+        let (edits, _) = buffer.randomly_edit(&mut rng, 5, cx.background_executor());
         for (old_range, new_text) in edits.iter().rev() {
             reference_string.replace_range(old_range.clone(), new_text);
         }
@@ -106,7 +112,11 @@ fn test_random_edits(mut rng: StdRng) {
         let mut text = old_buffer.visible_text.clone();
         for edit in edits {
             let new_text: String = buffer.text_for_range(edit.new.clone()).collect();
-            text.replace(edit.new.start..edit.new.start + edit.old.len(), &new_text);
+            text.replace(
+                edit.new.start..edit.new.start + edit.old.len(),
+                &new_text,
+                cx.background_executor(),
+            );
         }
         assert_eq!(text.to_string(), buffer.text());
 
@@ -161,14 +171,18 @@ fn test_random_edits(mut rng: StdRng) {
         let mut text = old_buffer.visible_text.clone();
         for edit in subscription_edits.into_inner() {
             let new_text: String = buffer.text_for_range(edit.new.clone()).collect();
-            text.replace(edit.new.start..edit.new.start + edit.old.len(), &new_text);
+            text.replace(
+                edit.new.start..edit.new.start + edit.old.len(),
+                &new_text,
+                cx.background_executor(),
+            );
         }
         assert_eq!(text.to_string(), buffer.text());
     }
 }
 
-#[test]
-fn test_line_endings() {
+#[gpui::test]
+fn test_line_endings(cx: &mut gpui::TestAppContext) {
     assert_eq!(LineEnding::detect(&"πŸβœ…\n".repeat(1000)), LineEnding::Unix);
     assert_eq!(LineEnding::detect(&"abcd\n".repeat(1000)), LineEnding::Unix);
     assert_eq!(
@@ -184,25 +198,34 @@ fn test_line_endings() {
         ReplicaId::LOCAL,
         BufferId::new(1).unwrap(),
         "one\r\ntwo\rthree",
+        cx.background_executor(),
     );
     assert_eq!(buffer.text(), "one\ntwo\nthree");
     assert_eq!(buffer.line_ending(), LineEnding::Windows);
     buffer.check_invariants();
 
-    buffer.edit([(buffer.len()..buffer.len(), "\r\nfour")]);
-    buffer.edit([(0..0, "zero\r\n")]);
+    buffer.edit(
+        [(buffer.len()..buffer.len(), "\r\nfour")],
+        cx.background_executor(),
+    );
+    buffer.edit([(0..0, "zero\r\n")], cx.background_executor());
     assert_eq!(buffer.text(), "zero\none\ntwo\nthree\nfour");
     assert_eq!(buffer.line_ending(), LineEnding::Windows);
     buffer.check_invariants();
 }
 
-#[test]
-fn test_line_len() {
-    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "");
-    buffer.edit([(0..0, "abcd\nefg\nhij")]);
-    buffer.edit([(12..12, "kl\nmno")]);
-    buffer.edit([(18..18, "\npqrs\n")]);
-    buffer.edit([(18..21, "\nPQ")]);
+#[gpui::test]
+fn test_line_len(cx: &mut gpui::TestAppContext) {
+    let mut buffer = Buffer::new(
+        ReplicaId::LOCAL,
+        BufferId::new(1).unwrap(),
+        "",
+        cx.background_executor(),
+    );
+    buffer.edit([(0..0, "abcd\nefg\nhij")], cx.background_executor());
+    buffer.edit([(12..12, "kl\nmno")], cx.background_executor());
+    buffer.edit([(18..18, "\npqrs\n")], cx.background_executor());
+    buffer.edit([(18..21, "\nPQ")], cx.background_executor());
 
     assert_eq!(buffer.line_len(0), 4);
     assert_eq!(buffer.line_len(1), 3);
@@ -212,10 +235,15 @@ fn test_line_len() {
     assert_eq!(buffer.line_len(5), 0);
 }
 
-#[test]
-fn test_common_prefix_at_position() {
+#[gpui::test]
+fn test_common_prefix_at_position(cx: &mut gpui::TestAppContext) {
     let text = "a = str; b = δα";
-    let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), text);
+    let buffer = Buffer::new(
+        ReplicaId::LOCAL,
+        BufferId::new(1).unwrap(),
+        text,
+        cx.background_executor(),
+    );
 
     let offset1 = offset_after(text, "str");
     let offset2 = offset_after(text, "δα");
@@ -261,12 +289,13 @@ fn test_common_prefix_at_position() {
     }
 }
 
-#[test]
-fn test_text_summary_for_range() {
+#[gpui::test]
+fn test_text_summary_for_range(cx: &mut gpui::TestAppContext) {
     let buffer = Buffer::new(
         ReplicaId::LOCAL,
         BufferId::new(1).unwrap(),
         "ab\nefg\nhklm\nnopqrs\ntuvwxyz",
+        cx.background_executor(),
     );
     assert_eq!(
         buffer.text_summary_for_range::<TextSummary, _>(0..2),
@@ -354,13 +383,18 @@ fn test_text_summary_for_range() {
     );
 }
 
-#[test]
-fn test_chars_at() {
-    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "");
-    buffer.edit([(0..0, "abcd\nefgh\nij")]);
-    buffer.edit([(12..12, "kl\nmno")]);
-    buffer.edit([(18..18, "\npqrs")]);
-    buffer.edit([(18..21, "\nPQ")]);
+#[gpui::test]
+fn test_chars_at(cx: &mut gpui::TestAppContext) {
+    let mut buffer = Buffer::new(
+        ReplicaId::LOCAL,
+        BufferId::new(1).unwrap(),
+        "",
+        cx.background_executor(),
+    );
+    buffer.edit([(0..0, "abcd\nefgh\nij")], cx.background_executor());
+    buffer.edit([(12..12, "kl\nmno")], cx.background_executor());
+    buffer.edit([(18..18, "\npqrs")], cx.background_executor());
+    buffer.edit([(18..21, "\nPQ")], cx.background_executor());
 
     let chars = buffer.chars_at(Point::new(0, 0));
     assert_eq!(chars.collect::<String>(), "abcd\nefgh\nijkl\nmno\nPQrs");
@@ -378,43 +412,53 @@ fn test_chars_at() {
     assert_eq!(chars.collect::<String>(), "PQrs");
 
     // Regression test:
-    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "");
-    buffer.edit([(0..0, "[workspace]\nmembers = [\n    \"xray_core\",\n    \"xray_server\",\n    \"xray_cli\",\n    \"xray_wasm\",\n]\n")]);
-    buffer.edit([(60..60, "\n")]);
+    let mut buffer = Buffer::new(
+        ReplicaId::LOCAL,
+        BufferId::new(1).unwrap(),
+        "",
+        cx.background_executor(),
+    );
+    buffer.edit([(0..0, "[workspace]\nmembers = [\n    \"xray_core\",\n    \"xray_server\",\n    \"xray_cli\",\n    \"xray_wasm\",\n]\n")], cx.background_executor());
+    buffer.edit([(60..60, "\n")], cx.background_executor());
 
     let chars = buffer.chars_at(Point::new(6, 0));
     assert_eq!(chars.collect::<String>(), "    \"xray_wasm\",\n]\n");
 }
 
-#[test]
-fn test_anchors() {
-    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "");
-    buffer.edit([(0..0, "abc")]);
+#[gpui::test]
+fn test_anchors(cx: &mut gpui::TestAppContext) {
+    let mut buffer = Buffer::new(
+        ReplicaId::LOCAL,
+        BufferId::new(1).unwrap(),
+        "",
+        cx.background_executor(),
+    );
+    buffer.edit([(0..0, "abc")], cx.background_executor());
     let left_anchor = buffer.anchor_before(2);
     let right_anchor = buffer.anchor_after(2);
 
-    buffer.edit([(1..1, "def\n")]);
+    buffer.edit([(1..1, "def\n")], cx.background_executor());
     assert_eq!(buffer.text(), "adef\nbc");
     assert_eq!(left_anchor.to_offset(&buffer), 6);
     assert_eq!(right_anchor.to_offset(&buffer), 6);
     assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 });
     assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 });
 
-    buffer.edit([(2..3, "")]);
+    buffer.edit([(2..3, "")], cx.background_executor());
     assert_eq!(buffer.text(), "adf\nbc");
     assert_eq!(left_anchor.to_offset(&buffer), 5);
     assert_eq!(right_anchor.to_offset(&buffer), 5);
     assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 });
     assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 });
 
-    buffer.edit([(5..5, "ghi\n")]);
+    buffer.edit([(5..5, "ghi\n")], cx.background_executor());
     assert_eq!(buffer.text(), "adf\nbghi\nc");
     assert_eq!(left_anchor.to_offset(&buffer), 5);
     assert_eq!(right_anchor.to_offset(&buffer), 9);
     assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 });
     assert_eq!(right_anchor.to_point(&buffer), Point { row: 2, column: 0 });
 
-    buffer.edit([(7..9, "")]);
+    buffer.edit([(7..9, "")], cx.background_executor());
     assert_eq!(buffer.text(), "adf\nbghc");
     assert_eq!(left_anchor.to_offset(&buffer), 5);
     assert_eq!(right_anchor.to_offset(&buffer), 7);
@@ -504,13 +548,18 @@ fn test_anchors() {
     );
 }
 
-#[test]
-fn test_anchors_at_start_and_end() {
-    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "");
+#[gpui::test]
+fn test_anchors_at_start_and_end(cx: &mut gpui::TestAppContext) {
+    let mut buffer = Buffer::new(
+        ReplicaId::LOCAL,
+        BufferId::new(1).unwrap(),
+        "",
+        cx.background_executor(),
+    );
     let before_start_anchor = buffer.anchor_before(0);
     let after_end_anchor = buffer.anchor_after(0);
 
-    buffer.edit([(0..0, "abc")]);
+    buffer.edit([(0..0, "abc")], cx.background_executor());
     assert_eq!(buffer.text(), "abc");
     assert_eq!(before_start_anchor.to_offset(&buffer), 0);
     assert_eq!(after_end_anchor.to_offset(&buffer), 3);
@@ -518,8 +567,8 @@ fn test_anchors_at_start_and_end() {
     let after_start_anchor = buffer.anchor_after(0);
     let before_end_anchor = buffer.anchor_before(3);
 
-    buffer.edit([(3..3, "def")]);
-    buffer.edit([(0..0, "ghi")]);
+    buffer.edit([(3..3, "def")], cx.background_executor());
+    buffer.edit([(0..0, "ghi")], cx.background_executor());
     assert_eq!(buffer.text(), "ghiabcdef");
     assert_eq!(before_start_anchor.to_offset(&buffer), 0);
     assert_eq!(after_start_anchor.to_offset(&buffer), 3);
@@ -527,15 +576,20 @@ fn test_anchors_at_start_and_end() {
     assert_eq!(after_end_anchor.to_offset(&buffer), 9);
 }
 
-#[test]
-fn test_undo_redo() {
-    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "1234");
+#[gpui::test]
+fn test_undo_redo(cx: &mut gpui::TestAppContext) {
+    let mut buffer = Buffer::new(
+        ReplicaId::LOCAL,
+        BufferId::new(1).unwrap(),
+        "1234",
+        cx.background_executor(),
+    );
     // Set group interval to zero so as to not group edits in the undo stack.
     buffer.set_group_interval(Duration::from_secs(0));
 
-    buffer.edit([(1..1, "abx")]);
-    buffer.edit([(3..4, "yzef")]);
-    buffer.edit([(3..5, "cd")]);
+    buffer.edit([(1..1, "abx")], cx.background_executor());
+    buffer.edit([(3..4, "yzef")], cx.background_executor());
+    buffer.edit([(3..5, "cd")], cx.background_executor());
     assert_eq!(buffer.text(), "1abcdef234");
 
     let entries = buffer.history.undo_stack.clone();
@@ -563,26 +617,31 @@ fn test_undo_redo() {
     assert_eq!(buffer.text(), "1234");
 }
 
-#[test]
-fn test_history() {
+#[gpui::test]
+fn test_history(cx: &mut gpui::TestAppContext) {
     let mut now = Instant::now();
-    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "123456");
+    let mut buffer = Buffer::new(
+        ReplicaId::LOCAL,
+        BufferId::new(1).unwrap(),
+        "123456",
+        cx.background_executor(),
+    );
     buffer.set_group_interval(Duration::from_millis(300));
 
     let transaction_1 = buffer.start_transaction_at(now).unwrap();
-    buffer.edit([(2..4, "cd")]);
+    buffer.edit([(2..4, "cd")], cx.background_executor());
     buffer.end_transaction_at(now);
     assert_eq!(buffer.text(), "12cd56");
 
     buffer.start_transaction_at(now);
-    buffer.edit([(4..5, "e")]);
+    buffer.edit([(4..5, "e")], cx.background_executor());
     buffer.end_transaction_at(now).unwrap();
     assert_eq!(buffer.text(), "12cde6");
 
     now += buffer.transaction_group_interval() + Duration::from_millis(1);
     buffer.start_transaction_at(now);
-    buffer.edit([(0..1, "a")]);
-    buffer.edit([(1..1, "b")]);
+    buffer.edit([(0..1, "a")], cx.background_executor());
+    buffer.edit([(1..1, "b")], cx.background_executor());
     buffer.end_transaction_at(now).unwrap();
     assert_eq!(buffer.text(), "ab2cde6");
 
@@ -609,7 +668,7 @@ fn test_history() {
 
     // Redo stack gets cleared after performing an edit.
     buffer.start_transaction_at(now);
-    buffer.edit([(0..0, "X")]);
+    buffer.edit([(0..0, "X")], cx.background_executor());
     buffer.end_transaction_at(now);
     assert_eq!(buffer.text(), "X12cde6");
     buffer.redo();
@@ -630,26 +689,31 @@ fn test_history() {
     assert_eq!(buffer.text(), "X12cde6");
 }
 
-#[test]
-fn test_finalize_last_transaction() {
+#[gpui::test]
+fn test_finalize_last_transaction(cx: &mut gpui::TestAppContext) {
     let now = Instant::now();
-    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "123456");
+    let mut buffer = Buffer::new(
+        ReplicaId::LOCAL,
+        BufferId::new(1).unwrap(),
+        "123456",
+        cx.background_executor(),
+    );
     buffer.history.group_interval = Duration::from_millis(1);
 
     buffer.start_transaction_at(now);
-    buffer.edit([(2..4, "cd")]);
+    buffer.edit([(2..4, "cd")], cx.background_executor());
     buffer.end_transaction_at(now);
     assert_eq!(buffer.text(), "12cd56");
 
     buffer.finalize_last_transaction();
     buffer.start_transaction_at(now);
-    buffer.edit([(4..5, "e")]);
+    buffer.edit([(4..5, "e")], cx.background_executor());
     buffer.end_transaction_at(now).unwrap();
     assert_eq!(buffer.text(), "12cde6");
 
     buffer.start_transaction_at(now);
-    buffer.edit([(0..1, "a")]);
-    buffer.edit([(1..1, "b")]);
+    buffer.edit([(0..1, "a")], cx.background_executor());
+    buffer.edit([(1..1, "b")], cx.background_executor());
     buffer.end_transaction_at(now).unwrap();
     assert_eq!(buffer.text(), "ab2cde6");
 
@@ -666,14 +730,19 @@ fn test_finalize_last_transaction() {
     assert_eq!(buffer.text(), "ab2cde6");
 }
 
-#[test]
-fn test_edited_ranges_for_transaction() {
+#[gpui::test]
+fn test_edited_ranges_for_transaction(cx: &mut gpui::TestAppContext) {
     let now = Instant::now();
-    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "1234567");
+    let mut buffer = Buffer::new(
+        ReplicaId::LOCAL,
+        BufferId::new(1).unwrap(),
+        "1234567",
+        cx.background_executor(),
+    );
 
     buffer.start_transaction_at(now);
-    buffer.edit([(2..4, "cd")]);
-    buffer.edit([(6..6, "efg")]);
+    buffer.edit([(2..4, "cd")], cx.background_executor());
+    buffer.edit([(6..6, "efg")], cx.background_executor());
     buffer.end_transaction_at(now);
     assert_eq!(buffer.text(), "12cd56efg7");
 
@@ -685,7 +754,7 @@ fn test_edited_ranges_for_transaction() {
         [2..4, 6..9]
     );
 
-    buffer.edit([(5..5, "hijk")]);
+    buffer.edit([(5..5, "hijk")], cx.background_executor());
     assert_eq!(buffer.text(), "12cd5hijk6efg7");
     assert_eq!(
         buffer
@@ -694,7 +763,7 @@ fn test_edited_ranges_for_transaction() {
         [2..4, 10..13]
     );
 
-    buffer.edit([(4..4, "l")]);
+    buffer.edit([(4..4, "l")], cx.background_executor());
     assert_eq!(buffer.text(), "12cdl5hijk6efg7");
     assert_eq!(
         buffer
@@ -704,27 +773,42 @@ fn test_edited_ranges_for_transaction() {
     );
 }
 
-#[test]
-fn test_concurrent_edits() {
+#[gpui::test]
+fn test_concurrent_edits(cx: &mut gpui::TestAppContext) {
     let text = "abcdef";
 
-    let mut buffer1 = Buffer::new(ReplicaId::new(1), BufferId::new(1).unwrap(), text);
-    let mut buffer2 = Buffer::new(ReplicaId::new(2), BufferId::new(1).unwrap(), text);
-    let mut buffer3 = Buffer::new(ReplicaId::new(3), BufferId::new(1).unwrap(), text);
+    let mut buffer1 = Buffer::new(
+        ReplicaId::new(1),
+        BufferId::new(1).unwrap(),
+        text,
+        cx.background_executor(),
+    );
+    let mut buffer2 = Buffer::new(
+        ReplicaId::new(2),
+        BufferId::new(1).unwrap(),
+        text,
+        cx.background_executor(),
+    );
+    let mut buffer3 = Buffer::new(
+        ReplicaId::new(3),
+        BufferId::new(1).unwrap(),
+        text,
+        cx.background_executor(),
+    );
 
-    let buf1_op = buffer1.edit([(1..2, "12")]);
+    let buf1_op = buffer1.edit([(1..2, "12")], cx.background_executor());
     assert_eq!(buffer1.text(), "a12cdef");
-    let buf2_op = buffer2.edit([(3..4, "34")]);
+    let buf2_op = buffer2.edit([(3..4, "34")], cx.background_executor());
     assert_eq!(buffer2.text(), "abc34ef");
-    let buf3_op = buffer3.edit([(5..6, "56")]);
+    let buf3_op = buffer3.edit([(5..6, "56")], cx.background_executor());
     assert_eq!(buffer3.text(), "abcde56");
 
-    buffer1.apply_op(buf2_op.clone());
-    buffer1.apply_op(buf3_op.clone());
-    buffer2.apply_op(buf1_op.clone());
-    buffer2.apply_op(buf3_op);
-    buffer3.apply_op(buf1_op);
-    buffer3.apply_op(buf2_op);
+    buffer1.apply_op(buf2_op.clone(), Some(cx.background_executor()));
+    buffer1.apply_op(buf3_op.clone(), Some(cx.background_executor()));
+    buffer2.apply_op(buf1_op.clone(), Some(cx.background_executor()));
+    buffer2.apply_op(buf3_op, Some(cx.background_executor()));
+    buffer3.apply_op(buf1_op, Some(cx.background_executor()));
+    buffer3.apply_op(buf2_op, Some(cx.background_executor()));
 
     assert_eq!(buffer1.text(), "a12c34e56");
     assert_eq!(buffer2.text(), "a12c34e56");
@@ -732,7 +816,7 @@ fn test_concurrent_edits() {
 }
 
 #[gpui::test(iterations = 100)]
-fn test_random_concurrent_edits(mut rng: StdRng) {
+fn test_random_concurrent_edits(mut rng: StdRng, cx: &mut gpui::TestAppContext) {
     let peers = env::var("PEERS")
         .map(|i| i.parse().expect("invalid `PEERS` variable"))
         .unwrap_or(5);
@@ -753,6 +837,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) {
             ReplicaId::new(i as u16),
             BufferId::new(1).unwrap(),
             base_text.clone(),
+            cx.background_executor(),
         );
         buffer.history.group_interval = Duration::from_millis(rng.random_range(0..=200));
         buffers.push(buffer);
@@ -769,7 +854,9 @@ fn test_random_concurrent_edits(mut rng: StdRng) {
         let buffer = &mut buffers[replica_index];
         match rng.random_range(0..=100) {
             0..=50 if mutation_count != 0 => {
-                let op = buffer.randomly_edit(&mut rng, 5).1;
+                let op = buffer
+                    .randomly_edit(&mut rng, 5, cx.background_executor())
+                    .1;
                 network.broadcast(buffer.replica_id, vec![op]);
                 log::info!("buffer {:?} text: {:?}", buffer.replica_id, buffer.text());
                 mutation_count -= 1;
@@ -787,7 +874,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) {
                         replica_id,
                         ops.len()
                     );
-                    buffer.apply_ops(ops);
+                    buffer.apply_ops(ops, Some(cx.background_executor()));
                 }
             }
             _ => {}

crates/text/src/text.rs πŸ”—

@@ -15,6 +15,7 @@ use anyhow::{Context as _, Result};
 use clock::Lamport;
 pub use clock::ReplicaId;
 use collections::{HashMap, HashSet};
+use gpui::BackgroundExecutor;
 use locator::Locator;
 use operation_queue::OperationQueue;
 pub use patch::Patch;
@@ -709,11 +710,41 @@ impl FromIterator<char> for LineIndent {
 }
 
 impl Buffer {
-    pub fn new(replica_id: ReplicaId, remote_id: BufferId, base_text: impl Into<String>) -> Buffer {
+    /// Create a new buffer from a string.
+    pub fn new(
+        replica_id: ReplicaId,
+        remote_id: BufferId,
+        base_text: impl Into<String>,
+        executor: &BackgroundExecutor,
+    ) -> Buffer {
+        let mut base_text = base_text.into();
+        let line_ending = LineEnding::detect(&base_text);
+        LineEnding::normalize(&mut base_text);
+        Self::new_normalized(
+            replica_id,
+            remote_id,
+            line_ending,
+            Rope::from_str(&base_text, executor),
+        )
+    }
+
+    /// Create a new buffer from a string.
+    ///
+    /// Unlike [`Buffer::new`], this does not construct the backing rope in parallel if it is large enough.
+    pub fn new_slow(
+        replica_id: ReplicaId,
+        remote_id: BufferId,
+        base_text: impl Into<String>,
+    ) -> Buffer {
         let mut base_text = base_text.into();
         let line_ending = LineEnding::detect(&base_text);
         LineEnding::normalize(&mut base_text);
-        Self::new_normalized(replica_id, remote_id, line_ending, Rope::from(&*base_text))
+        Self::new_normalized(
+            replica_id,
+            remote_id,
+            line_ending,
+            Rope::from_str_small(&base_text),
+        )
     }
 
     pub fn new_normalized(
@@ -808,7 +839,7 @@ impl Buffer {
         self.history.group_interval
     }
 
-    pub fn edit<R, I, S, T>(&mut self, edits: R) -> Operation
+    pub fn edit<R, I, S, T>(&mut self, edits: R, cx: &BackgroundExecutor) -> Operation
     where
         R: IntoIterator<IntoIter = I>,
         I: ExactSizeIterator<Item = (Range<S>, T)>,
@@ -821,7 +852,7 @@ impl Buffer {
 
         self.start_transaction();
         let timestamp = self.lamport_clock.tick();
-        let operation = Operation::Edit(self.apply_local_edit(edits, timestamp));
+        let operation = Operation::Edit(self.apply_local_edit(edits, timestamp, cx));
 
         self.history.push(operation.clone());
         self.history.push_undo(operation.timestamp());
@@ -834,6 +865,7 @@ impl Buffer {
         &mut self,
         edits: impl ExactSizeIterator<Item = (Range<S>, T)>,
         timestamp: clock::Lamport,
+        executor: &BackgroundExecutor,
     ) -> EditOperation {
         let mut edits_patch = Patch::default();
         let mut edit_op = EditOperation {
@@ -922,7 +954,7 @@ impl Buffer {
                 });
                 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
                 new_insertions.push(InsertionFragment::insert_new(&fragment));
-                new_ropes.push_str(new_text.as_ref());
+                new_ropes.push_str(new_text.as_ref(), executor);
                 new_fragments.push(fragment, &None);
                 insertion_offset += new_text.len();
             }
@@ -1001,22 +1033,26 @@ impl Buffer {
         self.snapshot.line_ending = line_ending;
     }
 
-    pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I) {
+    pub fn apply_ops<I: IntoIterator<Item = Operation>>(
+        &mut self,
+        ops: I,
+        executor: Option<&BackgroundExecutor>,
+    ) {
         let mut deferred_ops = Vec::new();
         for op in ops {
             self.history.push(op.clone());
             if self.can_apply_op(&op) {
-                self.apply_op(op);
+                self.apply_op(op, executor);
             } else {
                 self.deferred_replicas.insert(op.replica_id());
                 deferred_ops.push(op);
             }
         }
         self.deferred_ops.insert(deferred_ops);
-        self.flush_deferred_ops();
+        self.flush_deferred_ops(executor);
     }
 
-    fn apply_op(&mut self, op: Operation) {
+    fn apply_op(&mut self, op: Operation, executor: Option<&BackgroundExecutor>) {
         match op {
             Operation::Edit(edit) => {
                 if !self.version.observed(edit.timestamp) {
@@ -1025,6 +1061,7 @@ impl Buffer {
                         &edit.ranges,
                         &edit.new_text,
                         edit.timestamp,
+                        executor,
                     );
                     self.snapshot.version.observe(edit.timestamp);
                     self.lamport_clock.observe(edit.timestamp);
@@ -1055,6 +1092,7 @@ impl Buffer {
         ranges: &[Range<FullOffset>],
         new_text: &[Arc<str>],
         timestamp: clock::Lamport,
+        executor: Option<&BackgroundExecutor>,
     ) {
         if ranges.is_empty() {
             return;
@@ -1170,7 +1208,10 @@ impl Buffer {
                 });
                 insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
                 new_insertions.push(InsertionFragment::insert_new(&fragment));
-                new_ropes.push_str(new_text);
+                match executor {
+                    Some(executor) => new_ropes.push_str(new_text, executor),
+                    None => new_ropes.push_str_small(new_text),
+                }
                 new_fragments.push(fragment, &None);
                 insertion_offset += new_text.len();
             }
@@ -1348,12 +1389,12 @@ impl Buffer {
         self.subscriptions.publish_mut(&edits);
     }
 
-    fn flush_deferred_ops(&mut self) {
+    fn flush_deferred_ops(&mut self, executor: Option<&BackgroundExecutor>) {
         self.deferred_replicas.clear();
         let mut deferred_ops = Vec::new();
         for op in self.deferred_ops.drain().iter().cloned() {
             if self.can_apply_op(&op) {
-                self.apply_op(op);
+                self.apply_op(op, executor);
             } else {
                 self.deferred_replicas.insert(op.replica_id());
                 deferred_ops.push(op);
@@ -1711,9 +1752,9 @@ impl Buffer {
 #[cfg(any(test, feature = "test-support"))]
 impl Buffer {
     #[track_caller]
-    pub fn edit_via_marked_text(&mut self, marked_string: &str) {
+    pub fn edit_via_marked_text(&mut self, marked_string: &str, cx: &BackgroundExecutor) {
         let edits = self.edits_for_marked_text(marked_string);
-        self.edit(edits);
+        self.edit(edits, cx);
     }
 
     #[track_caller]
@@ -1850,6 +1891,7 @@ impl Buffer {
         &mut self,
         rng: &mut T,
         edit_count: usize,
+        executor: &BackgroundExecutor,
     ) -> (Vec<(Range<usize>, Arc<str>)>, Operation)
     where
         T: rand::Rng,
@@ -1857,7 +1899,7 @@ impl Buffer {
         let mut edits = self.get_random_edits(rng, edit_count);
         log::info!("mutating buffer {:?} with {:?}", self.replica_id, edits);
 
-        let op = self.edit(edits.iter().cloned());
+        let op = self.edit(edits.iter().cloned(), executor);
         if let Operation::Edit(edit) = &op {
             assert_eq!(edits.len(), edit.new_text.len());
             for (edit, new_text) in edits.iter_mut().zip(&edit.new_text) {
@@ -2312,6 +2354,7 @@ impl BufferSnapshot {
             self.visible_text.len()
         } else {
             debug_assert!(anchor.buffer_id == Some(self.remote_id));
+            debug_assert!(self.version.observed(anchor.timestamp));
             let anchor_key = InsertionFragmentKey {
                 timestamp: anchor.timestamp,
                 split_offset: anchor.offset,
@@ -2335,10 +2378,7 @@ impl BufferSnapshot {
                 .item()
                 .filter(|insertion| insertion.timestamp == anchor.timestamp)
             else {
-                panic!(
-                    "invalid anchor {:?}. buffer id: {}, version: {:?}",
-                    anchor, self.remote_id, self.version
-                );
+                self.panic_bad_anchor(anchor);
             };
 
             let (start, _, item) = self
@@ -2357,13 +2397,29 @@ impl BufferSnapshot {
         }
     }
 
-    fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator {
-        self.try_fragment_id_for_anchor(anchor).unwrap_or_else(|| {
+    #[cold]
+    fn panic_bad_anchor(&self, anchor: &Anchor) -> ! {
+        if anchor.buffer_id.is_some_and(|id| id != self.remote_id) {
+            panic!(
+                "invalid anchor - buffer id does not match: anchor {anchor:?}; buffer id: {}, version: {:?}",
+                self.remote_id, self.version
+            );
+        } else if !self.version.observed(anchor.timestamp) {
+            panic!(
+                "invalid anchor - snapshot has not observed lamport: {:?}; version: {:?}",
+                anchor, self.version
+            );
+        } else {
             panic!(
                 "invalid anchor {:?}. buffer id: {}, version: {:?}",
-                anchor, self.remote_id, self.version,
-            )
-        })
+                anchor, self.remote_id, self.version
+            );
+        }
+    }
+
+    fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator {
+        self.try_fragment_id_for_anchor(anchor)
+            .unwrap_or_else(|| self.panic_bad_anchor(anchor))
     }
 
     fn try_fragment_id_for_anchor(&self, anchor: &Anchor) -> Option<&Locator> {
@@ -2692,8 +2748,12 @@ impl<'a> RopeBuilder<'a> {
         }
     }
 
-    fn push_str(&mut self, text: &str) {
-        self.new_visible.push(text);
+    fn push_str(&mut self, text: &str, cx: &BackgroundExecutor) {
+        self.new_visible.push(text, cx);
+    }
+
+    fn push_str_small(&mut self, text: &str) {
+        self.new_visible.push_small(text);
     }
 
     fn finish(mut self) -> (Rope, Rope) {

crates/title_bar/src/collab.rs πŸ”—

@@ -220,6 +220,8 @@ impl TitleBar {
                                 .on_click({
                                     let peer_id = collaborator.peer_id;
                                     cx.listener(move |this, _, window, cx| {
+                                        cx.stop_propagation();
+
                                         this.workspace
                                             .update(cx, |workspace, cx| {
                                                 if is_following {

crates/ui/src/components/popover_menu.rs πŸ”—

@@ -270,11 +270,11 @@ fn show_menu<M: ManagedView>(
     window: &mut Window,
     cx: &mut App,
 ) {
+    let previous_focus_handle = window.focused(cx);
     let Some(new_menu) = (builder)(window, cx) else {
         return;
     };
     let menu2 = menu.clone();
-    let previous_focus_handle = window.focused(cx);
 
     window
         .subscribe(&new_menu, cx, move |modal, _: &DismissEvent, window, cx| {

crates/ui/src/components/scrollbar.rs πŸ”—

@@ -392,7 +392,7 @@ pub struct Scrollbars<T: ScrollableHandle = ScrollHandle> {
 
 impl Scrollbars {
     pub fn new(show_along: ScrollAxes) -> Self {
-        Self::new_with_setting(show_along, |_| ShowScrollbar::default())
+        Self::new_with_setting(show_along, |_| ShowScrollbar::Always)
     }
 
     pub fn for_settings<S: ScrollbarVisibility>() -> Scrollbars {

crates/ui/src/styles/typography.rs πŸ”—

@@ -144,6 +144,19 @@ impl TextSize {
             Self::Editor => rems_from_px(theme_settings.buffer_font_size(cx)),
         }
     }
+
+    pub fn pixels(self, cx: &App) -> Pixels {
+        let theme_settings = ThemeSettings::get_global(cx);
+
+        match self {
+            Self::Large => px(16.),
+            Self::Default => px(14.),
+            Self::Small => px(12.),
+            Self::XSmall => px(10.),
+            Self::Ui => theme_settings.ui_font_size(cx),
+            Self::Editor => theme_settings.buffer_font_size(cx),
+        }
+    }
 }
 
 /// The size of a [`Headline`] element

crates/util/src/shell.rs πŸ”—

@@ -408,6 +408,15 @@ impl ShellKind {
         }
     }
 
+    pub fn prepend_command_prefix<'a>(&self, command: &'a str) -> Cow<'a, str> {
+        match self.command_prefix() {
+            Some(prefix) if !command.starts_with(prefix) => {
+                Cow::Owned(format!("{prefix}{command}"))
+            }
+            _ => Cow::Borrowed(command),
+        }
+    }
+
     pub const fn sequential_commands_separator(&self) -> char {
         match self {
             ShellKind::Cmd => '&',
@@ -422,6 +431,20 @@ impl ShellKind {
         }
     }
 
+    pub const fn sequential_and_commands_separator(&self) -> &'static str {
+        match self {
+            ShellKind::Cmd
+            | ShellKind::Posix
+            | ShellKind::Csh
+            | ShellKind::Tcsh
+            | ShellKind::Rc
+            | ShellKind::Fish
+            | ShellKind::PowerShell
+            | ShellKind::Xonsh => "&&",
+            ShellKind::Nushell => ";",
+        }
+    }
+
     pub fn try_quote<'a>(&self, arg: &'a str) -> Option<Cow<'a, str>> {
         shlex::try_quote(arg).ok().map(|arg| match self {
             // If we are running in PowerShell, we want to take extra care when escaping strings.
@@ -438,6 +461,42 @@ impl ShellKind {
         })
     }
 
+    /// Quotes the given argument if necessary, taking into account the command prefix.
+    ///
+    /// In other words, this will consider quoting arg without its command prefix to not break the command.
+    /// You should use this over `try_quote` when you want to quote a shell command.
+    pub fn try_quote_prefix_aware<'a>(&self, arg: &'a str) -> Option<Cow<'a, str>> {
+        if let Some(char) = self.command_prefix() {
+            if let Some(arg) = arg.strip_prefix(char) {
+                // we have a command that is prefixed
+                for quote in ['\'', '"'] {
+                    if let Some(arg) = arg
+                        .strip_prefix(quote)
+                        .and_then(|arg| arg.strip_suffix(quote))
+                    {
+                        // and the command itself is wrapped as a literal, that
+                        // means the prefix exists to interpret a literal as a
+                        // command. So strip the quotes, quote the command, and
+                        // re-add the quotes if they are missing after requoting
+                        let quoted = self.try_quote(arg)?;
+                        return Some(if quoted.starts_with(['\'', '"']) {
+                            Cow::Owned(self.prepend_command_prefix(&quoted).into_owned())
+                        } else {
+                            Cow::Owned(
+                                self.prepend_command_prefix(&format!("{quote}{quoted}{quote}"))
+                                    .into_owned(),
+                            )
+                        });
+                    }
+                }
+                return self
+                    .try_quote(arg)
+                    .map(|quoted| Cow::Owned(self.prepend_command_prefix(&quoted).into_owned()));
+            }
+        }
+        self.try_quote(arg)
+    }
+
     pub fn split(&self, input: &str) -> Option<Vec<String>> {
         shlex::split(input)
     }
@@ -525,4 +584,75 @@ mod tests {
             "\"C:\\Users\\johndoe\\dev\\python\\39007\\tests\\.venv\\Scripts\\python.exe -m pytest \\\"test_foo.py::test_foo\\\"\"".to_string()
         );
     }
+
+    #[test]
+    fn test_try_quote_nu_command() {
+        let shell_kind = ShellKind::Nushell;
+        assert_eq!(
+            shell_kind.try_quote("'uname'").unwrap().into_owned(),
+            "\"'uname'\"".to_string()
+        );
+        assert_eq!(
+            shell_kind
+                .try_quote_prefix_aware("'uname'")
+                .unwrap()
+                .into_owned(),
+            "\"'uname'\"".to_string()
+        );
+        assert_eq!(
+            shell_kind.try_quote("^uname").unwrap().into_owned(),
+            "'^uname'".to_string()
+        );
+        assert_eq!(
+            shell_kind
+                .try_quote_prefix_aware("^uname")
+                .unwrap()
+                .into_owned(),
+            "^uname".to_string()
+        );
+        assert_eq!(
+            shell_kind.try_quote("^'uname'").unwrap().into_owned(),
+            "'^'\"'uname\'\"".to_string()
+        );
+        assert_eq!(
+            shell_kind
+                .try_quote_prefix_aware("^'uname'")
+                .unwrap()
+                .into_owned(),
+            "^'uname'".to_string()
+        );
+        assert_eq!(
+            shell_kind.try_quote("'uname a'").unwrap().into_owned(),
+            "\"'uname a'\"".to_string()
+        );
+        assert_eq!(
+            shell_kind
+                .try_quote_prefix_aware("'uname a'")
+                .unwrap()
+                .into_owned(),
+            "\"'uname a'\"".to_string()
+        );
+        assert_eq!(
+            shell_kind.try_quote("^'uname a'").unwrap().into_owned(),
+            "'^'\"'uname a'\"".to_string()
+        );
+        assert_eq!(
+            shell_kind
+                .try_quote_prefix_aware("^'uname a'")
+                .unwrap()
+                .into_owned(),
+            "^'uname a'".to_string()
+        );
+        assert_eq!(
+            shell_kind.try_quote("uname").unwrap().into_owned(),
+            "uname".to_string()
+        );
+        assert_eq!(
+            shell_kind
+                .try_quote_prefix_aware("uname")
+                .unwrap()
+                .into_owned(),
+            "uname".to_string()
+        );
+    }
 }

crates/vim/src/helix.rs πŸ”—

@@ -450,7 +450,7 @@ impl Vim {
                         prior_selections,
                         prior_operator: self.operator_stack.last().cloned(),
                         prior_mode: self.mode,
-                        helix_select: true,
+                        is_helix_regex_search: true,
                     }
                 });
             }
@@ -1278,6 +1278,24 @@ mod test {
         cx.assert_state("Β«one Λ‡Β»two", Mode::HelixSelect);
     }
 
+    #[gpui::test]
+    async fn test_exit_visual_mode(cx: &mut gpui::TestAppContext) {
+        let mut cx = VimTestContext::new(cx, true).await;
+
+        cx.set_state("Λ‡one two", Mode::Normal);
+        cx.simulate_keystrokes("v w");
+        cx.assert_state("«one tˇ»wo", Mode::Visual);
+        cx.simulate_keystrokes("escape");
+        cx.assert_state("one Λ‡two", Mode::Normal);
+
+        cx.enable_helix();
+        cx.set_state("Λ‡one two", Mode::HelixNormal);
+        cx.simulate_keystrokes("v w");
+        cx.assert_state("Β«one Λ‡Β»two", Mode::HelixSelect);
+        cx.simulate_keystrokes("escape");
+        cx.assert_state("Β«one Λ‡Β»two", Mode::HelixNormal);
+    }
+
     #[gpui::test]
     async fn test_helix_select_regex(cx: &mut gpui::TestAppContext) {
         let mut cx = VimTestContext::new(cx, true).await;
@@ -1297,9 +1315,47 @@ mod test {
         cx.simulate_keystrokes("enter");
         cx.assert_state("«oneˇ» two «oneˇ»", Mode::HelixNormal);
 
-        cx.set_state("Λ‡one two one", Mode::HelixNormal);
-        cx.simulate_keystrokes("s o n e enter");
-        cx.assert_state("Λ‡one two one", Mode::HelixNormal);
+        // TODO: change "search_in_selection" to not perform any search when in helix select mode with no selection
+        // cx.set_state("Λ‡stuff one two one", Mode::HelixNormal);
+        // cx.simulate_keystrokes("s o n e enter");
+        // cx.assert_state("Λ‡stuff one two one", Mode::HelixNormal);
+    }
+
+    #[gpui::test]
+    async fn test_helix_select_next_match(cx: &mut gpui::TestAppContext) {
+        let mut cx = VimTestContext::new(cx, true).await;
+
+        cx.set_state("Λ‡hello two one two one two one", Mode::Visual);
+        cx.simulate_keystrokes("/ o n e");
+        cx.simulate_keystrokes("enter");
+        cx.simulate_keystrokes("n n");
+        cx.assert_state("«hello two one two one two oˇ»ne", Mode::Visual);
+
+        cx.set_state("Λ‡hello two one two one two one", Mode::Normal);
+        cx.simulate_keystrokes("/ o n e");
+        cx.simulate_keystrokes("enter");
+        cx.simulate_keystrokes("n n");
+        cx.assert_state("hello two one two one two Λ‡one", Mode::Normal);
+
+        cx.set_state("Λ‡hello two one two one two one", Mode::Normal);
+        cx.simulate_keystrokes("/ o n e");
+        cx.simulate_keystrokes("enter");
+        cx.simulate_keystrokes("n g n g n");
+        cx.assert_state("hello two one two «one two oneˇ»", Mode::Visual);
+
+        cx.enable_helix();
+
+        cx.set_state("Λ‡hello two one two one two one", Mode::HelixNormal);
+        cx.simulate_keystrokes("/ o n e");
+        cx.simulate_keystrokes("enter");
+        cx.simulate_keystrokes("n n");
+        cx.assert_state("hello two one two one two «oneˇ»", Mode::HelixNormal);
+
+        cx.set_state("Λ‡hello two one two one two one", Mode::HelixSelect);
+        cx.simulate_keystrokes("/ o n e");
+        cx.simulate_keystrokes("enter");
+        cx.simulate_keystrokes("n n");
+        cx.assert_state("ˇhello two «oneˇ» two «oneˇ» two «oneˇ»", Mode::HelixSelect);
     }
 
     #[gpui::test]

crates/vim/src/motion.rs πŸ”—

@@ -672,31 +672,40 @@ pub fn register(editor: &mut Editor, cx: &mut Context<Vim>) {
 
 impl Vim {
     pub(crate) fn search_motion(&mut self, m: Motion, window: &mut Window, cx: &mut Context<Self>) {
-        if let Motion::ZedSearchResult {
-            prior_selections, ..
+        let Motion::ZedSearchResult {
+            prior_selections,
+            new_selections,
         } = &m
-        {
-            match self.mode {
-                Mode::Visual | Mode::VisualLine | Mode::VisualBlock => {
-                    if !prior_selections.is_empty() {
-                        self.update_editor(cx, |_, editor, cx| {
-                            editor.change_selections(Default::default(), window, cx, |s| {
-                                s.select_ranges(prior_selections.iter().cloned())
-                            })
+        else {
+            return;
+        };
+
+        match self.mode {
+            Mode::Visual | Mode::VisualLine | Mode::VisualBlock => {
+                if !prior_selections.is_empty() {
+                    self.update_editor(cx, |_, editor, cx| {
+                        editor.change_selections(Default::default(), window, cx, |s| {
+                            s.select_ranges(prior_selections.iter().cloned());
                         });
-                    }
+                    });
                 }
-                Mode::Normal | Mode::Replace | Mode::Insert => {
-                    if self.active_operator().is_none() {
-                        return;
-                    }
+                self.motion(m, window, cx);
+            }
+            Mode::Normal | Mode::Replace | Mode::Insert => {
+                if self.active_operator().is_some() {
+                    self.motion(m, window, cx);
                 }
+            }
 
-                Mode::HelixNormal | Mode::HelixSelect => {}
+            Mode::HelixNormal => {}
+            Mode::HelixSelect => {
+                self.update_editor(cx, |_, editor, cx| {
+                    editor.change_selections(Default::default(), window, cx, |s| {
+                        s.select_ranges(prior_selections.iter().chain(new_selections).cloned());
+                    });
+                });
             }
         }
-
-        self.motion(m, window, cx)
     }
 
     pub(crate) fn motion(&mut self, motion: Motion, window: &mut Window, cx: &mut Context<Self>) {
@@ -3087,6 +3096,7 @@ mod test {
     use indoc::indoc;
     use language::Point;
     use multi_buffer::MultiBufferRow;
+    use text::Rope;
 
     #[gpui::test]
     async fn test_start_end_of_paragraph(cx: &mut gpui::TestAppContext) {
@@ -3813,7 +3823,7 @@ mod test {
         cx.update_editor(|editor, _window, cx| {
             let range = editor.selections.newest_anchor().range();
             let inlay_text = "  field: int,\n  field2: string\n  field3: float";
-            let inlay = Inlay::edit_prediction(1, range.start, inlay_text);
+            let inlay = Inlay::edit_prediction(1, range.start, Rope::from_str_small(inlay_text));
             editor.splice_inlays(&[], vec![inlay], cx);
         });
 
@@ -3845,7 +3855,7 @@ mod test {
             let end_of_line =
                 snapshot.anchor_after(Point::new(0, snapshot.line_len(MultiBufferRow(0))));
             let inlay_text = " hint";
-            let inlay = Inlay::edit_prediction(1, end_of_line, inlay_text);
+            let inlay = Inlay::edit_prediction(1, end_of_line, Rope::from_str_small(inlay_text));
             editor.splice_inlays(&[], vec![inlay], cx);
         });
         cx.simulate_keystrokes("$");
@@ -3884,7 +3894,7 @@ mod test {
             // The empty line is at line 3 (0-indexed)
             let line_start = snapshot.anchor_after(Point::new(3, 0));
             let inlay_text = ": Vec<u32>";
-            let inlay = Inlay::edit_prediction(1, line_start, inlay_text);
+            let inlay = Inlay::edit_prediction(1, line_start, Rope::from_str_small(inlay_text));
             editor.splice_inlays(&[], vec![inlay], cx);
         });
 
@@ -3928,7 +3938,8 @@ mod test {
             let snapshot = editor.buffer().read(cx).snapshot(cx);
             let empty_line_start = snapshot.anchor_after(Point::new(2, 0));
             let inlay_text = ": i32";
-            let inlay = Inlay::edit_prediction(2, empty_line_start, inlay_text);
+            let inlay =
+                Inlay::edit_prediction(2, empty_line_start, Rope::from_str_small(inlay_text));
             editor.splice_inlays(&[], vec![inlay], cx);
         });
 

crates/vim/src/normal.rs πŸ”—

@@ -965,8 +965,17 @@ impl Vim {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
+        // We need to use `text.chars().count()` instead of `text.len()` here as
+        // `len()` counts bytes, not characters.
+        let char_count = text.chars().count();
+        let count = Vim::take_count(cx).unwrap_or(char_count);
         let is_return_char = text == "\n".into() || text == "\r".into();
-        let count = Vim::take_count(cx).unwrap_or(1);
+        let repeat_count = match (is_return_char, char_count) {
+            (true, _) => 0,
+            (_, 1) => count,
+            (_, _) => 1,
+        };
+
         Vim::take_forced_motion(cx);
         self.stop_recording(cx);
         self.update_editor(cx, |_, editor, cx| {
@@ -989,7 +998,7 @@ impl Vim {
                     edits.push((
                         range.start.to_offset(&display_map, Bias::Left)
                             ..range.end.to_offset(&display_map, Bias::Left),
-                        text.repeat(if is_return_char { 0 } else { count }),
+                        text.repeat(repeat_count),
                     ));
                 }
 

crates/vim/src/normal/search.rs πŸ”—

@@ -1,5 +1,6 @@
-use editor::{Editor, EditorSettings};
+use editor::{Editor, EditorSettings, VimFlavor};
 use gpui::{Action, Context, Window, actions};
+
 use language::Point;
 use schemars::JsonSchema;
 use search::{BufferSearchBar, SearchOptions, buffer_search};
@@ -195,7 +196,7 @@ impl Vim {
                         prior_selections,
                         prior_operator: self.operator_stack.last().cloned(),
                         prior_mode,
-                        helix_select: false,
+                        is_helix_regex_search: false,
                     }
                 });
             }
@@ -219,7 +220,7 @@ impl Vim {
         let new_selections = self.editor_selections(window, cx);
         let result = pane.update(cx, |pane, cx| {
             let search_bar = pane.toolbar().read(cx).item_of_type::<BufferSearchBar>()?;
-            if self.search.helix_select {
+            if self.search.is_helix_regex_search {
                 search_bar.update(cx, |search_bar, cx| {
                     search_bar.select_all_matches(&Default::default(), window, cx)
                 });
@@ -240,7 +241,8 @@ impl Vim {
                     count = count.saturating_sub(1)
                 }
                 self.search.count = 1;
-                search_bar.select_match(direction, count, window, cx);
+                let collapse = !self.mode.is_helix();
+                search_bar.select_match(direction, count, collapse, window, cx);
                 search_bar.focus_editor(&Default::default(), window, cx);
 
                 let prior_selections: Vec<_> = self.search.prior_selections.drain(..).collect();
@@ -307,7 +309,8 @@ impl Vim {
                 if !search_bar.has_active_match() || !search_bar.show(window, cx) {
                     return false;
                 }
-                search_bar.select_match(direction, count, window, cx);
+                let collapse = !self.mode.is_helix();
+                search_bar.select_match(direction, count, collapse, window, cx);
                 true
             })
         });
@@ -316,6 +319,7 @@ impl Vim {
         }
 
         let new_selections = self.editor_selections(window, cx);
+
         self.search_motion(
             Motion::ZedSearchResult {
                 prior_selections,
@@ -381,7 +385,8 @@ impl Vim {
             cx.spawn_in(window, async move |_, cx| {
                 search.await?;
                 search_bar.update_in(cx, |search_bar, window, cx| {
-                    search_bar.select_match(direction, count, window, cx);
+                    let collapse = editor::vim_flavor(cx) == Some(VimFlavor::Vim);
+                    search_bar.select_match(direction, count, collapse, window, cx);
 
                     vim.update(cx, |vim, cx| {
                         let new_selections = vim.editor_selections(window, cx);
@@ -444,7 +449,7 @@ impl Vim {
                 cx.spawn_in(window, async move |_, cx| {
                     search.await?;
                     search_bar.update_in(cx, |search_bar, window, cx| {
-                        search_bar.select_match(direction, 1, window, cx)
+                        search_bar.select_match(direction, 1, true, window, cx)
                     })?;
                     anyhow::Ok(())
                 })

crates/vim/src/replace.rs πŸ”—

@@ -1,5 +1,5 @@
 use crate::{
-    Vim,
+    Operator, Vim,
     motion::{self, Motion},
     object::Object,
     state::Mode,
@@ -8,7 +8,7 @@ use editor::{
     Anchor, Bias, Editor, EditorSnapshot, SelectionEffects, ToOffset, ToPoint,
     display_map::ToDisplayPoint,
 };
-use gpui::{Context, Window, actions};
+use gpui::{ClipboardEntry, Context, Window, actions};
 use language::{Point, SelectionGoal};
 use std::ops::Range;
 use std::sync::Arc;
@@ -278,10 +278,27 @@ impl Vim {
             );
         }
     }
+
+    /// Pastes the clipboard contents, replacing the same number of characters
+    /// as the clipboard's contents.
+    pub fn paste_replace(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+        let clipboard_text =
+            cx.read_from_clipboard()
+                .and_then(|item| match item.entries().first() {
+                    Some(ClipboardEntry::String(text)) => Some(text.text().to_string()),
+                    _ => None,
+                });
+
+        if let Some(text) = clipboard_text {
+            self.push_operator(Operator::Replace, window, cx);
+            self.normal_replace(Arc::from(text), window, cx);
+        }
+    }
 }
 
 #[cfg(test)]
 mod test {
+    use gpui::ClipboardItem;
     use indoc::indoc;
 
     use crate::{
@@ -521,4 +538,22 @@ mod test {
             assert_eq!(0, highlights.len());
         });
     }
+
+    #[gpui::test]
+    async fn test_paste_replace(cx: &mut gpui::TestAppContext) {
+        let mut cx = VimTestContext::new(cx, true).await;
+
+        cx.set_state(indoc! {"Λ‡123"}, Mode::Replace);
+        cx.write_to_clipboard(ClipboardItem::new_string("456".to_string()));
+        cx.dispatch_action(editor::actions::Paste);
+        cx.assert_state(indoc! {"45Λ‡6"}, Mode::Replace);
+
+        // If the clipboard's contents length is greater than the remaining text
+        // length, nothing sould be replace and cursor should remain in the same
+        // position.
+        cx.set_state(indoc! {"Λ‡123"}, Mode::Replace);
+        cx.write_to_clipboard(ClipboardItem::new_string("4567".to_string()));
+        cx.dispatch_action(editor::actions::Paste);
+        cx.assert_state(indoc! {"Λ‡123"}, Mode::Replace);
+    }
 }

crates/vim/src/state.rs πŸ”—

@@ -66,12 +66,16 @@ impl Display for Mode {
 }
 
 impl Mode {
-    pub fn is_visual(&self) -> bool {
+    pub fn is_visual(self) -> bool {
         match self {
             Self::Visual | Self::VisualLine | Self::VisualBlock | Self::HelixSelect => true,
             Self::Normal | Self::Insert | Self::Replace | Self::HelixNormal => false,
         }
     }
+
+    pub fn is_helix(self) -> bool {
+        matches!(self, Mode::HelixNormal | Mode::HelixSelect)
+    }
 }
 
 impl Default for Mode {
@@ -990,7 +994,7 @@ pub struct SearchState {
     pub prior_selections: Vec<Range<Anchor>>,
     pub prior_operator: Option<Operator>,
     pub prior_mode: Mode,
-    pub helix_select: bool,
+    pub is_helix_regex_search: bool,
 }
 
 impl Operator {

crates/vim/src/vim.rs πŸ”—

@@ -23,6 +23,7 @@ use collections::HashMap;
 use editor::{
     Anchor, Bias, Editor, EditorEvent, EditorSettings, HideMouseCursorOrigin, SelectionEffects,
     ToPoint,
+    actions::Paste,
     movement::{self, FindRange},
 };
 use gpui::{
@@ -668,7 +669,7 @@ impl Vim {
                 editor,
                 cx,
                 |vim, _: &SwitchToHelixNormalMode, window, cx| {
-                    vim.switch_mode(Mode::HelixNormal, false, window, cx)
+                    vim.switch_mode(Mode::HelixNormal, true, window, cx)
                 },
             );
             Vim::action(editor, cx, |_, _: &PushForcedMotion, _, cx| {
@@ -919,6 +920,17 @@ impl Vim {
                 );
             });
 
+            Vim::action(
+                editor,
+                cx,
+                |vim, _: &editor::actions::Paste, window, cx| match vim.mode {
+                    Mode::Replace => vim.paste_replace(window, cx),
+                    _ => {
+                        vim.update_editor(cx, |_, editor, cx| editor.paste(&Paste, window, cx));
+                    }
+                },
+            );
+
             normal::register(editor, cx);
             insert::register(editor, cx);
             helix::register(editor, cx);
@@ -932,16 +944,17 @@ impl Vim {
             change_list::register(editor, cx);
             digraph::register(editor, cx);
 
-            cx.defer_in(window, |vim, window, cx| {
-                vim.focused(false, window, cx);
-            })
+            if editor.is_focused(window) {
+                cx.defer_in(window, |vim, window, cx| {
+                    vim.focused(false, window, cx);
+                })
+            }
         })
     }
 
     fn deactivate(editor: &mut Editor, cx: &mut Context<Editor>) {
         editor.set_cursor_shape(CursorShape::Bar, cx);
         editor.set_clip_at_line_ends(false, cx);
-        editor.set_collapse_matches(false);
         editor.set_input_enabled(true);
         editor.set_autoindent(true);
         editor.selections.set_line_mode(false);
@@ -1917,7 +1930,6 @@ impl Vim {
         self.update_editor(cx, |vim, editor, cx| {
             editor.set_cursor_shape(vim.cursor_shape(cx), cx);
             editor.set_clip_at_line_ends(vim.clip_at_line_ends(), cx);
-            editor.set_collapse_matches(true);
             editor.set_input_enabled(vim.editor_input_enabled());
             editor.set_autoindent(vim.should_autoindent());
             editor

crates/vim/src/visual.rs πŸ”—

@@ -847,9 +847,6 @@ impl Vim {
         let mut start_selection = 0usize;
         let mut end_selection = 0usize;
 
-        self.update_editor(cx, |_, editor, _| {
-            editor.set_collapse_matches(false);
-        });
         if vim_is_normal {
             pane.update(cx, |pane, cx| {
                 if let Some(search_bar) = pane.toolbar().read(cx).item_of_type::<BufferSearchBar>()
@@ -860,7 +857,7 @@ impl Vim {
                         }
                         // without update_match_index there is a bug when the cursor is before the first match
                         search_bar.update_match_index(window, cx);
-                        search_bar.select_match(direction.opposite(), 1, window, cx);
+                        search_bar.select_match(direction.opposite(), 1, false, window, cx);
                     });
                 }
             });
@@ -878,7 +875,7 @@ impl Vim {
             if let Some(search_bar) = pane.toolbar().read(cx).item_of_type::<BufferSearchBar>() {
                 search_bar.update(cx, |search_bar, cx| {
                     search_bar.update_match_index(window, cx);
-                    search_bar.select_match(direction, count, window, cx);
+                    search_bar.select_match(direction, count, false, window, cx);
                     match_exists = search_bar.match_exists(window, cx);
                 });
             }
@@ -905,7 +902,6 @@ impl Vim {
             editor.change_selections(Default::default(), window, cx, |s| {
                 s.select_ranges([start_selection..end_selection]);
             });
-            editor.set_collapse_matches(true);
         });
 
         match self.maybe_pop_operator() {

crates/workspace/src/searchable.rs πŸ”—

@@ -104,6 +104,7 @@ pub trait SearchableItem: Item + EventEmitter<SearchEvent> {
         &mut self,
         index: usize,
         matches: &[Self::Match],
+        collapse: bool,
         window: &mut Window,
         cx: &mut Context<Self>,
     );
@@ -184,6 +185,7 @@ pub trait SearchableItemHandle: ItemHandle {
         &self,
         index: usize,
         matches: &AnyVec<dyn Send>,
+        collapse: bool,
         window: &mut Window,
         cx: &mut App,
     );
@@ -274,12 +276,13 @@ impl<T: SearchableItem> SearchableItemHandle for Entity<T> {
         &self,
         index: usize,
         matches: &AnyVec<dyn Send>,
+        collapse: bool,
         window: &mut Window,
         cx: &mut App,
     ) {
         let matches = matches.downcast_ref().unwrap();
         self.update(cx, |this, cx| {
-            this.activate_match(index, matches.as_slice(), window, cx)
+            this.activate_match(index, matches.as_slice(), collapse, window, cx)
         });
     }
 

crates/workspace/src/workspace.rs πŸ”—

@@ -7089,6 +7089,9 @@ actions!(
     [
         /// Opens the channel notes for the current call.
         ///
+        /// Use `collab_panel::OpenSelectedChannelNotes` to open the channel notes for the selected
+        /// channel in the collab panel.
+        ///
         /// If you want to open a specific channel, use `zed::OpenZedUrl` with a channel notes URL -
         /// can be copied via "Copy link to section" in the context menu of the channel notes
         /// buffer. These URLs look like `https://zed.dev/channel/channel-name-CHANNEL_ID/notes`.
@@ -7577,13 +7580,13 @@ pub fn create_and_open_local_file(
     path: &'static Path,
     window: &mut Window,
     cx: &mut Context<Workspace>,
-    default_content: impl 'static + Send + FnOnce() -> Rope,
+    default_content: impl 'static + Send + FnOnce(&mut AsyncApp) -> Rope,
 ) -> Task<Result<Box<dyn ItemHandle>>> {
     cx.spawn_in(window, async move |workspace, cx| {
         let fs = workspace.read_with(cx, |workspace, _| workspace.app_state().fs.clone())?;
         if !fs.is_file(path).await {
             fs.create_file(path, Default::default()).await?;
-            fs.save(path, &default_content(), Default::default())
+            fs.save(path, &default_content(cx), Default::default())
                 .await?;
         }
 

crates/worktree/src/worktree.rs πŸ”—

@@ -1318,7 +1318,8 @@ impl LocalWorktree {
         let entry = self.refresh_entry(path.clone(), None, cx);
         let is_private = self.is_path_private(path.as_ref());
 
-        cx.spawn(async move |this, _cx| {
+        let this = cx.weak_entity();
+        cx.background_spawn(async move {
             // WARN: Temporary workaround for #27283.
             //       We are not efficient with our memory usage per file, and use in excess of 64GB for a 10GB file
             //       Therefore, as a temporary workaround to prevent system freezes, we just bail before opening a file
@@ -1702,6 +1703,7 @@ impl LocalWorktree {
         };
         let t0 = Instant::now();
         let mut refresh = self.refresh_entries_for_paths(paths);
+        // todo(lw): Hot foreground spawn
         cx.spawn(async move |this, cx| {
             refresh.recv().await;
             log::trace!("refreshed entry {path:?} in {:?}", t0.elapsed());

crates/worktree/src/worktree_tests.rs πŸ”—

@@ -20,6 +20,7 @@ use std::{
     path::{Path, PathBuf},
     sync::Arc,
 };
+use text::Rope;
 use util::{
     ResultExt, path,
     rel_path::{RelPath, rel_path},
@@ -646,9 +647,13 @@ async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
 
     // Update the gitignore so that node_modules is no longer ignored,
     // but a subdirectory is ignored
-    fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
-        .await
-        .unwrap();
+    fs.save(
+        "/root/.gitignore".as_ref(),
+        &Rope::from_str("e", cx.background_executor()),
+        Default::default(),
+    )
+    .await
+    .unwrap();
     cx.executor().run_until_parked();
 
     // All of the directories that are no longer ignored are now loaded.
@@ -716,7 +721,7 @@ async fn test_write_file(cx: &mut TestAppContext) {
         .update(cx, |tree, cx| {
             tree.write_file(
                 rel_path("tracked-dir/file.txt").into(),
-                "hello".into(),
+                Rope::from_str("hello", cx.background_executor()),
                 Default::default(),
                 cx,
             )
@@ -727,7 +732,7 @@ async fn test_write_file(cx: &mut TestAppContext) {
         .update(cx, |tree, cx| {
             tree.write_file(
                 rel_path("ignored-dir/file.txt").into(),
-                "world".into(),
+                Rope::from_str("world", cx.background_executor()),
                 Default::default(),
                 cx,
             )
@@ -1465,7 +1470,7 @@ async fn test_random_worktree_operations_during_initial_scan(
     let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
     fs.as_fake().insert_tree(root_dir, json!({})).await;
     for _ in 0..initial_entries {
-        randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
+        randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng, cx.background_executor()).await;
     }
     log::info!("generated initial tree");
 
@@ -1555,7 +1560,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
     let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
     fs.as_fake().insert_tree(root_dir, json!({})).await;
     for _ in 0..initial_entries {
-        randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
+        randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng, cx.background_executor()).await;
     }
     log::info!("generated initial tree");
 
@@ -1598,7 +1603,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
                 .await
                 .log_err();
         } else {
-            randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
+            randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng, cx.background_executor()).await;
         }
 
         let buffered_event_count = fs.as_fake().buffered_event_count();
@@ -1607,7 +1612,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
             log::info!("flushing {} events", len);
             fs.as_fake().flush_events(len);
         } else {
-            randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
+            randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng, cx.background_executor()).await;
             mutations_len -= 1;
         }
 
@@ -1759,8 +1764,12 @@ fn randomly_mutate_worktree(
                 })
             } else {
                 log::info!("overwriting file {:?} ({})", &entry.path, entry.id.0);
-                let task =
-                    worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx);
+                let task = worktree.write_file(
+                    entry.path.clone(),
+                    Rope::default(),
+                    Default::default(),
+                    cx,
+                );
                 cx.background_spawn(async move {
                     task.await?;
                     Ok(())
@@ -1775,6 +1784,7 @@ async fn randomly_mutate_fs(
     root_path: &Path,
     insertion_probability: f64,
     rng: &mut impl Rng,
+    executor: &BackgroundExecutor,
 ) {
     log::info!("mutating fs");
     let mut files = Vec::new();
@@ -1849,7 +1859,7 @@ async fn randomly_mutate_fs(
         );
         fs.save(
             &ignore_path,
-            &ignore_contents.as_str().into(),
+            &Rope::from_str(ignore_contents.as_str(), executor),
             Default::default(),
         )
         .await

crates/zed/Cargo.toml πŸ”—

@@ -73,6 +73,7 @@ gpui = { workspace = true, features = [
     "windows-manifest",
 ] }
 gpui_tokio.workspace = true
+rayon.workspace = true
 
 edit_prediction_button.workspace = true
 http_client.workspace = true

crates/zed/src/main.rs πŸ”—

@@ -257,6 +257,13 @@ pub fn main() {
         return;
     }
 
+    rayon::ThreadPoolBuilder::new()
+        .num_threads(4)
+        .stack_size(10 * 1024 * 1024)
+        .thread_name(|ix| format!("RayonWorker{}", ix))
+        .build_global()
+        .unwrap();
+
     log::info!(
         "========== starting zed version {}, sha {} ==========",
         app_version,

crates/zed/src/zed.rs πŸ”—

@@ -28,10 +28,10 @@ use git_ui::commit_view::CommitViewToolbar;
 use git_ui::git_panel::GitPanel;
 use git_ui::project_diff::ProjectDiffToolbar;
 use gpui::{
-    Action, App, AppContext as _, Context, DismissEvent, Element, Entity, Focusable, KeyBinding,
-    ParentElement, PathPromptOptions, PromptLevel, ReadGlobal, SharedString, Styled, Task,
-    TitlebarOptions, UpdateGlobal, Window, WindowKind, WindowOptions, actions, image_cache, point,
-    px, retain_all,
+    Action, App, AppContext as _, AsyncApp, Context, DismissEvent, Element, Entity, Focusable,
+    KeyBinding, ParentElement, PathPromptOptions, PromptLevel, ReadGlobal, SharedString, Styled,
+    Task, TitlebarOptions, UpdateGlobal, Window, WindowKind, WindowOptions, actions, image_cache,
+    point, px, retain_all,
 };
 use image_viewer::ImageInfo;
 use language::Capability;
@@ -201,7 +201,12 @@ pub fn init(cx: &mut App) {
         with_active_or_new_workspace(cx, |_, window, cx| {
             open_settings_file(
                 paths::keymap_file(),
-                || settings::initial_keymap_content().as_ref().into(),
+                |cx| {
+                    Rope::from_str(
+                        settings::initial_keymap_content().as_ref(),
+                        cx.background_executor(),
+                    )
+                },
                 window,
                 cx,
             );
@@ -211,7 +216,12 @@ pub fn init(cx: &mut App) {
         with_active_or_new_workspace(cx, |_, window, cx| {
             open_settings_file(
                 paths::settings_file(),
-                || settings::initial_user_settings_content().as_ref().into(),
+                |cx| {
+                    Rope::from_str(
+                        settings::initial_user_settings_content().as_ref(),
+                        cx.background_executor(),
+                    )
+                },
                 window,
                 cx,
             );
@@ -226,7 +236,12 @@ pub fn init(cx: &mut App) {
         with_active_or_new_workspace(cx, |_, window, cx| {
             open_settings_file(
                 paths::tasks_file(),
-                || settings::initial_tasks_content().as_ref().into(),
+                |cx| {
+                    Rope::from_str(
+                        settings::initial_tasks_content().as_ref(),
+                        cx.background_executor(),
+                    )
+                },
                 window,
                 cx,
             );
@@ -236,7 +251,12 @@ pub fn init(cx: &mut App) {
         with_active_or_new_workspace(cx, |_, window, cx| {
             open_settings_file(
                 paths::debug_scenarios_file(),
-                || settings::initial_debug_tasks_content().as_ref().into(),
+                |cx| {
+                    Rope::from_str(
+                        settings::initial_debug_tasks_content().as_ref(),
+                        cx.background_executor(),
+                    )
+                },
                 window,
                 cx,
             );
@@ -388,6 +408,7 @@ pub fn initialize_workspace(
                 app_state.fs.clone(),
                 app_state.user_store.clone(),
                 edit_prediction_menu_handle.clone(),
+                app_state.client.clone(),
                 cx,
             )
         });
@@ -1938,7 +1959,7 @@ fn open_bundled_file(
 
 fn open_settings_file(
     abs_path: &'static Path,
-    default_content: impl FnOnce() -> Rope + Send + 'static,
+    default_content: impl FnOnce(&mut AsyncApp) -> Rope + Send + 'static,
     window: &mut Window,
     cx: &mut Context<Workspace>,
 ) {
@@ -2859,16 +2880,20 @@ mod tests {
         });
 
         // Split the pane with the first entry, then open the second entry again.
-        let (task1, task2) = window
+        window
             .update(cx, |w, window, cx| {
-                (
-                    w.split_and_clone(w.active_pane().clone(), SplitDirection::Right, window, cx),
-                    w.open_path(file2.clone(), None, true, window, cx),
-                )
+                w.split_and_clone(w.active_pane().clone(), SplitDirection::Right, window, cx)
+            })
+            .unwrap()
+            .await
+            .unwrap();
+        window
+            .update(cx, |w, window, cx| {
+                w.open_path(file2.clone(), None, true, window, cx)
             })
+            .unwrap()
+            .await
             .unwrap();
-        task1.await.unwrap();
-        task2.await.unwrap();
 
         window
             .read_with(cx, |w, cx| {
@@ -4350,7 +4375,7 @@ mod tests {
             .fs
             .save(
                 "/settings.json".as_ref(),
-                &r#"{"base_keymap": "Atom"}"#.into(),
+                &Rope::from_str_small(r#"{"base_keymap": "Atom"}"#),
                 Default::default(),
             )
             .await
@@ -4360,7 +4385,7 @@ mod tests {
             .fs
             .save(
                 "/keymap.json".as_ref(),
-                &r#"[{"bindings": {"backspace": "test_only::ActionA"}}]"#.into(),
+                &Rope::from_str_small(r#"[{"bindings": {"backspace": "test_only::ActionA"}}]"#),
                 Default::default(),
             )
             .await
@@ -4408,7 +4433,7 @@ mod tests {
             .fs
             .save(
                 "/keymap.json".as_ref(),
-                &r#"[{"bindings": {"backspace": "test_only::ActionB"}}]"#.into(),
+                &Rope::from_str_small(r#"[{"bindings": {"backspace": "test_only::ActionB"}}]"#),
                 Default::default(),
             )
             .await
@@ -4428,7 +4453,7 @@ mod tests {
             .fs
             .save(
                 "/settings.json".as_ref(),
-                &r#"{"base_keymap": "JetBrains"}"#.into(),
+                &Rope::from_str_small(r#"{"base_keymap": "JetBrains"}"#),
                 Default::default(),
             )
             .await
@@ -4468,7 +4493,7 @@ mod tests {
             .fs
             .save(
                 "/settings.json".as_ref(),
-                &r#"{"base_keymap": "Atom"}"#.into(),
+                &Rope::from_str_small(r#"{"base_keymap": "Atom"}"#),
                 Default::default(),
             )
             .await
@@ -4477,7 +4502,7 @@ mod tests {
             .fs
             .save(
                 "/keymap.json".as_ref(),
-                &r#"[{"bindings": {"backspace": "test_only::ActionA"}}]"#.into(),
+                &Rope::from_str_small(r#"[{"bindings": {"backspace": "test_only::ActionA"}}]"#),
                 Default::default(),
             )
             .await
@@ -4520,7 +4545,7 @@ mod tests {
             .fs
             .save(
                 "/keymap.json".as_ref(),
-                &r#"[{"bindings": {"backspace": null}}]"#.into(),
+                &Rope::from_str_small(r#"[{"bindings": {"backspace": null}}]"#),
                 Default::default(),
             )
             .await
@@ -4540,7 +4565,7 @@ mod tests {
             .fs
             .save(
                 "/settings.json".as_ref(),
-                &r#"{"base_keymap": "JetBrains"}"#.into(),
+                &Rope::from_str_small(r#"{"base_keymap": "JetBrains"}"#),
                 Default::default(),
             )
             .await

crates/zed/src/zed/open_listener.rs πŸ”—

@@ -861,7 +861,7 @@ mod tests {
             .fs
             .save(
                 Path::new(file1_path),
-                &Rope::from("content1"),
+                &Rope::from_str("content1", cx.background_executor()),
                 LineEnding::Unix,
             )
             .await
@@ -875,7 +875,7 @@ mod tests {
             .fs
             .save(
                 Path::new(file2_path),
-                &Rope::from("content2"),
+                &Rope::from_str("content2", cx.background_executor()),
                 LineEnding::Unix,
             )
             .await

crates/zeta/src/zeta.rs πŸ”—

@@ -1836,12 +1836,13 @@ mod tests {
         let fs = project::FakeFs::new(cx.executor());
         let project = Project::test(fs.clone(), [], cx).await;
 
-        let buffer = cx.new(|_cx| {
+        let buffer = cx.new(|cx| {
             Buffer::remote(
                 language::BufferId::new(1).unwrap(),
                 ReplicaId::new(1),
                 language::Capability::ReadWrite,
                 "fn main() {\n    println!(\"Hello\");\n}",
+                cx.background_executor(),
             )
         });
 

crates/zeta2/src/merge_excerpts.rs πŸ”—

@@ -1,4 +1,4 @@
-use cloud_llm_client::predict_edits_v3::{self, Excerpt};
+use cloud_llm_client::predict_edits_v3::Excerpt;
 use edit_prediction_context::Line;
 use language::{BufferSnapshot, Point};
 use std::ops::Range;
@@ -58,26 +58,12 @@ pub fn merge_excerpts(
     output
 }
 
-pub fn write_merged_excerpts(
-    buffer: &BufferSnapshot,
-    sorted_line_ranges: impl IntoIterator<Item = Range<Line>>,
-    sorted_insertions: &[(predict_edits_v3::Point, &str)],
-    output: &mut String,
-) {
-    cloud_zeta2_prompt::write_excerpts(
-        merge_excerpts(buffer, sorted_line_ranges).iter(),
-        sorted_insertions,
-        Line(buffer.max_point().row),
-        true,
-        output,
-    );
-}
-
 #[cfg(test)]
 mod tests {
     use std::sync::Arc;
 
     use super::*;
+    use cloud_llm_client::predict_edits_v3;
     use gpui::{TestAppContext, prelude::*};
     use indoc::indoc;
     use language::{Buffer, Language, LanguageConfig, LanguageMatcher, OffsetRangeExt};
@@ -168,7 +154,13 @@ mod tests {
                     .collect();
 
                 let mut output = String::new();
-                write_merged_excerpts(&buffer.snapshot(), ranges, &insertions, &mut output);
+                cloud_zeta2_prompt::write_excerpts(
+                    merge_excerpts(&buffer.snapshot(), ranges).iter(),
+                    &insertions,
+                    Line(buffer.max_point().row),
+                    true,
+                    &mut output,
+                );
                 assert_eq!(output, expected_output);
             });
         }

crates/zeta2/src/related_excerpts.rs πŸ”—

@@ -1,28 +1,41 @@
-use std::{cmp::Reverse, fmt::Write, ops::Range, path::PathBuf, sync::Arc, time::Instant};
+use std::{
+    cmp::Reverse, collections::hash_map::Entry, ops::Range, path::PathBuf, sync::Arc, time::Instant,
+};
 
 use crate::{
-    ZetaContextRetrievalDebugInfo, ZetaDebugInfo, ZetaSearchQueryDebugInfo,
-    merge_excerpts::write_merged_excerpts,
+    ZetaContextRetrievalDebugInfo, ZetaContextRetrievalStartedDebugInfo, ZetaDebugInfo,
+    ZetaSearchQueryDebugInfo, merge_excerpts::merge_excerpts,
 };
 use anyhow::{Result, anyhow};
+use cloud_zeta2_prompt::write_codeblock;
 use collections::HashMap;
 use edit_prediction_context::{EditPredictionExcerpt, EditPredictionExcerptOptions, Line};
-use futures::{StreamExt, channel::mpsc, stream::BoxStream};
-use gpui::{App, AsyncApp, Entity, Task};
+use futures::{
+    StreamExt,
+    channel::mpsc::{self, UnboundedSender},
+    stream::BoxStream,
+};
+use gpui::{App, AppContext, AsyncApp, Entity, Task};
 use indoc::indoc;
-use language::{Anchor, Bias, Buffer, OffsetRangeExt, Point, TextBufferSnapshot, ToPoint as _};
+use language::{
+    Anchor, Bias, Buffer, BufferSnapshot, OffsetRangeExt, Point, TextBufferSnapshot, ToPoint as _,
+};
 use language_model::{
     LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelId,
-    LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
-    LanguageModelRequestTool, LanguageModelToolResult, MessageContent, Role,
+    LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest,
+    LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
+    LanguageModelToolUse, MessageContent, Role,
 };
 use project::{
     Project, WorktreeSettings,
     search::{SearchQuery, SearchResult},
 };
 use schemars::JsonSchema;
-use serde::Deserialize;
-use util::paths::{PathMatcher, PathStyle};
+use serde::{Deserialize, Serialize};
+use util::{
+    ResultExt as _,
+    paths::{PathMatcher, PathStyle},
+};
 use workspace::item::Settings as _;
 
 const SEARCH_PROMPT: &str = indoc! {r#"
@@ -51,7 +64,7 @@ const SEARCH_PROMPT: &str = indoc! {r#"
 
     ## Current cursor context
 
-    `````filename={current_file_path}
+    `````{current_file_path}
     {cursor_excerpt}
     `````
 
@@ -64,22 +77,19 @@ const SEARCH_TOOL_NAME: &str = "search";
 /// Search for relevant code
 ///
 /// For the best results, run multiple queries at once with a single invocation of this tool.
-#[derive(Clone, Deserialize, JsonSchema)]
+#[derive(Clone, Deserialize, Serialize, JsonSchema)]
 pub struct SearchToolInput {
     /// An array of queries to run for gathering context relevant to the next prediction
     #[schemars(length(max = 5))]
     pub queries: Box<[SearchToolQuery]>,
 }
 
-#[derive(Debug, Clone, Deserialize, JsonSchema)]
+#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
 pub struct SearchToolQuery {
     /// A glob pattern to match file paths in the codebase
     pub glob: String,
     /// A regular expression to match content within the files matched by the glob pattern
     pub regex: String,
-    /// Whether the regex is case-sensitive. Defaults to false (case-insensitive).
-    #[serde(default)]
-    pub case_sensitive: bool,
 }
 
 const RESULTS_MESSAGE: &str = indoc! {"
@@ -121,11 +131,13 @@ pub struct LlmContextOptions {
     pub excerpt: EditPredictionExcerptOptions,
 }
 
-pub fn find_related_excerpts<'a>(
+pub const MODEL_PROVIDER_ID: LanguageModelProviderId = language_model::ANTHROPIC_PROVIDER_ID;
+
+pub fn find_related_excerpts(
     buffer: Entity<language::Buffer>,
     cursor_position: Anchor,
     project: &Entity<Project>,
-    events: impl Iterator<Item = &'a crate::Event>,
+    mut edit_history_unified_diff: String,
     options: &LlmContextOptions,
     debug_tx: Option<mpsc::UnboundedSender<ZetaDebugInfo>>,
     cx: &App,
@@ -135,23 +147,15 @@ pub fn find_related_excerpts<'a>(
         .read(cx)
         .available_models(cx)
         .find(|model| {
-            model.provider_id() == language_model::ANTHROPIC_PROVIDER_ID
+            model.provider_id() == MODEL_PROVIDER_ID
                 && model.id() == LanguageModelId("claude-haiku-4-5-latest".into())
         })
     else {
-        return Task::ready(Err(anyhow!("could not find claude model")));
+        return Task::ready(Err(anyhow!("could not find context model")));
     };
 
-    let mut edits_string = String::new();
-
-    for event in events {
-        if let Some(event) = event.to_request_event(cx) {
-            writeln!(&mut edits_string, "{event}").ok();
-        }
-    }
-
-    if edits_string.is_empty() {
-        edits_string.push_str("(No user edits yet)");
+    if edit_history_unified_diff.is_empty() {
+        edit_history_unified_diff.push_str("(No user edits yet)");
     }
 
     // TODO [zeta2] include breadcrumbs?
@@ -169,10 +173,22 @@ pub fn find_related_excerpts<'a>(
         .unwrap_or_else(|| "untitled".to_string());
 
     let prompt = SEARCH_PROMPT
-        .replace("{edits}", &edits_string)
+        .replace("{edits}", &edit_history_unified_diff)
         .replace("{current_file_path}", &current_file_path)
         .replace("{cursor_excerpt}", &cursor_excerpt.text(&snapshot).body);
 
+    if let Some(debug_tx) = &debug_tx {
+        debug_tx
+            .unbounded_send(ZetaDebugInfo::ContextRetrievalStarted(
+                ZetaContextRetrievalStartedDebugInfo {
+                    project: project.clone(),
+                    timestamp: Instant::now(),
+                    search_prompt: prompt.clone(),
+                },
+            ))
+            .ok();
+    }
+
     let path_style = project.read(cx).path_style(cx);
 
     let exclude_matcher = {
@@ -209,6 +225,8 @@ pub fn find_related_excerpts<'a>(
 
         let mut select_request_messages = Vec::with_capacity(5); // initial prompt, LLM response/thinking, tool use, tool result, select prompt
         select_request_messages.push(initial_prompt_message);
+
+        let mut regex_by_glob: HashMap<String, String> = HashMap::default();
         let mut search_calls = Vec::new();
 
         while let Some(event) = search_stream.next().await {
@@ -219,7 +237,18 @@ pub fn find_related_excerpts<'a>(
                     }
 
                     if tool_use.name.as_ref() == SEARCH_TOOL_NAME {
-                        search_calls.push((select_request_messages.len(), tool_use));
+                        let input =
+                            serde_json::from_value::<SearchToolInput>(tool_use.input.clone())?;
+
+                        for query in input.queries {
+                            let regex = regex_by_glob.entry(query.glob).or_default();
+                            if !regex.is_empty() {
+                                regex.push('|');
+                            }
+                            regex.push_str(&query.regex);
+                        }
+
+                        search_calls.push(tool_use);
                     } else {
                         log::warn!(
                             "context gathering model tried to use unknown tool: {}",
@@ -303,19 +332,35 @@ pub fn find_related_excerpts<'a>(
             }
         }
 
-        struct ResultBuffer {
-            buffer: Entity<Buffer>,
-            snapshot: TextBufferSnapshot,
-        }
-
-        let search_queries = search_calls
-            .iter()
-            .map(|(_, tool_use)| {
-                Ok(serde_json::from_value::<SearchToolInput>(
-                    tool_use.input.clone(),
-                )?)
+        let search_tool_use = if search_calls.is_empty() {
+            log::warn!("context model ran 0 searches");
+            return anyhow::Ok(Default::default());
+        } else if search_calls.len() == 1 {
+            search_calls.swap_remove(0)
+        } else {
+            // In theory, the model could perform multiple search calls
+            // Dealing with them separately is not worth it when it doesn't happen in practice.
+            // If it were to happen, here we would combine them into one.
+            // The second request doesn't need to know it was actually two different calls ;)
+            let input = serde_json::to_value(&SearchToolInput {
+                queries: regex_by_glob
+                    .iter()
+                    .map(|(glob, regex)| SearchToolQuery {
+                        glob: glob.clone(),
+                        regex: regex.clone(),
+                    })
+                    .collect(),
             })
-            .collect::<Result<Vec<_>>>()?;
+            .unwrap_or_default();
+
+            LanguageModelToolUse {
+                id: search_calls.swap_remove(0).id,
+                name: SELECT_TOOL_NAME.into(),
+                raw_input: serde_json::to_string(&input).unwrap_or_default(),
+                input,
+                is_input_complete: true,
+            }
+        };
 
         if let Some(debug_tx) = &debug_tx {
             debug_tx
@@ -323,114 +368,127 @@ pub fn find_related_excerpts<'a>(
                     ZetaSearchQueryDebugInfo {
                         project: project.clone(),
                         timestamp: Instant::now(),
-                        queries: search_queries
+                        queries: regex_by_glob
                             .iter()
-                            .flat_map(|call| call.queries.iter().cloned())
+                            .map(|(glob, regex)| SearchToolQuery {
+                                glob: glob.clone(),
+                                regex: regex.clone(),
+                            })
                             .collect(),
                     },
                 ))
                 .ok();
         }
 
-        let mut result_buffers_by_path = HashMap::default();
-
-        for ((index, tool_use), call) in search_calls.into_iter().zip(search_queries).rev() {
-            let mut excerpts_by_buffer = HashMap::default();
-
-            for query in call.queries {
-                // TODO [zeta2] parallelize?
+        let (results_tx, mut results_rx) = mpsc::unbounded();
 
+        for (glob, regex) in regex_by_glob {
+            let exclude_matcher = exclude_matcher.clone();
+            let results_tx = results_tx.clone();
+            let project = project.clone();
+            cx.spawn(async move |cx| {
                 run_query(
-                    query,
-                    &mut excerpts_by_buffer,
+                    &glob,
+                    &regex,
+                    results_tx.clone(),
                     path_style,
-                    exclude_matcher.clone(),
+                    exclude_matcher,
                     &project,
                     cx,
                 )
-                .await?;
-            }
-
-            if excerpts_by_buffer.is_empty() {
-                continue;
-            }
-
-            let mut merged_result = RESULTS_MESSAGE.to_string();
-
-            for (buffer_entity, mut excerpts_for_buffer) in excerpts_by_buffer {
-                excerpts_for_buffer.sort_unstable_by_key(|range| (range.start, Reverse(range.end)));
-
-                buffer_entity
-                    .clone()
-                    .read_with(cx, |buffer, cx| {
-                        let Some(file) = buffer.file() else {
-                            return;
-                        };
-
-                        let path = file.full_path(cx);
-
-                        writeln!(&mut merged_result, "`````filename={}", path.display()).unwrap();
+                .await
+                .log_err();
+            })
+            .detach()
+        }
+        drop(results_tx);
 
-                        let snapshot = buffer.snapshot();
+        struct ResultBuffer {
+            buffer: Entity<Buffer>,
+            snapshot: TextBufferSnapshot,
+        }
 
-                        write_merged_excerpts(
-                            &snapshot,
-                            excerpts_for_buffer,
-                            &[],
-                            &mut merged_result,
-                        );
+        let (result_buffers_by_path, merged_result) = cx
+            .background_spawn(async move {
+                let mut excerpts_by_buffer: HashMap<Entity<Buffer>, MatchedBuffer> =
+                    HashMap::default();
+
+                while let Some((buffer, matched)) = results_rx.next().await {
+                    match excerpts_by_buffer.entry(buffer) {
+                        Entry::Occupied(mut entry) => {
+                            let entry = entry.get_mut();
+                            entry.full_path = matched.full_path;
+                            entry.snapshot = matched.snapshot;
+                            entry.line_ranges.extend(matched.line_ranges);
+                        }
+                        Entry::Vacant(entry) => {
+                            entry.insert(matched);
+                        }
+                    }
+                }
 
-                        merged_result.push_str("`````\n\n");
+                let mut result_buffers_by_path = HashMap::default();
+                let mut merged_result = RESULTS_MESSAGE.to_string();
+
+                for (buffer, mut matched) in excerpts_by_buffer {
+                    matched
+                        .line_ranges
+                        .sort_unstable_by_key(|range| (range.start, Reverse(range.end)));
+
+                    write_codeblock(
+                        &matched.full_path,
+                        merge_excerpts(&matched.snapshot, matched.line_ranges).iter(),
+                        &[],
+                        Line(matched.snapshot.max_point().row),
+                        true,
+                        &mut merged_result,
+                    );
+
+                    result_buffers_by_path.insert(
+                        matched.full_path,
+                        ResultBuffer {
+                            buffer,
+                            snapshot: matched.snapshot.text,
+                        },
+                    );
+                }
 
-                        result_buffers_by_path.insert(
-                            path,
-                            ResultBuffer {
-                                buffer: buffer_entity,
-                                snapshot: snapshot.text,
-                            },
-                        );
-                    })
-                    .ok();
-            }
+                (result_buffers_by_path, merged_result)
+            })
+            .await;
 
-            let tool_result = LanguageModelToolResult {
-                tool_use_id: tool_use.id.clone(),
-                tool_name: SEARCH_TOOL_NAME.into(),
-                is_error: false,
-                content: merged_result.into(),
-                output: None,
-            };
-
-            // Almost always appends at the end, but in theory, the model could return some text after the tool call
-            // or perform parallel tool calls, so we splice at the message index for correctness.
-            select_request_messages.splice(
-                index..index,
-                [
-                    LanguageModelRequestMessage {
-                        role: Role::Assistant,
-                        content: vec![MessageContent::ToolUse(tool_use)],
-                        cache: false,
-                    },
-                    LanguageModelRequestMessage {
-                        role: Role::User,
-                        content: vec![MessageContent::ToolResult(tool_result)],
-                        cache: false,
+        if let Some(debug_tx) = &debug_tx {
+            debug_tx
+                .unbounded_send(ZetaDebugInfo::SearchQueriesExecuted(
+                    ZetaContextRetrievalDebugInfo {
+                        project: project.clone(),
+                        timestamp: Instant::now(),
                     },
-                ],
-            );
-
-            if let Some(debug_tx) = &debug_tx {
-                debug_tx
-                    .unbounded_send(ZetaDebugInfo::SearchQueriesExecuted(
-                        ZetaContextRetrievalDebugInfo {
-                            project: project.clone(),
-                            timestamp: Instant::now(),
-                        },
-                    ))
-                    .ok();
-            }
+                ))
+                .ok();
         }
 
+        let tool_result = LanguageModelToolResult {
+            tool_use_id: search_tool_use.id.clone(),
+            tool_name: SEARCH_TOOL_NAME.into(),
+            is_error: false,
+            content: merged_result.into(),
+            output: None,
+        };
+
+        select_request_messages.extend([
+            LanguageModelRequestMessage {
+                role: Role::Assistant,
+                content: vec![MessageContent::ToolUse(search_tool_use)],
+                cache: false,
+            },
+            LanguageModelRequestMessage {
+                role: Role::User,
+                content: vec![MessageContent::ToolResult(tool_result)],
+                cache: false,
+            },
+        ]);
+
         if result_buffers_by_path.is_empty() {
             log::trace!("context gathering queries produced no results");
             return anyhow::Ok(HashMap::default());
@@ -449,73 +507,85 @@ pub fn find_related_excerpts<'a>(
             cx,
         )
         .await?;
-        let mut selected_ranges = Vec::new();
 
-        while let Some(event) = select_stream.next().await {
-            match event? {
-                LanguageModelCompletionEvent::ToolUse(tool_use) => {
-                    if !tool_use.is_input_complete {
-                        continue;
-                    }
+        cx.background_spawn(async move {
+            let mut selected_ranges = Vec::new();
 
-                    if tool_use.name.as_ref() == SELECT_TOOL_NAME {
-                        let call =
-                            serde_json::from_value::<SelectToolInput>(tool_use.input.clone())?;
-                        selected_ranges.extend(call.ranges);
-                    } else {
-                        log::warn!(
-                            "context gathering model tried to use unknown tool: {}",
-                            tool_use.name
-                        );
+            while let Some(event) = select_stream.next().await {
+                match event? {
+                    LanguageModelCompletionEvent::ToolUse(tool_use) => {
+                        if !tool_use.is_input_complete {
+                            continue;
+                        }
+
+                        if tool_use.name.as_ref() == SELECT_TOOL_NAME {
+                            let call =
+                                serde_json::from_value::<SelectToolInput>(tool_use.input.clone())?;
+                            selected_ranges.extend(call.ranges);
+                        } else {
+                            log::warn!(
+                                "context gathering model tried to use unknown tool: {}",
+                                tool_use.name
+                            );
+                        }
+                    }
+                    ev @ LanguageModelCompletionEvent::ToolUseJsonParseError { .. } => {
+                        log::error!("{ev:?}");
+                    }
+                    ev => {
+                        log::trace!("context select event: {ev:?}")
                     }
-                }
-                ev @ LanguageModelCompletionEvent::ToolUseJsonParseError { .. } => {
-                    log::error!("{ev:?}");
-                }
-                ev => {
-                    log::trace!("context select event: {ev:?}")
                 }
             }
-        }
 
-        if selected_ranges.is_empty() {
-            log::trace!("context gathering selected no ranges")
-        }
+            if let Some(debug_tx) = &debug_tx {
+                debug_tx
+                    .unbounded_send(ZetaDebugInfo::SearchResultsFiltered(
+                        ZetaContextRetrievalDebugInfo {
+                            project: project.clone(),
+                            timestamp: Instant::now(),
+                        },
+                    ))
+                    .ok();
+            }
 
-        let mut related_excerpts_by_buffer: HashMap<_, Vec<_>> = HashMap::default();
-
-        for selected_range in selected_ranges {
-            if let Some(ResultBuffer { buffer, snapshot }) =
-                result_buffers_by_path.get(&selected_range.path)
-            {
-                let start_point = Point::new(selected_range.start_line.saturating_sub(1), 0);
-                let end_point =
-                    snapshot.clip_point(Point::new(selected_range.end_line, 0), Bias::Left);
-                let range = snapshot.anchor_after(start_point)..snapshot.anchor_before(end_point);
-
-                related_excerpts_by_buffer
-                    .entry(buffer.clone())
-                    .or_default()
-                    .push(range);
-            } else {
-                log::warn!(
-                    "selected path that wasn't included in search results: {}",
-                    selected_range.path.display()
-                );
+            if selected_ranges.is_empty() {
+                log::trace!("context gathering selected no ranges")
             }
-        }
 
-        for (buffer, ranges) in &mut related_excerpts_by_buffer {
-            buffer.read_with(cx, |buffer, _cx| {
-                ranges.sort_unstable_by(|a, b| {
-                    a.start
-                        .cmp(&b.start, buffer)
-                        .then(b.end.cmp(&a.end, buffer))
-                });
-            })?;
-        }
+            selected_ranges.sort_unstable_by(|a, b| {
+                a.start_line
+                    .cmp(&b.start_line)
+                    .then(b.end_line.cmp(&a.end_line))
+            });
+
+            let mut related_excerpts_by_buffer: HashMap<_, Vec<_>> = HashMap::default();
+
+            for selected_range in selected_ranges {
+                if let Some(ResultBuffer { buffer, snapshot }) =
+                    result_buffers_by_path.get(&selected_range.path)
+                {
+                    let start_point = Point::new(selected_range.start_line.saturating_sub(1), 0);
+                    let end_point =
+                        snapshot.clip_point(Point::new(selected_range.end_line, 0), Bias::Left);
+                    let range =
+                        snapshot.anchor_after(start_point)..snapshot.anchor_before(end_point);
+
+                    related_excerpts_by_buffer
+                        .entry(buffer.clone())
+                        .or_default()
+                        .push(range);
+                } else {
+                    log::warn!(
+                        "selected path that wasn't included in search results: {}",
+                        selected_range.path.display()
+                    );
+                }
+            }
 
-        anyhow::Ok(related_excerpts_by_buffer)
+            anyhow::Ok(related_excerpts_by_buffer)
+        })
+        .await
     })
 }
 
@@ -549,20 +619,27 @@ const MIN_EXCERPT_LEN: usize = 16;
 const MAX_EXCERPT_LEN: usize = 768;
 const MAX_RESULT_BYTES_PER_QUERY: usize = MAX_EXCERPT_LEN * 5;
 
+struct MatchedBuffer {
+    snapshot: BufferSnapshot,
+    line_ranges: Vec<Range<Line>>,
+    full_path: PathBuf,
+}
+
 async fn run_query(
-    args: SearchToolQuery,
-    excerpts_by_buffer: &mut HashMap<Entity<Buffer>, Vec<Range<Line>>>,
+    glob: &str,
+    regex: &str,
+    results_tx: UnboundedSender<(Entity<Buffer>, MatchedBuffer)>,
     path_style: PathStyle,
     exclude_matcher: PathMatcher,
     project: &Entity<Project>,
     cx: &mut AsyncApp,
 ) -> Result<()> {
-    let include_matcher = PathMatcher::new(vec![args.glob], path_style)?;
+    let include_matcher = PathMatcher::new(vec![glob], path_style)?;
 
     let query = SearchQuery::regex(
-        &args.regex,
+        regex,
         false,
-        args.case_sensitive,
+        true,
         false,
         true,
         include_matcher,
@@ -581,42 +658,56 @@ async fn run_query(
             continue;
         }
 
-        let excerpts_for_buffer = excerpts_by_buffer
-            .entry(buffer.clone())
-            .or_insert_with(|| Vec::with_capacity(ranges.len()));
+        let Some((snapshot, full_path)) = buffer.read_with(cx, |buffer, cx| {
+            Some((buffer.snapshot(), buffer.file()?.full_path(cx)))
+        })?
+        else {
+            continue;
+        };
 
-        let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
+        let results_tx = results_tx.clone();
+        cx.background_spawn(async move {
+            let mut line_ranges = Vec::with_capacity(ranges.len());
 
-        for range in ranges {
-            let offset_range = range.to_offset(&snapshot);
-            let query_point = (offset_range.start + offset_range.len() / 2).to_point(&snapshot);
+            for range in ranges {
+                let offset_range = range.to_offset(&snapshot);
+                let query_point = (offset_range.start + offset_range.len() / 2).to_point(&snapshot);
 
-            if total_bytes + MIN_EXCERPT_LEN >= MAX_RESULT_BYTES_PER_QUERY {
-                break;
-            }
+                if total_bytes + MIN_EXCERPT_LEN >= MAX_RESULT_BYTES_PER_QUERY {
+                    break;
+                }
 
-            let excerpt = EditPredictionExcerpt::select_from_buffer(
-                query_point,
-                &snapshot,
-                &EditPredictionExcerptOptions {
-                    max_bytes: MAX_EXCERPT_LEN.min(MAX_RESULT_BYTES_PER_QUERY - total_bytes),
-                    min_bytes: MIN_EXCERPT_LEN,
-                    target_before_cursor_over_total_bytes: 0.5,
-                },
-                None,
-            );
+                let excerpt = EditPredictionExcerpt::select_from_buffer(
+                    query_point,
+                    &snapshot,
+                    &EditPredictionExcerptOptions {
+                        max_bytes: MAX_EXCERPT_LEN.min(MAX_RESULT_BYTES_PER_QUERY - total_bytes),
+                        min_bytes: MIN_EXCERPT_LEN,
+                        target_before_cursor_over_total_bytes: 0.5,
+                    },
+                    None,
+                );
 
-            if let Some(excerpt) = excerpt {
-                total_bytes += excerpt.range.len();
-                if !excerpt.line_range.is_empty() {
-                    excerpts_for_buffer.push(excerpt.line_range);
+                if let Some(excerpt) = excerpt {
+                    total_bytes += excerpt.range.len();
+                    if !excerpt.line_range.is_empty() {
+                        line_ranges.push(excerpt.line_range);
+                    }
                 }
             }
-        }
 
-        if excerpts_for_buffer.is_empty() {
-            excerpts_by_buffer.remove(&buffer);
-        }
+            results_tx
+                .unbounded_send((
+                    buffer,
+                    MatchedBuffer {
+                        snapshot,
+                        line_ranges,
+                        full_path,
+                    },
+                ))
+                .log_err();
+        })
+        .detach();
     }
 
     anyhow::Ok(())

crates/zeta2/src/zeta2.rs πŸ”—

@@ -28,6 +28,7 @@ use project::Project;
 use release_channel::AppVersion;
 use serde::de::DeserializeOwned;
 use std::collections::{VecDeque, hash_map};
+use std::fmt::Write;
 use std::ops::Range;
 use std::path::Path;
 use std::str::FromStr as _;
@@ -38,10 +39,10 @@ use util::ResultExt as _;
 use util::rel_path::RelPathBuf;
 use workspace::notifications::{ErrorMessagePrompt, NotificationId, show_app_notification};
 
-mod merge_excerpts;
+pub mod merge_excerpts;
 mod prediction;
 mod provider;
-mod related_excerpts;
+pub mod related_excerpts;
 
 use crate::merge_excerpts::merge_excerpts;
 use crate::prediction::EditPrediction;
@@ -135,13 +136,20 @@ impl ContextMode {
 }
 
 pub enum ZetaDebugInfo {
-    ContextRetrievalStarted(ZetaContextRetrievalDebugInfo),
+    ContextRetrievalStarted(ZetaContextRetrievalStartedDebugInfo),
     SearchQueriesGenerated(ZetaSearchQueryDebugInfo),
     SearchQueriesExecuted(ZetaContextRetrievalDebugInfo),
+    SearchResultsFiltered(ZetaContextRetrievalDebugInfo),
     ContextRetrievalFinished(ZetaContextRetrievalDebugInfo),
     EditPredicted(ZetaEditPredictionDebugInfo),
 }
 
+pub struct ZetaContextRetrievalStartedDebugInfo {
+    pub project: Entity<Project>,
+    pub timestamp: Instant,
+    pub search_prompt: String,
+}
+
 pub struct ZetaContextRetrievalDebugInfo {
     pub project: Entity<Project>,
     pub timestamp: Instant,
@@ -1085,17 +1093,6 @@ impl Zeta {
         zeta_project
             .refresh_context_task
             .get_or_insert(cx.spawn(async move |this, cx| {
-                if let Some(debug_tx) = &debug_tx {
-                    debug_tx
-                        .unbounded_send(ZetaDebugInfo::ContextRetrievalStarted(
-                            ZetaContextRetrievalDebugInfo {
-                                project: project.clone(),
-                                timestamp: Instant::now(),
-                            },
-                        ))
-                        .ok();
-                }
-
                 let related_excerpts = this
                     .update(cx, |this, cx| {
                         let Some(zeta_project) = this.projects.get(&project.entity_id()) else {
@@ -1106,11 +1103,19 @@ impl Zeta {
                             return Task::ready(anyhow::Ok(HashMap::default()));
                         };
 
+                        let mut edit_history_unified_diff = String::new();
+
+                        for event in zeta_project.events.iter() {
+                            if let Some(event) = event.to_request_event(cx) {
+                                writeln!(&mut edit_history_unified_diff, "{event}").ok();
+                            }
+                        }
+
                         find_related_excerpts(
                             buffer.clone(),
                             cursor_position,
                             &project,
-                            zeta_project.events.iter(),
+                            edit_history_unified_diff,
                             options,
                             debug_tx,
                             cx,

crates/zeta2_tools/Cargo.toml πŸ”—

@@ -30,6 +30,7 @@ project.workspace = true
 serde.workspace = true
 telemetry.workspace = true
 text.workspace = true
+regex-syntax = "0.8.8"
 ui.workspace = true
 ui_input.workspace = true
 util.workspace = true

crates/zeta2_tools/src/zeta2_context_view.rs πŸ”—

@@ -20,11 +20,13 @@ use project::Project;
 use text::OffsetRangeExt;
 use ui::{
     ButtonCommon, Clickable, Color, Disableable, FluentBuilder as _, Icon, IconButton, IconName,
-    IconSize, InteractiveElement, IntoElement, ListItem, StyledTypography, div, h_flex, v_flex,
+    IconSize, InteractiveElement, IntoElement, ListHeader, ListItem, StyledTypography, div, h_flex,
+    v_flex,
 };
 use workspace::{Item, ItemHandle as _};
 use zeta2::{
-    SearchToolQuery, Zeta, ZetaContextRetrievalDebugInfo, ZetaDebugInfo, ZetaSearchQueryDebugInfo,
+    Zeta, ZetaContextRetrievalDebugInfo, ZetaContextRetrievalStartedDebugInfo, ZetaDebugInfo,
+    ZetaSearchQueryDebugInfo,
 };
 
 pub struct Zeta2ContextView {
@@ -37,15 +39,22 @@ pub struct Zeta2ContextView {
 }
 
 #[derive(Debug)]
-pub struct RetrievalRun {
+struct RetrievalRun {
     editor: Entity<Editor>,
-    search_queries: Vec<SearchToolQuery>,
+    search_queries: Vec<GlobQueries>,
     started_at: Instant,
     search_results_generated_at: Option<Instant>,
     search_results_executed_at: Option<Instant>,
+    search_results_filtered_at: Option<Instant>,
     finished_at: Option<Instant>,
 }
 
+#[derive(Debug)]
+struct GlobQueries {
+    glob: String,
+    alternations: Vec<String>,
+}
+
 actions!(
     dev,
     [
@@ -108,6 +117,11 @@ impl Zeta2ContextView {
                     self.handle_search_queries_executed(info, window, cx);
                 }
             }
+            ZetaDebugInfo::SearchResultsFiltered(info) => {
+                if info.project == self.project {
+                    self.handle_search_results_filtered(info, window, cx);
+                }
+            }
             ZetaDebugInfo::ContextRetrievalFinished(info) => {
                 if info.project == self.project {
                     self.handle_context_retrieval_finished(info, window, cx);
@@ -119,7 +133,7 @@ impl Zeta2ContextView {
 
     fn handle_context_retrieval_started(
         &mut self,
-        info: ZetaContextRetrievalDebugInfo,
+        info: ZetaContextRetrievalStartedDebugInfo,
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
@@ -145,6 +159,7 @@ impl Zeta2ContextView {
             started_at: info.timestamp,
             search_results_generated_at: None,
             search_results_executed_at: None,
+            search_results_filtered_at: None,
             finished_at: None,
         });
 
@@ -202,7 +217,23 @@ impl Zeta2ContextView {
         };
 
         run.search_results_generated_at = Some(info.timestamp);
-        run.search_queries = info.queries;
+        run.search_queries = info
+            .queries
+            .into_iter()
+            .map(|query| {
+                let mut regex_parser = regex_syntax::ast::parse::Parser::new();
+
+                GlobQueries {
+                    glob: query.glob,
+                    alternations: match regex_parser.parse(&query.regex) {
+                        Ok(regex_syntax::ast::Ast::Alternation(ref alt)) => {
+                            alt.asts.iter().map(|ast| ast.to_string()).collect()
+                        }
+                        _ => vec![query.regex],
+                    },
+                }
+            })
+            .collect();
         cx.notify();
     }
 
@@ -225,6 +256,20 @@ impl Zeta2ContextView {
         cx.notify();
     }
 
+    fn handle_search_results_filtered(
+        &mut self,
+        info: ZetaContextRetrievalDebugInfo,
+        _window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        let Some(run) = self.runs.back_mut() else {
+            return;
+        };
+
+        run.search_results_filtered_at = Some(info.timestamp);
+        cx.notify();
+    }
+
     fn handle_go_back(
         &mut self,
         _: &Zeta2ContextGoBack,
@@ -255,28 +300,37 @@ impl Zeta2ContextView {
         let run = &self.runs[self.current_ix];
 
         h_flex()
+            .p_2()
             .w_full()
             .font_buffer(cx)
             .text_xs()
             .border_t_1()
+            .gap_2()
             .child(
-                v_flex()
-                    .h_full()
-                    .flex_1()
-                    .children(run.search_queries.iter().enumerate().map(|(ix, query)| {
-                        ListItem::new(ix)
-                            .start_slot(
-                                Icon::new(IconName::MagnifyingGlass)
-                                    .color(Color::Muted)
-                                    .size(IconSize::Small),
-                            )
-                            .child(query.regex.clone())
-                    })),
+                v_flex().h_full().flex_1().children(
+                    run.search_queries
+                        .iter()
+                        .enumerate()
+                        .flat_map(|(ix, query)| {
+                            std::iter::once(ListHeader::new(query.glob.clone()).into_any_element())
+                                .chain(query.alternations.iter().enumerate().map(
+                                    move |(alt_ix, alt)| {
+                                        ListItem::new(ix * 100 + alt_ix)
+                                            .start_slot(
+                                                Icon::new(IconName::MagnifyingGlass)
+                                                    .color(Color::Muted)
+                                                    .size(IconSize::Small),
+                                            )
+                                            .child(alt.clone())
+                                            .into_any_element()
+                                    },
+                                ))
+                        }),
+                ),
             )
             .child(
                 v_flex()
                     .h_full()
-                    .pr_2()
                     .text_align(TextAlign::Right)
                     .child(
                         h_flex()
@@ -325,25 +379,38 @@ impl Zeta2ContextView {
                             ),
                     )
                     .map(|mut div| {
+                        let pending_message = |div: ui::Div, msg: &'static str| {
+                            if is_latest {
+                                return div.child(msg);
+                            } else {
+                                return div.child("Canceled");
+                            }
+                        };
+
                         let t0 = run.started_at;
                         let Some(t1) = run.search_results_generated_at else {
-                            return div.child("Planning search...");
+                            return pending_message(div, "Planning search...");
                         };
                         div = div.child(format!("Planned search: {:>5} ms", (t1 - t0).as_millis()));
 
                         let Some(t2) = run.search_results_executed_at else {
-                            return div.child("Running search...");
+                            return pending_message(div, "Running search...");
                         };
                         div = div.child(format!("Ran search: {:>5} ms", (t2 - t1).as_millis()));
 
-                        let Some(t3) = run.finished_at else {
-                            if is_latest {
-                                return div.child("Filtering results...");
-                            } else {
-                                return div.child("Canceled");
-                            }
+                        let Some(t3) = run.search_results_filtered_at else {
+                            return pending_message(div, "Filtering results...");
+                        };
+                        div =
+                            div.child(format!("Filtered results: {:>5} ms", (t3 - t2).as_millis()));
+
+                        let Some(t4) = run.finished_at else {
+                            return pending_message(div, "Building excerpts");
                         };
-                        div.child(format!("Filtered results: {:>5} ms", (t3 - t2).as_millis()))
+                        div = div
+                            .child(format!("Build excerpts: {:>5} Β΅s", (t4 - t3).as_micros()))
+                            .child(format!("Total: {:>5} ms", (t4 - t0).as_millis()));
+                        div
                     }),
             )
     }

crates/zeta_cli/Cargo.toml πŸ”—

@@ -39,8 +39,10 @@ paths.workspace = true
 polars = { version = "0.51", features = ["lazy", "dtype-struct", "parquet"] }
 project.workspace = true
 prompt_store.workspace = true
+pulldown-cmark.workspace = true
 release_channel.workspace = true
 reqwest_client.workspace = true
+toml.workspace = true
 serde.workspace = true
 serde_json.workspace = true
 settings.workspace = true

crates/zeta_cli/src/example.rs πŸ”—

@@ -0,0 +1,355 @@
+use std::{
+    borrow::Cow,
+    env,
+    fmt::{self, Display},
+    fs,
+    io::Write,
+    mem,
+    path::{Path, PathBuf},
+};
+
+use anyhow::{Context as _, Result};
+use clap::ValueEnum;
+use gpui::http_client::Url;
+use pulldown_cmark::CowStr;
+use serde::{Deserialize, Serialize};
+
+const CURSOR_POSITION_HEADING: &str = "Cursor Position";
+const EDIT_HISTORY_HEADING: &str = "Edit History";
+const EXPECTED_PATCH_HEADING: &str = "Expected Patch";
+const EXPECTED_EXCERPTS_HEADING: &str = "Expected Excerpts";
+const REPOSITORY_URL_FIELD: &str = "repository_url";
+const REVISION_FIELD: &str = "revision";
+
+#[derive(Debug)]
+pub struct NamedExample {
+    pub name: String,
+    pub example: Example,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct Example {
+    pub repository_url: String,
+    pub revision: String,
+    pub cursor_path: PathBuf,
+    pub cursor_position: String,
+    pub edit_history: Vec<String>,
+    pub expected_patch: String,
+    pub expected_excerpts: Vec<ExpectedExcerpt>,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct ExpectedExcerpt {
+    path: PathBuf,
+    text: String,
+}
+
+#[derive(ValueEnum, Debug, Clone)]
+pub enum ExampleFormat {
+    Json,
+    Toml,
+    Md,
+}
+
+impl NamedExample {
+    pub fn load(path: impl AsRef<Path>) -> Result<Self> {
+        let path = path.as_ref();
+        let content = std::fs::read_to_string(path)?;
+        let ext = path.extension();
+
+        match ext.and_then(|s| s.to_str()) {
+            Some("json") => Ok(Self {
+                name: path.file_name().unwrap_or_default().display().to_string(),
+                example: serde_json::from_str(&content)?,
+            }),
+            Some("toml") => Ok(Self {
+                name: path.file_name().unwrap_or_default().display().to_string(),
+                example: toml::from_str(&content)?,
+            }),
+            Some("md") => Self::parse_md(&content),
+            Some(_) => {
+                anyhow::bail!("Unrecognized example extension: {}", ext.unwrap().display());
+            }
+            None => {
+                anyhow::bail!(
+                    "Failed to determine example type since the file does not have an extension."
+                );
+            }
+        }
+    }
+
+    pub fn parse_md(input: &str) -> Result<Self> {
+        use pulldown_cmark::{CodeBlockKind, Event, HeadingLevel, Parser, Tag, TagEnd};
+
+        let parser = Parser::new(input);
+
+        let mut named = NamedExample {
+            name: String::new(),
+            example: Example {
+                repository_url: String::new(),
+                revision: String::new(),
+                cursor_path: PathBuf::new(),
+                cursor_position: String::new(),
+                edit_history: Vec::new(),
+                expected_patch: String::new(),
+                expected_excerpts: Vec::new(),
+            },
+        };
+
+        let mut text = String::new();
+        let mut current_section = String::new();
+        let mut block_info: CowStr = "".into();
+
+        for event in parser {
+            match event {
+                Event::Text(line) => {
+                    text.push_str(&line);
+
+                    if !named.name.is_empty()
+                        && current_section.is_empty()
+                        // in h1 section
+                        && let Some((field, value)) = line.split_once('=')
+                    {
+                        match field.trim() {
+                            REPOSITORY_URL_FIELD => {
+                                named.example.repository_url = value.trim().to_string();
+                            }
+                            REVISION_FIELD => {
+                                named.example.revision = value.trim().to_string();
+                            }
+                            _ => {
+                                eprintln!("Warning: Unrecognized field `{field}`");
+                            }
+                        }
+                    }
+                }
+                Event::End(TagEnd::Heading(HeadingLevel::H1)) => {
+                    if !named.name.is_empty() {
+                        anyhow::bail!(
+                            "Found multiple H1 headings. There should only be one with the name of the example."
+                        );
+                    }
+                    named.name = mem::take(&mut text);
+                }
+                Event::End(TagEnd::Heading(HeadingLevel::H2)) => {
+                    current_section = mem::take(&mut text);
+                }
+                Event::End(TagEnd::Heading(level)) => {
+                    anyhow::bail!("Unexpected heading level: {level}");
+                }
+                Event::Start(Tag::CodeBlock(kind)) => {
+                    match kind {
+                        CodeBlockKind::Fenced(info) => {
+                            block_info = info;
+                        }
+                        CodeBlockKind::Indented => {
+                            anyhow::bail!("Unexpected indented codeblock");
+                        }
+                    };
+                }
+                Event::Start(_) => {
+                    text.clear();
+                    block_info = "".into();
+                }
+                Event::End(TagEnd::CodeBlock) => {
+                    if current_section.eq_ignore_ascii_case(EDIT_HISTORY_HEADING) {
+                        named.example.edit_history.push(mem::take(&mut text));
+                    } else if current_section.eq_ignore_ascii_case(CURSOR_POSITION_HEADING) {
+                        let path = PathBuf::from(block_info.trim());
+                        named.example.cursor_path = path;
+                        named.example.cursor_position = mem::take(&mut text);
+                    } else if current_section.eq_ignore_ascii_case(EXPECTED_PATCH_HEADING) {
+                        named.example.expected_patch = mem::take(&mut text);
+                    } else if current_section.eq_ignore_ascii_case(EXPECTED_EXCERPTS_HEADING) {
+                        let path = PathBuf::from(block_info.trim());
+                        named.example.expected_excerpts.push(ExpectedExcerpt {
+                            path,
+                            text: mem::take(&mut text),
+                        });
+                    } else {
+                        eprintln!("Warning: Unrecognized section `{current_section:?}`")
+                    }
+                }
+                _ => {}
+            }
+        }
+
+        if named.example.cursor_path.as_path() == Path::new("")
+            || named.example.cursor_position.is_empty()
+        {
+            anyhow::bail!("Missing cursor position codeblock");
+        }
+
+        Ok(named)
+    }
+
+    pub fn write(&self, format: ExampleFormat, mut out: impl Write) -> Result<()> {
+        match format {
+            ExampleFormat::Json => Ok(serde_json::to_writer(out, &self.example)?),
+            ExampleFormat::Toml => {
+                Ok(out.write_all(toml::to_string_pretty(&self.example)?.as_bytes())?)
+            }
+            ExampleFormat::Md => Ok(write!(out, "{}", self)?),
+        }
+    }
+
+    #[allow(unused)]
+    pub async fn setup_worktree(&self) -> Result<PathBuf> {
+        let worktrees_dir = env::current_dir()?.join("target").join("zeta-worktrees");
+        let repos_dir = env::current_dir()?.join("target").join("zeta-repos");
+        fs::create_dir_all(&repos_dir)?;
+        fs::create_dir_all(&worktrees_dir)?;
+
+        let (repo_owner, repo_name) = self.repo_name()?;
+
+        let repo_dir = repos_dir.join(repo_owner.as_ref()).join(repo_name.as_ref());
+        if !repo_dir.is_dir() {
+            fs::create_dir_all(&repo_dir)?;
+            run_git(&repo_dir, &["init"]).await?;
+            run_git(
+                &repo_dir,
+                &["remote", "add", "origin", &self.example.repository_url],
+            )
+            .await?;
+        }
+
+        run_git(
+            &repo_dir,
+            &["fetch", "--depth", "1", "origin", &self.example.revision],
+        )
+        .await?;
+
+        let worktree_path = worktrees_dir.join(&self.name);
+
+        if worktree_path.is_dir() {
+            run_git(&worktree_path, &["clean", "--force", "-d"]).await?;
+            run_git(&worktree_path, &["reset", "--hard", "HEAD"]).await?;
+            run_git(&worktree_path, &["checkout", &self.example.revision]).await?;
+        } else {
+            let worktree_path_string = worktree_path.to_string_lossy();
+            run_git(
+                &repo_dir,
+                &[
+                    "worktree",
+                    "add",
+                    "-f",
+                    &worktree_path_string,
+                    &self.example.revision,
+                ],
+            )
+            .await?;
+        }
+
+        Ok(worktree_path)
+    }
+
+    #[allow(unused)]
+    fn repo_name(&self) -> Result<(Cow<'_, str>, Cow<'_, str>)> {
+        // git@github.com:owner/repo.git
+        if self.example.repository_url.contains('@') {
+            let (owner, repo) = self
+                .example
+                .repository_url
+                .split_once(':')
+                .context("expected : in git url")?
+                .1
+                .split_once('/')
+                .context("expected / in git url")?;
+            Ok((
+                Cow::Borrowed(owner),
+                Cow::Borrowed(repo.trim_end_matches(".git")),
+            ))
+        // http://github.com/owner/repo.git
+        } else {
+            let url = Url::parse(&self.example.repository_url)?;
+            let mut segments = url.path_segments().context("empty http url")?;
+            let owner = segments
+                .next()
+                .context("expected owner path segment")?
+                .to_string();
+            let repo = segments
+                .next()
+                .context("expected repo path segment")?
+                .trim_end_matches(".git")
+                .to_string();
+            assert!(segments.next().is_none());
+
+            Ok((owner.into(), repo.into()))
+        }
+    }
+}
+
+async fn run_git(repo_path: &Path, args: &[&str]) -> Result<String> {
+    let output = smol::process::Command::new("git")
+        .current_dir(repo_path)
+        .args(args)
+        .output()
+        .await?;
+
+    anyhow::ensure!(
+        output.status.success(),
+        "`git {}` within `{}` failed with status: {}\nstderr:\n{}\nstdout:\n{}",
+        args.join(" "),
+        repo_path.display(),
+        output.status,
+        String::from_utf8_lossy(&output.stderr),
+        String::from_utf8_lossy(&output.stdout),
+    );
+    Ok(String::from_utf8(output.stdout)?.trim().to_string())
+}
+
+impl Display for NamedExample {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        write!(f, "# {}\n\n", self.name)?;
+        write!(
+            f,
+            "{REPOSITORY_URL_FIELD} = {}\n",
+            self.example.repository_url
+        )?;
+        write!(f, "{REVISION_FIELD} = {}\n\n", self.example.revision)?;
+
+        write!(
+            f,
+            "## {CURSOR_POSITION_HEADING}\n\n`````{}\n{}`````\n",
+            self.example.cursor_path.display(),
+            self.example.cursor_position
+        )?;
+        write!(f, "## {EDIT_HISTORY_HEADING}\n\n")?;
+
+        if !self.example.edit_history.is_empty() {
+            write!(f, "`````diff\n")?;
+            for item in &self.example.edit_history {
+                write!(f, "{item}")?;
+            }
+            write!(f, "`````\n")?;
+        }
+
+        if !self.example.expected_patch.is_empty() {
+            write!(
+                f,
+                "\n## {EXPECTED_PATCH_HEADING}\n\n`````diff\n{}`````\n",
+                self.example.expected_patch
+            )?;
+        }
+
+        if !self.example.expected_excerpts.is_empty() {
+            write!(f, "\n## {EXPECTED_EXCERPTS_HEADING}\n\n")?;
+
+            for excerpt in &self.example.expected_excerpts {
+                write!(
+                    f,
+                    "`````{}{}\n{}`````\n\n",
+                    excerpt
+                        .path
+                        .extension()
+                        .map(|ext| format!("{} ", ext.to_string_lossy()))
+                        .unwrap_or_default(),
+                    excerpt.path.display(),
+                    excerpt.text
+                )?;
+            }
+        }
+
+        Ok(())
+    }
+}

crates/zeta_cli/src/main.rs πŸ”—

@@ -1,26 +1,32 @@
+mod example;
 mod headless;
-mod retrieval_stats;
 mod source_location;
+mod syntax_retrieval_stats;
 mod util;
 
-use crate::retrieval_stats::retrieval_stats;
+use crate::example::{ExampleFormat, NamedExample};
+use crate::syntax_retrieval_stats::retrieval_stats;
+use ::serde::Serialize;
 use ::util::paths::PathStyle;
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
 use clap::{Args, Parser, Subcommand};
-use cloud_llm_client::predict_edits_v3::{self};
+use cloud_llm_client::predict_edits_v3::{self, Excerpt};
+use cloud_zeta2_prompt::{CURSOR_MARKER, write_codeblock};
 use edit_prediction_context::{
-    EditPredictionContextOptions, EditPredictionExcerptOptions, EditPredictionScoreOptions,
+    EditPredictionContextOptions, EditPredictionExcerpt, EditPredictionExcerptOptions,
+    EditPredictionScoreOptions, Line,
 };
-use gpui::{Application, AsyncApp, prelude::*};
-use language::Bias;
-use language_model::LlmApiToken;
-use project::Project;
-use release_channel::AppVersion;
+use futures::StreamExt as _;
+use futures::channel::mpsc;
+use gpui::{Application, AsyncApp, Entity, prelude::*};
+use language::{Bias, Buffer, BufferSnapshot, OffsetRangeExt, Point};
+use language_model::LanguageModelRegistry;
+use project::{Project, Worktree};
 use reqwest_client::ReqwestClient;
 use serde_json::json;
+use std::io;
 use std::{collections::HashSet, path::PathBuf, process::exit, str::FromStr, sync::Arc};
-use zeta::{PerformPredictEditsParams, Zeta};
-use zeta2::ContextMode;
+use zeta2::{ContextMode, LlmContextOptions, SearchToolQuery};
 
 use crate::headless::ZetaCliAppState;
 use crate::source_location::SourceLocation;
@@ -30,27 +36,57 @@ use crate::util::{open_buffer, open_buffer_with_language_server};
 #[command(name = "zeta")]
 struct ZetaCliArgs {
     #[command(subcommand)]
-    command: Commands,
+    command: Command,
 }
 
 #[derive(Subcommand, Debug)]
-enum Commands {
-    Context(ContextArgs),
-    Zeta2Context {
+enum Command {
+    Zeta1 {
+        #[command(subcommand)]
+        command: Zeta1Command,
+    },
+    Zeta2 {
         #[clap(flatten)]
-        zeta2_args: Zeta2Args,
+        args: Zeta2Args,
+        #[command(subcommand)]
+        command: Zeta2Command,
+    },
+    ConvertExample {
+        path: PathBuf,
+        #[arg(long, value_enum, default_value_t = ExampleFormat::Md)]
+        output_format: ExampleFormat,
+    },
+}
+
+#[derive(Subcommand, Debug)]
+enum Zeta1Command {
+    Context {
         #[clap(flatten)]
         context_args: ContextArgs,
     },
-    Predict {
-        #[arg(long)]
-        predict_edits_body: Option<FileOrStdin>,
+}
+
+#[derive(Subcommand, Debug)]
+enum Zeta2Command {
+    Syntax {
         #[clap(flatten)]
-        context_args: Option<ContextArgs>,
+        syntax_args: Zeta2SyntaxArgs,
+        #[command(subcommand)]
+        command: Zeta2SyntaxCommand,
     },
-    RetrievalStats {
+    Llm {
+        #[command(subcommand)]
+        command: Zeta2LlmCommand,
+    },
+}
+
+#[derive(Subcommand, Debug)]
+enum Zeta2SyntaxCommand {
+    Context {
         #[clap(flatten)]
-        zeta2_args: Zeta2Args,
+        context_args: ContextArgs,
+    },
+    Stats {
         #[arg(long)]
         worktree: PathBuf,
         #[arg(long)]
@@ -62,6 +98,14 @@ enum Commands {
     },
 }
 
+#[derive(Subcommand, Debug)]
+enum Zeta2LlmCommand {
+    Context {
+        #[clap(flatten)]
+        context_args: ContextArgs,
+    },
+}
+
 #[derive(Debug, Args)]
 #[group(requires = "worktree")]
 struct ContextArgs {
@@ -72,7 +116,7 @@ struct ContextArgs {
     #[arg(long)]
     use_language_server: bool,
     #[arg(long)]
-    events: Option<FileOrStdin>,
+    edit_history: Option<FileOrStdin>,
 }
 
 #[derive(Debug, Args)]
@@ -93,12 +137,42 @@ struct Zeta2Args {
     output_format: OutputFormat,
     #[arg(long, default_value_t = 42)]
     file_indexing_parallelism: usize,
+}
+
+#[derive(Debug, Args)]
+struct Zeta2SyntaxArgs {
     #[arg(long, default_value_t = false)]
     disable_imports_gathering: bool,
     #[arg(long, default_value_t = u8::MAX)]
     max_retrieved_definitions: u8,
 }
 
+fn syntax_args_to_options(
+    zeta2_args: &Zeta2Args,
+    syntax_args: &Zeta2SyntaxArgs,
+    omit_excerpt_overlaps: bool,
+) -> zeta2::ZetaOptions {
+    zeta2::ZetaOptions {
+        context: ContextMode::Syntax(EditPredictionContextOptions {
+            max_retrieved_declarations: syntax_args.max_retrieved_definitions,
+            use_imports: !syntax_args.disable_imports_gathering,
+            excerpt: EditPredictionExcerptOptions {
+                max_bytes: zeta2_args.max_excerpt_bytes,
+                min_bytes: zeta2_args.min_excerpt_bytes,
+                target_before_cursor_over_total_bytes: zeta2_args
+                    .target_before_cursor_over_total_bytes,
+            },
+            score: EditPredictionScoreOptions {
+                omit_excerpt_overlaps,
+            },
+        }),
+        max_diagnostic_bytes: zeta2_args.max_diagnostic_bytes,
+        max_prompt_bytes: zeta2_args.max_prompt_bytes,
+        prompt_format: zeta2_args.prompt_format.clone().into(),
+        file_indexing_parallelism: zeta2_args.file_indexing_parallelism,
+    }
+}
+
 #[derive(clap::ValueEnum, Default, Debug, Clone)]
 enum PromptFormat {
     MarkedExcerpt,
@@ -153,22 +227,25 @@ impl FromStr for FileOrStdin {
     }
 }
 
-enum GetContextOutput {
-    Zeta1(zeta::GatherContextOutput),
-    Zeta2(String),
+struct LoadedContext {
+    full_path_str: String,
+    snapshot: BufferSnapshot,
+    clipped_cursor: Point,
+    worktree: Entity<Worktree>,
+    project: Entity<Project>,
+    buffer: Entity<Buffer>,
 }
 
-async fn get_context(
-    zeta2_args: Option<Zeta2Args>,
-    args: ContextArgs,
+async fn load_context(
+    args: &ContextArgs,
     app_state: &Arc<ZetaCliAppState>,
     cx: &mut AsyncApp,
-) -> Result<GetContextOutput> {
+) -> Result<LoadedContext> {
     let ContextArgs {
         worktree: worktree_path,
         cursor,
         use_language_server,
-        events,
+        ..
     } = args;
 
     let worktree_path = worktree_path.canonicalize()?;
@@ -192,7 +269,7 @@ async fn get_context(
         .await?;
 
     let mut ready_languages = HashSet::default();
-    let (_lsp_open_handle, buffer) = if use_language_server {
+    let (_lsp_open_handle, buffer) = if *use_language_server {
         let (lsp_open_handle, _, buffer) = open_buffer_with_language_server(
             project.clone(),
             worktree.clone(),
@@ -232,95 +309,294 @@ async fn get_context(
         }
     }
 
-    let events = match events {
+    Ok(LoadedContext {
+        full_path_str,
+        snapshot,
+        clipped_cursor,
+        worktree,
+        project,
+        buffer,
+    })
+}
+
+async fn zeta2_syntax_context(
+    zeta2_args: Zeta2Args,
+    syntax_args: Zeta2SyntaxArgs,
+    args: ContextArgs,
+    app_state: &Arc<ZetaCliAppState>,
+    cx: &mut AsyncApp,
+) -> Result<String> {
+    let LoadedContext {
+        worktree,
+        project,
+        buffer,
+        clipped_cursor,
+        ..
+    } = load_context(&args, app_state, cx).await?;
+
+    // wait for worktree scan before starting zeta2 so that wait_for_initial_indexing waits for
+    // the whole worktree.
+    worktree
+        .read_with(cx, |worktree, _cx| {
+            worktree.as_local().unwrap().scan_complete()
+        })?
+        .await;
+    let output = cx
+        .update(|cx| {
+            let zeta = cx.new(|cx| {
+                zeta2::Zeta::new(app_state.client.clone(), app_state.user_store.clone(), cx)
+            });
+            let indexing_done_task = zeta.update(cx, |zeta, cx| {
+                zeta.set_options(syntax_args_to_options(&zeta2_args, &syntax_args, true));
+                zeta.register_buffer(&buffer, &project, cx);
+                zeta.wait_for_initial_indexing(&project, cx)
+            });
+            cx.spawn(async move |cx| {
+                indexing_done_task.await?;
+                let request = zeta
+                    .update(cx, |zeta, cx| {
+                        let cursor = buffer.read(cx).snapshot().anchor_before(clipped_cursor);
+                        zeta.cloud_request_for_zeta_cli(&project, &buffer, cursor, cx)
+                    })?
+                    .await?;
+
+                let (prompt_string, section_labels) = cloud_zeta2_prompt::build_prompt(&request)?;
+
+                match zeta2_args.output_format {
+                    OutputFormat::Prompt => anyhow::Ok(prompt_string),
+                    OutputFormat::Request => anyhow::Ok(serde_json::to_string_pretty(&request)?),
+                    OutputFormat::Full => anyhow::Ok(serde_json::to_string_pretty(&json!({
+                        "request": request,
+                        "prompt": prompt_string,
+                        "section_labels": section_labels,
+                    }))?),
+                }
+            })
+        })?
+        .await?;
+
+    Ok(output)
+}
+
+async fn zeta2_llm_context(
+    zeta2_args: Zeta2Args,
+    context_args: ContextArgs,
+    app_state: &Arc<ZetaCliAppState>,
+    cx: &mut AsyncApp,
+) -> Result<String> {
+    let LoadedContext {
+        buffer,
+        clipped_cursor,
+        snapshot: cursor_snapshot,
+        project,
+        ..
+    } = load_context(&context_args, app_state, cx).await?;
+
+    let cursor_position = cursor_snapshot.anchor_after(clipped_cursor);
+
+    cx.update(|cx| {
+        LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
+            registry
+                .provider(&zeta2::related_excerpts::MODEL_PROVIDER_ID)
+                .unwrap()
+                .authenticate(cx)
+        })
+    })?
+    .await?;
+
+    let edit_history_unified_diff = match context_args.edit_history {
         Some(events) => events.read_to_string().await?,
         None => String::new(),
     };
 
-    if let Some(zeta2_args) = zeta2_args {
-        // wait for worktree scan before starting zeta2 so that wait_for_initial_indexing waits for
-        // the whole worktree.
-        worktree
-            .read_with(cx, |worktree, _cx| {
-                worktree.as_local().unwrap().scan_complete()
-            })?
-            .await;
-        let output = cx
-            .update(|cx| {
-                let zeta = cx.new(|cx| {
-                    zeta2::Zeta::new(app_state.client.clone(), app_state.user_store.clone(), cx)
-                });
-                let indexing_done_task = zeta.update(cx, |zeta, cx| {
-                    zeta.set_options(zeta2_args.to_options(true));
-                    zeta.register_buffer(&buffer, &project, cx);
-                    zeta.wait_for_initial_indexing(&project, cx)
-                });
-                cx.spawn(async move |cx| {
-                    indexing_done_task.await?;
-                    let request = zeta
-                        .update(cx, |zeta, cx| {
-                            let cursor = buffer.read(cx).snapshot().anchor_before(clipped_cursor);
-                            zeta.cloud_request_for_zeta_cli(&project, &buffer, cursor, cx)
-                        })?
-                        .await?;
-
-                    let (prompt_string, section_labels) =
-                        cloud_zeta2_prompt::build_prompt(&request)?;
-
-                    match zeta2_args.output_format {
-                        OutputFormat::Prompt => anyhow::Ok(prompt_string),
-                        OutputFormat::Request => {
-                            anyhow::Ok(serde_json::to_string_pretty(&request)?)
-                        }
-                        OutputFormat::Full => anyhow::Ok(serde_json::to_string_pretty(&json!({
-                            "request": request,
-                            "prompt": prompt_string,
-                            "section_labels": section_labels,
-                        }))?),
-                    }
-                })
-            })?
-            .await?;
-        Ok(GetContextOutput::Zeta2(output))
-    } else {
-        let prompt_for_events = move || (events, 0);
-        Ok(GetContextOutput::Zeta1(
-            cx.update(|cx| {
-                zeta::gather_context(
-                    full_path_str,
-                    &snapshot,
-                    clipped_cursor,
-                    prompt_for_events,
-                    cx,
-                )
-            })?
-            .await?,
-        ))
-    }
-}
+    let (debug_tx, mut debug_rx) = mpsc::unbounded();
 
-impl Zeta2Args {
-    fn to_options(&self, omit_excerpt_overlaps: bool) -> zeta2::ZetaOptions {
-        zeta2::ZetaOptions {
-            context: ContextMode::Syntax(EditPredictionContextOptions {
-                max_retrieved_declarations: self.max_retrieved_definitions,
-                use_imports: !self.disable_imports_gathering,
-                excerpt: EditPredictionExcerptOptions {
-                    max_bytes: self.max_excerpt_bytes,
-                    min_bytes: self.min_excerpt_bytes,
-                    target_before_cursor_over_total_bytes: self
-                        .target_before_cursor_over_total_bytes,
-                },
-                score: EditPredictionScoreOptions {
-                    omit_excerpt_overlaps,
+    let excerpt_options = EditPredictionExcerptOptions {
+        max_bytes: zeta2_args.max_excerpt_bytes,
+        min_bytes: zeta2_args.min_excerpt_bytes,
+        target_before_cursor_over_total_bytes: zeta2_args.target_before_cursor_over_total_bytes,
+    };
+
+    let related_excerpts = cx
+        .update(|cx| {
+            zeta2::related_excerpts::find_related_excerpts(
+                buffer,
+                cursor_position,
+                &project,
+                edit_history_unified_diff,
+                &LlmContextOptions {
+                    excerpt: excerpt_options.clone(),
                 },
-            }),
-            max_diagnostic_bytes: self.max_diagnostic_bytes,
-            max_prompt_bytes: self.max_prompt_bytes,
-            prompt_format: self.prompt_format.clone().into(),
-            file_indexing_parallelism: self.file_indexing_parallelism,
+                Some(debug_tx),
+                cx,
+            )
+        })?
+        .await?;
+
+    let cursor_excerpt = EditPredictionExcerpt::select_from_buffer(
+        clipped_cursor,
+        &cursor_snapshot,
+        &excerpt_options,
+        None,
+    )
+    .context("line didn't fit")?;
+
+    #[derive(Serialize)]
+    struct Output {
+        excerpts: Vec<OutputExcerpt>,
+        formatted_excerpts: String,
+        meta: OutputMeta,
+    }
+
+    #[derive(Default, Serialize)]
+    struct OutputMeta {
+        search_prompt: String,
+        search_queries: Vec<SearchToolQuery>,
+    }
+
+    #[derive(Serialize)]
+    struct OutputExcerpt {
+        path: PathBuf,
+        #[serde(flatten)]
+        excerpt: Excerpt,
+    }
+
+    let mut meta = OutputMeta::default();
+
+    while let Some(debug_info) = debug_rx.next().await {
+        match debug_info {
+            zeta2::ZetaDebugInfo::ContextRetrievalStarted(info) => {
+                meta.search_prompt = info.search_prompt;
+            }
+            zeta2::ZetaDebugInfo::SearchQueriesGenerated(info) => {
+                meta.search_queries = info.queries
+            }
+            _ => {}
         }
     }
+
+    cx.update(|cx| {
+        let mut excerpts = Vec::new();
+        let mut formatted_excerpts = String::new();
+
+        let cursor_insertions = [(
+            predict_edits_v3::Point {
+                line: Line(clipped_cursor.row),
+                column: clipped_cursor.column,
+            },
+            CURSOR_MARKER,
+        )];
+
+        let mut cursor_excerpt_added = false;
+
+        for (buffer, ranges) in related_excerpts {
+            let excerpt_snapshot = buffer.read(cx).snapshot();
+
+            let mut line_ranges = ranges
+                .into_iter()
+                .map(|range| {
+                    let point_range = range.to_point(&excerpt_snapshot);
+                    Line(point_range.start.row)..Line(point_range.end.row)
+                })
+                .collect::<Vec<_>>();
+
+            let Some(file) = excerpt_snapshot.file() else {
+                continue;
+            };
+            let path = file.full_path(cx);
+
+            let is_cursor_file = path == cursor_snapshot.file().unwrap().full_path(cx);
+            if is_cursor_file {
+                let insertion_ix = line_ranges
+                    .binary_search_by(|probe| {
+                        probe
+                            .start
+                            .cmp(&cursor_excerpt.line_range.start)
+                            .then(cursor_excerpt.line_range.end.cmp(&probe.end))
+                    })
+                    .unwrap_or_else(|ix| ix);
+                line_ranges.insert(insertion_ix, cursor_excerpt.line_range.clone());
+                cursor_excerpt_added = true;
+            }
+
+            let merged_excerpts =
+                zeta2::merge_excerpts::merge_excerpts(&excerpt_snapshot, line_ranges)
+                    .into_iter()
+                    .map(|excerpt| OutputExcerpt {
+                        path: path.clone(),
+                        excerpt,
+                    });
+
+            let excerpt_start_ix = excerpts.len();
+            excerpts.extend(merged_excerpts);
+
+            write_codeblock(
+                &path,
+                excerpts[excerpt_start_ix..].iter().map(|e| &e.excerpt),
+                if is_cursor_file {
+                    &cursor_insertions
+                } else {
+                    &[]
+                },
+                Line(excerpt_snapshot.max_point().row),
+                true,
+                &mut formatted_excerpts,
+            );
+        }
+
+        if !cursor_excerpt_added {
+            write_codeblock(
+                &cursor_snapshot.file().unwrap().full_path(cx),
+                &[Excerpt {
+                    start_line: cursor_excerpt.line_range.start,
+                    text: cursor_excerpt.text(&cursor_snapshot).body.into(),
+                }],
+                &cursor_insertions,
+                Line(cursor_snapshot.max_point().row),
+                true,
+                &mut formatted_excerpts,
+            );
+        }
+
+        let output = Output {
+            excerpts,
+            formatted_excerpts,
+            meta,
+        };
+
+        Ok(serde_json::to_string_pretty(&output)?)
+    })
+    .unwrap()
+}
+
+async fn zeta1_context(
+    args: ContextArgs,
+    app_state: &Arc<ZetaCliAppState>,
+    cx: &mut AsyncApp,
+) -> Result<zeta::GatherContextOutput> {
+    let LoadedContext {
+        full_path_str,
+        snapshot,
+        clipped_cursor,
+        ..
+    } = load_context(&args, app_state, cx).await?;
+
+    let events = match args.edit_history {
+        Some(events) => events.read_to_string().await?,
+        None => String::new(),
+    };
+
+    let prompt_for_events = move || (events, 0);
+    cx.update(|cx| {
+        zeta::gather_context(
+            full_path_str,
+            &snapshot,
+            clipped_cursor,
+            prompt_for_events,
+            cx,
+        )
+    })?
+    .await
 }
 
 fn main() {
@@ -334,80 +610,56 @@ fn main() {
         let app_state = Arc::new(headless::init(cx));
         cx.spawn(async move |cx| {
             let result = match args.command {
-                Commands::Zeta2Context {
-                    zeta2_args,
-                    context_args,
-                } => match get_context(Some(zeta2_args), context_args, &app_state, cx).await {
-                    Ok(GetContextOutput::Zeta1 { .. }) => unreachable!(),
-                    Ok(GetContextOutput::Zeta2(output)) => Ok(output),
-                    Err(err) => Err(err),
-                },
-                Commands::Context(context_args) => {
-                    match get_context(None, context_args, &app_state, cx).await {
-                        Ok(GetContextOutput::Zeta1(output)) => {
-                            Ok(serde_json::to_string_pretty(&output.body).unwrap())
-                        }
-                        Ok(GetContextOutput::Zeta2 { .. }) => unreachable!(),
-                        Err(err) => Err(err),
-                    }
-                }
-                Commands::Predict {
-                    predict_edits_body,
-                    context_args,
+                Command::Zeta1 {
+                    command: Zeta1Command::Context { context_args },
                 } => {
-                    cx.spawn(async move |cx| {
-                        let app_version = cx.update(|cx| AppVersion::global(cx))?;
-                        app_state.client.sign_in(true, cx).await?;
-                        let llm_token = LlmApiToken::default();
-                        llm_token.refresh(&app_state.client).await?;
-
-                        let predict_edits_body =
-                            if let Some(predict_edits_body) = predict_edits_body {
-                                serde_json::from_str(&predict_edits_body.read_to_string().await?)?
-                            } else if let Some(context_args) = context_args {
-                                match get_context(None, context_args, &app_state, cx).await? {
-                                    GetContextOutput::Zeta1(output) => output.body,
-                                    GetContextOutput::Zeta2 { .. } => unreachable!(),
-                                }
-                            } else {
-                                return Err(anyhow!(
-                                    "Expected either --predict-edits-body-file \
-                                    or the required args of the `context` command."
-                                ));
-                            };
-
-                        let (response, _usage) =
-                            Zeta::perform_predict_edits(PerformPredictEditsParams {
-                                client: app_state.client.clone(),
-                                llm_token,
-                                app_version,
-                                body: predict_edits_body,
-                            })
-                            .await?;
-
-                        Ok(response.output_excerpt)
-                    })
-                    .await
+                    let context = zeta1_context(context_args, &app_state, cx).await.unwrap();
+                    serde_json::to_string_pretty(&context.body).map_err(|err| anyhow::anyhow!(err))
                 }
-                Commands::RetrievalStats {
-                    zeta2_args,
-                    worktree,
-                    extension,
-                    limit,
-                    skip,
+                Command::Zeta2 { args, command } => match command {
+                    Zeta2Command::Syntax {
+                        syntax_args,
+                        command,
+                    } => match command {
+                        Zeta2SyntaxCommand::Context { context_args } => {
+                            zeta2_syntax_context(args, syntax_args, context_args, &app_state, cx)
+                                .await
+                        }
+                        Zeta2SyntaxCommand::Stats {
+                            worktree,
+                            extension,
+                            limit,
+                            skip,
+                        } => {
+                            retrieval_stats(
+                                worktree,
+                                app_state,
+                                extension,
+                                limit,
+                                skip,
+                                syntax_args_to_options(&args, &syntax_args, false),
+                                cx,
+                            )
+                            .await
+                        }
+                    },
+                    Zeta2Command::Llm { command } => match command {
+                        Zeta2LlmCommand::Context { context_args } => {
+                            zeta2_llm_context(args, context_args, &app_state, cx).await
+                        }
+                    },
+                },
+                Command::ConvertExample {
+                    path,
+                    output_format,
                 } => {
-                    retrieval_stats(
-                        worktree,
-                        app_state,
-                        extension,
-                        limit,
-                        skip,
-                        (&zeta2_args).to_options(false),
-                        cx,
-                    )
-                    .await
+                    let example = NamedExample::load(path).unwrap();
+                    example.write(output_format, io::stdout()).unwrap();
+                    let _ = cx.update(|cx| cx.quit());
+                    return;
                 }
             };
+
             match result {
                 Ok(output) => {
                     println!("{}", output);

docs/src/SUMMARY.md πŸ”—

@@ -165,6 +165,5 @@
   - [Local Collaboration](./development/local-collaboration.md)
   - [Using Debuggers](./development/debuggers.md)
   - [Glossary](./development/glossary.md)
-- [Release Process](./development/releases.md)
 - [Release Notes](./development/release-notes.md)
 - [Debugging Crashes](./development/debugging-crashes.md)

docs/src/ai/agent-panel.md πŸ”—

@@ -1,10 +1,10 @@
 # Agent Panel
 
-The Agent Panel allows you to interact with many LLMs and coding agents that can help with in various types of tasks, such as generating code, codebase understanding, and other general inquiries like writing emails, documentation, and more.
+The Agent Panel allows you to interact with many LLMs and coding agents that can help with various types of tasks, such as generating code, codebase understanding, and other general inquiries like writing emails, documentation, and more.
 
 To open it, use the `agent: new thread` action in [the Command Palette](../getting-started.md#command-palette) or click the ✨ (sparkles) icon in the status bar.
 
-## Getting Started
+## Getting Started {#getting-started}
 
 If you're using the Agent Panel for the first time, you need to have at least one LLM provider or external agent configured.
 You can do that by:
@@ -28,7 +28,7 @@ From this point on, you can interact with the many supported features outlined b
 By default, the Agent Panel uses Zed's first-party agent.
 
 To change that, go to the plus button in the top-right of the Agent Panel and choose another option.
-You choose to create a new [Text Thread](./text-threads.md) or, if you have [external agents](./external-agents.md) connected, you can create new threads with them.
+You can choose to create a new [Text Thread](./text-threads.md) or, if you have [external agents](./external-agents.md) connected, you can create new threads with them.
 
 ### Editing Messages {#editing-messages}
 
@@ -37,7 +37,7 @@ You can click on the card that contains your message and re-submit it with an ad
 
 ### Checkpoints {#checkpoints}
 
-Every time the AI performs an edit, you should see a "Restore Checkpoint" button to the top of your message, allowing you to return your code base to the state it was in prior to that message.
+Every time the AI performs an edit, you should see a "Restore Checkpoint" button at the top of your message, allowing you to return your code base to the state it was in prior to that message.
 
 The checkpoint button appears even if you interrupt the thread midway through an edit attempt, as this is likely a moment when you've identified that the agent is not heading in the right direction and you want to revert back.
 
@@ -78,7 +78,7 @@ Edit diffs also appear in individual buffers. If your active tab had edits made
 
 ## Adding Context {#adding-context}
 
-Although Zed's agent is very efficient at reading through your code base to autonomously pick up relevant context, manually adding whatever would be useful to fulfill your prompt is still very encouraged as a way to not only improve the AI's response quality but also to speed its response time up.
+Although Zed's agent is very efficient at reading through your code base to autonomously pick up relevant context, manually adding whatever would be useful to fulfill your prompt is still very encouraged as a way to not only improve the AI's response quality but also to speed up its response time.
 
 In Zed's Agent Panel, all pieces of context are added as mentions in the panel's message editor.
 You can type `@` to mention files, directories, symbols, previous threads, and rules files.
@@ -89,7 +89,7 @@ Copying images and pasting them in the panel's message editor is also supported.
 
 ### Token Usage {#token-usage}
 
-Zed surfaces how many tokens you are consuming for your currently active thread nearby the profile selector in the panel's message editor. Depending on how many pieces of context you add, your token consumption can grow rapidly.
+Zed surfaces how many tokens you are consuming for your currently active thread near the profile selector in the panel's message editor. Depending on how many pieces of context you add, your token consumption can grow rapidly.
 
 Once you approach the model's context window, a banner appears below the message editor suggesting to start a new thread with the current one summarized and added as context.
 You can also do this at any time with an ongoing thread via the "Agent Options" menu on the top right.
@@ -147,7 +147,7 @@ All [Zed's hosted models](./models.md) support tool calling out-of-the-box.
 
 ### MCP Servers {#mcp-servers}
 
-Similarly to the built-in tools, some models may not support all tools included in a given MCP Server. Zed's UI will inform about this via a warning icon that appears close to the model selector.
+Similarly to the built-in tools, some models may not support all tools included in a given MCP Server. Zed's UI will inform you about this via a warning icon that appears close to the model selector.
 
 ## Text Threads {#text-threads}
 

docs/src/ai/agent-settings.md πŸ”—

@@ -54,15 +54,33 @@ You can assign distinct and specific models for the following AI-powered feature
 
 ### Alternative Models for Inline Assists {#alternative-assists}
 
-The Inline Assist feature in particular has the capacity to perform multiple generations in parallel using different models.
-That is possible by assigning more than one model to it, taking the configuration shown above one step further.
+With the Inline Assistant in particular, you can send the same prompt to multiple models at once.
 
-When configured, the inline assist UI will surface controls to cycle between the outputs generated by each model.
+Here's how you can customize your `settings.json` to add this functionality:
+
+```json [settings]
+{
+  "agent": {
+    "default_model": {
+      "provider": "zed.dev",
+      "model": "claude-sonnet-4"
+    },
+    "inline_alternatives": [
+      {
+        "provider": "zed.dev",
+        "model": "gpt-4-mini"
+      }
+    ]
+  }
+}
+```
+
+When multiple models are configured, you'll see in the Inline Assistant UI buttons that allow you to cycle between outputs generated by each model.
 
 The models you specify here are always used in _addition_ to your [default model](#default-model).
 
-For example, the following configuration will generate two outputs for every assist.
-One with Claude Sonnet 4 (the default model), and one with GPT-5-mini.
+For example, the following configuration will generate three outputs for every assist.
+One with Claude Sonnet 4 (the default model), another with GPT-5-mini, and another one with Gemini 2.5 Flash.
 
 ```json [settings]
 {
@@ -75,6 +93,10 @@ One with Claude Sonnet 4 (the default model), and one with GPT-5-mini.
       {
         "provider": "zed.dev",
         "model": "gpt-4-mini"
+      },
+      {
+        "provider": "zed.dev",
+        "model": "gemini-2.5-flash"
       }
     ]
   }
@@ -179,7 +201,7 @@ The default value is `false`.
 
 ### Message Editor Size
 
-Use the `message_editor_min_lines` setting to control minimum number of lines of height the agent message editor should have.
+Use the `message_editor_min_lines` setting to control the minimum number of lines of height the agent message editor should have.
 It is set to `4` by default, and the max number of lines is always double of the minimum.
 
 ```json [settings]
@@ -232,7 +254,7 @@ It is set to `true` by default, but if set to false, the card will be fully coll
 
 ### Feedback Controls
 
-Control whether to display the thumbs up/down buttons at the bottom of each agent response, allowing to give Zed feedback about the agent's performance.
+Control whether to display the thumbs up/down buttons at the bottom of each agent response, allowing you to give Zed feedback about the agent's performance.
 The default value is `true`.
 
 ```json [settings]

docs/src/ai/edit-prediction.md πŸ”—

@@ -4,8 +4,7 @@ Edit Prediction is Zed's mechanism for predicting the code you want to write thr
 Each keystroke sends a new request to the edit prediction provider, which returns individual or multi-line suggestions that can be quickly accepted by pressing `tab`.
 
 The default provider is [Zeta, a proprietary open source and open dataset model](https://huggingface.co/zed-industries/zeta), which [requires being signed into Zed](../authentication.md#what-features-require-signing-in).
-
-Alternatively, you can use other providers like [GitHub Copilot](#github-copilot) (or [Enterprise](#github-copilot-enterprise)) or [Supermaven](#supermaven).
+Alternatively, you can also use [other providers](#other-providers) like GitHub Copilot and Codestral.
 
 ## Configuring Zeta
 
@@ -257,7 +256,12 @@ To completely turn off edit prediction across all providers, explicitly set the
 },
 ```
 
-## Configuring GitHub Copilot {#github-copilot}
+## Configuring Other Providers {#other-providers}
+
+Zed's Edit Prediction also work with other completion model providers aside from Zeta.
+Learn about the available ones below.
+
+### GitHub Copilot {#github-copilot}
 
 To use GitHub Copilot as your provider, set this within `settings.json`:
 
@@ -271,7 +275,7 @@ To use GitHub Copilot as your provider, set this within `settings.json`:
 
 You should be able to sign-in to GitHub Copilot by clicking on the Copilot icon in the status bar and following the setup instructions.
 
-### Using GitHub Copilot Enterprise {#github-copilot-enterprise}
+#### Using GitHub Copilot Enterprise
 
 If your organization uses GitHub Copilot Enterprise, you can configure Zed to use your enterprise instance by specifying the enterprise URI in your `settings.json`:
 
@@ -287,14 +291,16 @@ If your organization uses GitHub Copilot Enterprise, you can configure Zed to us
 
 Replace `"https://your.enterprise.domain"` with the URL provided by your GitHub Enterprise administrator (e.g., `https://foo.ghe.com`).
 
-Once set, Zed will route Copilot requests through your enterprise endpoint. When you sign in by clicking the Copilot icon in the status bar, you will be redirected to your configured enterprise URL to complete authentication. All other Copilot features and usage remain the same.
+Once set, Zed will route Copilot requests through your enterprise endpoint.
+When you sign in by clicking the Copilot icon in the status bar, you will be redirected to your configured enterprise URL to complete authentication.
+All other Copilot features and usage remain the same.
 
 Copilot can provide multiple completion alternatives, and these can be navigated with the following actions:
 
 - {#action editor::NextEditPrediction} ({#kb editor::NextEditPrediction}): To cycle to the next edit prediction
 - {#action editor::PreviousEditPrediction} ({#kb editor::PreviousEditPrediction}): To cycle to the previous edit prediction
 
-## Configuring Supermaven {#supermaven}
+### Supermaven {#supermaven}
 
 To use Supermaven as your provider, set this within `settings.json`:
 
@@ -308,6 +314,21 @@ To use Supermaven as your provider, set this within `settings.json`:
 
 You should be able to sign-in to Supermaven by clicking on the Supermaven icon in the status bar and following the setup instructions.
 
+### Codestral {#codestral}
+
+To use Mistral's Codestral as your provider, start by going to the the Agent Panel settings view by running the {#action agent::OpenSettings} action.
+Look for the Mistral item and add a Codestral API key in the corresponding text input.
+
+After that, you should be able to switch your provider to it in your `settings.json` file:
+
+```json [settings]
+{
+  "features": {
+    "edit_prediction_provider": "codestral"
+  }
+}
+```
+
 ## See also
 
-You may also use the [Agent Panel](./agent-panel.md) or the [Inline Assistant](./inline-assistant.md) to interact with language models, see the [AI documentation](./overview.md) for more information on the other AI features in Zed.
+To learn about other ways to interact with AI in Zed, you may also want to see more about the [Agent Panel](./agent-panel.md) or the [Inline Assistant](./inline-assistant.md) feature.

docs/src/ai/inline-assistant.md πŸ”—

@@ -2,17 +2,104 @@
 
 ## Usage Overview
 
-Use `ctrl-enter` to open the Inline Assistant nearly anywhere you can enter text: editors, text threads, the rules library, channel notes, and even within the terminal panel.
+Use {#kb assistant::InlineAssist} to open the Inline Assistant nearly anywhere you can enter text: editors, text threads, the rules library, channel notes, and even within the terminal panel.
 
 The Inline Assistant allows you to send the current selection (or the current line) to a language model and modify the selection with the language model's response.
 
-You can also perform multiple generation requests in parallel by pressing `ctrl-enter` with multiple cursors, or by pressing the same binding with a selection that spans multiple excerpts in a multibuffer.
+## Getting Started
 
-## Context
+If you're using the Inline Assistant for the first time, you need to have at least one LLM provider or external agent configured.
+You can do that by:
 
-Give the Inline Assistant context the same way you can in [the Agent Panel](./agent-panel.md), allowing you to provide additional instructions or rules for code transformations with @-mentions.
+1. [subscribing to our Pro plan](https://zed.dev/pricing), so you have access to our hosted models
+2. [using your own API keys](./llm-providers.md#use-your-own-keys), either from model providers like Anthropic or model gateways like OpenRouter.
 
-A useful pattern here is to create a thread in the Agent Panel, and then mention that thread with `@thread` in the Inline Assistant to include it as context.
+If you have already set up an LLM provider to interact with [the Agent Panel](./agent-panel.md#getting-started), then that will also work for the Inline Assistant.
+
+> Unlike the Agent Panel, though, the only exception at the moment is [external agents](./external-agents.md).
+> They currently can't be used for generating changes with the Inline Assistant.
+
+## Adding Context
+
+You can add context in the Inline Assistant the same way you can in [the Agent Panel](./agent-panel.md#adding-context):
+
+- @-mention files, directories, past threads, rules, and symbols
+- paste images that are copied on your clipboard
+
+Additionally, a useful pattern is to create a thread in the Agent Panel, and then mention it with `@thread` in the Inline Assistant to include it as context.
+That often serves as a way to more quickly iterate over a specific part of a change that happened in the context of a larger thread.
+
+## Parallel Generations
+
+There are two ways in which you can generate multiple changes at once with the Inline Assistant:
+
+### Multiple Cursors
+
+If you have a multiple cursor selection and hit {#kb assistant::InlineAssist}, you can shoot the same prompt for all cursor positions and get a change in all of them.
+
+This is particularly useful when working on excerpts in [a multibuffer context](../multibuffers.md).
+
+### Multiple Models
+
+You can use the Inline Assistant to send the same prompt to multiple models at once.
+
+Here's how you can customize your `settings.json` to add this functionality:
+
+```json [settings]
+{
+  "agent": {
+    "default_model": {
+      "provider": "zed.dev",
+      "model": "claude-sonnet-4"
+    },
+    "inline_alternatives": [
+      {
+        "provider": "zed.dev",
+        "model": "gpt-4-mini"
+      }
+    ]
+  }
+}
+```
+
+When multiple models are configured, you'll see in the Inline Assistant UI buttons that allow you to cycle between outputs generated by each model.
+
+The models you specify here are always used in _addition_ to your [default model](#default-model).
+
+For example, the following configuration will generate three outputs for every assist.
+One with Claude Sonnet 4 (the default model), another with GPT-5-mini, and another one with Gemini 2.5 Flash.
+
+```json [settings]
+{
+  "agent": {
+    "default_model": {
+      "provider": "zed.dev",
+      "model": "claude-sonnet-4"
+    },
+    "inline_alternatives": [
+      {
+        "provider": "zed.dev",
+        "model": "gpt-4-mini"
+      },
+      {
+        "provider": "zed.dev",
+        "model": "gemini-2.5-flash"
+      }
+    ]
+  }
+}
+```
+
+## Inline Assistant vs. Edit Prediction
+
+Users often ask what's the difference between these two AI-powered features in Zed, particularly because both of them involve getting inline LLM code completions.
+
+Here's how they are different:
+
+- The Inline Assistant is more similar to the Agent Panel as in you're still writing a prompt yourself and crafting context. It works from within the buffer and is mostly centered around your selections.
+- [Edit Predictions](./edit-prediction.md) is an AI-powered completion mechanism that intelligently suggests what you likely want to add next, based on context automatically gathered from your previous edits, recently visited files, and more.
+
+In summary, the key difference is that in the Inline Assistant, you're still manually prompting, whereas Edit Prediction will _automatically suggest_ edits to you.
 
 ## Prefilling Prompts
 

docs/src/development.md πŸ”—

@@ -88,7 +88,6 @@ in-depth examples and explanations.
 ## Contributor links
 
 - [CONTRIBUTING.md](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md)
-- [Releases](./development/releases.md)
 - [Debugging Crashes](./development/debugging-crashes.md)
 - [Code of Conduct](https://zed.dev/code-of-conduct)
 - [Zed Contributor License](https://zed.dev/cla)

docs/src/development/releases.md πŸ”—

@@ -1,147 +0,0 @@
-# Zed Releases
-
-Read about Zed's [release channels here](https://zed.dev/faq#what-are-the-release-channels).
-
-## Wednesday Release Process
-
-You will need write access to the Zed repository to do this.
-
-Credentials for various services used in this process can be found in 1Password.
-
-Use the `releases` Slack channel to notify the team that releases will be starting.
-This is mostly a formality on Wednesday's minor update releases, but can be beneficial when doing patch releases, as other devs may have landed fixes they'd like to cherry pick.
-
-### Starting the Builds
-
-1. Checkout `main` and ensure your working copy is clean.
-
-1. Run `git fetch && git pull` to ensure you have the latest commits locally.
-
-1. Run `git fetch --tags --force` to forcibly ensure your local tags are in sync with the remote.
-
-1. Run `./script/get-stable-channel-release-notes` and store output locally.
-
-1. Run `./script/bump-zed-minor-versions`.
-
-   - Push the tags and branches as instructed.
-
-1. Run `./script/get-preview-channel-changes` and store output locally.
-
-> **Note:** Always prioritize the stable release.
-> If you've completed aggregating stable release notes, you can move on to working on aggregating preview release notes, but once the stable build has finished, work through the rest of the stable steps to fully publish.
-> Preview can be finished up after.
-
-### Stable Release
-
-1. Aggregate stable release notes.
-
-   - Follow the instructions at the end of the script and aggregate the release notes into one structure.
-
-1. Once the stable release draft is up on [GitHub Releases](https://github.com/zed-industries/zed/releases), paste the stable release notes into it and **save**.
-
-   - **Do not publish the draft!**
-
-1. Check the stable release assets.
-
-   - Ensure the stable release job has finished without error.
-   - Ensure the draft has the proper number of assetsβ€”releases currently have 12 assets each (as of v0.211).
-   - Download the artifacts for the stable release draft and test that you can run them locally.
-
-1. Publish the stable draft on [GitHub Releases](https://github.com/zed-industries/zed/releases).
-
-   - Use [Vercel](https://vercel.com/zed-industries/zed-dev) to check the progress of the website rebuild.
-     The release will be public once the rebuild has completed.
-
-1. Post the stable release notes to social media.
-
-   - Bluesky and X posts will already be built as drafts in [Buffer](https://buffer.com).
-   - Double-check links.
-   - Publish both, one at a time, ensuring both are posted to each respective platform.
-
-1. Send the stable release notes email.
-
-   - The email broadcast will already be built as a draft in [Kit](https://kit.com).
-   - Double-check links.
-   - Publish the email.
-
-### Preview Release
-
-1. Aggregate preview release notes.
-
-   - Take the script's output and build release notes by organizing each release note line into a category.
-   - Use a prior release for the initial outline.
-   - Make sure to append the `Credit` line, if present, to the end of each release note line.
-
-1. Once the preview release draft is up on [GitHub Releases](https://github.com/zed-industries/zed/releases), paste the preview release notes into it and **save**.
-
-   - **Do not publish the draft!**
-
-1. Check the preview release assets.
-
-   - Ensure the preview release job has finished without error.
-   - Ensure the draft has the proper number of assetsβ€”releases currently have 12 assets each (as of v0.211).
-   - Download the artifacts for the preview release draft and test that you can run them locally.
-
-1. Publish the preview draft on [GitHub Releases](https://github.com/zed-industries/zed/releases).
-   - Use [Vercel](https://vercel.com/zed-industries/zed-dev) to check the progress of the website rebuild.
-     The release will be public once the rebuild has completed.
-
-### Prep Content for Next Week's Stable Release
-
-1. Build social media posts based on the popular items in preview.
-
-   - Draft the copy in the [tweets](https://zed.dev/channel/tweets-23331) channel.
-   - Create the preview media (videos, screenshots).
-     - For features that you film videos around, try to create alternative photo-only versions to be used in the email, as videos and GIFs aren't great for email.
-     - Store all created media in `Feature Media` in our Google Drive.
-   - Build X and Bluesky post drafts (copy and media) in [Buffer](https://buffer.com), to be sent for next week's stable release.
-
-   **Note: These are preview items and you may discover bugs.**
-   **This is a very good time to report these findings to the team!**
-
-1. Build email based on the popular items in preview.
-
-   - You can reuse the copy and photo media from the preview social media posts.
-   - Create a draft email in [Kit](https://kit.com), to be sent for next week's stable release.
-
-## Patch Release Process
-
-If your PR fixes a panic or a crash, you should cherry-pick it to the current stable and preview branches.
-If your PR fixes a regression in recently released code, you should cherry-pick it to preview.
-
-You will need write access to the Zed repository to do this:
-
----
-
-1. Send a PR containing your change to `main` as normal.
-
-1. Once it is merged, cherry-pick the commit locally to either of the release branches (`v0.XXX.x`).
-
-   - In some cases, you may have to handle a merge conflict.
-     More often than not, this will happen when cherry-picking to stable, as the stable branch is more "stale" than the preview branch.
-
-1. After the commit is cherry-picked, run `./script/trigger-release {preview|stable}`.
-   This will bump the version numbers, create a new release tag, and kick off a release build.
-
-   - This can also be run from the [GitHub Actions UI](https://github.com/zed-industries/zed/actions/workflows/bump_patch_version.yml):
-     ![](https://github.com/zed-industries/zed/assets/1486634/9e31ae95-09e1-4c7f-9591-944f4f5b63ea)
-
-1. Once release drafts are up on [GitHub Releases](https://github.com/zed-industries/zed/releases), proofread and edit the release notes as needed and **save**.
-
-   - **Do not publish the drafts, yet.**
-
-1. Check the release assets.
-
-   - Ensure the stable / preview release jobs have finished without error.
-   - Ensure each draft has the proper number of assetsβ€”releases currently have 10 assets each.
-   - Download the artifacts for each release draft and test that you can run them locally.
-
-1. Publish stable / preview drafts, one at a time.
-   - Use [Vercel](https://vercel.com/zed-industries/zed-dev) to check the progress of the website rebuild.
-     The release will be public once the rebuild has completed.
-
-## Nightly release process
-
-In addition to the public releases, we also have a nightly build that we encourage employees to use.
-Nightly is released by cron once a day, and can be shipped as often as you'd like.
-There are no release notes or announcements, so you can just merge your changes to main and run `./script/trigger-release nightly`.

docs/src/extensions/icon-themes.md πŸ”—

@@ -11,7 +11,7 @@ The [Material Icon Theme](https://github.com/zed-extensions/material-icon-theme)
 There are two important directories for an icon theme extension:
 
 - `icon_themes`: This directory will contain one or more JSON files containing the icon theme definitions.
-- `icons`: This directory contains the icons assets that will be distributed with the extension. You can created subdirectories in this directory, if so desired.
+- `icons`: This directory contains the icon assets that will be distributed with the extension. You can created subdirectories in this directory, if so desired.
 
 Each icon theme file should adhere to the JSON schema specified at [`https://zed.dev/schema/icon_themes/v0.3.0.json`](https://zed.dev/schema/icon_themes/v0.3.0.json).
 

docs/src/extensions/languages.md πŸ”—

@@ -324,7 +324,7 @@ This query marks number and string values in key-value pairs and arrays for reda
 
 The `runnables.scm` file defines rules for detecting runnable code.
 
-Here's an example from an `runnables.scm` file for JSON:
+Here's an example from a `runnables.scm` file for JSON:
 
 ```scheme
 (

docs/src/icon-themes.md πŸ”—

@@ -4,19 +4,21 @@ Zed comes with a built-in icon theme, with more icon themes available as extensi
 
 ## Selecting an Icon Theme
 
-See what icon themes are installed and preview them via the Icon Theme Selector, which you can open from the command palette with "icon theme selector: toggle".
+See what icon themes are installed and preview them via the Icon Theme Selector, which you can open from the command palette with `icon theme selector: toggle`.
 
 Navigating through the icon theme list by moving up and down will change the icon theme in real time and hitting enter will save it to your settings file.
 
 ## Installing more Icon Themes
 
-More icon themes are available from the Extensions page, which you can access via the command palette with `zed: extensions` or the [Zed website](https://zed.dev/extensions).
+More icon themes are available from the Extensions page, which you can access via the command palette with `zed: extensions` or the [Zed website](https://zed.dev/extensions?filter=icon-themes).
 
 ## Configuring Icon Themes
 
-Your selected icon theme is stored in your settings file. You can open your settings file from the command palette with `zed: open settings file` (bound to `cmd-alt-,` on macOS and `ctrl-alt-,` on Linux).
+Your selected icon theme is stored in your settings file.
+You can open your settings file from the command palette with {#action zed::OpenSettingsFile} (bound to {#kb zed::OpenSettingsFile}).
 
-Just like with themes, Zed allows for configuring different icon themes for light and dark mode. You can set the mode to `"light"` or `"dark"` to ignore the current system mode.
+Just like with themes, Zed allows for configuring different icon themes for light and dark mode.
+You can set the mode to `"light"` or `"dark"` to ignore the current system mode.
 
 ```json [settings]
 {

docs/src/languages/php.md πŸ”—

@@ -71,9 +71,7 @@ Zed’s PHP extension provides a debug adapter for PHP and Xdebug. The adapter n
     "label": "PHP: Listen to Xdebug",
     "adapter": "Xdebug",
     "request": "launch",
-    "initialize_args": {
-      "port": 9003
-    }
+    "port": 9003
   },
   {
     "label": "PHP: Debug this test",

docs/src/languages/rego.md πŸ”—

@@ -7,7 +7,7 @@ Rego language support in Zed is provided by the community-maintained [Rego exten
 
 ## Installation
 
-The extensions is largely based on the [Regal](https://docs.styra.com/regal/language-server) language server which should be installed to make use of the extension. Read the [getting started](https://docs.styra.com/regal#getting-started) instructions for more information.
+The extension is largely based on the [Regal](https://docs.styra.com/regal/language-server) language server which should be installed to make use of the extension. Read the [getting started](https://docs.styra.com/regal#getting-started) instructions for more information.
 
 ## Configuration
 

docs/src/snippets.md πŸ”—

@@ -1,6 +1,6 @@
 # Snippets
 
-Use the {#action snippets::ConfigureSnippets} action to create a new snippets file or edit a existing snippets file for a specified [scope](#scopes).
+Use the {#action snippets::ConfigureSnippets} action to create a new snippets file or edit an existing snippets file for a specified [scope](#scopes).
 
 The snippets are located in `~/.config/zed/snippets` directory to which you can navigate to with the {#action snippets::OpenFolder} action.
 

docs/src/themes.md πŸ”—

@@ -4,21 +4,23 @@ Zed comes with a number of built-in themes, with more themes available as extens
 
 ## Selecting a Theme
 
-See what themes are installed and preview them via the Theme Selector, which you can open from the command palette with "theme selector: Toggle" (bound to `cmd-k cmd-t` on macOS and `ctrl-k ctrl-t` on Linux).
+See what themes are installed and preview them via the Theme Selector, which you can open from the command palette with `theme selector: toggle` (bound to {#kb theme_selector::Toggle}).
 
 Navigating through the theme list by moving up and down will change the theme in real time and hitting enter will save it to your settings file.
 
 ## Installing more Themes
 
-More themes are available from the Extensions page, which you can access via the command palette with `zed: extensions` or the [Zed website](https://zed.dev/extensions).
+More themes are available from the Extensions page, which you can access via the command palette with `zed: extensions` or the [Zed website](https://zed.dev/extensions?filter=themes).
 
 Many popular themes have been ported to Zed, and if you're struggling to choose one, visit [zed-themes.com](https://zed-themes.com), a third-party gallery with visible previews for many of them.
 
 ## Configuring a Theme
 
-Your selected theme is stored in your settings file. You can open your settings file from the command palette with `zed: open settings file` (bound to `cmd-alt-,` on macOS and `ctrl-alt-,` on Linux).
+Your selected theme is stored in your settings file.
+You can open your settings file from the command palette with {#action zed::OpenSettingsFile} (bound to {#kb zed::OpenSettingsFile}).
 
-By default, Zed maintains two themes: one for light mode and one for dark mode. You can set the mode to `"dark"` or `"light"` to ignore the current system mode.
+By default, Zed maintains two themes: one for light mode and one for dark mode.
+You can set the mode to `"dark"` or `"light"` to ignore the current system mode.
 
 ```json [settings]
 {
@@ -32,7 +34,8 @@ By default, Zed maintains two themes: one for light mode and one for dark mode.
 
 ## Theme Overrides
 
-To override specific attributes of a theme, use the `theme_overrides` setting. This setting can be used to configure theme-specific overrides.
+To override specific attributes of a theme, use the `theme_overrides` setting.
+This setting can be used to configure theme-specific overrides.
 
 For example, add the following to your `settings.json` if you wish to override the background color of the editor and display comments and doc comments as italics:
 
@@ -54,17 +57,17 @@ For example, add the following to your `settings.json` if you wish to override t
 }
 ```
 
-To see a comprehensive list of list of captures (like `comment` and `comment.doc`) see: [Language Extensions: Syntax highlighting](./extensions/languages.md#syntax-highlighting).
+To see a comprehensive list of list of captures (like `comment` and `comment.doc`) see [Language Extensions: Syntax highlighting](./extensions/languages.md#syntax-highlighting).
 
-To see a list of available theme attributes look at the JSON file for your theme. For example, [assets/themes/one/one.json](https://github.com/zed-industries/zed/blob/main/assets/themes/one/one.json) for the default One Dark and One Light themes.
+To see a list of available theme attributes look at the JSON file for your theme.
+For example, [assets/themes/one/one.json](https://github.com/zed-industries/zed/blob/main/assets/themes/one/one.json) for the default One Dark and One Light themes.
 
 ## Local Themes
 
 Store new themes locally by placing them in the `~/.config/zed/themes` directory (macOS and Linux) or `%USERPROFILE%\AppData\Roaming\Zed\themes\` (Windows).
 
-For example, to create a new theme called `my-cool-theme`, create a file called `my-cool-theme.json` in that directory. It will be available in the theme selector the next time Zed loads.
-
-Find more themes at [zed-themes.com](https://zed-themes.com).
+For example, to create a new theme called `my-cool-theme`, create a file called `my-cool-theme.json` in that directory.
+It will be available in the theme selector the next time Zed loads.
 
 ## Theme Development
 

docs/src/vim.md πŸ”—

@@ -628,7 +628,7 @@ Here's an example of these settings changed:
   // Allow the cursor to reach the edges of the screen
   "vertical_scroll_margin": 0,
   "gutter": {
-    // Disable line numbers completely:
+    // Disable line numbers completely
     "line_numbers": false
   },
   "command_aliases": {

docs/src/visual-customization.md πŸ”—

@@ -1,14 +1,14 @@
 # Visual Customization
 
-Various aspects of Zed's visual layout can be configured via Zed settings.json which you can access via {#action zed::OpenSettings} ({#kb zed::OpenSettings}).
+Various aspects of Zed's visual layout can be configured via either the settings window or the `settings.json` file, which you can access via {#action zed::OpenSettings} ({#kb zed::OpenSettings}) and {#action zed::OpenSettingsFile} ({#kb zed::OpenSettingsFile}) respectively.
 
 See [Configuring Zed](./configuring-zed.md) for additional information and other non-visual settings.
 
 ## Themes
 
-Use may install zed extensions providing [Themes](./themes.md) and [Icon Themes](./icon-themes.md) via {#action zed::Extensions} from the command palette or menu.
+You can install many [themes](./themes.md) and [icon themes](./icon-themes.md) in form of extensions by running {#action zed::Extensions} from the command palette.
 
-You can preview/choose amongst your installed themes and icon themes with {#action theme_selector::Toggle} ({#kb theme_selector::Toggle}) and ({#action icon_theme_selector::Toggle}) which will modify the following settings:
+You can preview/choose amongst your installed themes and icon themes with {#action theme_selector::Toggle} ({#kb theme_selector::Toggle}) and {#action icon_theme_selector::Toggle} ({#kb icon_theme_selector::Toggle}) which will modify the following settings:
 
 ```json [settings]
 {
@@ -61,15 +61,20 @@ If you would like to use distinct themes for light mode/dark mode that can be se
     "line_height": "standard",
   },
 
-  // Agent Panel Font Settings
-  "agent_font_size": 15
+  // Controls the font size for agent responses in the agent panel.
+  // If not specified, it falls back to the UI font size.
+  "agent_ui_font_size": 15,
+  // Controls the font size for the agent panel's message editor, user message,
+  // and any other snippet of code.
+  "agent_buffer_font_size": 12
 ```
 
 ### Font ligatures
 
 By default Zed enable font ligatures which will visually combines certain adjacent characters.
 
-For example `=>` will be displayed as `β†’` and `!=` will be `β‰ `. This is purely cosmetic and the individual characters remain unchanged.
+For example `=>` will be displayed as `β†’` and `!=` will be `β‰ `.
+This is purely cosmetic and the individual characters remain unchanged.
 
 To disable this behavior use:
 
@@ -464,7 +469,12 @@ Project panel can be shown/hidden with {#action project_panel::ToggleFocus} ({#k
     "default_width": 640,   // Default width (left/right docked)
     "default_height": 320,  // Default height (bottom docked)
   },
-  "agent_font_size": 16
+  // Controls the font size for agent responses in the agent panel.
+  // If not specified, it falls back to the UI font size.
+  "agent_ui_font_size": 15,
+  // Controls the font size for the agent panel's message editor, user message,
+  // and any other snippet of code.
+  "agent_buffer_font_size": 12
 ```
 
 See [Zed AI Documentation](./ai/overview.md) for additional non-visual AI settings.

script/bundle-mac πŸ”—

@@ -6,8 +6,6 @@ source script/lib/blob-store.sh
 build_flag="--release"
 target_dir="release"
 open_result=false
-local_arch=false
-local_only=false
 local_install=false
 can_code_sign=false
 
@@ -196,10 +194,6 @@ function sign_app_binaries() {
         /usr/bin/codesign --force --timestamp --options runtime --entitlements crates/zed/resources/zed.entitlements --sign "$IDENTITY" "${app_path}" -v
     else
         echo "One or more of the following variables are missing: MACOS_CERTIFICATE, MACOS_CERTIFICATE_PASSWORD, APPLE_NOTARIZATION_KEY, APPLE_NOTARIZATION_KEY_ID, APPLE_NOTARIZATION_ISSUER_ID"
-        if [[ "$local_only" = false ]]; then
-            echo "To create a self-signed local build use ./scripts/build.sh -ldf"
-            exit 1
-        fi
 
         echo "====== WARNING ======"
         echo "This bundle is being signed without all entitlements, some features (e.g. universal links) will not work"
@@ -215,7 +209,7 @@ function sign_app_binaries() {
         codesign --force --deep --entitlements "${app_path}/Contents/Resources/zed.entitlements" --sign ${MACOS_SIGNING_KEY:- -} "${app_path}" -v
     fi
 
-    if [[ "$target_dir" = "debug" && "$local_only" = false ]]; then
+    if [[ "$target_dir" = "debug" ]]; then
         if [ "$open_result" = true ]; then
             open "$app_path"
         else
@@ -227,20 +221,13 @@ function sign_app_binaries() {
 
     bundle_name=$(basename "$app_path")
 
-    if [ "$local_only" = true ]; then
-        if [ "$local_install" = true ]; then
-            rm -rf "/Applications/$bundle_name"
-            mv "$app_path" "/Applications/$bundle_name"
-            echo "Installed application bundle: /Applications/$bundle_name"
-            if [ "$open_result" = true ]; then
-                echo "Opening /Applications/$bundle_name"
-                open "/Applications/$bundle_name"
-            fi
-        else
-            if [ "$open_result" = true ]; then
-                echo "Opening $app_path"
-                open "$app_path"
-            fi
+    if [ "$local_install" = true ]; then
+        rm -rf "/Applications/$bundle_name"
+        mv "$app_path" "/Applications/$bundle_name"
+        echo "Installed application bundle: /Applications/$bundle_name"
+        if [ "$open_result" = true ]; then
+            echo "Opening /Applications/$bundle_name"
+            open "/Applications/$bundle_name"
         fi
     else
         dmg_target_directory="target/${target_triple}/${target_dir}"
@@ -291,30 +278,36 @@ function sign_binary() {
         /usr/bin/codesign --deep --force --timestamp --options runtime --entitlements crates/zed/resources/zed.entitlements --sign "$IDENTITY" "${binary_path}" -v
     fi
 }
-cp target/${target_triple}/${target_dir}/zed "${app_path}/Contents/MacOS/zed"
-cp target/${target_triple}/${target_dir}/cli "${app_path}/Contents/MacOS/cli"
-sign_app_binaries
-
-sign_binary "target/$target_triple/release/remote_server"
-gzip -f --stdout --best target/$target_triple/release/remote_server > target/zed-remote-server-macos-$remote_server_arch.gz
 
-function upload_debug_info() {
+function upload_debug_symbols() {
     if [[ -n "${SENTRY_AUTH_TOKEN:-}" ]]; then
         echo "Uploading zed debug symbols to sentry..."
+        exe_path="target/${target_triple}/release/Zed"
+        if ! dsymutil --flat "target/${target_triple}/${target_dir}/zed" 2> target/dsymutil.log; then
+            echo "dsymutil failed"
+            cat target/dsymutil.log
+            exit 1
+        fi
+        if ! dsymutil --flat "target/${target_triple}/${target_dir}/remote_server" 2> target/dsymutil.log; then
+            echo "dsymutil failed"
+            cat target/dsymutil.log
+            exit 1
+        fi
         # note: this uploads the unstripped binary which is needed because it contains
         # .eh_frame data for stack unwinding. see https://github.com/getsentry/symbolic/issues/783
         sentry-cli debug-files upload --include-sources --wait -p zed -o zed-dev \
-            "target/${target_triple}/${target_dir}/zed" \
-            "target/${target_triple}/${target_dir}/remote_server" \
-            "target/${target_triple}/${target_dir}/zed.dwarf"
+            "target/${target_triple}/${target_dir}/zed.dwarf" \
+            "target/${target_triple}/${target_dir}/remote_server.dwarf"
     else
         echo "missing SENTRY_AUTH_TOKEN. skipping sentry upload."
     fi
 }
 
-if command -v sentry-cli >/dev/null 2>&1; then
-    upload_debug_info
-else
-    echo "sentry-cli not found. skipping sentry upload."
-    echo "install with: 'curl -sL https://sentry.io/get-cli | bash'"
-fi
+upload_debug_symbols
+
+cp target/${target_triple}/${target_dir}/zed "${app_path}/Contents/MacOS/zed"
+cp target/${target_triple}/${target_dir}/cli "${app_path}/Contents/MacOS/cli"
+sign_app_binaries
+
+sign_binary "target/$target_triple/release/remote_server"
+gzip -f --stdout --best target/$target_triple/release/remote_server > target/zed-remote-server-macos-$remote_server_arch.gz

script/prettier πŸ”—

@@ -0,0 +1,17 @@
+#!/bin/bash
+set -euxo pipefail
+
+PRETTIER_VERSION=3.5.0
+
+pnpm dlx "prettier@${PRETTIER_VERSION}" assets/settings/default.json --check || {
+    echo "To fix, run from the root of the Zed repo:"
+    echo "  pnpm dlx prettier@${PRETTIER_VERSION} assets/settings/default.json --write"
+    false
+}
+
+cd docs
+pnpm dlx "prettier@${PRETTIER_VERSION}" . --check || {
+    echo "To fix, run from the root of the Zed repo:"
+    echo "  cd docs && pnpm dlx prettier@${PRETTIER_VERSION} . --write && cd .."
+    false
+}

script/run-unit-evals πŸ”—

@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+set -euxo pipefail
+
+cargo nextest run --workspace --no-fail-fast --features unit-eval --no-capture -E 'test(::eval_)'

tooling/xtask/src/tasks/workflows.rs πŸ”—

@@ -3,11 +3,15 @@ use clap::Parser;
 use std::fs;
 use std::path::Path;
 
+mod compare_perf;
 mod danger;
 mod nix_build;
 mod release_nightly;
 mod run_bundling;
 
+mod release;
+mod run_agent_evals;
+mod run_tests;
 mod runners;
 mod steps;
 mod vars;
@@ -20,11 +24,13 @@ pub fn run_workflows(_: GenerateWorkflowArgs) -> Result<()> {
 
     let workflows = vec![
         ("danger.yml", danger::danger()),
-        ("nix_build.yml", nix_build::nix_build()),
         ("run_bundling.yml", run_bundling::run_bundling()),
         ("release_nightly.yml", release_nightly::release_nightly()),
-        // ("run_tests.yml", run_tests::run_tests()),
-        // ("release.yml", release::release()),
+        ("run_tests.yml", run_tests::run_tests()),
+        ("release.yml", release::release()),
+        ("compare_perf.yml", compare_perf::compare_perf()),
+        ("run_unit_evals.yml", run_agent_evals::run_unit_evals()),
+        ("run_agent_evals.yml", run_agent_evals::run_agent_evals()),
     ];
     fs::create_dir_all(dir)
         .with_context(|| format!("Failed to create directory: {}", dir.display()))?;

tooling/xtask/src/tasks/workflows/compare_perf.rs πŸ”—

@@ -0,0 +1,22 @@
+use gh_workflow::*;
+
+use crate::tasks::workflows::{
+    runners,
+    steps::{self, NamedJob, named},
+};
+
+/// Generates the danger.yml workflow
+pub fn compare_perf() -> Workflow {
+    let run_perf = run_perf();
+    named::workflow()
+        .on(Event::default().workflow_dispatch(WorkflowDispatch::default()))
+        .add_job(run_perf.name, run_perf.job)
+}
+
+pub fn run_perf() -> NamedJob {
+    named::job(
+        Job::default()
+            .runs_on(runners::LINUX_SMALL)
+            .add_step(steps::checkout_repo()),
+    )
+}

tooling/xtask/src/tasks/workflows/danger.rs πŸ”—

@@ -1,11 +1,13 @@
 use gh_workflow::*;
 
-use crate::tasks::workflows::steps::named;
+use crate::tasks::workflows::steps::{NamedJob, named};
 
 use super::{runners, steps};
 
 /// Generates the danger.yml workflow
 pub fn danger() -> Workflow {
+    let danger = danger_job();
+
     named::workflow()
         .on(
             Event::default().pull_request(PullRequest::default().add_branch("main").types([
@@ -15,39 +17,43 @@ pub fn danger() -> Workflow {
                 PullRequestType::Edited,
             ])),
         )
-        .add_job(
-            "danger",
-            Job::default()
-                .cond(Expression::new(
-                    "github.repository_owner == 'zed-industries'",
-                ))
-                .runs_on(runners::LINUX_CHEAP)
-                .add_step(steps::checkout_repo())
-                .add_step(steps::setup_pnpm())
-                .add_step(
-                    steps::setup_node()
-                        .add_with(("cache", "pnpm"))
-                        .add_with(("cache-dependency-path", "script/danger/pnpm-lock.yaml")),
-                )
-                .add_step(install_deps())
-                .add_step(run()),
-        )
+        .add_job(danger.name, danger.job)
 }
 
-pub fn install_deps() -> Step<Run> {
-    named::bash("pnpm install --dir script/danger")
-}
+fn danger_job() -> NamedJob {
+    pub fn install_deps() -> Step<Run> {
+        named::bash("pnpm install --dir script/danger")
+    }
+
+    pub fn run() -> Step<Run> {
+        named::bash("pnpm run --dir script/danger danger ci")
+            // This GitHub token is not used, but the value needs to be here to prevent
+            // Danger from throwing an error.
+            .add_env(("GITHUB_TOKEN", "not_a_real_token"))
+            // All requests are instead proxied through an instance of
+            // https://github.com/maxdeviant/danger-proxy that allows Danger to securely
+            // authenticate with GitHub while still being able to run on PRs from forks.
+            .add_env((
+                "DANGER_GITHUB_API_BASE_URL",
+                "https://danger-proxy.fly.dev/github",
+            ))
+    }
 
-pub fn run() -> Step<Run> {
-    named::bash("pnpm run --dir script/danger danger ci")
-        // This GitHub token is not used, but the value needs to be here to prevent
-        // Danger from throwing an error.
-        .add_env(("GITHUB_TOKEN", "not_a_real_token"))
-        // All requests are instead proxied through an instance of
-        // https://github.com/maxdeviant/danger-proxy that allows Danger to securely
-        // authenticate with GitHub while still being able to run on PRs from forks.
-        .add_env((
-            "DANGER_GITHUB_API_BASE_URL",
-            "https://danger-proxy.fly.dev/github",
-        ))
+    NamedJob {
+        name: "danger".to_string(),
+        job: Job::default()
+            .cond(Expression::new(
+                "github.repository_owner == 'zed-industries'",
+            ))
+            .runs_on(runners::LINUX_SMALL)
+            .add_step(steps::checkout_repo())
+            .add_step(steps::setup_pnpm())
+            .add_step(
+                steps::setup_node()
+                    .add_with(("cache", "pnpm"))
+                    .add_with(("cache-dependency-path", "script/danger/pnpm-lock.yaml")),
+            )
+            .add_step(install_deps())
+            .add_step(run()),
+    }
 }

tooling/xtask/src/tasks/workflows/nix_build.rs πŸ”—

@@ -7,52 +7,6 @@ use super::{runners, steps, steps::named, vars};
 use gh_workflow::*;
 use indoc::indoc;
 
-/// Generates the nix.yml workflow
-pub fn nix_build() -> Workflow {
-    // todo(ci) instead of having these as optional YAML inputs,
-    // should we just generate two copies of the job (one for release-nightly
-    // and one for CI?)
-    let (input_flake_output, flake_output) = vars::input(
-        "flake-output",
-        WorkflowCallInput {
-            input_type: "string".into(),
-            default: Some("default".into()),
-            ..Default::default()
-        },
-    );
-    let (input_cachix_filter, cachix_filter) = vars::input(
-        "cachix-filter",
-        WorkflowCallInput {
-            input_type: "string".into(),
-            ..Default::default()
-        },
-    );
-
-    let linux_x86 = build_nix(
-        Platform::Linux,
-        Arch::X86_64,
-        &input_flake_output,
-        Some(&input_cachix_filter),
-        &[],
-    );
-    let mac_arm = build_nix(
-        Platform::Mac,
-        Arch::ARM64,
-        &input_flake_output,
-        Some(&input_cachix_filter),
-        &[],
-    );
-
-    named::workflow()
-        .on(Event::default().workflow_call(
-            WorkflowCall::default()
-                .add_input(flake_output.0, flake_output.1)
-                .add_input(cachix_filter.0, cachix_filter.1),
-        ))
-        .add_job(linux_x86.name, linux_x86.job)
-        .add_job(mac_arm.name, mac_arm.job)
-}
-
 pub(crate) fn build_nix(
     platform: Platform,
     arch: Arch,
@@ -60,6 +14,55 @@ pub(crate) fn build_nix(
     cachix_filter: Option<&str>,
     deps: &[&NamedJob],
 ) -> NamedJob {
+    // on our macs we manually install nix. for some reason the cachix action is running
+    // under a non-login /bin/bash shell which doesn't source the proper script to add the
+    // nix profile to PATH, so we manually add them here
+    pub fn set_path() -> Step<Run> {
+        named::bash(indoc! {r#"
+                echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH"
+                echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH"
+            "#})
+    }
+
+    pub fn install_nix() -> Step<Use> {
+        named::uses(
+            "cachix",
+            "install-nix-action",
+            "02a151ada4993995686f9ed4f1be7cfbb229e56f", // v31
+        )
+        .add_with(("github_access_token", vars::GITHUB_TOKEN))
+    }
+
+    pub fn cachix_action(cachix_filter: Option<&str>) -> Step<Use> {
+        let mut step = named::uses(
+            "cachix",
+            "cachix-action",
+            "0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad", // v16
+        )
+        .add_with(("name", "zed"))
+        .add_with(("authToken", vars::CACHIX_AUTH_TOKEN))
+        .add_with(("cachixArgs", "-v"));
+        if let Some(cachix_filter) = cachix_filter {
+            step = step.add_with(("pushFilter", cachix_filter));
+        }
+        step
+    }
+
+    pub fn build(flake_output: &str) -> Step<Run> {
+        named::bash(&format!(
+            "nix build .#{} -L --accept-flake-config",
+            flake_output
+        ))
+    }
+
+    pub fn limit_store() -> Step<Run> {
+        named::bash(indoc! {r#"
+                if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then
+                    nix-collect-garbage -d || true
+                fi"#
+        })
+    }
+
     let runner = match platform {
         Platform::Windows => unimplemented!(),
         Platform::Linux => runners::LINUX_X86_BUNDLER,
@@ -101,52 +104,3 @@ pub(crate) fn build_nix(
         job,
     }
 }
-
-// on our macs we manually install nix. for some reason the cachix action is running
-// under a non-login /bin/bash shell which doesn't source the proper script to add the
-// nix profile to PATH, so we manually add them here
-pub fn set_path() -> Step<Run> {
-    named::bash(indoc! {r#"
-            echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH"
-            echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH"
-        "#})
-}
-
-pub fn install_nix() -> Step<Use> {
-    named::uses(
-        "cachix",
-        "install-nix-action",
-        "02a151ada4993995686f9ed4f1be7cfbb229e56f", // v31
-    )
-    .add_with(("github_access_token", vars::GITHUB_TOKEN))
-}
-
-pub fn cachix_action(cachix_filter: Option<&str>) -> Step<Use> {
-    let mut step = named::uses(
-        "cachix",
-        "cachix-action",
-        "0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad", // v16
-    )
-    .add_with(("name", "zed"))
-    .add_with(("authToken", vars::CACHIX_AUTH_TOKEN))
-    .add_with(("cachixArgs", "-v"));
-    if let Some(cachix_filter) = cachix_filter {
-        step = step.add_with(("pushFilter", cachix_filter));
-    }
-    step
-}
-
-pub fn build(flake_output: &str) -> Step<Run> {
-    named::bash(&format!(
-        "nix build .#{} -L --accept-flake-config",
-        flake_output
-    ))
-}
-
-pub fn limit_store() -> Step<Run> {
-    named::bash(indoc! {r#"
-            if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then
-                nix-collect-garbage -d || true
-            fi"#
-    })
-}

tooling/xtask/src/tasks/workflows/release.rs πŸ”—

@@ -0,0 +1,223 @@
+use gh_workflow::{Event, Expression, Push, Run, Step, Use, Workflow};
+
+use crate::tasks::workflows::{
+    run_bundling, run_tests, runners,
+    steps::{self, NamedJob, dependant_job, named, release_job},
+    vars,
+};
+
+pub(crate) fn release() -> Workflow {
+    let macos_tests = run_tests::run_platform_tests(runners::Platform::Mac);
+    let linux_tests = run_tests::run_platform_tests(runners::Platform::Linux);
+    let windows_tests = run_tests::run_platform_tests(runners::Platform::Windows);
+    let check_scripts = run_tests::check_scripts();
+
+    let create_draft_release = create_draft_release();
+
+    let bundle = ReleaseBundleJobs {
+        linux_arm64: bundle_linux_arm64(&[&linux_tests, &check_scripts]),
+        linux_x86_64: bundle_linux_x86_64(&[&linux_tests, &check_scripts]),
+        mac_arm64: bundle_mac_arm64(&[&macos_tests, &check_scripts]),
+        mac_x86_64: bundle_mac_x86_64(&[&macos_tests, &check_scripts]),
+        windows_arm64: bundle_windows_arm64(&[&windows_tests, &check_scripts]),
+        windows_x86_64: bundle_windows_x86_64(&[&windows_tests, &check_scripts]),
+    };
+
+    let upload_release_assets = upload_release_assets(&[&create_draft_release], &bundle);
+
+    let auto_release_preview = auto_release_preview(&[&upload_release_assets]);
+
+    named::workflow()
+        .on(Event::default().push(Push::default().tags(vec!["v*".to_string()])))
+        .concurrency(vars::one_workflow_per_non_main_branch())
+        .add_env(("CARGO_TERM_COLOR", "always"))
+        .add_env(("CARGO_INCREMENTAL", "0"))
+        .add_env(("RUST_BACKTRACE", "1"))
+        .add_env(("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED))
+        .add_env(("ZED_MINIDUMP_ENDPOINT", vars::ZED_SENTRY_MINIDUMP_ENDPOINT))
+        .add_job(macos_tests.name, macos_tests.job)
+        .add_job(linux_tests.name, linux_tests.job)
+        .add_job(windows_tests.name, windows_tests.job)
+        .add_job(check_scripts.name, check_scripts.job)
+        .add_job(create_draft_release.name, create_draft_release.job)
+        .add_job(bundle.linux_arm64.name, bundle.linux_arm64.job)
+        .add_job(bundle.linux_x86_64.name, bundle.linux_x86_64.job)
+        .add_job(bundle.mac_arm64.name, bundle.mac_arm64.job)
+        .add_job(bundle.mac_x86_64.name, bundle.mac_x86_64.job)
+        .add_job(bundle.windows_arm64.name, bundle.windows_arm64.job)
+        .add_job(bundle.windows_x86_64.name, bundle.windows_x86_64.job)
+        .add_job(upload_release_assets.name, upload_release_assets.job)
+        .add_job(auto_release_preview.name, auto_release_preview.job)
+}
+
+struct ReleaseBundleJobs {
+    linux_arm64: NamedJob,
+    linux_x86_64: NamedJob,
+    mac_arm64: NamedJob,
+    mac_x86_64: NamedJob,
+    windows_arm64: NamedJob,
+    windows_x86_64: NamedJob,
+}
+
+fn auto_release_preview(deps: &[&NamedJob; 1]) -> NamedJob {
+    fn create_sentry_release() -> Step<Use> {
+        named::uses(
+            "getsentry",
+            "action-release",
+            "526942b68292201ac6bbb99b9a0747d4abee354c", // v3
+        )
+        .add_env(("SENTRY_ORG", "zed-dev"))
+        .add_env(("SENTRY_PROJECT", "zed"))
+        .add_env(("SENTRY_AUTH_TOKEN", "${{ secrets.SENTRY_AUTH_TOKEN }}"))
+        .add_with(("environment", "production"))
+    }
+
+    named::job(
+        dependant_job(deps)
+            .runs_on(runners::LINUX_SMALL)
+            .cond(Expression::new(indoc::indoc!(
+                r#"
+                false
+                && startsWith(github.ref, 'refs/tags/v')
+                && endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre')
+            "# // todo(ci-release) enable
+            )))
+            .add_step(
+                steps::script(
+                    r#"gh release edit "$GITHUB_REF_NAME" --repo=zed-industries/zed --draft=false"#,
+                )
+                .add_env(("GITHUB_TOKEN", "${{ secrets.GITHUB_TOKEN }}")),
+            )
+            .add_step(create_sentry_release()),
+    )
+}
+
+fn upload_release_assets(deps: &[&NamedJob], bundle_jobs: &ReleaseBundleJobs) -> NamedJob {
+    fn download_workflow_artifacts() -> Step<Use> {
+        named::uses(
+            "actions",
+            "download-artifact",
+            "018cc2cf5baa6db3ef3c5f8a56943fffe632ef53", // v6.0.0
+        )
+        .add_with(("path", "./artifacts/"))
+    }
+
+    fn prep_release_artifacts(bundle: &ReleaseBundleJobs) -> Step<Run> {
+        let assets = [
+            (&bundle.mac_x86_64.name, "zed", "Zed-x86_64.dmg"),
+            (&bundle.mac_arm64.name, "zed", "Zed-aarch64.dmg"),
+            (&bundle.windows_x86_64.name, "zed", "Zed-x86_64.exe"),
+            (&bundle.windows_arm64.name, "zed", "Zed-aarch64.exe"),
+            (&bundle.linux_arm64.name, "zed", "zed-linux-aarch64.tar.gz"),
+            (&bundle.linux_x86_64.name, "zed", "zed-linux-x86_64.tar.gz"),
+            (
+                &bundle.linux_x86_64.name,
+                "remote-server",
+                "zed-remote-server-linux-x86_64.gz",
+            ),
+            (
+                &bundle.linux_arm64.name,
+                "remote-server",
+                "zed-remote-server-linux-aarch64.gz",
+            ),
+            (
+                &bundle.mac_x86_64.name,
+                "remote-server",
+                "zed-remote-server-macos-x86_64.gz",
+            ),
+            (
+                &bundle.mac_arm64.name,
+                "remote-server",
+                "zed-remote-server-macos-aarch64.gz",
+            ),
+        ];
+
+        let mut script_lines = vec!["mkdir -p release-artifacts/\n".to_string()];
+        for (job_name, artifact_kind, release_artifact_name) in assets {
+            let artifact_path =
+                ["${{ needs.", job_name, ".outputs.", artifact_kind, " }}"].join("");
+            let mv_command = format!(
+                "mv ./artifacts/{artifact_path}/* release-artifacts/{release_artifact_name}"
+            );
+            script_lines.push(mv_command)
+        }
+
+        named::bash(&script_lines.join("\n"))
+    }
+
+    let mut deps = deps.to_vec();
+    deps.extend([
+        &bundle_jobs.linux_arm64,
+        &bundle_jobs.linux_x86_64,
+        &bundle_jobs.mac_arm64,
+        &bundle_jobs.mac_x86_64,
+        &bundle_jobs.windows_arm64,
+        &bundle_jobs.windows_x86_64,
+    ]);
+
+    named::job(
+        dependant_job(&deps)
+            .runs_on(runners::LINUX_MEDIUM)
+            .add_step(download_workflow_artifacts())
+            .add_step(steps::script("ls -lR ./artifacts"))
+            .add_step(prep_release_artifacts(bundle_jobs))
+            .add_step(
+                steps::script("gh release upload \"$GITHUB_REF_NAME\" --repo=zed-industries/zed release-artifacts/*")
+                    .add_env(("GITHUB_TOKEN", "${{ secrets.GITHUB_TOKEN }}")),
+            ),
+    )
+}
+
+fn create_draft_release() -> NamedJob {
+    fn generate_release_notes() -> Step<Run> {
+        named::bash(
+            r#"node --redirect-warnings=/dev/null ./script/draft-release-notes "$RELEASE_VERSION" "$RELEASE_CHANNEL" > target/release-notes.md"#,
+        )
+    }
+
+    fn create_release() -> Step<Run> {
+        named::bash("script/create-draft-release target/release-notes.md")
+            .add_env(("GITHUB_TOKEN", "${{ secrets.GITHUB_TOKEN }}"))
+    }
+
+    named::job(
+        release_job(&[])
+            .runs_on(runners::LINUX_SMALL)
+            // We need to fetch more than one commit so that `script/draft-release-notes`
+            // is able to diff between the current and previous tag.
+            //
+            // 25 was chosen arbitrarily.
+            .add_step(
+                steps::checkout_repo()
+                    .add_with(("fetch-depth", 25))
+                    .add_with(("clean", false))
+                    .add_with(("ref", "${{ github.ref }}")),
+            )
+            .add_step(steps::script("script/determine-release-channel"))
+            .add_step(steps::script("mkdir -p target/"))
+            .add_step(generate_release_notes())
+            .add_step(create_release()),
+    )
+}
+
+fn bundle_mac_x86_64(deps: &[&NamedJob]) -> NamedJob {
+    named::job(run_bundling::bundle_mac_job(runners::Arch::X86_64, deps))
+}
+fn bundle_mac_arm64(deps: &[&NamedJob]) -> NamedJob {
+    named::job(run_bundling::bundle_mac_job(runners::Arch::ARM64, deps))
+}
+fn bundle_linux_x86_64(deps: &[&NamedJob]) -> NamedJob {
+    named::job(run_bundling::bundle_linux_job(runners::Arch::X86_64, deps))
+}
+fn bundle_linux_arm64(deps: &[&NamedJob]) -> NamedJob {
+    named::job(run_bundling::bundle_linux_job(runners::Arch::ARM64, deps))
+}
+fn bundle_windows_x86_64(deps: &[&NamedJob]) -> NamedJob {
+    named::job(run_bundling::bundle_windows_job(
+        runners::Arch::X86_64,
+        deps,
+    ))
+}
+fn bundle_windows_arm64(deps: &[&NamedJob]) -> NamedJob {
+    named::job(run_bundling::bundle_windows_job(runners::Arch::ARM64, deps))
+}

tooling/xtask/src/tasks/workflows/release_nightly.rs πŸ”—

@@ -1,6 +1,7 @@
 use crate::tasks::workflows::{
     nix_build::build_nix,
-    run_bundling::bundle_mac,
+    run_bundling::{bundle_mac, bundle_windows},
+    run_tests::run_platform_tests,
     runners::{Arch, Platform},
     steps::NamedJob,
     vars::{mac_bundle_envs, windows_bundle_envs},
@@ -32,8 +33,8 @@ pub fn release_nightly() -> Workflow {
     .collect();
 
     let style = check_style();
-    let tests = run_tests(Platform::Mac);
-    let windows_tests = run_tests(Platform::Windows);
+    let tests = run_platform_tests(Platform::Mac);
+    let windows_tests = run_platform_tests(Platform::Windows);
     let bundle_mac_x86 = bundle_mac_nightly(Arch::X86_64, &[&style, &tests]);
     let bundle_mac_arm = bundle_mac_nightly(Arch::ARM64, &[&style, &tests]);
     let linux_x86 = bundle_linux_nightly(Arch::X86_64, &[&style, &tests]);
@@ -111,26 +112,6 @@ fn release_job(deps: &[&NamedJob]) -> Job {
     }
 }
 
-fn run_tests(platform: Platform) -> NamedJob {
-    let runner = match platform {
-        Platform::Windows => runners::WINDOWS_DEFAULT,
-        Platform::Linux => runners::LINUX_DEFAULT,
-        Platform::Mac => runners::MAC_DEFAULT,
-    };
-    NamedJob {
-        name: format!("run_tests_{platform}"),
-        job: release_job(&[])
-            .runs_on(runner)
-            .add_step(steps::checkout_repo())
-            .add_step(steps::setup_cargo_config(platform))
-            .add_step(steps::setup_node())
-            .add_step(steps::cargo_install_nextest(platform))
-            .add_step(steps::clear_target_dir_if_large(platform))
-            .add_step(steps::cargo_nextest(platform))
-            .add_step(steps::cleanup_cargo_config(platform)),
-    }
-}
-
 fn bundle_mac_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob {
     let platform = Platform::Mac;
     NamedJob {
@@ -150,11 +131,10 @@ fn bundle_mac_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob {
 
 fn bundle_linux_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob {
     let platform = Platform::Linux;
-    let mut job = release_job(deps)
+    let mut job = steps::release_job(deps)
         .runs_on(arch.linux_bundler())
         .add_step(steps::checkout_repo())
         .add_step(steps::setup_sentry())
-        .add_step(add_rust_to_path())
         .add_step(steps::script("./script/linux"));
 
     // todo(ci) can we do this on arm too?
@@ -176,22 +156,47 @@ fn bundle_windows_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob {
     let platform = Platform::Windows;
     NamedJob {
         name: format!("bundle_windows_nightly_{arch}"),
-        job: release_job(deps)
+        job: steps::release_job(deps)
             .runs_on(runners::WINDOWS_DEFAULT)
             .envs(windows_bundle_envs())
             .add_step(steps::checkout_repo())
             .add_step(steps::setup_sentry())
             .add_step(set_release_channel_to_nightly(platform))
-            .add_step(build_zed_installer(arch))
-            .add_step(upload_zed_nightly_windows(arch)),
+            .add_step(bundle_windows(arch))
+            .add_step(upload_zed_nightly(platform, arch)),
     }
 }
 
 fn update_nightly_tag_job(deps: &[&NamedJob]) -> NamedJob {
+    fn update_nightly_tag() -> Step<Run> {
+        named::bash(indoc::indoc! {r#"
+            if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then
+              echo "Nightly tag already points to current commit. Skipping tagging."
+              exit 0
+            fi
+            git config user.name github-actions
+            git config user.email github-actions@github.com
+            git tag -f nightly
+            git push origin nightly --force
+        "#})
+    }
+
+    fn create_sentry_release() -> Step<Use> {
+        named::uses(
+            "getsentry",
+            "action-release",
+            "526942b68292201ac6bbb99b9a0747d4abee354c", // v3
+        )
+        .add_env(("SENTRY_ORG", "zed-dev"))
+        .add_env(("SENTRY_PROJECT", "zed"))
+        .add_env(("SENTRY_AUTH_TOKEN", vars::SENTRY_AUTH_TOKEN))
+        .add_with(("environment", "production"))
+    }
+
     NamedJob {
         name: "update_nightly_tag".to_owned(),
-        job: release_job(deps)
-            .runs_on(runners::LINUX_CHEAP)
+        job: steps::release_job(deps)
+            .runs_on(runners::LINUX_SMALL)
             .add_step(steps::checkout_repo().add_with(("fetch-depth", 0)))
             .add_step(update_nightly_tag())
             .add_step(create_sentry_release()),
@@ -216,10 +221,6 @@ fn set_release_channel_to_nightly(platform: Platform) -> Step<Run> {
     }
 }
 
-fn add_rust_to_path() -> Step<Run> {
-    named::bash(r#"echo "$HOME/.cargo/bin" >> "$GITHUB_PATH""#)
-}
-
 fn upload_zed_nightly(platform: Platform, arch: Arch) -> Step<Run> {
     match platform {
         Platform::Linux => named::bash(&format!("script/upload-nightly linux-targz {arch}")),
@@ -233,44 +234,3 @@ fn upload_zed_nightly(platform: Platform, arch: Arch) -> Step<Run> {
         }
     }
 }
-
-fn build_zed_installer(arch: Arch) -> Step<Run> {
-    let cmd = match arch {
-        Arch::X86_64 => "script/bundle-windows.ps1 -Architecture x86_64",
-        Arch::ARM64 => "script/bundle-windows.ps1 -Architecture aarch64",
-    };
-    named::pwsh(cmd).working_directory("${{ env.ZED_WORKSPACE }}")
-}
-
-fn upload_zed_nightly_windows(arch: Arch) -> Step<Run> {
-    let cmd = match arch {
-        Arch::X86_64 => "script/upload-nightly.ps1 -Architecture x86_64",
-        Arch::ARM64 => "script/upload-nightly.ps1 -Architecture aarch64",
-    };
-    named::pwsh(cmd).working_directory("${{ env.ZED_WORKSPACE }}")
-}
-
-fn update_nightly_tag() -> Step<Run> {
-    named::bash(indoc::indoc! {r#"
-        if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then
-          echo "Nightly tag already points to current commit. Skipping tagging."
-          exit 0
-        fi
-        git config user.name github-actions
-        git config user.email github-actions@github.com
-        git tag -f nightly
-        git push origin nightly --force
-    "#})
-}
-
-fn create_sentry_release() -> Step<Use> {
-    named::uses(
-        "getsentry",
-        "action-release",
-        "526942b68292201ac6bbb99b9a0747d4abee354c", // v3
-    )
-    .add_env(("SENTRY_ORG", "zed-dev"))
-    .add_env(("SENTRY_PROJECT", "zed"))
-    .add_env(("SENTRY_AUTH_TOKEN", vars::SENTRY_AUTH_TOKEN))
-    .add_with(("environment", "production"))
-}

tooling/xtask/src/tasks/workflows/run_agent_evals.rs πŸ”—

@@ -0,0 +1,113 @@
+use gh_workflow::{
+    Event, Expression, Job, PullRequest, PullRequestType, Run, Schedule, Step, Use, Workflow,
+    WorkflowDispatch,
+};
+
+use crate::tasks::workflows::{
+    runners::{self, Platform},
+    steps::{self, FluentBuilder as _, NamedJob, named, setup_cargo_config},
+    vars,
+};
+
+pub(crate) fn run_agent_evals() -> Workflow {
+    let agent_evals = agent_evals();
+
+    named::workflow()
+        .on(Event::default()
+            .schedule([Schedule::default().cron("0 0 * * *")])
+            .pull_request(PullRequest::default().add_branch("**").types([
+                PullRequestType::Synchronize,
+                PullRequestType::Reopened,
+                PullRequestType::Labeled,
+            ]))
+            .workflow_dispatch(WorkflowDispatch::default()))
+        .concurrency(vars::one_workflow_per_non_main_branch())
+        .add_env(("CARGO_TERM_COLOR", "always"))
+        .add_env(("CARGO_INCREMENTAL", 0))
+        .add_env(("RUST_BACKTRACE", 1))
+        .add_env(("ANTHROPIC_API_KEY", "${{ secrets.ANTHROPIC_API_KEY }}"))
+        .add_env((
+            "ZED_CLIENT_CHECKSUM_SEED",
+            "${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}",
+        ))
+        .add_env(("ZED_EVAL_TELEMETRY", 1))
+        .add_job(agent_evals.name, agent_evals.job)
+}
+
+fn agent_evals() -> NamedJob {
+    fn run_eval() -> Step<Run> {
+        named::bash("cargo run --package=eval -- --repetitions=8 --concurrency=1")
+    }
+
+    named::job(
+        Job::default()
+            .cond(Expression::new(indoc::indoc!{r#"
+                github.repository_owner == 'zed-industries' &&
+                (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval'))
+            "#}))
+            .runs_on(runners::LINUX_DEFAULT)
+            .timeout_minutes(60_u32)
+            .add_step(steps::checkout_repo())
+            .add_step(steps::cache_rust_dependencies())
+            .map(steps::install_linux_dependencies)
+            .add_step(setup_cargo_config(Platform::Linux))
+            .add_step(steps::script("cargo build --package=eval"))
+            .add_step(run_eval())
+            .add_step(steps::cleanup_cargo_config(Platform::Linux))
+    )
+}
+
+pub(crate) fn run_unit_evals() -> Workflow {
+    let unit_evals = unit_evals();
+
+    named::workflow()
+        .on(Event::default()
+            .schedule([
+                // GitHub might drop jobs at busy times, so we choose a random time in the middle of the night.
+                Schedule::default().cron("47 1 * * 2"),
+            ])
+            .workflow_dispatch(WorkflowDispatch::default()))
+        .concurrency(vars::one_workflow_per_non_main_branch())
+        .add_env(("CARGO_TERM_COLOR", "always"))
+        .add_env(("CARGO_INCREMENTAL", 0))
+        .add_env(("RUST_BACKTRACE", 1))
+        .add_env((
+            "ZED_CLIENT_CHECKSUM_SEED",
+            "${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}",
+        ))
+        .add_job(unit_evals.name, unit_evals.job)
+}
+
+fn unit_evals() -> NamedJob {
+    fn send_failure_to_slack() -> Step<Use> {
+        named::uses(
+            "slackapi",
+            "slack-github-action",
+            "b0fa283ad8fea605de13dc3f449259339835fc52",
+        )
+        .if_condition(Expression::new("${{ failure() }}"))
+        .add_with(("method", "chat.postMessage"))
+        .add_with(("token", "${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }}"))
+        .add_with(("payload", indoc::indoc!{r#"
+            channel: C04UDRNNJFQ
+            text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}"
+        "#}))
+    }
+
+    named::job(
+        Job::default()
+            .runs_on(runners::LINUX_DEFAULT)
+            .add_step(steps::checkout_repo())
+            .add_step(steps::setup_cargo_config(Platform::Linux))
+            .add_step(steps::cache_rust_dependencies())
+            .map(steps::install_linux_dependencies)
+            .add_step(steps::cargo_install_nextest(Platform::Linux))
+            .add_step(steps::clear_target_dir_if_large(Platform::Linux))
+            .add_step(
+                steps::script("./script/run-unit-evals")
+                    .add_env(("ANTHROPIC_API_KEY", "${{ secrets.ANTHROPIC_API_KEY }}")),
+            )
+            .add_step(send_failure_to_slack())
+            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
+    )
+}

tooling/xtask/src/tasks/workflows/run_bundling.rs πŸ”—

@@ -1,10 +1,11 @@
 use crate::tasks::workflows::{
-    steps::named,
+    steps::{FluentBuilder, NamedJob, dependant_job, named},
     vars::{mac_bundle_envs, windows_bundle_envs},
 };
 
 use super::{runners, steps, vars};
 use gh_workflow::*;
+use indexmap::IndexMap;
 
 pub fn run_bundling() -> Workflow {
     named::workflow()
@@ -22,32 +23,47 @@ pub fn run_bundling() -> Workflow {
         .add_env(("RUST_BACKTRACE", "1"))
         .add_env(("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED))
         .add_env(("ZED_MINIDUMP_ENDPOINT", vars::ZED_SENTRY_MINIDUMP_ENDPOINT))
-        .add_job("bundle_mac_x86_64", bundle_mac_job(runners::Arch::X86_64))
-        .add_job("bundle_mac_arm64", bundle_mac_job(runners::Arch::ARM64))
-        .add_job("bundle_linux_x86_64", bundle_linux(runners::Arch::X86_64))
-        .add_job("bundle_linux_arm64", bundle_linux(runners::Arch::ARM64))
+        .add_job(
+            "bundle_mac_x86_64",
+            bundle_mac_job(runners::Arch::X86_64, &[]),
+        )
+        .add_job(
+            "bundle_mac_arm64",
+            bundle_mac_job(runners::Arch::ARM64, &[]),
+        )
+        .add_job(
+            "bundle_linux_x86_64",
+            bundle_linux_job(runners::Arch::X86_64, &[]),
+        )
+        .add_job(
+            "bundle_linux_arm64",
+            bundle_linux_job(runners::Arch::ARM64, &[]),
+        )
         .add_job(
             "bundle_windows_x86_64",
-            bundle_windows_job(runners::Arch::X86_64),
+            bundle_windows_job(runners::Arch::X86_64, &[]),
         )
         .add_job(
             "bundle_windows_arm64",
-            bundle_windows_job(runners::Arch::ARM64),
+            bundle_windows_job(runners::Arch::ARM64, &[]),
         )
 }
 
-fn bundle_job() -> Job {
-    Job::default()
-        .cond(Expression::new(
+fn bundle_job(deps: &[&NamedJob]) -> Job {
+    dependant_job(deps)
+        .when(deps.len() == 0, |job|
+                job.cond(Expression::new(
                 "(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
                  (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))",
-            ))
+            )))
         .timeout_minutes(60u32)
 }
 
-fn bundle_mac_job(arch: runners::Arch) -> Job {
+pub(crate) fn bundle_mac_job(arch: runners::Arch, deps: &[&NamedJob]) -> Job {
     use vars::GITHUB_SHA;
-    bundle_job()
+    let artifact_name = format!("Zed_{GITHUB_SHA}-{arch}.dmg");
+    let remote_server_artifact_name = format!("zed-remote-server-{GITHUB_SHA}-macos-{arch}.gz");
+    bundle_job(deps)
         .runs_on(runners::MAC_DEFAULT)
         .envs(mac_bundle_envs())
         .add_step(steps::checkout_repo())
@@ -56,61 +72,79 @@ fn bundle_mac_job(arch: runners::Arch) -> Job {
         .add_step(steps::clear_target_dir_if_large(runners::Platform::Mac))
         .add_step(bundle_mac(arch))
         .add_step(steps::upload_artifact(
-            &format!("Zed_{GITHUB_SHA}-{arch}.dmg"),
+            &artifact_name,
             &format!("target/{arch}-apple-darwin/release/Zed.dmg"),
         ))
         .add_step(steps::upload_artifact(
-            &format!("zed-remote-server-{GITHUB_SHA}-macos-{arch}.gz"),
+            &remote_server_artifact_name,
             &format!("target/zed-remote-server-macos-{arch}.gz"),
         ))
+        .outputs(
+            [
+                ("zed".to_string(), artifact_name),
+                ("remote-server".to_string(), remote_server_artifact_name),
+            ]
+            .into_iter()
+            .collect::<IndexMap<_, _>>(),
+        )
 }
 
 pub fn bundle_mac(arch: runners::Arch) -> Step<Run> {
     named::bash(&format!("./script/bundle-mac {arch}-apple-darwin"))
 }
 
-fn bundle_linux(arch: runners::Arch) -> Job {
+pub(crate) fn bundle_linux_job(arch: runners::Arch, deps: &[&NamedJob]) -> Job {
     let artifact_name = format!("zed-{}-{}.tar.gz", vars::GITHUB_SHA, arch.triple());
     let remote_server_artifact_name = format!(
         "zed-remote-server-{}-{}.tar.gz",
         vars::GITHUB_SHA,
         arch.triple()
     );
-    let mut job = bundle_job()
+    bundle_job(deps)
         .runs_on(arch.linux_bundler())
         .add_step(steps::checkout_repo())
         .add_step(steps::setup_sentry())
-        .add_step(steps::script("./script/linux"));
-    // todo(ci) can we do this on arm too?
-    if arch == runners::Arch::X86_64 {
-        job = job.add_step(steps::script("./script/install-mold"));
-    }
-    job.add_step(steps::script("./script/bundle-linux"))
+        .map(steps::install_linux_dependencies)
+        .add_step(steps::script("./script/bundle-linux"))
         .add_step(steps::upload_artifact(
             &artifact_name,
             "target/release/zed-*.tar.gz",
         ))
         .add_step(steps::upload_artifact(
             &remote_server_artifact_name,
-            "target/release/zed-remote-server-*.tar.gz",
+            "target/zed-remote-server-*.gz",
         ))
+        .outputs(
+            [
+                ("zed".to_string(), artifact_name),
+                ("remote-server".to_string(), remote_server_artifact_name),
+            ]
+            .into_iter()
+            .collect::<IndexMap<_, _>>(),
+        )
 }
 
-fn bundle_windows_job(arch: runners::Arch) -> Job {
+pub(crate) fn bundle_windows_job(arch: runners::Arch, deps: &[&NamedJob]) -> Job {
     use vars::GITHUB_SHA;
-    bundle_job()
+    let artifact_name = format!("Zed_{GITHUB_SHA}-{arch}.exe");
+    bundle_job(deps)
         .runs_on(runners::WINDOWS_DEFAULT)
         .envs(windows_bundle_envs())
         .add_step(steps::checkout_repo())
         .add_step(steps::setup_sentry())
         .add_step(bundle_windows(arch))
         .add_step(steps::upload_artifact(
-            &format!("Zed_{GITHUB_SHA}-{arch}.exe"),
+            &artifact_name,
             "${{ env.SETUP_PATH }}",
         ))
+        .outputs(
+            [("zed".to_string(), artifact_name)]
+                .into_iter()
+                .collect::<IndexMap<_, _>>(),
+        )
 }
 
-fn bundle_windows(arch: runners::Arch) -> Step<Run> {
+pub fn bundle_windows(arch: runners::Arch) -> Step<Run> {
     let step = match arch {
         runners::Arch::X86_64 => named::pwsh("script/bundle-windows.ps1 -Architecture x86_64"),
         runners::Arch::ARM64 => named::pwsh("script/bundle-windows.ps1 -Architecture aarch64"),

tooling/xtask/src/tasks/workflows/run_tests.rs πŸ”—

@@ -0,0 +1,473 @@
+use gh_workflow::{
+    Concurrency, Event, Expression, Job, PullRequest, Push, Run, Step, Use, Workflow,
+};
+use indexmap::IndexMap;
+
+use crate::tasks::workflows::{
+    nix_build::build_nix, runners::Arch, steps::BASH_SHELL, vars::PathCondition,
+};
+
+use super::{
+    runners::{self, Platform},
+    steps::{self, FluentBuilder, NamedJob, named, release_job},
+};
+
+pub(crate) fn run_tests() -> Workflow {
+    // Specify anything which should potentially skip full test suite in this regex:
+    // - docs/
+    // - script/update_top_ranking_issues/
+    // - .github/ISSUE_TEMPLATE/
+    // - .github/workflows/  (except .github/workflows/ci.yml)
+    let should_run_tests = PathCondition::inverted(
+        "run_tests",
+        r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))",
+    );
+    let should_check_docs = PathCondition::new("run_docs", r"^docs/");
+    let should_check_scripts = PathCondition::new(
+        "run_action_checks",
+        r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/",
+    );
+    let should_check_licences =
+        PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
+    let should_build_nix = PathCondition::new(
+        "run_nix",
+        r"^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)",
+    );
+
+    let orchestrate = orchestrate(&[
+        &should_check_scripts,
+        &should_check_docs,
+        &should_check_licences,
+        &should_build_nix,
+        &should_run_tests,
+    ]);
+
+    let jobs = [
+        orchestrate,
+        check_style(),
+        should_run_tests.guard(run_platform_tests(Platform::Windows)),
+        should_run_tests.guard(run_platform_tests(Platform::Linux)),
+        should_run_tests.guard(run_platform_tests(Platform::Mac)),
+        should_run_tests.guard(doctests()),
+        should_run_tests.guard(check_workspace_binaries()),
+        should_run_tests.guard(check_postgres_and_protobuf_migrations()), // could be more specific here?
+        should_run_tests.guard(check_dependencies()), // could be more specific here?
+        should_check_docs.guard(check_docs()),
+        should_check_licences.guard(check_licenses()),
+        should_check_scripts.guard(check_scripts()),
+        should_build_nix.guard(build_nix(
+            Platform::Linux,
+            Arch::X86_64,
+            "debug",
+            // *don't* cache the built output
+            Some("-zed-editor-[0-9.]*-nightly"),
+            &[],
+        )),
+        should_build_nix.guard(build_nix(
+            Platform::Mac,
+            Arch::ARM64,
+            "debug",
+            // *don't* cache the built output
+            Some("-zed-editor-[0-9.]*-nightly"),
+            &[],
+        )),
+    ];
+    let tests_pass = tests_pass(&jobs);
+
+    let mut workflow = named::workflow()
+        .add_event(Event::default()
+            .push(
+                Push::default()
+                    .add_branch("main")
+                    .add_branch("v[0-9]+.[0-9]+.x")
+            )
+            .pull_request(PullRequest::default().add_branch("**"))
+        )
+        .concurrency(Concurrency::default()
+            .group("${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}")
+            .cancel_in_progress(true)
+        )
+        .add_env(( "CARGO_TERM_COLOR", "always" ))
+        .add_env(( "RUST_BACKTRACE", 1 ))
+        .add_env(( "CARGO_INCREMENTAL", 0 ));
+    for job in jobs {
+        workflow = workflow.add_job(job.name, job.job)
+    }
+    workflow.add_job(tests_pass.name, tests_pass.job)
+}
+
+// Generates a bash script that checks changed files against regex patterns
+// and sets GitHub output variables accordingly
+fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
+    let name = "orchestrate".to_owned();
+    let step_name = "filter".to_owned();
+    let mut script = String::new();
+
+    script.push_str(indoc::indoc! {r#"
+        if [ -z "$GITHUB_BASE_REF" ]; then
+          echo "Not in a PR context (i.e., push to main/stable/preview)"
+          COMPARE_REV="$(git rev-parse HEAD~1)"
+        else
+          echo "In a PR context comparing to pull_request.base.ref"
+          git fetch origin "$GITHUB_BASE_REF" --depth=350
+          COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
+        fi
+        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
+
+        check_pattern() {
+          local output_name="$1"
+          local pattern="$2"
+          local grep_arg="$3"
+
+          echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
+            echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
+            echo "${output_name}=false" >> "$GITHUB_OUTPUT"
+        }
+
+    "#});
+
+    let mut outputs = IndexMap::new();
+
+    for rule in rules {
+        assert!(
+            rule.set_by_step
+                .borrow_mut()
+                .replace(name.clone())
+                .is_none()
+        );
+        assert!(
+            outputs
+                .insert(
+                    rule.name.to_owned(),
+                    format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
+                )
+                .is_none()
+        );
+
+        let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
+        script.push_str(&format!(
+            "check_pattern \"{}\" '{}' {}\n",
+            rule.name, rule.pattern, grep_arg
+        ));
+    }
+
+    let job = Job::default()
+        .runs_on(runners::LINUX_SMALL)
+        .cond(Expression::new(
+            "github.repository_owner == 'zed-industries'",
+        ))
+        .outputs(outputs)
+        .add_step(steps::checkout_repo().add_with((
+            "fetch-depth",
+            "${{ github.ref == 'refs/heads/main' && 2 || 350 }}",
+        )))
+        .add_step(
+            Step::new(step_name.clone())
+                .run(script)
+                .id(step_name)
+                .shell(BASH_SHELL),
+        );
+
+    NamedJob { name, job }
+}
+
+pub(crate) fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
+    let mut script = String::from(indoc::indoc! {r#"
+        set +x
+        EXIT_CODE=0
+
+        check_result() {
+          echo "* $1: $2"
+          if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
+        }
+
+    "#});
+
+    script.push_str(
+        &jobs
+            .iter()
+            .map(|job| {
+                format!(
+                    "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"",
+                    job.name, job.name
+                )
+            })
+            .collect::<Vec<_>>()
+            .join("\n"),
+    );
+
+    script.push_str("\n\nexit $EXIT_CODE\n");
+
+    let job = Job::default()
+        .runs_on(runners::LINUX_SMALL)
+        .needs(
+            jobs.iter()
+                .map(|j| j.name.to_string())
+                .collect::<Vec<String>>(),
+        )
+        .cond(Expression::new(
+            "github.repository_owner == 'zed-industries' && always()",
+        ))
+        .add_step(named::bash(&script));
+
+    named::job(job)
+}
+
+fn check_style() -> NamedJob {
+    fn check_for_typos() -> Step<Use> {
+        named::uses(
+            "crate-ci",
+            "typos",
+            "80c8a4945eec0f6d464eaf9e65ed98ef085283d1",
+        ) // v1.38.1
+        .with(("config", "./typos.toml"))
+    }
+    named::job(
+        release_job(&[])
+            .runs_on(runners::LINUX_MEDIUM)
+            .add_step(steps::checkout_repo())
+            .add_step(steps::setup_pnpm())
+            .add_step(steps::script("./script/prettier"))
+            .add_step(steps::script("./script/check-todos"))
+            .add_step(steps::script("./script/check-keymaps"))
+            .add_step(check_for_typos())
+            .add_step(steps::cargo_fmt()),
+    )
+}
+
+fn check_dependencies() -> NamedJob {
+    fn install_cargo_machete() -> Step<Use> {
+        named::uses(
+            "clechasseur",
+            "rs-cargo",
+            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
+        )
+        .add_with(("command", "install"))
+        .add_with(("args", "cargo-machete@0.7.0"))
+    }
+
+    fn run_cargo_machete() -> Step<Use> {
+        named::uses(
+            "clechasseur",
+            "rs-cargo",
+            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
+        )
+        .add_with(("command", "machete"))
+    }
+
+    fn check_cargo_lock() -> Step<Run> {
+        named::bash("cargo update --locked --workspace")
+    }
+
+    fn check_vulnerable_dependencies() -> Step<Use> {
+        named::uses(
+            "actions",
+            "dependency-review-action",
+            "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
+        )
+        .if_condition(Expression::new("github.event_name == 'pull_request'"))
+        .with(("license-check", false))
+    }
+
+    named::job(
+        release_job(&[])
+            .runs_on(runners::LINUX_SMALL)
+            .add_step(steps::checkout_repo())
+            .add_step(install_cargo_machete())
+            .add_step(run_cargo_machete())
+            .add_step(check_cargo_lock())
+            .add_step(check_vulnerable_dependencies()),
+    )
+}
+
+fn check_workspace_binaries() -> NamedJob {
+    named::job(
+        release_job(&[])
+            .runs_on(runners::LINUX_LARGE)
+            .add_step(steps::checkout_repo())
+            .add_step(steps::setup_cargo_config(Platform::Linux))
+            .map(steps::install_linux_dependencies)
+            .add_step(steps::script("cargo build -p collab"))
+            .add_step(steps::script("cargo build --workspace --bins --examples"))
+            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
+    )
+}
+
+pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
+    let runner = match platform {
+        Platform::Windows => runners::WINDOWS_DEFAULT,
+        Platform::Linux => runners::LINUX_DEFAULT,
+        Platform::Mac => runners::MAC_DEFAULT,
+    };
+    NamedJob {
+        name: format!("run_tests_{platform}"),
+        job: release_job(&[])
+            .runs_on(runner)
+            .add_step(steps::checkout_repo())
+            .add_step(steps::setup_cargo_config(platform))
+            .when(
+                platform == Platform::Linux,
+                steps::install_linux_dependencies,
+            )
+            .add_step(steps::setup_node())
+            .add_step(steps::clippy(platform))
+            .add_step(steps::cargo_install_nextest(platform))
+            .add_step(steps::clear_target_dir_if_large(platform))
+            .add_step(steps::cargo_nextest(platform))
+            .add_step(steps::cleanup_cargo_config(platform)),
+    }
+}
+
+pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
+    fn remove_untracked_files() -> Step<Run> {
+        named::bash("git clean -df")
+    }
+
+    fn ensure_fresh_merge() -> Step<Run> {
+        named::bash(indoc::indoc! {r#"
+            if [ -z "$GITHUB_BASE_REF" ];
+            then
+              echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
+            else
+              git checkout -B temp
+              git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
+              echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
+            fi
+        "#})
+    }
+
+    fn bufbuild_setup_action() -> Step<Use> {
+        named::uses("bufbuild", "buf-setup-action", "v1").add_with(("version", "v1.29.0"))
+    }
+
+    fn bufbuild_breaking_action() -> Step<Use> {
+        named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
+            .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
+    }
+
+    named::job(
+        release_job(&[])
+            .runs_on(runners::MAC_DEFAULT)
+            .add_step(steps::checkout_repo().with(("fetch-depth", 0))) // fetch full history
+            .add_step(remove_untracked_files())
+            .add_step(ensure_fresh_merge())
+            .add_step(bufbuild_setup_action())
+            .add_step(bufbuild_breaking_action()),
+    )
+}
+
+fn doctests() -> NamedJob {
+    fn run_doctests() -> Step<Run> {
+        named::bash(indoc::indoc! {r#"
+            cargo test --workspace --doc --no-fail-fast
+        "#})
+        .id("run_doctests")
+    }
+
+    named::job(
+        release_job(&[])
+            .runs_on(runners::LINUX_DEFAULT)
+            .add_step(steps::checkout_repo())
+            .add_step(steps::cache_rust_dependencies())
+            .map(steps::install_linux_dependencies)
+            .add_step(steps::setup_cargo_config(Platform::Linux))
+            .add_step(run_doctests())
+            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
+    )
+}
+
+fn check_licenses() -> NamedJob {
+    named::job(
+        Job::default()
+            .runs_on(runners::LINUX_SMALL)
+            .add_step(steps::checkout_repo())
+            .add_step(steps::script("./script/check-licenses"))
+            .add_step(steps::script("./script/generate-licenses")),
+    )
+}
+
+fn check_docs() -> NamedJob {
+    fn lychee_link_check(dir: &str) -> Step<Use> {
+        named::uses(
+            "lycheeverse",
+            "lychee-action",
+            "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
+        ) // v2.4.1
+        .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
+        .add_with(("fail", true))
+        .add_with(("jobSummary", false))
+    }
+
+    fn install_mdbook() -> Step<Use> {
+        named::uses(
+            "peaceiris",
+            "actions-mdbook",
+            "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
+        )
+        .with(("mdbook-version", "0.4.37"))
+    }
+
+    fn build_docs() -> Step<Run> {
+        named::bash(indoc::indoc! {r#"
+            mkdir -p target/deploy
+            mdbook build ./docs --dest-dir=../target/deploy/docs/
+        "#})
+    }
+
+    named::job(
+        release_job(&[])
+            .runs_on(runners::LINUX_LARGE)
+            .add_step(steps::checkout_repo())
+            .add_step(steps::setup_cargo_config(Platform::Linux))
+            // todo(ci): un-inline build_docs/action.yml here
+            .add_step(steps::cache_rust_dependencies())
+            .add_step(
+                lychee_link_check("./docs/src/**/*"), // check markdown links
+            )
+            .map(steps::install_linux_dependencies)
+            .add_step(install_mdbook())
+            .add_step(build_docs())
+            .add_step(
+                lychee_link_check("target/deploy/docs"), // check links in generated html
+            ),
+    )
+}
+
+pub(crate) fn check_scripts() -> NamedJob {
+    fn download_actionlint() -> Step<Run> {
+        named::bash(
+            "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
+        )
+    }
+
+    fn run_actionlint() -> Step<Run> {
+        named::bash(indoc::indoc! {r#"
+            ${{ steps.get_actionlint.outputs.executable }} -color
+        "#})
+    }
+
+    fn run_shellcheck() -> Step<Run> {
+        named::bash("./script/shellcheck-scripts error")
+    }
+
+    fn check_xtask_workflows() -> Step<Run> {
+        named::bash(indoc::indoc! {r#"
+            cargo xtask workflows
+            if ! git diff --exit-code .github; then
+              echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
+              echo "Please run 'cargo xtask workflows' locally and commit the changes"
+              exit 1
+            fi
+        "#})
+    }
+
+    named::job(
+        release_job(&[])
+            .runs_on(runners::LINUX_SMALL)
+            .add_step(steps::checkout_repo())
+            .add_step(run_shellcheck())
+            .add_step(download_actionlint().id("get_actionlint"))
+            .add_step(run_actionlint())
+            .add_step(check_xtask_workflows()),
+    )
+}

tooling/xtask/src/tasks/workflows/runners.rs πŸ”—

@@ -1,5 +1,8 @@
-pub const LINUX_CHEAP: Runner = Runner("namespace-profile-2x4-ubuntu-2404");
-pub const LINUX_DEFAULT: Runner = Runner("namespace-profile-16x32-ubuntu-2204");
+pub const LINUX_SMALL: Runner = Runner("namespace-profile-2x4-ubuntu-2404");
+pub const LINUX_DEFAULT: Runner = LINUX_XL;
+pub const LINUX_XL: Runner = Runner("namespace-profile-16x32-ubuntu-2204");
+pub const LINUX_LARGE: Runner = Runner("namespace-profile-8x16-ubuntu-2204");
+pub const LINUX_MEDIUM: Runner = Runner("namespace-profile-4x8-ubuntu-2204");
 
 // Using Ubuntu 20.04 for minimal glibc version
 pub const LINUX_X86_BUNDLER: Runner = Runner("namespace-profile-32x64-ubuntu-2004");

tooling/xtask/src/tasks/workflows/steps.rs πŸ”—

@@ -2,9 +2,9 @@ use gh_workflow::*;
 
 use crate::tasks::workflows::{runners::Platform, vars};
 
-const BASH_SHELL: &str = "bash -euxo pipefail {0}";
+pub const BASH_SHELL: &str = "bash -euxo pipefail {0}";
 // https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#jobsjob_idstepsshell
-const PWSH_SHELL: &str = "pwsh";
+pub const PWSH_SHELL: &str = "pwsh";
 
 pub fn checkout_repo() -> Step<Use> {
     named::uses(
@@ -95,6 +95,7 @@ pub fn upload_artifact(name: &str, path: &str) -> Step<Use> {
         )
         .add_with(("name", name))
         .add_with(("path", path))
+        .add_with(("if-no-files-found", "error"))
 }
 
 pub fn clear_target_dir_if_large(platform: Platform) -> Step<Run> {
@@ -105,6 +106,34 @@ pub fn clear_target_dir_if_large(platform: Platform) -> Step<Run> {
     }
 }
 
+pub(crate) fn clippy(platform: Platform) -> Step<Run> {
+    match platform {
+        Platform::Windows => named::pwsh("./script/clippy.ps1"),
+        _ => named::bash("./script/clippy"),
+    }
+}
+
+pub(crate) fn cache_rust_dependencies() -> Step<Use> {
+    named::uses(
+        "swatinem",
+        "rust-cache",
+        "9d47c6ad4b02e050fd481d890b2ea34778fd09d6", // v2
+    )
+    .with(("save-if", "${{ github.ref == 'refs/heads/main' }}"))
+}
+
+fn setup_linux() -> Step<Run> {
+    named::bash("./script/linux")
+}
+
+fn install_mold() -> Step<Run> {
+    named::bash("./script/install-mold")
+}
+
+pub(crate) fn install_linux_dependencies(job: Job) -> Job {
+    job.add_step(setup_linux()).add_step(install_mold())
+}
+
 pub fn script(name: &str) -> Step<Run> {
     if name.ends_with(".ps1") {
         Step::new(name).run(name).shell(PWSH_SHELL)
@@ -118,6 +147,91 @@ pub(crate) struct NamedJob {
     pub job: Job,
 }
 
+// impl NamedJob {
+//     pub fn map(self, f: impl FnOnce(Job) -> Job) -> Self {
+//         NamedJob {
+//             name: self.name,
+//             job: f(self.job),
+//         }
+//     }
+// }
+
+pub(crate) fn release_job(deps: &[&NamedJob]) -> Job {
+    dependant_job(deps)
+        .cond(Expression::new(
+            "github.repository_owner == 'zed-industries'",
+        ))
+        .timeout_minutes(60u32)
+}
+
+pub(crate) fn dependant_job(deps: &[&NamedJob]) -> Job {
+    let job = Job::default();
+    if deps.len() > 0 {
+        job.needs(deps.iter().map(|j| j.name.clone()).collect::<Vec<_>>())
+    } else {
+        job
+    }
+}
+
+impl FluentBuilder for Job {}
+impl FluentBuilder for Workflow {}
+
+/// A helper trait for building complex objects with imperative conditionals in a fluent style.
+/// Copied from GPUI to avoid adding GPUI as dependency
+/// todo(ci) just put this in gh-workflow
+#[allow(unused)]
+pub(crate) trait FluentBuilder {
+    /// Imperatively modify self with the given closure.
+    fn map<U>(self, f: impl FnOnce(Self) -> U) -> U
+    where
+        Self: Sized,
+    {
+        f(self)
+    }
+
+    /// Conditionally modify self with the given closure.
+    fn when(self, condition: bool, then: impl FnOnce(Self) -> Self) -> Self
+    where
+        Self: Sized,
+    {
+        self.map(|this| if condition { then(this) } else { this })
+    }
+
+    /// Conditionally modify self with the given closure.
+    fn when_else(
+        self,
+        condition: bool,
+        then: impl FnOnce(Self) -> Self,
+        else_fn: impl FnOnce(Self) -> Self,
+    ) -> Self
+    where
+        Self: Sized,
+    {
+        self.map(|this| if condition { then(this) } else { else_fn(this) })
+    }
+
+    /// Conditionally unwrap and modify self with the given closure, if the given option is Some.
+    fn when_some<T>(self, option: Option<T>, then: impl FnOnce(Self, T) -> Self) -> Self
+    where
+        Self: Sized,
+    {
+        self.map(|this| {
+            if let Some(value) = option {
+                then(this, value)
+            } else {
+                this
+            }
+        })
+    }
+    /// Conditionally unwrap and modify self with the given closure, if the given option is None.
+    fn when_none<T>(self, option: &Option<T>, then: impl FnOnce(Self) -> Self) -> Self
+    where
+        Self: Sized,
+    {
+        self.map(|this| if option.is_some() { this } else { then(this) })
+    }
+}
+
 // (janky) helper to generate steps with a name that corresponds
 // to the name of the calling function.
 pub(crate) mod named {

tooling/xtask/src/tasks/workflows/vars.rs πŸ”—

@@ -1,4 +1,8 @@
-use gh_workflow::{Env, WorkflowCallInput};
+use std::cell::RefCell;
+
+use gh_workflow::{Concurrency, Env, Expression};
+
+use crate::tasks::workflows::steps::NamedJob;
 
 macro_rules! secret {
     ($secret_name:ident) => {
@@ -12,10 +16,6 @@ macro_rules! var {
     };
 }
 
-pub fn input(name: &str, input: WorkflowCallInput) -> (String, (&str, WorkflowCallInput)) {
-    return (format!("${{{{ inputs.{name} }}}}"), (name, input));
-}
-
 secret!(APPLE_NOTARIZATION_ISSUER_ID);
 secret!(APPLE_NOTARIZATION_KEY);
 secret!(APPLE_NOTARIZATION_KEY_ID);
@@ -61,3 +61,52 @@ pub fn windows_bundle_envs() -> Env {
         .add("TIMESTAMP_DIGEST", "SHA256")
         .add("TIMESTAMP_SERVER", "http://timestamp.acs.microsoft.com")
 }
+
+pub(crate) fn one_workflow_per_non_main_branch() -> Concurrency {
+    Concurrency::default()
+        .group("${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}")
+        .cancel_in_progress(true)
+}
+
+// Represents a pattern to check for changed files and corresponding output variable
+pub(crate) struct PathCondition {
+    pub name: &'static str,
+    pub pattern: &'static str,
+    pub invert: bool,
+    pub set_by_step: RefCell<Option<String>>,
+}
+impl PathCondition {
+    pub fn new(name: &'static str, pattern: &'static str) -> Self {
+        Self {
+            name,
+            pattern,
+            invert: false,
+            set_by_step: Default::default(),
+        }
+    }
+    pub fn inverted(name: &'static str, pattern: &'static str) -> Self {
+        Self {
+            name,
+            pattern,
+            invert: true,
+            set_by_step: Default::default(),
+        }
+    }
+    pub fn guard(&self, job: NamedJob) -> NamedJob {
+        let set_by_step = self
+            .set_by_step
+            .borrow()
+            .clone()
+            .unwrap_or_else(|| panic!("condition {},is never set", self.name));
+        NamedJob {
+            name: job.name,
+            job: job
+                .job
+                .add_needs(set_by_step.clone())
+                .cond(Expression::new(format!(
+                    "needs.{}.outputs.{} == 'true'",
+                    &set_by_step, self.name
+                ))),
+        }
+    }
+}