Merge branch 'main' into never-change-a-running-system

MrSubidubi created

Change summary

.github/ISSUE_TEMPLATE/10_bug_report.yml                   |    2 
.github/workflows/autofix_pr.yml                           |    5 
.github/workflows/cherry_pick.yml                          |    6 
.github/workflows/compliance_check.yml                     |    5 
.github/workflows/release.yml                              |   12 
.github/workflows/run_tests.yml                            |   11 
Cargo.lock                                                 |  300 
Cargo.toml                                                 |    6 
assets/keymaps/default-linux.json                          |    2 
assets/keymaps/default-macos.json                          |    1 
assets/keymaps/default-windows.json                        |    1 
assets/keymaps/vim.json                                    |    5 
assets/settings/default.json                               |    9 
crates/action_log/src/action_log.rs                        |    9 
crates/agent/src/tools/streaming_edit_file_tool.rs         |    4 
crates/agent/src/tools/web_search_tool.rs                  |    2 
crates/agent_ui/src/agent_diff.rs                          |    1 
crates/agent_ui/src/agent_panel.rs                         |  479 ++
crates/agent_ui/src/conversation_view/thread_view.rs       |   18 
crates/agent_ui/src/inline_assistant.rs                    |  179 -
crates/agent_ui/src/message_editor.rs                      |    1 
crates/agent_ui/src/thread_import.rs                       |  124 
crates/agent_ui/src/thread_metadata_store.rs               |  686 +++
crates/agent_ui/src/thread_worktree_archive.rs             |    4 
crates/agent_ui/src/threads_archive_view.rs                |   59 
crates/anthropic/src/anthropic.rs                          |    2 
crates/client/src/test.rs                                  |    1 
crates/client/src/user.rs                                  |   12 
crates/cloud_api_types/src/cloud_api_types.rs              |   16 
crates/collab_ui/src/collab_panel.rs                       |   22 
crates/debugger_ui/src/session/running/console.rs          |    1 
crates/deepseek/src/deepseek.rs                            |    2 
crates/dev_container/Cargo.toml                            |    1 
crates/dev_container/src/devcontainer_json.rs              |   41 
crates/dev_container/src/devcontainer_manifest.rs          |  516 ++
crates/dev_container/src/docker.rs                         |   42 
crates/diagnostics/src/diagnostics.rs                      |    1 
crates/edit_prediction/src/edit_prediction.rs              |    8 
crates/edit_prediction/src/metrics/kept_rate.rs            |  245 
crates/edit_prediction/src/zed_edit_prediction_delegate.rs |   42 
crates/edit_prediction_cli/src/example.rs                  |    8 
crates/edit_prediction_cli/src/score.rs                    |  124 
crates/edit_prediction_types/src/edit_prediction_types.rs  |    9 
crates/edit_prediction_ui/src/edit_prediction_button.rs    |    2 
crates/editor/src/editor.rs                                |   13 
crates/editor/src/editor_settings.rs                       |    2 
crates/editor/src/element.rs                               |   20 
crates/feature_flags/src/flags.rs                          |    2 
crates/fs/Cargo.toml                                       |    6 
crates/fs/src/fs.rs                                        |  415 +
crates/fs/tests/integration/fs.rs                          |  200 +
crates/git/src/blame.rs                                    |    2 
crates/git/src/repository.rs                               |    5 
crates/google_ai/src/google_ai.rs                          |    2 
crates/gpui/src/window.rs                                  |   32 
crates/gpui_linux/src/linux/x11/client.rs                  |   17 
crates/gpui_linux/src/linux/x11/window.rs                  |   31 
crates/gpui_macos/src/platform.rs                          |    1 
crates/inspector_ui/src/div_inspector.rs                   |    1 
crates/keymap_editor/src/keymap_editor.rs                  |    1 
crates/markdown/src/markdown.rs                            |  190 
crates/markdown/src/mermaid.rs                             |    2 
crates/markdown/src/parser.rs                              |  232 +
crates/markdown_preview/src/markdown_preview_view.rs       |   70 
crates/migrator/src/migrations.rs                          |    6 
crates/migrator/src/migrations/m_2026_04_10/settings.rs    |   64 
crates/migrator/src/migrator.rs                            |  298 +
crates/mistral/src/mistral.rs                              |    2 
crates/open_ai/src/open_ai.rs                              |    2 
crates/project/src/project.rs                              |   59 
crates/project_panel/Cargo.toml                            |    2 
crates/project_panel/src/project_panel.rs                  |  162 
crates/project_panel/src/project_panel_tests.rs            |  568 ---
crates/project_panel/src/tests.rs                          |    1 
crates/project_panel/src/tests/undo.rs                     |  384 ++
crates/project_panel/src/undo.rs                           |  740 +++-
crates/project_symbols/src/project_symbols.rs              |  111 
crates/recent_projects/src/recent_projects.rs              |   57 
crates/remote_connection/src/remote_connection.rs          |   17 
crates/repl/src/notebook/cell.rs                           |    3 
crates/settings/src/vscode_import.rs                       |    1 
crates/settings_content/src/agent.rs                       |    4 
crates/settings_content/src/editor.rs                      |    5 
crates/settings_content/src/language_model.rs              |    6 
crates/settings_ui/src/page_data.rs                        |   30 
crates/settings_ui/src/pages/tool_permissions_setup.rs     |    6 
crates/settings_ui/src/settings_ui.rs                      |    1 
crates/sidebar/src/sidebar.rs                              |  507 ++-
crates/sidebar/src/sidebar_tests.rs                        | 1306 ++++++-
crates/story/Cargo.toml                                    |   17 
crates/story/LICENSE-GPL                                   |    1 
crates/story/src/story.rs                                  |  209 -
crates/storybook/Cargo.toml                                |   41 
crates/storybook/LICENSE-GPL                               |    1 
crates/storybook/build.rs                                  |    9 
crates/storybook/docs/thoughts.md                          |   57 
crates/storybook/src/actions.rs                            |    2 
crates/storybook/src/app_menus.rs                          |    7 
crates/storybook/src/assets.rs                             |   32 
crates/storybook/src/stories.rs                            |   23 
crates/storybook/src/stories/auto_height_editor.rs         |   36 
crates/storybook/src/stories/cursor.rs                     |  109 
crates/storybook/src/stories/focus.rs                      |  123 
crates/storybook/src/stories/indent_guides.rs              |   82 
crates/storybook/src/stories/kitchen_sink.rs               |   32 
crates/storybook/src/stories/overflow_scroll.rs            |   41 
crates/storybook/src/stories/picker.rs                     |  206 -
crates/storybook/src/stories/scroll.rs                     |   52 
crates/storybook/src/stories/text.rs                       |  120 
crates/storybook/src/stories/viewport_units.rs             |   32 
crates/storybook/src/stories/with_rem_size.rs              |   61 
crates/storybook/src/story_selector.rs                     |  109 
crates/storybook/src/storybook.rs                          |  162 
crates/terminal_view/src/terminal_view.rs                  |    4 
crates/theme/Cargo.toml                                    |    1 
crates/theme/src/registry.rs                               |   17 
crates/theme/src/theme.rs                                  |   27 
crates/theme_settings/src/settings.rs                      |    3 
crates/theme_settings/src/theme_settings.rs                |   19 
crates/title_bar/Cargo.toml                                |    3 
crates/title_bar/src/stories/application_menu.rs           |   29 
crates/title_bar/src/title_bar.rs                          |    6 
crates/title_bar/src/update_version.rs                     |    9 
crates/ui/Cargo.toml                                       |    2 
crates/ui/src/components.rs                                |    6 
crates/ui/src/components/stories/context_menu.rs           |   81 
crates/util/src/disambiguate.rs                            |  202 +
crates/util/src/markdown.rs                                |  108 
crates/util/src/util.rs                                    |    1 
crates/vercel/src/vercel.rs                                |    2 
crates/workspace/src/multi_workspace.rs                    |  226 +
crates/workspace/src/multi_workspace_tests.rs              |  154 
crates/workspace/src/pane.rs                               |   33 
crates/workspace/src/workspace.rs                          |    2 
crates/worktree/src/worktree.rs                            |  133 
crates/worktree/tests/integration/main.rs                  |    9 
crates/x_ai/src/x_ai.rs                                    |    2 
crates/zed/src/visual_test_runner.rs                       |  294 +
crates/zed/src/zed.rs                                      |   64 
docs/.doc-examples/reference.md                            |    2 
docs/src/ai/tool-permissions.md                            |    2 
docs/src/ai/tools.md                                       |    2 
docs/src/migrate/intellij.md                               |    2 
docs/src/migrate/pycharm.md                                |    2 
docs/src/migrate/rustrover.md                              |    2 
docs/src/migrate/webstorm.md                               |    2 
docs/src/reference/all-settings.md                         |   10 
docs/src/vim.md                                            |    2 
tooling/xtask/src/tasks/workflows/autofix_pr.rs            |    9 
tooling/xtask/src/tasks/workflows/cherry_pick.rs           |   12 
tooling/xtask/src/tasks/workflows/compliance_check.rs      |   43 
tooling/xtask/src/tasks/workflows/release.rs               |  139 
tooling/xtask/src/tasks/workflows/run_tests.rs             |   20 
tooling/xtask/src/tasks/workflows/vars.rs                  |    2 
154 files changed, 7,475 insertions(+), 4,389 deletions(-)

Detailed changes

.github/ISSUE_TEMPLATE/10_bug_report.yml 🔗

@@ -101,7 +101,7 @@ body:
       placeholder: |
         - Provider: (Anthropic via ZedPro, Anthropic via API key, Copilot Chat, Mistral, OpenAI, etc.)
         - Model Name: (Claude Sonnet 4.5, Gemini 3.1 Pro, GPT-5)
-        - Mode: (Agent Panel, Inline Assistant, Terminal Assistant or Text Threads)
+        - Mode: (Agent Panel, Inline Assistant, or Terminal Assistant)
         - Other details (ACPs, MCPs, other settings, etc.):
     validations:
       required: false

.github/workflows/autofix_pr.yml 🔗

@@ -45,10 +45,9 @@ jobs:
         version: '9'
     - name: autofix_pr::run_autofix::install_cargo_machete
       if: ${{ inputs.run_clippy }}
-      uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386
+      uses: taiki-e/install-action@02cc5f8ca9f2301050c0c099055816a41ee05507
       with:
-        command: install
-        args: cargo-machete@0.7.0
+        tool: cargo-machete@0.7.0
     - name: autofix_pr::run_autofix::run_cargo_fix
       if: ${{ inputs.run_clippy }}
       run: cargo fix --workspace --release --all-targets --all-features --allow-dirty --allow-staged

.github/workflows/cherry_pick.yml 🔗

@@ -44,8 +44,10 @@ jobs:
         BRANCH: ${{ inputs.branch }}
         COMMIT: ${{ inputs.commit }}
         CHANNEL: ${{ inputs.channel }}
-        GIT_COMMITTER_NAME: Zed Zippy
-        GIT_COMMITTER_EMAIL: hi@zed.dev
+        GIT_AUTHOR_NAME: zed-zippy[bot]
+        GIT_AUTHOR_EMAIL: <234243425+zed-zippy[bot]@users.noreply.github.com>
+        GIT_COMMITTER_NAME: zed-zippy[bot]
+        GIT_COMMITTER_EMAIL: <234243425+zed-zippy[bot]@users.noreply.github.com>
         GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
 defaults:
   run:

.github/workflows/compliance_check.yml 🔗

@@ -34,13 +34,14 @@ jobs:
         echo "Checking compliance for $TAG"
         echo "tag=$TAG" >> "$GITHUB_OUTPUT"
     - id: run-compliance-check
-      name: compliance_check::scheduled_compliance_check::run_compliance_check
+      name: release::add_compliance_steps::run_compliance_check
       run: |
         cargo xtask compliance "$LATEST_TAG" --branch main --report-path "compliance-report-${GITHUB_REF_NAME}.md"
       env:
-        LATEST_TAG: ${{ steps.determine-version.outputs.tag }}
         GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }}
         GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
+        LATEST_TAG: ${{ steps.determine-version.outputs.tag }}
+      continue-on-error: true
     - name: '@actions/upload-artifact compliance-report-${{ github.ref_name }}.md'
       if: always()
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4

.github/workflows/release.yml 🔗

@@ -307,7 +307,7 @@ jobs:
         cache: rust
         path: ~/.rustup
     - id: run-compliance-check
-      name: release::run_compliance_check
+      name: release::add_compliance_steps::run_compliance_check
       run: |
         cargo xtask compliance "$GITHUB_REF_NAME" --report-path "compliance-report-${GITHUB_REF_NAME}.md"
       env:
@@ -328,7 +328,7 @@ jobs:
             STATUS="✅ Compliance check passed for $COMPLIANCE_TAG"
             MESSAGE=$(printf "%s\n\nReport: %s" "$STATUS" "$ARTIFACT_URL")
         else
-            STATUS="❌ Compliance check failed for $COMPLIANCE_TAG"
+            STATUS="❌ Preliminary compliance check failed (but this can still be fixed while the builds are running!) for $COMPLIANCE_TAG"
             MESSAGE=$(printf "%s\n\nReport: %s\nPRs needing review: %s" "$STATUS" "$ARTIFACT_URL" "https://github.com/zed-industries/zed/pulls?q=is%3Apr+is%3Aclosed+label%3A%22PR+state%3Aneeds+review%22")
         fi
 
@@ -340,6 +340,8 @@ jobs:
         COMPLIANCE_OUTCOME: ${{ steps.run-compliance-check.outcome }}
         COMPLIANCE_TAG: ${{ github.ref_name }}
         ARTIFACT_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}#artifacts
+    outputs:
+      outcome: ${{ steps.run-compliance-check.outputs.outcome }}
     timeout-minutes: 60
   bundle_linux_aarch64:
     needs:
@@ -641,6 +643,7 @@ jobs:
   validate_release_assets:
     needs:
     - upload_release_assets
+    - compliance_check
     runs-on: namespace-profile-2x4-ubuntu-2404
     steps:
     - name: release::validate_release_assets
@@ -673,13 +676,12 @@ jobs:
         cache: rust
         path: ~/.rustup
     - id: run-compliance-check
-      name: release::run_compliance_check
+      name: release::add_compliance_steps::run_compliance_check
       run: |
         cargo xtask compliance "$GITHUB_REF_NAME" --report-path "compliance-report-${GITHUB_REF_NAME}.md"
       env:
         GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }}
         GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
-      continue-on-error: true
     - name: '@actions/upload-artifact compliance-report-${{ github.ref_name }}.md'
       if: always()
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
@@ -689,7 +691,7 @@ jobs:
         if-no-files-found: error
         overwrite: true
     - name: send_compliance_slack_notification
-      if: always()
+      if: failure() || needs.compliance_check.outputs.outcome != 'success'
       run: |
         if [ "$COMPLIANCE_OUTCOME" == "success" ]; then
             STATUS="✅ Compliance check passed for $COMPLIANCE_TAG"

.github/workflows/run_tests.yml 🔗

@@ -80,7 +80,7 @@ jobs:
 
           # If assets/ changed, add crates that depend on those assets
           if echo "$CHANGED_FILES" | grep -qP '^assets/'; then
-            FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "storybook" "assets" | sort -u)
+            FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "assets" | sort -u)
           fi
 
           # Combine all changed packages
@@ -618,14 +618,11 @@ jobs:
         cache: rust
         path: ~/.rustup
     - name: run_tests::check_dependencies::install_cargo_machete
-      uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386
+      uses: taiki-e/install-action@02cc5f8ca9f2301050c0c099055816a41ee05507
       with:
-        command: install
-        args: cargo-machete@0.7.0
+        tool: cargo-machete@0.7.0
     - name: run_tests::check_dependencies::run_cargo_machete
-      uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386
-      with:
-        command: machete
+      run: cargo machete
     - name: run_tests::check_dependencies::check_cargo_lock
       run: cargo update --locked --workspace
     - name: run_tests::check_dependencies::check_vulnerable_dependencies

Cargo.lock 🔗

@@ -3418,19 +3418,6 @@ dependencies = [
  "crossbeam-utils",
 ]
 
-[[package]]
-name = "console"
-version = "0.15.11"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8"
-dependencies = [
- "encode_unicode",
- "libc",
- "once_cell",
- "unicode-width",
- "windows-sys 0.59.0",
-]
-
 [[package]]
 name = "console_error_panic_hook"
 version = "0.1.7"
@@ -3877,36 +3864,36 @@ dependencies = [
 
 [[package]]
 name = "cranelift-assembler-x64"
-version = "0.123.6"
+version = "0.123.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ba33ddc4e157cb1abe9da6c821e8824f99e56d057c2c22536850e0141f281d61"
+checksum = "c8056d63fef9a6f88a1e7aae52bb08fcf48de8866d514c0dc52feb15975f5db5"
 dependencies = [
  "cranelift-assembler-x64-meta",
 ]
 
 [[package]]
 name = "cranelift-assembler-x64-meta"
-version = "0.123.6"
+version = "0.123.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "69b23dd6ea360e6fb28a3f3b40b7f126509668f58076a4729b2cfd656f26a0ad"
+checksum = "57d063b40884a0d733223a45c5de1155395af4393cf7f900d5be8e2cbc094015"
 dependencies = [
  "cranelift-srcgen",
 ]
 
 [[package]]
 name = "cranelift-bforest"
-version = "0.123.6"
+version = "0.123.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a9d81afcee8fe27ee2536987df3fadcb2e161af4edb7dbe3ef36838d0ce74382"
+checksum = "3c3add2881bae2d55cd7162906988dd70053cb7ece865ad793a6754b04d47df6"
 dependencies = [
  "cranelift-entity",
 ]
 
 [[package]]
 name = "cranelift-bitset"
-version = "0.123.6"
+version = "0.123.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fb33595f1279fe7af03b28245060e9085caf98b10ed3137461a85796eb83972a"
+checksum = "dd73e32bc1ea4bddc4c770760c66fa24b2890991b0561af554219e603fcd7c34"
 dependencies = [
  "serde",
  "serde_derive",
@@ -3914,9 +3901,9 @@ dependencies = [
 
 [[package]]
 name = "cranelift-codegen"
-version = "0.123.6"
+version = "0.123.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0230a6ac0660bfe31eb244cbb43dcd4f2b3c1c4e0addc3e0348c6053ea60272e"
+checksum = "3e1da85f2636fe28244848861d1ed0f8dccdc6e98fc5db31aa5eb8878e7ff617"
 dependencies = [
  "bumpalo",
  "cranelift-assembler-x64",
@@ -3944,9 +3931,9 @@ dependencies = [
 
 [[package]]
 name = "cranelift-codegen-meta"
-version = "0.123.6"
+version = "0.123.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "96d6817fdc15cb8f236fc9d8e610767d3a03327ceca4abff7a14d8e2154c405e"
+checksum = "ee3c8aba9d89832df27364b2e79dc2fe288daf4bd6c7347829e7f3f258ea5650"
 dependencies = [
  "cranelift-assembler-x64-meta",
  "cranelift-codegen-shared",
@@ -3957,24 +3944,24 @@ dependencies = [
 
 [[package]]
 name = "cranelift-codegen-shared"
-version = "0.123.6"
+version = "0.123.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0403796328e9e2e7df2b80191cdbb473fd9ea3889eb45ef5632d0fef168ea032"
+checksum = "ac9a9b09fe107fef6377caed20614586124184cffccb73611312ceb922a917e6"
 
 [[package]]
 name = "cranelift-control"
-version = "0.123.6"
+version = "0.123.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "188f04092279a3814e0b6235c2f9c2e34028e4beb72da7bfed55cbd184702bcc"
+checksum = "50aef001c7ad250d5fdda2c7481cbfcabe6435c66106adf5760dcb9fb9a8ede4"
 dependencies = [
  "arbitrary",
 ]
 
 [[package]]
 name = "cranelift-entity"
-version = "0.123.6"
+version = "0.123.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "43f5e7391167605d505fe66a337e1a69583b3f34b63d359ffa5a430313c555e8"
+checksum = "cf3c84656a010df2b5afaedcbbbd94f1efe175b55e29864df7b99e64bfa40d56"
 dependencies = [
  "cranelift-bitset",
  "serde",
@@ -3983,9 +3970,9 @@ dependencies = [
 
 [[package]]
 name = "cranelift-frontend"
-version = "0.123.6"
+version = "0.123.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ea5440792eb2b5ba0a0976df371b9f94031bd853ae56f389de610bca7128a7cb"
+checksum = "6aa1d2006915cddb63705db46dcfb8637fe08f91d26fbe59680d7257ec39d609"
 dependencies = [
  "cranelift-codegen",
  "log",
@@ -3995,15 +3982,15 @@ dependencies = [
 
 [[package]]
 name = "cranelift-isle"
-version = "0.123.6"
+version = "0.123.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e5c05fab6fce38d729088f3fa1060eaa1ad54eefd473588887205ed2ab2f79e"
+checksum = "6e4fecbcbb81273f9aff4559e26fc341f42663da420cca5ac84b34e74e9267e0"
 
 [[package]]
 name = "cranelift-native"
-version = "0.123.6"
+version = "0.123.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9c9a0607a028edf5ba5bba7e7cf5ca1b7f0a030e3ae84dcd401e8b9b05192280"
+checksum = "976a3d85f197a56ae34ee4d5a5e469855ac52804a09a513d0562d425da0ff56e"
 dependencies = [
  "cranelift-codegen",
  "libc",
@@ -4012,9 +3999,9 @@ dependencies = [
 
 [[package]]
 name = "cranelift-srcgen"
-version = "0.123.6"
+version = "0.123.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cb0f2da72eb2472aaac6cfba4e785af42b1f2d82f5155f30c9c30e8cce351e17"
+checksum = "37fbd4aefce642145491ff862d2054a71b63d2d97b8dd1e280c9fdaf399598b7"
 
 [[package]]
 name = "crash-context"
@@ -4806,6 +4793,7 @@ dependencies = [
  "paths",
  "picker",
  "project",
+ "regex",
  "serde",
  "serde_json",
  "serde_json_lenient",
@@ -4851,20 +4839,6 @@ dependencies = [
  "zlog",
 ]
 
-[[package]]
-name = "dialoguer"
-version = "0.11.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "658bce805d770f407bc62102fca7c2c64ceef2fbcb2b8bd19d2765ce093980de"
-dependencies = [
- "console",
- "fuzzy-matcher",
- "shell-words",
- "tempfile",
- "thiserror 1.0.69",
- "zeroize",
-]
-
 [[package]]
 name = "diff"
 version = "0.1.13"
@@ -4951,7 +4925,7 @@ dependencies = [
  "libc",
  "option-ext",
  "redox_users 0.5.2",
- "windows-sys 0.61.2",
+ "windows-sys 0.59.0",
 ]
 
 [[package]]
@@ -5546,12 +5520,6 @@ dependencies = [
  "phf 0.11.3",
 ]
 
-[[package]]
-name = "encode_unicode"
-version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0"
-
 [[package]]
 name = "encoding_rs"
 version = "0.8.35"
@@ -5723,7 +5691,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
 dependencies = [
  "libc",
- "windows-sys 0.61.2",
+ "windows-sys 0.52.0",
 ]
 
 [[package]]
@@ -6495,7 +6463,6 @@ dependencies = [
  "ashpd",
  "async-tar",
  "async-trait",
- "cocoa 0.26.0",
  "collections",
  "dunce",
  "fs",
@@ -6507,7 +6474,6 @@ dependencies = [
  "libc",
  "log",
  "notify 8.2.0",
- "objc",
  "parking_lot",
  "paths",
  "proto",
@@ -6517,7 +6483,9 @@ dependencies = [
  "smol",
  "tempfile",
  "text",
+ "thiserror 2.0.17",
  "time",
+ "trash",
  "util",
  "windows 0.61.3",
 ]
@@ -6747,15 +6715,6 @@ dependencies = [
  "util",
 ]
 
-[[package]]
-name = "fuzzy-matcher"
-version = "0.3.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "54614a3312934d066701a80f20f15fa3b56d67ac7722b39eea5b4c9dd1d66c94"
-dependencies = [
- "thread_local",
-]
-
 [[package]]
 name = "fuzzy_nucleo"
 version = "0.1.0"
@@ -7157,7 +7116,7 @@ dependencies = [
  "gobject-sys",
  "libc",
  "system-deps 7.0.7",
- "windows-sys 0.61.2",
+ "windows-sys 0.52.0",
 ]
 
 [[package]]
@@ -8486,7 +8445,7 @@ dependencies = [
  "js-sys",
  "log",
  "wasm-bindgen",
- "windows-core 0.62.2",
+ "windows-core 0.57.0",
 ]
 
 [[package]]
@@ -11130,7 +11089,7 @@ version = "0.50.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5"
 dependencies = [
- "windows-sys 0.61.2",
+ "windows-sys 0.59.0",
 ]
 
 [[package]]
@@ -13392,6 +13351,8 @@ dependencies = [
  "editor",
  "feature_flags",
  "file_icons",
+ "fs",
+ "futures 0.3.32",
  "git",
  "git_ui",
  "gpui",
@@ -13574,7 +13535,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4"
 dependencies = [
  "bytes 1.11.1",
- "heck 0.5.0",
+ "heck 0.4.1",
  "itertools 0.12.1",
  "log",
  "multimap",
@@ -13724,9 +13685,9 @@ checksum = "bd348ff538bc9caeda7ee8cad2d1d48236a1f443c1fa3913c6a02fe0043b1dd3"
 
 [[package]]
 name = "pulley-interpreter"
-version = "36.0.6"
+version = "36.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "499d922aa0f9faac8d92351416664f1b7acd914008a90fce2f0516d31efddf67"
+checksum = "a078b4bdfd275fadeefc4f9ae3675ee5af302e69497da439956dd05257858970"
 dependencies = [
  "cranelift-bitset",
  "log",
@@ -13736,9 +13697,9 @@ dependencies = [
 
 [[package]]
 name = "pulley-macros"
-version = "36.0.6"
+version = "36.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a3848fb193d6dffca43a21f24ca9492f22aab88af1223d06bac7f8a0ef405b81"
+checksum = "9dac91999883fd00b900eb5377be403c5cb8b93e10efcb571bf66454c2d9f231"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -13880,7 +13841,7 @@ dependencies = [
  "once_cell",
  "socket2 0.6.1",
  "tracing",
- "windows-sys 0.60.2",
+ "windows-sys 0.52.0",
 ]
 
 [[package]]
@@ -15072,7 +15033,7 @@ dependencies = [
  "errno 0.3.14",
  "libc",
  "linux-raw-sys 0.11.0",
- "windows-sys 0.61.2",
+ "windows-sys 0.52.0",
 ]
 
 [[package]]
@@ -16325,7 +16286,7 @@ version = "0.8.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "c1c97747dbf44bb1ca44a561ece23508e99cb592e862f22222dcf42f51d1e451"
 dependencies = [
- "heck 0.5.0",
+ "heck 0.4.1",
  "proc-macro2",
  "quote",
  "syn 2.0.117",
@@ -16745,44 +16706,6 @@ version = "1.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
 
-[[package]]
-name = "story"
-version = "0.1.0"
-dependencies = [
- "gpui",
- "itertools 0.14.0",
- "smallvec",
-]
-
-[[package]]
-name = "storybook"
-version = "0.1.0"
-dependencies = [
- "anyhow",
- "clap",
- "ctrlc",
- "dialoguer",
- "editor",
- "fuzzy",
- "gpui",
- "gpui_platform",
- "indoc",
- "language",
- "log",
- "menu",
- "picker",
- "reqwest_client",
- "rust-embed",
- "settings",
- "simplelog",
- "story",
- "strum 0.27.2",
- "theme",
- "theme_settings",
- "title_bar",
- "ui",
-]
-
 [[package]]
 name = "streaming-iterator"
 version = "0.1.9"
@@ -17577,7 +17500,7 @@ dependencies = [
  "getrandom 0.3.4",
  "once_cell",
  "rustix 1.1.2",
- "windows-sys 0.61.2",
+ "windows-sys 0.52.0",
 ]
 
 [[package]]
@@ -17704,7 +17627,6 @@ version = "0.1.0"
 dependencies = [
  "anyhow",
  "collections",
- "derive_more",
  "gpui",
  "palette",
  "parking_lot",
@@ -18027,7 +17949,6 @@ dependencies = [
  "serde",
  "settings",
  "smallvec",
- "story",
  "telemetry",
  "theme",
  "ui",
@@ -18498,7 +18419,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2fb391ac70462b3097a755618fbf9c8f95ecc1eb379a414f7b46f202ed10db1f"
 dependencies = [
  "cc",
- "windows-targets 0.52.6",
+ "windows-targets 0.48.5",
 ]
 
 [[package]]
@@ -18511,6 +18432,24 @@ dependencies = [
  "strength_reduce",
 ]
 
+[[package]]
+name = "trash"
+version = "5.2.5"
+source = "git+https://github.com/zed-industries/trash-rs?rev=3bf27effd4eb8699f2e484d3326b852fe3e53af7#3bf27effd4eb8699f2e484d3326b852fe3e53af7"
+dependencies = [
+ "chrono",
+ "libc",
+ "log",
+ "objc2",
+ "objc2-foundation",
+ "once_cell",
+ "percent-encoding",
+ "scopeguard",
+ "urlencoding",
+ "windows 0.56.0",
+ "windows-core 0.56.0",
+]
+
 [[package]]
 name = "tree-sitter"
 version = "0.26.8"
@@ -18926,7 +18865,6 @@ dependencies = [
  "schemars",
  "serde",
  "smallvec",
- "story",
  "strum 0.27.2",
  "theme",
  "ui_macros",
@@ -19772,9 +19710,9 @@ dependencies = [
 
 [[package]]
 name = "wasmtime"
-version = "36.0.6"
+version = "36.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6a2f8736ddc86e03a9d0e4c477a37939cfc53cd1b052ee38a3133679b87ef830"
+checksum = "b80d5ba38b9b00f60a0665e07dde38e91d884d4a78cd61d777c8cf081a1267c1"
 dependencies = [
  "addr2line",
  "anyhow",
@@ -19833,9 +19771,9 @@ dependencies = [
 
 [[package]]
 name = "wasmtime-environ"
-version = "36.0.6"
+version = "36.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "733682a327755c77153ac7455b1ba8f2db4d9946c1738f8002fe1fbda1d52e83"
+checksum = "44a45d60dea98308decb71a9f7bb35a629696d1fbf7127dbfde42cbc64b8fa33"
 dependencies = [
  "anyhow",
  "cpp_demangle",
@@ -19860,9 +19798,9 @@ dependencies = [
 
 [[package]]
 name = "wasmtime-internal-asm-macros"
-version = "36.0.6"
+version = "36.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "68288980a2e02bcb368d436da32565897033ea21918007e3f2bae18843326cf9"
+checksum = "dd014b4001b6da03d79062d9ad5ec98fa62e34d50e30e46298545282cc2957e4"
 dependencies = [
  "cfg-if",
 ]
@@ -19879,9 +19817,9 @@ dependencies = [
 
 [[package]]
 name = "wasmtime-internal-component-macro"
-version = "36.0.6"
+version = "36.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5dea846da68f8e776c8a43bde3386022d7bb74e713b9654f7c0196e5ff2e4684"
+checksum = "0f2942aa5d44b02061e0c6ab71b23090cf3b300b4519e3b80776ac38edde2e65"
 dependencies = [
  "anyhow",
  "proc-macro2",
@@ -19894,15 +19832,15 @@ dependencies = [
 
 [[package]]
 name = "wasmtime-internal-component-util"
-version = "36.0.6"
+version = "36.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fe1e5735b3c8251510d2a55311562772d6c6fca9438a3d0329eb6e38af4957d6"
+checksum = "bcb6f974fe739e98034b7e6ec6feb2ab399f4cde7207675f26138bd9a1d65720"
 
 [[package]]
 name = "wasmtime-internal-cranelift"
-version = "36.0.6"
+version = "36.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e89bb9ef571288e2be6b8a3c4763acc56c348dcd517500b1679d3ffad9e4a757"
+checksum = "4047020866a80aa943e41133e607020e17562126cf81533362275272098a22b1"
 dependencies = [
  "anyhow",
  "cfg-if",
@@ -19927,9 +19865,9 @@ dependencies = [
 
 [[package]]
 name = "wasmtime-internal-fiber"
-version = "36.0.6"
+version = "36.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b698d004b15ea1f1ae2d06e5e8b80080cbd684fd245220ce2fac3cdd5ecf87f2"
+checksum = "7cd172b622993bb8f834f6ca3b7683dfdba72b12db0527824850fdec17c89e5a"
 dependencies = [
  "anyhow",
  "cc",
@@ -19943,9 +19881,9 @@ dependencies = [
 
 [[package]]
 name = "wasmtime-internal-jit-debug"
-version = "36.0.6"
+version = "36.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c803a9fec05c3d7fa03474d4595079d546e77a3c71c1d09b21f74152e2165c17"
+checksum = "1287e310fef4c8759a6b5caa0d44eff9a03ebcd6c273729cc39ce3e321a9e26a"
 dependencies = [
  "cc",
  "wasmtime-internal-versioned-export-macros",
@@ -19953,9 +19891,9 @@ dependencies = [
 
 [[package]]
 name = "wasmtime-internal-jit-icache-coherence"
-version = "36.0.6"
+version = "36.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d3866909d37f7929d902e6011847748147e8734e9d7e0353e78fb8b98f586aee"
+checksum = "c02bca30ef670a31496d742d9facdbd0228debe766b1e9541655c0530ff5c953"
 dependencies = [
  "anyhow",
  "cfg-if",
@@ -19965,24 +19903,24 @@ dependencies = [
 
 [[package]]
 name = "wasmtime-internal-math"
-version = "36.0.6"
+version = "36.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5a23b03fb14c64bd0dfcaa4653101f94ade76c34a3027ed2d6b373267536e45b"
+checksum = "fd3a1f51a037ae2c048f0d76d36e27f0d22276295496c44f16a251f24690e003"
 dependencies = [
  "libm",
 ]
 
 [[package]]
 name = "wasmtime-internal-slab"
-version = "36.0.6"
+version = "36.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fbff220b88cdb990d34a20b13344e5da2e7b99959a5b1666106bec94b58d6364"
+checksum = "ba6171aac3d66e4d69e50080bb6bc5205de2283513984a4118a93cb66dc02994"
 
 [[package]]
 name = "wasmtime-internal-unwinder"
-version = "36.0.6"
+version = "36.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "13e1ad30e88988b20c0d1c56ea4b4fbc01a8c614653cbf12ca50c0dcc695e2f7"
+checksum = "3fd1bc1783391a02176fb687159b1779fc10b71d5350adf09c1f3aa8442a02cc"
 dependencies = [
  "anyhow",
  "cfg-if",
@@ -19993,9 +19931,9 @@ dependencies = [
 
 [[package]]
 name = "wasmtime-internal-versioned-export-macros"
-version = "36.0.6"
+version = "36.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "549aefdaa1398c2fcfbf69a7b882956bb5b6e8e5b600844ecb91a3b5bf658ca7"
+checksum = "8097e2c8ca02ed65d31dda111faa0888ffbf28dc3ee74355e283118a8d293eb0"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -20004,9 +19942,9 @@ dependencies = [
 
 [[package]]
 name = "wasmtime-internal-winch"
-version = "36.0.6"
+version = "36.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5cc96a84c5700171aeecf96fa9a9ab234f333f5afb295dabf3f8a812b70fe832"
+checksum = "6a8cb36b61fbcff2c8bcd14f9f2651a6e52b019d0d329324620d7bc971b2b235"
 dependencies = [
  "anyhow",
  "cranelift-codegen",
@@ -20021,9 +19959,9 @@ dependencies = [
 
 [[package]]
 name = "wasmtime-internal-wit-bindgen"
-version = "36.0.6"
+version = "36.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c28dc9efea511598c88564ac1974e0825c07d9c0de902dbf68f227431cd4ff8c"
+checksum = "ff555cfb71577028616d65c00221c7fe6eef45a9ebb96fc6d34d4a41fa1de191"
 dependencies = [
  "anyhow",
  "bitflags 2.10.0",
@@ -20593,7 +20531,7 @@ version = "0.1.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22"
 dependencies = [
- "windows-sys 0.61.2",
+ "windows-sys 0.48.0",
 ]
 
 [[package]]
@@ -20604,9 +20542,9 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
 
 [[package]]
 name = "winch-codegen"
-version = "36.0.6"
+version = "36.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "06c0ec09e8eb5e850e432da6271ed8c4a9d459a9db3850c38e98a3ee9d015e79"
+checksum = "0989126b21d12c9923aa2de7ddbcf87db03037b24b7365041d9dd0095b69d8cb"
 dependencies = [
  "anyhow",
  "cranelift-assembler-x64",
@@ -20622,6 +20560,16 @@ dependencies = [
  "wasmtime-internal-math",
 ]
 
+[[package]]
+name = "windows"
+version = "0.56.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1de69df01bdf1ead2f4ac895dc77c9351aefff65b2f3db429a343f9cbf05e132"
+dependencies = [
+ "windows-core 0.56.0",
+ "windows-targets 0.52.6",
+]
+
 [[package]]
 name = "windows"
 version = "0.57.0"
@@ -20710,6 +20658,18 @@ dependencies = [
  "windows-core 0.62.2",
 ]
 
+[[package]]
+name = "windows-core"
+version = "0.56.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4698e52ed2d08f8658ab0c39512a7c00ee5fe2688c65f8c0a4f06750d729f2a6"
+dependencies = [
+ "windows-implement 0.56.0",
+ "windows-interface 0.56.0",
+ "windows-result 0.1.2",
+ "windows-targets 0.52.6",
+]
+
 [[package]]
 name = "windows-core"
 version = "0.57.0"
@@ -20783,6 +20743,17 @@ dependencies = [
  "windows-threading 0.2.1",
 ]
 
+[[package]]
+name = "windows-implement"
+version = "0.56.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f6fc35f58ecd95a9b71c4f2329b911016e6bec66b3f2e6a4aad86bd2e99e2f9b"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.117",
+]
+
 [[package]]
 name = "windows-implement"
 version = "0.57.0"
@@ -20816,6 +20787,17 @@ dependencies = [
  "syn 2.0.117",
 ]
 
+[[package]]
+name = "windows-interface"
+version = "0.56.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08990546bf4edef8f431fa6326e032865f27138718c587dc21bc0265bbcb57cc"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.117",
+]
+
 [[package]]
 name = "windows-interface"
 version = "0.57.0"

Cargo.toml 🔗

@@ -183,8 +183,6 @@ members = [
     "crates/snippets_ui",
     "crates/sqlez",
     "crates/sqlez_macros",
-    "crates/story",
-    "crates/storybook",
     "crates/streaming_diff",
     "crates/sum_tree",
     "crates/svg_preview",
@@ -437,7 +435,6 @@ snippet_provider = { path = "crates/snippet_provider" }
 snippets_ui = { path = "crates/snippets_ui" }
 sqlez = { path = "crates/sqlez" }
 sqlez_macros = { path = "crates/sqlez_macros" }
-story = { path = "crates/story" }
 streaming_diff = { path = "crates/streaming_diff" }
 sum_tree = { path = "crates/sum_tree" }
 codestral = { path = "crates/codestral" }
@@ -574,7 +571,7 @@ encoding_rs = "0.8"
 exec = "0.3.1"
 fancy-regex = "0.17.0"
 fork = "0.4.0"
-futures = "0.3"
+futures = "0.3.32"
 futures-concurrency = "7.7.1"
 futures-lite = "1.13"
 gh-workflow = { git = "https://github.com/zed-industries/gh-workflow", rev = "37f3c0575d379c218a9c455ee67585184e40d43f" }
@@ -935,7 +932,6 @@ session = { codegen-units = 1 }
 sidebar = { codegen-units = 1 }
 snippet = { codegen-units = 1 }
 snippets_ui = { codegen-units = 1 }
-story = { codegen-units = 1 }
 telemetry_events = { codegen-units = 1 }
 theme_selector = { codegen-units = 1 }
 time_format = { codegen-units = 1 }

assets/keymaps/default-linux.json 🔗

@@ -936,6 +936,8 @@
       "alt-ctrl-shift-c": "workspace::CopyRelativePath",
       "undo": "project_panel::Undo",
       "ctrl-z": "project_panel::Undo",
+      "redo": "project_panel::Redo",
+      "ctrl-shift-z": "project_panel::Redo",
       "enter": "project_panel::Rename",
       "f2": "project_panel::Rename",
       "backspace": ["project_panel::Trash", { "skip_prompt": false }],

assets/keymaps/default-macos.json 🔗

@@ -991,6 +991,7 @@
       "cmd-alt-c": "workspace::CopyPath",
       "alt-cmd-shift-c": "workspace::CopyRelativePath",
       "cmd-z": "project_panel::Undo",
+      "cmd-shift-z": "project_panel::Redo",
       "enter": "project_panel::Rename",
       "f2": "project_panel::Rename",
       "backspace": ["project_panel::Trash", { "skip_prompt": false }],

assets/keymaps/default-windows.json 🔗

@@ -929,6 +929,7 @@
       "shift-alt-c": "project_panel::CopyPath",
       "ctrl-k ctrl-shift-c": "workspace::CopyRelativePath",
       "ctrl-z": "project_panel::Undo",
+      "ctrl-shift-z": "project_panel::Redo",
       "enter": "project_panel::Rename",
       "f2": "project_panel::Rename",
       "backspace": ["project_panel::Trash", { "skip_prompt": false }],

assets/keymaps/vim.json 🔗

@@ -1148,6 +1148,11 @@
       "g g": "menu::SelectFirst",
       "shift-g": "menu::SelectLast",
       "/": "agents_sidebar::FocusSidebarFilter",
+      "d d": "agent::RemoveSelectedThread",
+      "o": "agents_sidebar::NewThreadInGroup",
+      "shift-o": "agents_sidebar::NewThreadInGroup",
+      "] p": "multi_workspace::NextProject",
+      "[ p": "multi_workspace::PreviousProject",
       "z a": "editor::ToggleFold",
       "z c": "menu::SelectParent",
       "z o": "menu::SelectChild",

assets/settings/default.json 🔗

@@ -636,6 +636,9 @@
   // Scroll sensitivity multiplier. This multiplier is applied
   // to both the horizontal and vertical delta values while scrolling.
   "scroll_sensitivity": 1.0,
+  // Whether to zoom the editor font size with the mouse wheel
+  // while holding the primary modifier key (Cmd on macOS, Ctrl on other platforms).
+  "mouse_wheel_zoom": false,
   // Scroll sensitivity multiplier for fast scrolling. This multiplier is applied
   // to both the horizontal and vertical delta values while scrolling. Fast scrolling
   // happens when a user holds the alt or option key while scrolling.
@@ -964,7 +967,7 @@
     //
     // Default: true
     "flexible": true,
-    // Where to position the sidebar. Can be 'left' or 'right'.
+    // Where to position the threads sidebar. Can be 'left' or 'right'.
     "sidebar_side": "left",
     // Default width when the agent panel is docked to the left or right.
     "default_width": 640,
@@ -1075,7 +1078,7 @@
           "terminal": true,
           "thinking": true,
           "update_plan": true,
-          "web_search": true,
+          "search_web": true,
         },
       },
       "ask": {
@@ -1095,7 +1098,7 @@
           "spawn_agent": true,
           "thinking": true,
           "update_plan": true,
-          "web_search": true,
+          "search_web": true,
         },
       },
       "minimal": {

crates/action_log/src/action_log.rs 🔗

@@ -777,7 +777,7 @@ impl ActionLog {
                         initial_version == current_version && current_content == tracked_content;
 
                     if is_ai_only_content {
-                        buffer
+                        let task = buffer
                             .read(cx)
                             .entry_id(cx)
                             .and_then(|entry_id| {
@@ -785,7 +785,12 @@ impl ActionLog {
                                     project.delete_entry(entry_id, false, cx)
                                 })
                             })
-                            .unwrap_or(Task::ready(Ok(())))
+                            .unwrap_or_else(|| Task::ready(Ok(None)));
+
+                        cx.background_spawn(async move {
+                            task.await?;
+                            Ok(())
+                        })
                     } else {
                         // Not sure how to disentangle edits made by the user
                         // from edits made by the AI at this point.

crates/agent/src/tools/streaming_edit_file_tool.rs 🔗

@@ -189,9 +189,9 @@ pub enum StreamingEditFileToolOutput {
     },
     Error {
         error: String,
-        #[serde(default)]
+        #[serde(default, skip_serializing_if = "Option::is_none")]
         input_path: Option<PathBuf>,
-        #[serde(default)]
+        #[serde(default, skip_serializing_if = "String::is_empty")]
         diff: String,
     },
 }

crates/agent/src/tools/web_search_tool.rs 🔗

@@ -53,7 +53,7 @@ impl AgentTool for WebSearchTool {
     type Input = WebSearchToolInput;
     type Output = WebSearchToolOutput;
 
-    const NAME: &'static str = "web_search";
+    const NAME: &'static str = "search_web";
 
     fn kind() -> acp::ToolKind {
         acp::ToolKind::Fetch

crates/agent_ui/src/agent_diff.rs 🔗

@@ -98,6 +98,7 @@ impl AgentDiffPane {
             editor
                 .set_render_diff_hunk_controls(diff_hunk_controls(&thread, workspace.clone()), cx);
             editor.register_addon(AgentDiffAddon);
+            editor.disable_mouse_wheel_zoom();
             editor
         });
 

crates/agent_ui/src/agent_panel.rs 🔗

@@ -56,8 +56,9 @@ use extension_host::ExtensionStore;
 use fs::Fs;
 use gpui::{
     Action, Animation, AnimationExt, AnyElement, App, AsyncWindowContext, ClipboardItem, Corner,
-    DismissEvent, Entity, EventEmitter, ExternalPaths, FocusHandle, Focusable, KeyContext, Pixels,
-    Subscription, Task, UpdateGlobal, WeakEntity, prelude::*, pulsating_between,
+    DismissEvent, Entity, EntityId, EventEmitter, ExternalPaths, FocusHandle, Focusable,
+    KeyContext, Pixels, Subscription, Task, UpdateGlobal, WeakEntity, prelude::*,
+    pulsating_between,
 };
 use language::LanguageRegistry;
 use language_model::LanguageModelRegistry;
@@ -819,7 +820,7 @@ pub struct AgentPanel {
     agent_layout_onboarding_dismissed: AtomicBool,
     selected_agent: Agent,
     start_thread_in: StartThreadIn,
-    worktree_creation_status: Option<WorktreeCreationStatus>,
+    worktree_creation_status: Option<(EntityId, WorktreeCreationStatus)>,
     _thread_view_subscription: Option<Subscription>,
     _active_thread_focus_subscription: Option<Subscription>,
     _worktree_creation_task: Option<Task<()>>,
@@ -2795,6 +2796,7 @@ impl AgentPanel {
             PathBuf,
             futures::channel::oneshot::Receiver<Result<()>>,
         )>,
+        fs: Arc<dyn Fs>,
         cx: &mut AsyncWindowContext,
     ) -> Result<Vec<PathBuf>> {
         let mut created_paths: Vec<PathBuf> = Vec::new();
@@ -2803,10 +2805,10 @@ impl AgentPanel {
         let mut first_error: Option<anyhow::Error> = None;
 
         for (repo, new_path, receiver) in creation_infos {
+            repos_and_paths.push((repo.clone(), new_path.clone()));
             match receiver.await {
                 Ok(Ok(())) => {
-                    created_paths.push(new_path.clone());
-                    repos_and_paths.push((repo, new_path));
+                    created_paths.push(new_path);
                 }
                 Ok(Err(err)) => {
                     if first_error.is_none() {
@@ -2825,34 +2827,66 @@ impl AgentPanel {
             return Ok(created_paths);
         };
 
-        // Rollback all successfully created worktrees
-        let mut rollback_receivers = Vec::new();
+        // Rollback all attempted worktrees (both successful and failed)
+        let mut rollback_futures = Vec::new();
         for (rollback_repo, rollback_path) in &repos_and_paths {
-            if let Ok(receiver) = cx.update(|_, cx| {
-                rollback_repo.update(cx, |repo, _cx| {
-                    repo.remove_worktree(rollback_path.clone(), true)
+            let receiver = cx
+                .update(|_, cx| {
+                    rollback_repo.update(cx, |repo, _cx| {
+                        repo.remove_worktree(rollback_path.clone(), true)
+                    })
                 })
-            }) {
-                rollback_receivers.push((rollback_path.clone(), receiver));
-            }
+                .ok();
+
+            rollback_futures.push((rollback_path.clone(), receiver));
         }
+
         let mut rollback_failures: Vec<String> = Vec::new();
-        for (path, receiver) in rollback_receivers {
-            match receiver.await {
-                Ok(Ok(())) => {}
-                Ok(Err(rollback_err)) => {
-                    log::error!(
-                        "failed to rollback worktree at {}: {rollback_err}",
-                        path.display()
-                    );
-                    rollback_failures.push(format!("{}: {rollback_err}", path.display()));
+        for (path, receiver_opt) in rollback_futures {
+            let mut git_remove_failed = false;
+
+            if let Some(receiver) = receiver_opt {
+                match receiver.await {
+                    Ok(Ok(())) => {}
+                    Ok(Err(rollback_err)) => {
+                        log::error!(
+                            "git worktree remove failed for {}: {rollback_err}",
+                            path.display()
+                        );
+                        git_remove_failed = true;
+                    }
+                    Err(canceled) => {
+                        log::error!(
+                            "git worktree remove failed for {}: {canceled}",
+                            path.display()
+                        );
+                        git_remove_failed = true;
+                    }
                 }
-                Err(rollback_err) => {
-                    log::error!(
-                        "failed to rollback worktree at {}: {rollback_err}",
-                        path.display()
-                    );
-                    rollback_failures.push(format!("{}: {rollback_err}", path.display()));
+            } else {
+                log::error!(
+                    "failed to dispatch git worktree remove for {}",
+                    path.display()
+                );
+                git_remove_failed = true;
+            }
+
+            // `git worktree remove` normally removes this directory, but since
+            // `git worktree remove` failed (or wasn't dispatched), manually rm the directory.
+            if git_remove_failed {
+                if let Err(fs_err) = fs
+                    .remove_dir(
+                        &path,
+                        fs::RemoveOptions {
+                            recursive: true,
+                            ignore_if_not_exists: true,
+                        },
+                    )
+                    .await
+                {
+                    let msg = format!("{}: failed to remove directory: {fs_err}", path.display());
+                    log::error!("{}", msg);
+                    rollback_failures.push(msg);
                 }
             }
         }
@@ -2870,7 +2904,9 @@ impl AgentPanel {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
-        self.worktree_creation_status = Some(WorktreeCreationStatus::Error(message));
+        if let Some((_, status)) = &mut self.worktree_creation_status {
+            *status = WorktreeCreationStatus::Error(message);
+        }
         if matches!(self.active_view, ActiveView::Uninitialized) {
             let selected_agent = self.selected_agent.clone();
             self.new_agent_thread(selected_agent, window, cx);
@@ -2887,12 +2923,17 @@ impl AgentPanel {
     ) {
         if matches!(
             self.worktree_creation_status,
-            Some(WorktreeCreationStatus::Creating)
+            Some((_, WorktreeCreationStatus::Creating))
         ) {
             return;
         }
 
-        self.worktree_creation_status = Some(WorktreeCreationStatus::Creating);
+        let conversation_view_id = self
+            .active_conversation_view()
+            .map(|v| v.entity_id())
+            .unwrap_or_else(|| EntityId::from(0u64));
+        self.worktree_creation_status =
+            Some((conversation_view_id, WorktreeCreationStatus::Creating));
         cx.notify();
 
         let (git_repos, non_git_paths) = self.classify_worktrees(cx);
@@ -3058,8 +3099,10 @@ impl AgentPanel {
                             }
                         };
 
+                    let fs = cx.update(|_, cx| <dyn Fs>::global(cx))?;
+
                     let created_paths =
-                        match Self::await_and_rollback_on_failure(creation_infos, cx).await {
+                        match Self::await_and_rollback_on_failure(creation_infos, fs, cx).await {
                             Ok(paths) => paths,
                             Err(err) => {
                                 this.update_in(cx, |this, window, cx| {
@@ -3147,28 +3190,33 @@ impl AgentPanel {
         let window_handle = window_handle
             .ok_or_else(|| anyhow!("No window handle available for workspace creation"))?;
 
-        let workspace_task = window_handle.update(cx, |multi_workspace, window, cx| {
-            let path_list = PathList::new(&all_paths);
-            let active_workspace = multi_workspace.workspace().clone();
+        let (workspace_task, modal_workspace) =
+            window_handle.update(cx, |multi_workspace, window, cx| {
+                let path_list = PathList::new(&all_paths);
+                let active_workspace = multi_workspace.workspace().clone();
+                let modal_workspace = active_workspace.clone();
 
-            multi_workspace.find_or_create_workspace(
-                path_list,
-                remote_connection_options,
-                None,
-                move |connection_options, window, cx| {
-                    remote_connection::connect_with_modal(
-                        &active_workspace,
-                        connection_options,
-                        window,
-                        cx,
-                    )
-                },
-                window,
-                cx,
-            )
-        })?;
+                let task = multi_workspace.find_or_create_workspace(
+                    path_list,
+                    remote_connection_options,
+                    None,
+                    move |connection_options, window, cx| {
+                        remote_connection::connect_with_modal(
+                            &active_workspace,
+                            connection_options,
+                            window,
+                            cx,
+                        )
+                    },
+                    window,
+                    cx,
+                );
+                (task, modal_workspace)
+            })?;
 
-        let new_workspace = workspace_task.await?;
+        let result = workspace_task.await;
+        remote_connection::dismiss_connection_modal(&modal_workspace, cx);
+        let new_workspace = result?;
 
         let panels_task = new_workspace.update(cx, |workspace, _cx| workspace.take_panels_task());
 
@@ -3406,7 +3454,7 @@ impl Panel for AgentPanel {
             && matches!(self.active_view, ActiveView::Uninitialized)
             && !matches!(
                 self.worktree_creation_status,
-                Some(WorktreeCreationStatus::Creating)
+                Some((_, WorktreeCreationStatus::Creating))
             )
         {
             let selected_agent = self.selected_agent.clone();
@@ -3646,13 +3694,19 @@ impl AgentPanel {
         !self.project.read(cx).repositories(cx).is_empty()
     }
 
+    fn is_active_view_creating_worktree(&self, _cx: &App) -> bool {
+        match &self.worktree_creation_status {
+            Some((view_id, WorktreeCreationStatus::Creating)) => {
+                self.active_conversation_view().map(|v| v.entity_id()) == Some(*view_id)
+            }
+            _ => false,
+        }
+    }
+
     fn render_start_thread_in_selector(&self, cx: &mut Context<Self>) -> impl IntoElement {
         let focus_handle = self.focus_handle(cx);
 
-        let is_creating = matches!(
-            self.worktree_creation_status,
-            Some(WorktreeCreationStatus::Creating)
-        );
+        let is_creating = self.is_active_view_creating_worktree(cx);
 
         let trigger_parts = self
             .start_thread_in
@@ -3705,10 +3759,7 @@ impl AgentPanel {
     }
 
     fn render_new_worktree_branch_selector(&self, cx: &mut Context<Self>) -> impl IntoElement {
-        let is_creating = matches!(
-            self.worktree_creation_status,
-            Some(WorktreeCreationStatus::Creating)
-        );
+        let is_creating = self.is_active_view_creating_worktree(cx);
 
         let project_ref = self.project.read(cx);
         let trigger_parts = self
@@ -4176,7 +4227,11 @@ impl AgentPanel {
     }
 
     fn render_worktree_creation_status(&self, cx: &mut Context<Self>) -> Option<AnyElement> {
-        let status = self.worktree_creation_status.as_ref()?;
+        let (view_id, status) = self.worktree_creation_status.as_ref()?;
+        let active_view_id = self.active_conversation_view().map(|v| v.entity_id());
+        if active_view_id != Some(*view_id) {
+            return None;
+        }
         match status {
             WorktreeCreationStatus::Creating => Some(
                 h_flex()
@@ -4716,10 +4771,11 @@ impl AgentPanel {
     ///
     /// This is a test-only helper for visual tests.
     pub fn worktree_creation_status_for_tests(&self) -> Option<&WorktreeCreationStatus> {
-        self.worktree_creation_status.as_ref()
+        self.worktree_creation_status.as_ref().map(|(_, s)| s)
     }
 
-    /// Sets the worktree creation status directly.
+    /// Sets the worktree creation status directly, associating it with the
+    /// currently active conversation view.
     ///
     /// This is a test-only helper for visual tests that need to show the
     /// "Creating worktree…" spinner or error banners.
@@ -4728,7 +4784,13 @@ impl AgentPanel {
         status: Option<WorktreeCreationStatus>,
         cx: &mut Context<Self>,
     ) {
-        self.worktree_creation_status = status;
+        self.worktree_creation_status = status.map(|s| {
+            let view_id = self
+                .active_conversation_view()
+                .map(|v| v.entity_id())
+                .unwrap_or_else(|| EntityId::from(0u64));
+            (view_id, s)
+        });
         cx.notify();
     }
 
@@ -4769,6 +4831,7 @@ mod tests {
     };
     use acp_thread::{StubAgentConnection, ThreadStatus};
     use agent_servers::CODEX_ID;
+    use feature_flags::FeatureFlagAppExt;
     use fs::FakeFs;
     use gpui::{TestAppContext, VisualTestContext};
     use project::Project;
@@ -5975,7 +6038,8 @@ mod tests {
 
         // Simulate worktree creation in progress and reset to Uninitialized
         panel.update_in(cx, |panel, window, cx| {
-            panel.worktree_creation_status = Some(WorktreeCreationStatus::Creating);
+            panel.worktree_creation_status =
+                Some((EntityId::from(0u64), WorktreeCreationStatus::Creating));
             panel.active_view = ActiveView::Uninitialized;
             Panel::set_active(panel, true, window, cx);
             assert!(
@@ -6421,7 +6485,7 @@ mod tests {
                 let metadata = store
                     .entry(session_id)
                     .unwrap_or_else(|| panic!("{label} thread metadata should exist"));
-                metadata.folder_paths.clone()
+                metadata.folder_paths().clone()
             });
             let mut sorted = metadata_paths.ordered_paths().cloned().collect::<Vec<_>>();
             sorted.sort();
@@ -6671,6 +6735,287 @@ mod tests {
         });
     }
 
+    #[gpui::test]
+    async fn test_rollback_all_succeed_returns_ok(cx: &mut TestAppContext) {
+        init_test(cx);
+        let fs = FakeFs::new(cx.executor());
+        cx.update(|cx| {
+            cx.update_flags(true, vec!["agent-v2".to_string()]);
+            agent::ThreadStore::init_global(cx);
+            language_model::LanguageModelRegistry::test(cx);
+            <dyn fs::Fs>::set_global(fs.clone(), cx);
+        });
+
+        fs.insert_tree(
+            "/project",
+            json!({
+                ".git": {},
+                "src": { "main.rs": "fn main() {}" }
+            }),
+        )
+        .await;
+
+        let project = Project::test(fs.clone(), [Path::new("/project")], cx).await;
+        cx.executor().run_until_parked();
+
+        let repository = project.read_with(cx, |project, cx| {
+            project.repositories(cx).values().next().unwrap().clone()
+        });
+
+        let multi_workspace =
+            cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+
+        let path_a = PathBuf::from("/worktrees/branch/project_a");
+        let path_b = PathBuf::from("/worktrees/branch/project_b");
+
+        let (sender_a, receiver_a) = futures::channel::oneshot::channel::<Result<()>>();
+        let (sender_b, receiver_b) = futures::channel::oneshot::channel::<Result<()>>();
+        sender_a.send(Ok(())).unwrap();
+        sender_b.send(Ok(())).unwrap();
+
+        let creation_infos = vec![
+            (repository.clone(), path_a.clone(), receiver_a),
+            (repository.clone(), path_b.clone(), receiver_b),
+        ];
+
+        let fs_clone = fs.clone();
+        let result = multi_workspace
+            .update(cx, |_, window, cx| {
+                window.spawn(cx, async move |cx| {
+                    AgentPanel::await_and_rollback_on_failure(creation_infos, fs_clone, cx).await
+                })
+            })
+            .unwrap()
+            .await;
+
+        let paths = result.expect("all succeed should return Ok");
+        assert_eq!(paths, vec![path_a, path_b]);
+    }
+
+    #[gpui::test]
+    async fn test_rollback_on_failure_attempts_all_worktrees(cx: &mut TestAppContext) {
+        init_test(cx);
+        let fs = FakeFs::new(cx.executor());
+        cx.update(|cx| {
+            cx.update_flags(true, vec!["agent-v2".to_string()]);
+            agent::ThreadStore::init_global(cx);
+            language_model::LanguageModelRegistry::test(cx);
+            <dyn fs::Fs>::set_global(fs.clone(), cx);
+        });
+
+        fs.insert_tree(
+            "/project",
+            json!({
+                ".git": {},
+                "src": { "main.rs": "fn main() {}" }
+            }),
+        )
+        .await;
+
+        let project = Project::test(fs.clone(), [Path::new("/project")], cx).await;
+        cx.executor().run_until_parked();
+
+        let repository = project.read_with(cx, |project, cx| {
+            project.repositories(cx).values().next().unwrap().clone()
+        });
+
+        // Actually create a worktree so it exists in FakeFs for rollback to find.
+        let success_path = PathBuf::from("/worktrees/branch/project");
+        cx.update(|cx| {
+            repository.update(cx, |repo, _| {
+                repo.create_worktree(
+                    git::repository::CreateWorktreeTarget::NewBranch {
+                        branch_name: "branch".to_string(),
+                        base_sha: None,
+                    },
+                    success_path.clone(),
+                )
+            })
+        })
+        .await
+        .unwrap()
+        .unwrap();
+        cx.executor().run_until_parked();
+
+        // Verify the worktree directory exists before rollback.
+        assert!(
+            fs.is_dir(&success_path).await,
+            "worktree directory should exist before rollback"
+        );
+
+        let multi_workspace =
+            cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+
+        // Build creation_infos: one success, one failure.
+        let failed_path = PathBuf::from("/worktrees/branch/failed_project");
+
+        let (sender_ok, receiver_ok) = futures::channel::oneshot::channel::<Result<()>>();
+        let (sender_err, receiver_err) = futures::channel::oneshot::channel::<Result<()>>();
+        sender_ok.send(Ok(())).unwrap();
+        sender_err
+            .send(Err(anyhow!("branch already exists")))
+            .unwrap();
+
+        let creation_infos = vec![
+            (repository.clone(), success_path.clone(), receiver_ok),
+            (repository.clone(), failed_path.clone(), receiver_err),
+        ];
+
+        let fs_clone = fs.clone();
+        let result = multi_workspace
+            .update(cx, |_, window, cx| {
+                window.spawn(cx, async move |cx| {
+                    AgentPanel::await_and_rollback_on_failure(creation_infos, fs_clone, cx).await
+                })
+            })
+            .unwrap()
+            .await;
+
+        assert!(
+            result.is_err(),
+            "should return error when any creation fails"
+        );
+        let err_msg = result.unwrap_err().to_string();
+        assert!(
+            err_msg.contains("branch already exists"),
+            "error should mention the original failure: {err_msg}"
+        );
+
+        // The successful worktree should have been rolled back by git.
+        cx.executor().run_until_parked();
+        assert!(
+            !fs.is_dir(&success_path).await,
+            "successful worktree directory should be removed by rollback"
+        );
+    }
+
+    #[gpui::test]
+    async fn test_rollback_on_canceled_receiver(cx: &mut TestAppContext) {
+        init_test(cx);
+        let fs = FakeFs::new(cx.executor());
+        cx.update(|cx| {
+            cx.update_flags(true, vec!["agent-v2".to_string()]);
+            agent::ThreadStore::init_global(cx);
+            language_model::LanguageModelRegistry::test(cx);
+            <dyn fs::Fs>::set_global(fs.clone(), cx);
+        });
+
+        fs.insert_tree(
+            "/project",
+            json!({
+                ".git": {},
+                "src": { "main.rs": "fn main() {}" }
+            }),
+        )
+        .await;
+
+        let project = Project::test(fs.clone(), [Path::new("/project")], cx).await;
+        cx.executor().run_until_parked();
+
+        let repository = project.read_with(cx, |project, cx| {
+            project.repositories(cx).values().next().unwrap().clone()
+        });
+
+        let multi_workspace =
+            cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+
+        let path = PathBuf::from("/worktrees/branch/project");
+
+        // Drop the sender to simulate a canceled receiver.
+        let (_sender, receiver) = futures::channel::oneshot::channel::<Result<()>>();
+        drop(_sender);
+
+        let creation_infos = vec![(repository.clone(), path.clone(), receiver)];
+
+        let fs_clone = fs.clone();
+        let result = multi_workspace
+            .update(cx, |_, window, cx| {
+                window.spawn(cx, async move |cx| {
+                    AgentPanel::await_and_rollback_on_failure(creation_infos, fs_clone, cx).await
+                })
+            })
+            .unwrap()
+            .await;
+
+        assert!(
+            result.is_err(),
+            "should return error when receiver is canceled"
+        );
+        let err_msg = result.unwrap_err().to_string();
+        assert!(
+            err_msg.contains("canceled"),
+            "error should mention cancellation: {err_msg}"
+        );
+    }
+
+    #[gpui::test]
+    async fn test_rollback_cleans_up_orphan_directories(cx: &mut TestAppContext) {
+        init_test(cx);
+        let fs = FakeFs::new(cx.executor());
+        cx.update(|cx| {
+            cx.update_flags(true, vec!["agent-v2".to_string()]);
+            agent::ThreadStore::init_global(cx);
+            language_model::LanguageModelRegistry::test(cx);
+            <dyn fs::Fs>::set_global(fs.clone(), cx);
+        });
+
+        fs.insert_tree(
+            "/project",
+            json!({
+                ".git": {},
+                "src": { "main.rs": "fn main() {}" }
+            }),
+        )
+        .await;
+
+        let project = Project::test(fs.clone(), [Path::new("/project")], cx).await;
+        cx.executor().run_until_parked();
+
+        let repository = project.read_with(cx, |project, cx| {
+            project.repositories(cx).values().next().unwrap().clone()
+        });
+
+        let multi_workspace =
+            cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+
+        // Simulate the orphan state: create_dir_all was called but git
+        // worktree add failed, leaving a directory with leftover files.
+        let orphan_path = PathBuf::from("/worktrees/branch/orphan_project");
+        fs.insert_tree(
+            "/worktrees/branch/orphan_project",
+            json!({ "leftover.txt": "junk" }),
+        )
+        .await;
+
+        assert!(
+            fs.is_dir(&orphan_path).await,
+            "orphan dir should exist before rollback"
+        );
+
+        let (sender, receiver) = futures::channel::oneshot::channel::<Result<()>>();
+        sender.send(Err(anyhow!("hook failed"))).unwrap();
+
+        let creation_infos = vec![(repository.clone(), orphan_path.clone(), receiver)];
+
+        let fs_clone = fs.clone();
+        let result = multi_workspace
+            .update(cx, |_, window, cx| {
+                window.spawn(cx, async move |cx| {
+                    AgentPanel::await_and_rollback_on_failure(creation_infos, fs_clone, cx).await
+                })
+            })
+            .unwrap()
+            .await;
+
+        cx.executor().run_until_parked();
+
+        assert!(result.is_err());
+        assert!(
+            !fs.is_dir(&orphan_path).await,
+            "orphan worktree directory should be removed by filesystem cleanup"
+        );
+    }
+
     #[gpui::test]
     async fn test_worktree_creation_for_remote_project(
         cx: &mut TestAppContext,

crates/agent_ui/src/conversation_view/thread_view.rs 🔗

@@ -4872,9 +4872,20 @@ impl ThreadView {
                 },
             );
 
-        if AgentSettings::get_global(cx).enable_feedback
-            && self.thread.read(cx).connection().telemetry().is_some()
-        {
+        let enable_thread_feedback = util::maybe!({
+            let project = thread.read(cx).project().read(cx);
+            let user_store = project.user_store();
+            if let Some(configuration) = user_store.read(cx).current_organization_configuration() {
+                if !configuration.is_agent_thread_feedback_enabled {
+                    return false;
+                }
+            }
+
+            AgentSettings::get_global(cx).enable_feedback
+                && self.thread.read(cx).connection().telemetry().is_some()
+        });
+
+        if enable_thread_feedback {
             let feedback = self.thread_feedback.feedback;
 
             let tooltip_meta = || {
@@ -5159,6 +5170,7 @@ impl ThreadView {
                         let mut editor =
                             Editor::for_multibuffer(buffer, Some(project.clone()), window, cx);
                         editor.set_breadcrumb_header(thread_title);
+                        editor.disable_mouse_wheel_zoom();
                         editor
                     })),
                     None,

crates/agent_ui/src/inline_assistant.rs 🔗

@@ -1,10 +1,8 @@
 use language_models::provider::anthropic::telemetry::{
     AnthropicCompletionType, AnthropicEventData, AnthropicEventType, report_anthropic_event,
 };
-use std::cmp;
 use std::mem;
 use std::ops::Range;
-use std::rc::Rc;
 use std::sync::Arc;
 use uuid::Uuid;
 
@@ -27,8 +25,8 @@ use editor::RowExt;
 use editor::SelectionEffects;
 use editor::scroll::ScrollOffset;
 use editor::{
-    Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorEvent, HighlightKey, MultiBuffer,
-    MultiBufferSnapshot, ToOffset as _, ToPoint,
+    Anchor, AnchorRangeExt, Editor, EditorEvent, HighlightKey, MultiBuffer, MultiBufferSnapshot,
+    ToOffset as _, ToPoint,
     actions::SelectAll,
     display_map::{
         BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, EditorMargins,
@@ -45,15 +43,14 @@ use language::{Buffer, Point, Selection, TransactionId};
 use language_model::{ConfigurationError, ConfiguredModel, LanguageModelRegistry};
 use multi_buffer::MultiBufferRow;
 use parking_lot::Mutex;
-use project::{CodeAction, DisableAiSettings, LspAction, Project, ProjectTransaction};
+use project::{DisableAiSettings, Project};
 use prompt_store::{PromptBuilder, PromptStore};
 use settings::{Settings, SettingsStore};
 
 use terminal_view::{TerminalView, terminal_panel::TerminalPanel};
-use text::{OffsetRangeExt, ToPoint as _};
 use ui::prelude::*;
 use util::{RangeExt, ResultExt, maybe};
-use workspace::{ItemHandle, Toast, Workspace, dock::Panel, notifications::NotificationId};
+use workspace::{Toast, Workspace, dock::Panel, notifications::NotificationId};
 use zed_actions::agent::OpenSettings;
 
 pub fn init(fs: Arc<dyn Fs>, prompt_builder: Arc<PromptBuilder>, cx: &mut App) {
@@ -184,7 +181,7 @@ impl InlineAssistant {
 
     fn handle_workspace_event(
         &mut self,
-        workspace: Entity<Workspace>,
+        _workspace: Entity<Workspace>,
         event: &workspace::Event,
         window: &mut Window,
         cx: &mut App,
@@ -203,51 +200,10 @@ impl InlineAssistant {
                     }
                 }
             }
-            workspace::Event::ItemAdded { item } => {
-                self.register_workspace_item(&workspace, item.as_ref(), window, cx);
-            }
             _ => (),
         }
     }
 
-    fn register_workspace_item(
-        &mut self,
-        workspace: &Entity<Workspace>,
-        item: &dyn ItemHandle,
-        window: &mut Window,
-        cx: &mut App,
-    ) {
-        let is_ai_enabled = !DisableAiSettings::get_global(cx).disable_ai;
-
-        if let Some(editor) = item.act_as::<Editor>(cx) {
-            editor.update(cx, |editor, cx| {
-                if is_ai_enabled {
-                    editor.add_code_action_provider(
-                        Rc::new(AssistantCodeActionProvider {
-                            editor: cx.entity().downgrade(),
-                            workspace: workspace.downgrade(),
-                        }),
-                        window,
-                        cx,
-                    );
-
-                    if DisableAiSettings::get_global(cx).disable_ai {
-                        // Cancel any active edit predictions
-                        if editor.has_active_edit_prediction() {
-                            editor.cancel(&Default::default(), window, cx);
-                        }
-                    }
-                } else {
-                    editor.remove_code_action_provider(
-                        ASSISTANT_CODE_ACTION_PROVIDER_ID.into(),
-                        window,
-                        cx,
-                    );
-                }
-            });
-        }
-    }
-
     pub fn inline_assist(
         workspace: &mut Workspace,
         action: &zed_actions::assistant::InlineAssist,
@@ -1527,6 +1483,7 @@ impl InlineAssistant {
                     editor.set_show_wrap_guides(false, cx);
                     editor.set_show_gutter(false, cx);
                     editor.set_offset_content(false, cx);
+                    editor.disable_mouse_wheel_zoom();
                     editor.scroll_manager.set_forbid_vertical_scroll(true);
                     editor.set_read_only(true);
                     editor.set_show_edit_predictions(Some(false), window, cx);
@@ -1875,130 +1832,6 @@ struct InlineAssistDecorations {
     end_block_id: CustomBlockId,
 }
 
-struct AssistantCodeActionProvider {
-    editor: WeakEntity<Editor>,
-    workspace: WeakEntity<Workspace>,
-}
-
-const ASSISTANT_CODE_ACTION_PROVIDER_ID: &str = "assistant";
-
-impl CodeActionProvider for AssistantCodeActionProvider {
-    fn id(&self) -> Arc<str> {
-        ASSISTANT_CODE_ACTION_PROVIDER_ID.into()
-    }
-
-    fn code_actions(
-        &self,
-        buffer: &Entity<Buffer>,
-        range: Range<text::Anchor>,
-        _: &mut Window,
-        cx: &mut App,
-    ) -> Task<Result<Vec<CodeAction>>> {
-        if !AgentSettings::get_global(cx).enabled(cx) {
-            return Task::ready(Ok(Vec::new()));
-        }
-
-        let snapshot = buffer.read(cx).snapshot();
-        let mut range = range.to_point(&snapshot);
-
-        // Expand the range to line boundaries.
-        range.start.column = 0;
-        range.end.column = snapshot.line_len(range.end.row);
-
-        let mut has_diagnostics = false;
-        for diagnostic in snapshot.diagnostics_in_range::<_, Point>(range.clone(), false) {
-            range.start = cmp::min(range.start, diagnostic.range.start);
-            range.end = cmp::max(range.end, diagnostic.range.end);
-            has_diagnostics = true;
-        }
-        if has_diagnostics {
-            let symbols_containing_start = snapshot.symbols_containing(range.start, None);
-            if let Some(symbol) = symbols_containing_start.last() {
-                range.start = cmp::min(range.start, symbol.range.start.to_point(&snapshot));
-                range.end = cmp::max(range.end, symbol.range.end.to_point(&snapshot));
-            }
-            let symbols_containing_end = snapshot.symbols_containing(range.end, None);
-            if let Some(symbol) = symbols_containing_end.last() {
-                range.start = cmp::min(range.start, symbol.range.start.to_point(&snapshot));
-                range.end = cmp::max(range.end, symbol.range.end.to_point(&snapshot));
-            }
-
-            Task::ready(Ok(vec![CodeAction {
-                server_id: language::LanguageServerId(0),
-                range: snapshot.anchor_before(range.start)..snapshot.anchor_after(range.end),
-                lsp_action: LspAction::Action(Box::new(lsp::CodeAction {
-                    title: "Fix with Assistant".into(),
-                    ..Default::default()
-                })),
-                resolved: true,
-            }]))
-        } else {
-            Task::ready(Ok(Vec::new()))
-        }
-    }
-
-    fn apply_code_action(
-        &self,
-        _buffer: Entity<Buffer>,
-        action: CodeAction,
-        _push_to_history: bool,
-        window: &mut Window,
-        cx: &mut App,
-    ) -> Task<Result<ProjectTransaction>> {
-        let editor = self.editor.clone();
-        let workspace = self.workspace.clone();
-        let prompt_store = PromptStore::global(cx);
-        window.spawn(cx, async move |cx| {
-            let workspace = workspace.upgrade().context("workspace was released")?;
-            let (thread_store, history) = cx.update(|_window, cx| {
-                let panel = workspace
-                    .read(cx)
-                    .panel::<AgentPanel>(cx)
-                    .context("missing agent panel")?
-                    .read(cx);
-
-                let history = panel
-                    .connection_store()
-                    .read(cx)
-                    .entry(&crate::Agent::NativeAgent)
-                    .and_then(|e| e.read(cx).history())
-                    .map(|h| h.downgrade());
-
-                anyhow::Ok((panel.thread_store().clone(), history))
-            })??;
-            let editor = editor.upgrade().context("editor was released")?;
-            let range = editor
-                .update(cx, |editor, cx| {
-                    editor.buffer().update(cx, |multibuffer, cx| {
-                        let multibuffer_snapshot = multibuffer.read(cx);
-                        multibuffer_snapshot.buffer_anchor_range_to_anchor_range(action.range)
-                    })
-                })
-                .context("invalid range")?;
-
-            let prompt_store = prompt_store.await.ok();
-            cx.update_global(|assistant: &mut InlineAssistant, window, cx| {
-                let assist_id = assistant.suggest_assist(
-                    &editor,
-                    range,
-                    "Fix Diagnostics".into(),
-                    None,
-                    true,
-                    workspace,
-                    thread_store,
-                    prompt_store,
-                    history,
-                    window,
-                    cx,
-                );
-                assistant.start_assist(assist_id, window, cx);
-            })?;
-
-            Ok(ProjectTransaction::default())
-        })
-    }
-}
-
 fn merge_ranges(ranges: &mut Vec<Range<Anchor>>, buffer: &MultiBufferSnapshot) {
     ranges.sort_unstable_by(|a, b| {
         a.start

crates/agent_ui/src/message_editor.rs 🔗

@@ -422,6 +422,7 @@ impl MessageEditor {
             editor.set_show_indent_guides(false, cx);
             editor.set_show_completions_on_input(Some(true));
             editor.set_soft_wrap();
+            editor.disable_mouse_wheel_zoom();
             editor.set_use_modal_editing(true);
             editor.set_context_menu_options(ContextMenuOptions {
                 min_entries_visible: 12,

crates/agent_ui/src/thread_import.rs 🔗

@@ -12,17 +12,18 @@ use gpui::{
 };
 use notifications::status_toast::{StatusToast, ToastIcon};
 use project::{AgentId, AgentRegistryStore, AgentServerStore};
+use remote::RemoteConnectionOptions;
 use ui::{
     Checkbox, KeyBinding, ListItem, ListItemSpacing, Modal, ModalFooter, ModalHeader, Section,
     prelude::*,
 };
 use util::ResultExt;
-use workspace::{ModalView, MultiWorkspace, PathList, Workspace};
+use workspace::{ModalView, MultiWorkspace, Workspace};
 
 use crate::{
     Agent, AgentPanel,
     agent_connection_store::AgentConnectionStore,
-    thread_metadata_store::{ThreadMetadata, ThreadMetadataStore},
+    thread_metadata_store::{ThreadMetadata, ThreadMetadataStore, ThreadWorktreePaths},
 };
 
 pub struct AcpThreadImportOnboarding;
@@ -436,19 +437,28 @@ fn find_threads_to_import(
     let mut wait_for_connection_tasks = Vec::new();
 
     for store in stores {
+        let remote_connection = store
+            .read(cx)
+            .project()
+            .read(cx)
+            .remote_connection_options(cx);
+
         for agent_id in agent_ids.clone() {
             let agent = Agent::from(agent_id.clone());
             let server = agent.server(<dyn Fs>::global(cx), ThreadStore::global(cx));
             let entry = store.update(cx, |store, cx| store.request_connection(agent, server, cx));
-            wait_for_connection_tasks
-                .push(entry.read(cx).wait_for_connection().map(|s| (agent_id, s)));
+
+            wait_for_connection_tasks.push(entry.read(cx).wait_for_connection().map({
+                let remote_connection = remote_connection.clone();
+                move |state| (agent_id, remote_connection, state)
+            }));
         }
     }
 
     let mut session_list_tasks = Vec::new();
     cx.spawn(async move |cx| {
         let results = futures::future::join_all(wait_for_connection_tasks).await;
-        for (agent, result) in results {
+        for (agent_id, remote_connection, result) in results {
             let Some(state) = result.log_err() else {
                 continue;
             };
@@ -457,18 +467,25 @@ fn find_threads_to_import(
             };
             let task = cx.update(|cx| {
                 list.list_sessions(AgentSessionListRequest::default(), cx)
-                    .map(|r| (agent, r))
+                    .map({
+                        let remote_connection = remote_connection.clone();
+                        move |response| (agent_id, remote_connection, response)
+                    })
             });
             session_list_tasks.push(task);
         }
 
         let mut sessions_by_agent = Vec::new();
         let results = futures::future::join_all(session_list_tasks).await;
-        for (agent_id, result) in results {
+        for (agent_id, remote_connection, result) in results {
             let Some(response) = result.log_err() else {
                 continue;
             };
-            sessions_by_agent.push((agent_id, response.sessions));
+            sessions_by_agent.push(SessionByAgent {
+                agent_id,
+                remote_connection,
+                sessions: response.sessions,
+            });
         }
 
         Ok(collect_importable_threads(
@@ -478,12 +495,23 @@ fn find_threads_to_import(
     })
 }
 
+struct SessionByAgent {
+    agent_id: AgentId,
+    remote_connection: Option<RemoteConnectionOptions>,
+    sessions: Vec<acp_thread::AgentSessionInfo>,
+}
+
 fn collect_importable_threads(
-    sessions_by_agent: Vec<(AgentId, Vec<acp_thread::AgentSessionInfo>)>,
+    sessions_by_agent: Vec<SessionByAgent>,
     mut existing_sessions: HashSet<acp::SessionId>,
 ) -> Vec<ThreadMetadata> {
     let mut to_insert = Vec::new();
-    for (agent_id, sessions) in sessions_by_agent {
+    for SessionByAgent {
+        agent_id,
+        remote_connection,
+        sessions,
+    } in sessions_by_agent
+    {
         for session in sessions {
             if !existing_sessions.insert(session.session_id.clone()) {
                 continue;
@@ -499,8 +527,8 @@ fn collect_importable_threads(
                     .unwrap_or_else(|| crate::DEFAULT_THREAD_TITLE.into()),
                 updated_at: session.updated_at.unwrap_or_else(|| Utc::now()),
                 created_at: session.created_at,
-                folder_paths,
-                main_worktree_paths: PathList::default(),
+                worktree_paths: ThreadWorktreePaths::from_folder_paths(&folder_paths),
+                remote_connection: remote_connection.clone(),
                 archived: true,
             });
         }
@@ -538,9 +566,10 @@ mod tests {
         let existing = HashSet::from_iter(vec![acp::SessionId::new("existing-1")]);
         let paths = PathList::new(&[Path::new("/project")]);
 
-        let sessions_by_agent = vec![(
-            AgentId::new("agent-a"),
-            vec![
+        let sessions_by_agent = vec![SessionByAgent {
+            agent_id: AgentId::new("agent-a"),
+            remote_connection: None,
+            sessions: vec![
                 make_session(
                     "existing-1",
                     Some("Already There"),
@@ -550,7 +579,7 @@ mod tests {
                 ),
                 make_session("new-1", Some("Brand New"), Some(paths), None, None),
             ],
-        )];
+        }];
 
         let result = collect_importable_threads(sessions_by_agent, existing);
 
@@ -564,13 +593,14 @@ mod tests {
         let existing = HashSet::default();
         let paths = PathList::new(&[Path::new("/project")]);
 
-        let sessions_by_agent = vec![(
-            AgentId::new("agent-a"),
-            vec![
+        let sessions_by_agent = vec![SessionByAgent {
+            agent_id: AgentId::new("agent-a"),
+            remote_connection: None,
+            sessions: vec![
                 make_session("has-dirs", Some("With Dirs"), Some(paths), None, None),
                 make_session("no-dirs", Some("No Dirs"), None, None, None),
             ],
-        )];
+        }];
 
         let result = collect_importable_threads(sessions_by_agent, existing);
 
@@ -583,13 +613,14 @@ mod tests {
         let existing = HashSet::default();
         let paths = PathList::new(&[Path::new("/project")]);
 
-        let sessions_by_agent = vec![(
-            AgentId::new("agent-a"),
-            vec![
+        let sessions_by_agent = vec![SessionByAgent {
+            agent_id: AgentId::new("agent-a"),
+            remote_connection: None,
+            sessions: vec![
                 make_session("s1", Some("Thread 1"), Some(paths.clone()), None, None),
                 make_session("s2", Some("Thread 2"), Some(paths), None, None),
             ],
-        )];
+        }];
 
         let result = collect_importable_threads(sessions_by_agent, existing);
 
@@ -603,20 +634,22 @@ mod tests {
         let paths = PathList::new(&[Path::new("/project")]);
 
         let sessions_by_agent = vec![
-            (
-                AgentId::new("agent-a"),
-                vec![make_session(
+            SessionByAgent {
+                agent_id: AgentId::new("agent-a"),
+                remote_connection: None,
+                sessions: vec![make_session(
                     "s1",
                     Some("From A"),
                     Some(paths.clone()),
                     None,
                     None,
                 )],
-            ),
-            (
-                AgentId::new("agent-b"),
-                vec![make_session("s2", Some("From B"), Some(paths), None, None)],
-            ),
+            },
+            SessionByAgent {
+                agent_id: AgentId::new("agent-b"),
+                remote_connection: None,
+                sessions: vec![make_session("s2", Some("From B"), Some(paths), None, None)],
+            },
         ];
 
         let result = collect_importable_threads(sessions_by_agent, existing);
@@ -640,26 +673,28 @@ mod tests {
         let paths = PathList::new(&[Path::new("/project")]);
 
         let sessions_by_agent = vec![
-            (
-                AgentId::new("agent-a"),
-                vec![make_session(
+            SessionByAgent {
+                agent_id: AgentId::new("agent-a"),
+                remote_connection: None,
+                sessions: vec![make_session(
                     "shared-session",
                     Some("From A"),
                     Some(paths.clone()),
                     None,
                     None,
                 )],
-            ),
-            (
-                AgentId::new("agent-b"),
-                vec![make_session(
+            },
+            SessionByAgent {
+                agent_id: AgentId::new("agent-b"),
+                remote_connection: None,
+                sessions: vec![make_session(
                     "shared-session",
                     Some("From B"),
                     Some(paths),
                     None,
                     None,
                 )],
-            ),
+            },
         ];
 
         let result = collect_importable_threads(sessions_by_agent, existing);
@@ -679,13 +714,14 @@ mod tests {
         let existing =
             HashSet::from_iter(vec![acp::SessionId::new("s1"), acp::SessionId::new("s2")]);
 
-        let sessions_by_agent = vec![(
-            AgentId::new("agent-a"),
-            vec![
+        let sessions_by_agent = vec![SessionByAgent {
+            agent_id: AgentId::new("agent-a"),
+            remote_connection: None,
+            sessions: vec![
                 make_session("s1", Some("T1"), Some(paths.clone()), None, None),
                 make_session("s2", Some("T2"), Some(paths), None, None),
             ],
-        )];
+        }];
 
         let result = collect_importable_threads(sessions_by_agent, existing);
         assert!(result.is_empty());

crates/agent_ui/src/thread_metadata_store.rs 🔗

@@ -10,31 +10,37 @@ use anyhow::Context as _;
 use chrono::{DateTime, Utc};
 use collections::{HashMap, HashSet};
 use db::{
+    kvp::KeyValueStore,
     sqlez::{
         bindable::Column, domain::Domain, statement::Statement,
         thread_safe_connection::ThreadSafeConnection,
     },
     sqlez_macros::sql,
 };
-use futures::{FutureExt as _, future::Shared};
+use fs::Fs;
+use futures::{FutureExt, future::Shared};
 use gpui::{AppContext as _, Entity, Global, Subscription, Task};
 use project::AgentId;
+use remote::RemoteConnectionOptions;
 use ui::{App, Context, SharedString};
 use util::ResultExt as _;
-use workspace::PathList;
+use workspace::{PathList, SerializedWorkspaceLocation, WorkspaceDb};
 
 use crate::DEFAULT_THREAD_TITLE;
 
+const THREAD_REMOTE_CONNECTION_MIGRATION_KEY: &str = "thread-metadata-remote-connection-backfill";
+
 pub fn init(cx: &mut App) {
     ThreadMetadataStore::init_global(cx);
-    migrate_thread_metadata(cx);
+    let migration_task = migrate_thread_metadata(cx);
+    migrate_thread_remote_connections(cx, migration_task);
 }
 
 /// Migrate existing thread metadata from native agent thread store to the new metadata storage.
 /// We skip migrating threads that do not have a project.
 ///
 /// TODO: Remove this after N weeks of shipping the sidebar
-fn migrate_thread_metadata(cx: &mut App) {
+fn migrate_thread_metadata(cx: &mut App) -> Task<anyhow::Result<()>> {
     let store = ThreadMetadataStore::global(cx);
     let db = store.read(cx).db.clone();
 
@@ -58,8 +64,8 @@ fn migrate_thread_metadata(cx: &mut App) {
                         title: entry.title,
                         updated_at: entry.updated_at,
                         created_at: entry.created_at,
-                        folder_paths: entry.folder_paths,
-                        main_worktree_paths: PathList::default(),
+                        worktree_paths: ThreadWorktreePaths::from_folder_paths(&entry.folder_paths),
+                        remote_connection: None,
                         archived: true,
                     })
                 })
@@ -75,11 +81,11 @@ fn migrate_thread_metadata(cx: &mut App) {
         if is_first_migration {
             let mut per_project: HashMap<PathList, Vec<&mut ThreadMetadata>> = HashMap::default();
             for entry in &mut to_migrate {
-                if entry.folder_paths.is_empty() {
+                if entry.worktree_paths.is_empty() {
                     continue;
                 }
                 per_project
-                    .entry(entry.folder_paths.clone())
+                    .entry(entry.worktree_paths.folder_path_list().clone())
                     .or_default()
                     .push(entry);
             }
@@ -104,12 +110,219 @@ fn migrate_thread_metadata(cx: &mut App) {
         let _ = store.update(cx, |store, cx| store.reload(cx));
         anyhow::Ok(())
     })
+}
+
+fn migrate_thread_remote_connections(cx: &mut App, migration_task: Task<anyhow::Result<()>>) {
+    let store = ThreadMetadataStore::global(cx);
+    let db = store.read(cx).db.clone();
+    let kvp = KeyValueStore::global(cx);
+    let workspace_db = WorkspaceDb::global(cx);
+    let fs = <dyn Fs>::global(cx);
+
+    cx.spawn(async move |cx| -> anyhow::Result<()> {
+        migration_task.await?;
+
+        if kvp
+            .read_kvp(THREAD_REMOTE_CONNECTION_MIGRATION_KEY)?
+            .is_some()
+        {
+            return Ok(());
+        }
+
+        let recent_workspaces = workspace_db.recent_workspaces_on_disk(fs.as_ref()).await?;
+
+        let mut local_path_lists = HashSet::<PathList>::default();
+        let mut remote_path_lists = HashMap::<PathList, RemoteConnectionOptions>::default();
+
+        recent_workspaces
+            .iter()
+            .filter(|(_, location, path_list, _)| {
+                !path_list.is_empty() && matches!(location, &SerializedWorkspaceLocation::Local)
+            })
+            .for_each(|(_, _, path_list, _)| {
+                local_path_lists.insert(path_list.clone());
+            });
+
+        for (_, location, path_list, _) in recent_workspaces {
+            match location {
+                SerializedWorkspaceLocation::Remote(remote_connection)
+                    if !local_path_lists.contains(&path_list) =>
+                {
+                    remote_path_lists
+                        .entry(path_list)
+                        .or_insert(remote_connection);
+                }
+                _ => {}
+            }
+        }
+
+        let mut reloaded = false;
+        for metadata in db.list()? {
+            if metadata.remote_connection.is_some() {
+                continue;
+            }
+
+            if let Some(remote_connection) = remote_path_lists
+                .get(metadata.folder_paths())
+                .or_else(|| remote_path_lists.get(metadata.main_worktree_paths()))
+            {
+                db.save(ThreadMetadata {
+                    remote_connection: Some(remote_connection.clone()),
+                    ..metadata
+                })
+                .await?;
+                reloaded = true;
+            }
+        }
+
+        let reloaded_task = reloaded
+            .then_some(store.update(cx, |store, cx| store.reload(cx)))
+            .unwrap_or(Task::ready(()).shared());
+
+        kvp.write_kvp(
+            THREAD_REMOTE_CONNECTION_MIGRATION_KEY.to_string(),
+            "1".to_string(),
+        )
+        .await?;
+        reloaded_task.await;
+
+        Ok(())
+    })
     .detach_and_log_err(cx);
 }
 
 struct GlobalThreadMetadataStore(Entity<ThreadMetadataStore>);
 impl Global for GlobalThreadMetadataStore {}
 
+/// Paired worktree paths for a thread. Each folder path has a corresponding
+/// main worktree path at the same position. The two lists are always the
+/// same length and are modified together via `add_path` / `remove_main_path`.
+///
+/// For non-linked worktrees, the main path and folder path are identical.
+/// For linked worktrees, the main path is the original repo and the folder
+/// path is the linked worktree location.
+///
+/// Internally stores two `PathList`s with matching insertion order so that
+/// `ordered_paths()` on both yields positionally-paired results.
+#[derive(Default, Debug, Clone)]
+pub struct ThreadWorktreePaths {
+    folder_paths: PathList,
+    main_worktree_paths: PathList,
+}
+
+impl PartialEq for ThreadWorktreePaths {
+    fn eq(&self, other: &Self) -> bool {
+        self.folder_paths == other.folder_paths
+            && self.main_worktree_paths == other.main_worktree_paths
+    }
+}
+
+impl ThreadWorktreePaths {
+    /// Build from a project's current state. Each visible worktree is paired
+    /// with its main repo path (resolved via git), falling back to the
+    /// worktree's own path if no git repo is found.
+    pub fn from_project(project: &project::Project, cx: &App) -> Self {
+        let (mains, folders): (Vec<PathBuf>, Vec<PathBuf>) = project
+            .visible_worktrees(cx)
+            .map(|worktree| {
+                let snapshot = worktree.read(cx).snapshot();
+                let folder_path = snapshot.abs_path().to_path_buf();
+                let main_path = snapshot
+                    .root_repo_common_dir()
+                    .and_then(|dir| Some(dir.parent()?.to_path_buf()))
+                    .unwrap_or_else(|| folder_path.clone());
+                (main_path, folder_path)
+            })
+            .unzip();
+        Self {
+            folder_paths: PathList::new(&folders),
+            main_worktree_paths: PathList::new(&mains),
+        }
+    }
+
+    /// Build from two parallel `PathList`s that already share the same
+    /// insertion order. Used for deserialization from DB.
+    ///
+    /// Returns an error if the two lists have different lengths, which
+    /// indicates corrupted data from a prior migration bug.
+    pub fn from_path_lists(
+        main_worktree_paths: PathList,
+        folder_paths: PathList,
+    ) -> anyhow::Result<Self> {
+        anyhow::ensure!(
+            main_worktree_paths.paths().len() == folder_paths.paths().len(),
+            "main_worktree_paths has {} entries but folder_paths has {}",
+            main_worktree_paths.paths().len(),
+            folder_paths.paths().len(),
+        );
+        Ok(Self {
+            folder_paths,
+            main_worktree_paths,
+        })
+    }
+
+    /// Build for non-linked worktrees where main == folder for every path.
+    pub fn from_folder_paths(folder_paths: &PathList) -> Self {
+        Self {
+            folder_paths: folder_paths.clone(),
+            main_worktree_paths: folder_paths.clone(),
+        }
+    }
+
+    pub fn is_empty(&self) -> bool {
+        self.folder_paths.is_empty()
+    }
+
+    /// The folder paths (for workspace matching / `threads_by_paths` index).
+    pub fn folder_path_list(&self) -> &PathList {
+        &self.folder_paths
+    }
+
+    /// The main worktree paths (for group key / `threads_by_main_paths` index).
+    pub fn main_worktree_path_list(&self) -> &PathList {
+        &self.main_worktree_paths
+    }
+
+    /// Iterate the (main_worktree_path, folder_path) pairs in insertion order.
+    pub fn ordered_pairs(&self) -> impl Iterator<Item = (&PathBuf, &PathBuf)> {
+        self.main_worktree_paths
+            .ordered_paths()
+            .zip(self.folder_paths.ordered_paths())
+    }
+
+    /// Add a new path pair. If the exact (main, folder) pair already exists,
+    /// this is a no-op. Rebuilds both internal `PathList`s to maintain
+    /// consistent ordering.
+    pub fn add_path(&mut self, main_path: &Path, folder_path: &Path) {
+        let already_exists = self
+            .ordered_pairs()
+            .any(|(m, f)| m.as_path() == main_path && f.as_path() == folder_path);
+        if already_exists {
+            return;
+        }
+        let (mut mains, mut folders): (Vec<PathBuf>, Vec<PathBuf>) = self
+            .ordered_pairs()
+            .map(|(m, f)| (m.clone(), f.clone()))
+            .unzip();
+        mains.push(main_path.to_path_buf());
+        folders.push(folder_path.to_path_buf());
+        self.main_worktree_paths = PathList::new(&mains);
+        self.folder_paths = PathList::new(&folders);
+    }
+
+    /// Remove all pairs whose main worktree path matches the given path.
+    /// This removes the corresponding entries from both lists.
+    pub fn remove_main_path(&mut self, main_path: &Path) {
+        let (mains, folders): (Vec<PathBuf>, Vec<PathBuf>) = self
+            .ordered_pairs()
+            .filter(|(m, _)| m.as_path() != main_path)
+            .map(|(m, f)| (m.clone(), f.clone()))
+            .unzip();
+        self.main_worktree_paths = PathList::new(&mains);
+        self.folder_paths = PathList::new(&folders);
+    }
+}
+
 /// Lightweight metadata for any thread (native or ACP), enough to populate
 /// the sidebar list and route to the correct load path when clicked.
 #[derive(Debug, Clone, PartialEq)]
@@ -119,16 +332,25 @@ pub struct ThreadMetadata {
     pub title: SharedString,
     pub updated_at: DateTime<Utc>,
     pub created_at: Option<DateTime<Utc>>,
-    pub folder_paths: PathList,
-    pub main_worktree_paths: PathList,
+    pub worktree_paths: ThreadWorktreePaths,
+    pub remote_connection: Option<RemoteConnectionOptions>,
     pub archived: bool,
 }
 
+impl ThreadMetadata {
+    pub fn folder_paths(&self) -> &PathList {
+        self.worktree_paths.folder_path_list()
+    }
+    pub fn main_worktree_paths(&self) -> &PathList {
+        self.worktree_paths.main_worktree_path_list()
+    }
+}
+
 impl From<&ThreadMetadata> for acp_thread::AgentSessionInfo {
     fn from(meta: &ThreadMetadata) -> Self {
         Self {
             session_id: meta.session_id.clone(),
-            work_dirs: Some(meta.folder_paths.clone()),
+            work_dirs: Some(meta.folder_paths().clone()),
             title: Some(meta.title.clone()),
             updated_at: Some(meta.updated_at),
             created_at: meta.created_at,
@@ -312,12 +534,12 @@ impl ThreadMetadataStore {
 
                     for row in rows {
                         this.threads_by_paths
-                            .entry(row.folder_paths.clone())
+                            .entry(row.folder_paths().clone())
                             .or_default()
                             .insert(row.session_id.clone());
-                        if !row.main_worktree_paths.is_empty() {
+                        if !row.main_worktree_paths().is_empty() {
                             this.threads_by_main_paths
-                                .entry(row.main_worktree_paths.clone())
+                                .entry(row.main_worktree_paths().clone())
                                 .or_default()
                                 .insert(row.session_id.clone());
                         }
@@ -352,17 +574,17 @@ impl ThreadMetadataStore {
 
     fn save_internal(&mut self, metadata: ThreadMetadata) {
         if let Some(thread) = self.threads.get(&metadata.session_id) {
-            if thread.folder_paths != metadata.folder_paths {
-                if let Some(session_ids) = self.threads_by_paths.get_mut(&thread.folder_paths) {
+            if thread.folder_paths() != metadata.folder_paths() {
+                if let Some(session_ids) = self.threads_by_paths.get_mut(thread.folder_paths()) {
                     session_ids.remove(&metadata.session_id);
                 }
             }
-            if thread.main_worktree_paths != metadata.main_worktree_paths
-                && !thread.main_worktree_paths.is_empty()
+            if thread.main_worktree_paths() != metadata.main_worktree_paths()
+                && !thread.main_worktree_paths().is_empty()
             {
                 if let Some(session_ids) = self
                     .threads_by_main_paths
-                    .get_mut(&thread.main_worktree_paths)
+                    .get_mut(thread.main_worktree_paths())
                 {
                     session_ids.remove(&metadata.session_id);
                 }
@@ -373,13 +595,13 @@ impl ThreadMetadataStore {
             .insert(metadata.session_id.clone(), metadata.clone());
 
         self.threads_by_paths
-            .entry(metadata.folder_paths.clone())
+            .entry(metadata.folder_paths().clone())
             .or_default()
             .insert(metadata.session_id.clone());
 
-        if !metadata.main_worktree_paths.is_empty() {
+        if !metadata.main_worktree_paths().is_empty() {
             self.threads_by_main_paths
-                .entry(metadata.main_worktree_paths.clone())
+                .entry(metadata.main_worktree_paths().clone())
                 .or_default()
                 .insert(metadata.session_id.clone());
         }
@@ -397,7 +619,11 @@ impl ThreadMetadataStore {
     ) {
         if let Some(thread) = self.threads.get(session_id) {
             self.save_internal(ThreadMetadata {
-                folder_paths: work_dirs,
+                worktree_paths: ThreadWorktreePaths::from_path_lists(
+                    thread.main_worktree_paths().clone(),
+                    work_dirs.clone(),
+                )
+                .unwrap_or_else(|_| ThreadWorktreePaths::from_folder_paths(&work_dirs)),
                 ..thread.clone()
             });
             cx.notify();
@@ -438,7 +664,7 @@ impl ThreadMetadataStore {
         cx: &mut Context<Self>,
     ) {
         if let Some(thread) = self.threads.get(session_id).cloned() {
-            let mut paths: Vec<PathBuf> = thread.folder_paths.paths().to_vec();
+            let mut paths: Vec<PathBuf> = thread.folder_paths().paths().to_vec();
             for (old_path, new_path) in path_replacements {
                 if let Some(pos) = paths.iter().position(|p| p == old_path) {
                     paths[pos] = new_path.clone();
@@ -446,7 +672,11 @@ impl ThreadMetadataStore {
             }
             let new_folder_paths = PathList::new(&paths);
             self.save_internal(ThreadMetadata {
-                folder_paths: new_folder_paths,
+                worktree_paths: ThreadWorktreePaths::from_path_lists(
+                    thread.main_worktree_paths().clone(),
+                    new_folder_paths.clone(),
+                )
+                .unwrap_or_else(|_| ThreadWorktreePaths::from_folder_paths(&new_folder_paths)),
                 ..thread
             });
             cx.notify();
@@ -460,7 +690,7 @@ impl ThreadMetadataStore {
         cx: &mut Context<Self>,
     ) {
         if let Some(thread) = self.threads.get(session_id).cloned() {
-            let mut paths: Vec<PathBuf> = thread.folder_paths.paths().to_vec();
+            let mut paths: Vec<PathBuf> = thread.folder_paths().paths().to_vec();
             for (old_path, new_path) in path_replacements {
                 for path in &mut paths {
                     if path == old_path {
@@ -470,13 +700,69 @@ impl ThreadMetadataStore {
             }
             let new_folder_paths = PathList::new(&paths);
             self.save_internal(ThreadMetadata {
-                folder_paths: new_folder_paths,
+                worktree_paths: ThreadWorktreePaths::from_path_lists(
+                    thread.main_worktree_paths().clone(),
+                    new_folder_paths.clone(),
+                )
+                .unwrap_or_else(|_| ThreadWorktreePaths::from_folder_paths(&new_folder_paths)),
                 ..thread
             });
             cx.notify();
         }
     }
 
+    /// Apply a mutation to the worktree paths of all threads whose current
+    /// `main_worktree_paths` matches `current_main_paths`, then re-index.
+    pub fn change_worktree_paths(
+        &mut self,
+        current_main_paths: &PathList,
+        mutate: impl Fn(&mut ThreadWorktreePaths),
+        cx: &mut Context<Self>,
+    ) {
+        let session_ids: Vec<_> = self
+            .threads_by_main_paths
+            .get(current_main_paths)
+            .into_iter()
+            .flatten()
+            .cloned()
+            .collect();
+
+        if session_ids.is_empty() {
+            return;
+        }
+
+        for session_id in &session_ids {
+            if let Some(thread) = self.threads.get_mut(session_id) {
+                if let Some(ids) = self
+                    .threads_by_main_paths
+                    .get_mut(thread.main_worktree_paths())
+                {
+                    ids.remove(session_id);
+                }
+                if let Some(ids) = self.threads_by_paths.get_mut(thread.folder_paths()) {
+                    ids.remove(session_id);
+                }
+
+                mutate(&mut thread.worktree_paths);
+
+                self.threads_by_main_paths
+                    .entry(thread.main_worktree_paths().clone())
+                    .or_default()
+                    .insert(session_id.clone());
+                self.threads_by_paths
+                    .entry(thread.folder_paths().clone())
+                    .or_default()
+                    .insert(session_id.clone());
+
+                self.pending_thread_ops_tx
+                    .try_send(DbOperation::Upsert(thread.clone()))
+                    .log_err();
+            }
+        }
+
+        cx.notify();
+    }
+
     pub fn create_archived_worktree(
         &self,
         worktree_path: String,
@@ -569,13 +855,13 @@ impl ThreadMetadataStore {
 
     pub fn delete(&mut self, session_id: acp::SessionId, cx: &mut Context<Self>) {
         if let Some(thread) = self.threads.get(&session_id) {
-            if let Some(session_ids) = self.threads_by_paths.get_mut(&thread.folder_paths) {
+            if let Some(session_ids) = self.threads_by_paths.get_mut(thread.folder_paths()) {
                 session_ids.remove(&session_id);
             }
-            if !thread.main_worktree_paths.is_empty() {
+            if !thread.main_worktree_paths().is_empty() {
                 if let Some(session_ids) = self
                     .threads_by_main_paths
-                    .get_mut(&thread.main_worktree_paths)
+                    .get_mut(thread.main_worktree_paths())
                 {
                     session_ids.remove(&session_id);
                 }
@@ -715,21 +1001,11 @@ impl ThreadMetadataStore {
 
                 let agent_id = thread_ref.connection().agent_id();
 
-                let folder_paths = {
-                    let project = thread_ref.project().read(cx);
-                    let paths: Vec<Arc<Path>> = project
-                        .visible_worktrees(cx)
-                        .map(|worktree| worktree.read(cx).abs_path())
-                        .collect();
-                    PathList::new(&paths)
-                };
+                let project = thread_ref.project().read(cx);
+                let worktree_paths = ThreadWorktreePaths::from_project(project, cx);
 
-                let main_worktree_paths = thread_ref
-                    .project()
-                    .read(cx)
-                    .project_group_key(cx)
-                    .path_list()
-                    .clone();
+                let project_group_key = project.project_group_key(cx);
+                let remote_connection = project_group_key.host();
 
                 // Threads without a folder path (e.g. started in an empty
                 // window) are archived by default so they don't get lost,
@@ -737,7 +1013,7 @@ impl ThreadMetadataStore {
                 // them from the archive.
                 let archived = existing_thread
                     .map(|t| t.archived)
-                    .unwrap_or(folder_paths.is_empty());
+                    .unwrap_or(worktree_paths.is_empty());
 
                 let metadata = ThreadMetadata {
                     session_id,
@@ -745,8 +1021,8 @@ impl ThreadMetadataStore {
                     title,
                     created_at: Some(created_at),
                     updated_at,
-                    folder_paths,
-                    main_worktree_paths,
+                    worktree_paths,
+                    remote_connection,
                     archived,
                 };
 
@@ -801,6 +1077,7 @@ impl Domain for ThreadMetadataDb {
                 PRIMARY KEY (session_id, archived_worktree_id)
             ) STRICT;
         ),
+        sql!(ALTER TABLE sidebar_threads ADD COLUMN remote_connection TEXT),
     ];
 }
 
@@ -817,7 +1094,7 @@ impl ThreadMetadataDb {
     /// List all sidebar thread metadata, ordered by updated_at descending.
     pub fn list(&self) -> anyhow::Result<Vec<ThreadMetadata>> {
         self.select::<ThreadMetadata>(
-            "SELECT session_id, agent_id, title, updated_at, created_at, folder_paths, folder_paths_order, archived, main_worktree_paths, main_worktree_paths_order \
+            "SELECT session_id, agent_id, title, updated_at, created_at, folder_paths, folder_paths_order, archived, main_worktree_paths, main_worktree_paths_order, remote_connection \
              FROM sidebar_threads \
              ORDER BY updated_at DESC"
         )?()
@@ -834,24 +1111,30 @@ impl ThreadMetadataDb {
         let title = row.title.to_string();
         let updated_at = row.updated_at.to_rfc3339();
         let created_at = row.created_at.map(|dt| dt.to_rfc3339());
-        let serialized = row.folder_paths.serialize();
-        let (folder_paths, folder_paths_order) = if row.folder_paths.is_empty() {
+        let serialized = row.folder_paths().serialize();
+        let (folder_paths, folder_paths_order) = if row.folder_paths().is_empty() {
             (None, None)
         } else {
             (Some(serialized.paths), Some(serialized.order))
         };
-        let main_serialized = row.main_worktree_paths.serialize();
-        let (main_worktree_paths, main_worktree_paths_order) = if row.main_worktree_paths.is_empty()
-        {
-            (None, None)
-        } else {
-            (Some(main_serialized.paths), Some(main_serialized.order))
-        };
+        let main_serialized = row.main_worktree_paths().serialize();
+        let (main_worktree_paths, main_worktree_paths_order) =
+            if row.main_worktree_paths().is_empty() {
+                (None, None)
+            } else {
+                (Some(main_serialized.paths), Some(main_serialized.order))
+            };
+        let remote_connection = row
+            .remote_connection
+            .as_ref()
+            .map(serde_json::to_string)
+            .transpose()
+            .context("serialize thread metadata remote connection")?;
         let archived = row.archived;
 
         self.write(move |conn| {
-            let sql = "INSERT INTO sidebar_threads(session_id, agent_id, title, updated_at, created_at, folder_paths, folder_paths_order, archived, main_worktree_paths, main_worktree_paths_order) \
-                       VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10) \
+            let sql = "INSERT INTO sidebar_threads(session_id, agent_id, title, updated_at, created_at, folder_paths, folder_paths_order, archived, main_worktree_paths, main_worktree_paths_order, remote_connection) \
+                       VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11) \
                        ON CONFLICT(session_id) DO UPDATE SET \
                            agent_id = excluded.agent_id, \
                            title = excluded.title, \
@@ -861,7 +1144,8 @@ impl ThreadMetadataDb {
                            folder_paths_order = excluded.folder_paths_order, \
                            archived = excluded.archived, \
                            main_worktree_paths = excluded.main_worktree_paths, \
-                           main_worktree_paths_order = excluded.main_worktree_paths_order";
+                           main_worktree_paths_order = excluded.main_worktree_paths_order, \
+                           remote_connection = excluded.remote_connection";
             let mut stmt = Statement::prepare(conn, sql)?;
             let mut i = stmt.bind(&id, 1)?;
             i = stmt.bind(&agent_id, i)?;
@@ -872,7 +1156,8 @@ impl ThreadMetadataDb {
             i = stmt.bind(&folder_paths_order, i)?;
             i = stmt.bind(&archived, i)?;
             i = stmt.bind(&main_worktree_paths, i)?;
-            stmt.bind(&main_worktree_paths_order, i)?;
+            i = stmt.bind(&main_worktree_paths_order, i)?;
+            stmt.bind(&remote_connection, i)?;
             stmt.exec()
         })
         .await
@@ -1005,6 +1290,8 @@ impl Column for ThreadMetadata {
             Column::column(statement, next)?;
         let (main_worktree_paths_order_str, next): (Option<String>, i32) =
             Column::column(statement, next)?;
+        let (remote_connection_json, next): (Option<String>, i32) =
+            Column::column(statement, next)?;
 
         let agent_id = agent_id
             .map(|id| AgentId::new(id))
@@ -1035,6 +1322,16 @@ impl Column for ThreadMetadata {
             })
             .unwrap_or_default();
 
+        let remote_connection = remote_connection_json
+            .as_deref()
+            .map(serde_json::from_str::<RemoteConnectionOptions>)
+            .transpose()
+            .context("deserialize thread metadata remote connection")?;
+
+        let worktree_paths =
+            ThreadWorktreePaths::from_path_lists(main_worktree_paths, folder_paths)
+                .unwrap_or_else(|_| ThreadWorktreePaths::default());
+
         Ok((
             ThreadMetadata {
                 session_id: acp::SessionId::new(id),
@@ -1042,8 +1339,8 @@ impl Column for ThreadMetadata {
                 title: title.into(),
                 updated_at,
                 created_at,
-                folder_paths,
-                main_worktree_paths,
+                worktree_paths,
+                remote_connection,
                 archived,
             },
             next,
@@ -1087,6 +1384,7 @@ mod tests {
     use gpui::TestAppContext;
     use project::FakeFs;
     use project::Project;
+    use remote::WslConnectionOptions;
     use std::path::Path;
     use std::rc::Rc;
 
@@ -1124,21 +1422,38 @@ mod tests {
             title: title.to_string().into(),
             updated_at,
             created_at: Some(updated_at),
-            folder_paths,
-            main_worktree_paths: PathList::default(),
+            worktree_paths: ThreadWorktreePaths::from_folder_paths(&folder_paths),
+            remote_connection: None,
         }
     }
 
     fn init_test(cx: &mut TestAppContext) {
+        let fs = FakeFs::new(cx.executor());
         cx.update(|cx| {
             let settings_store = settings::SettingsStore::test(cx);
             cx.set_global(settings_store);
+            <dyn Fs>::set_global(fs, cx);
             ThreadMetadataStore::init_global(cx);
             ThreadStore::init_global(cx);
         });
         cx.run_until_parked();
     }
 
+    fn clear_thread_metadata_remote_connection_backfill(cx: &mut TestAppContext) {
+        let kvp = cx.update(|cx| KeyValueStore::global(cx));
+        smol::block_on(kvp.delete_kvp("thread-metadata-remote-connection-backfill".to_string()))
+            .unwrap();
+    }
+
+    fn run_thread_metadata_migrations(cx: &mut TestAppContext) {
+        clear_thread_metadata_remote_connection_backfill(cx);
+        cx.update(|cx| {
+            let migration_task = migrate_thread_metadata(cx);
+            migrate_thread_remote_connections(cx, migration_task);
+        });
+        cx.run_until_parked();
+    }
+
     #[gpui::test]
     async fn test_store_initializes_cache_from_database(cx: &mut TestAppContext) {
         let first_paths = PathList::new(&[Path::new("/project-a")]);
@@ -1338,8 +1653,8 @@ mod tests {
             title: "Existing Metadata".into(),
             updated_at: now - chrono::Duration::seconds(10),
             created_at: Some(now - chrono::Duration::seconds(10)),
-            folder_paths: project_a_paths.clone(),
-            main_worktree_paths: PathList::default(),
+            worktree_paths: ThreadWorktreePaths::from_folder_paths(&project_a_paths),
+            remote_connection: None,
             archived: false,
         };
 
@@ -1397,8 +1712,7 @@ mod tests {
             cx.run_until_parked();
         }
 
-        cx.update(|cx| migrate_thread_metadata(cx));
-        cx.run_until_parked();
+        run_thread_metadata_migrations(cx);
 
         let list = cx.update(|cx| {
             let store = ThreadMetadataStore::global(cx);
@@ -1448,8 +1762,8 @@ mod tests {
             title: "Existing Metadata".into(),
             updated_at: existing_updated_at,
             created_at: Some(existing_updated_at),
-            folder_paths: project_paths.clone(),
-            main_worktree_paths: PathList::default(),
+            worktree_paths: ThreadWorktreePaths::from_folder_paths(&project_paths),
+            remote_connection: None,
             archived: false,
         };
 
@@ -1478,8 +1792,7 @@ mod tests {
         save_task.await.unwrap();
         cx.run_until_parked();
 
-        cx.update(|cx| migrate_thread_metadata(cx));
-        cx.run_until_parked();
+        run_thread_metadata_migrations(cx);
 
         let list = cx.update(|cx| {
             let store = ThreadMetadataStore::global(cx);
@@ -1490,6 +1803,82 @@ mod tests {
         assert_eq!(list[0].session_id.0.as_ref(), "existing-session");
     }
 
+    #[gpui::test]
+    async fn test_migrate_thread_remote_connections_backfills_from_workspace_db(
+        cx: &mut TestAppContext,
+    ) {
+        init_test(cx);
+
+        let folder_paths = PathList::new(&[Path::new("/remote-project")]);
+        let updated_at = Utc::now();
+        let metadata = make_metadata(
+            "remote-session",
+            "Remote Thread",
+            updated_at,
+            folder_paths.clone(),
+        );
+
+        cx.update(|cx| {
+            let store = ThreadMetadataStore::global(cx);
+            store.update(cx, |store, cx| {
+                store.save(metadata, cx);
+            });
+        });
+        cx.run_until_parked();
+
+        let workspace_db = cx.update(|cx| WorkspaceDb::global(cx));
+        let workspace_id = workspace_db.next_id().await.unwrap();
+        let serialized_paths = folder_paths.serialize();
+        let remote_connection_id = 1_i64;
+        workspace_db
+            .write(move |conn| {
+                let mut stmt = Statement::prepare(
+                    conn,
+                    "INSERT INTO remote_connections(id, kind, user, distro) VALUES (?1, ?2, ?3, ?4)",
+                )?;
+                let mut next_index = stmt.bind(&remote_connection_id, 1)?;
+                next_index = stmt.bind(&"wsl", next_index)?;
+                next_index = stmt.bind(&Some("anth".to_string()), next_index)?;
+                stmt.bind(&Some("Ubuntu".to_string()), next_index)?;
+                stmt.exec()?;
+
+                let mut stmt = Statement::prepare(
+                    conn,
+                    "UPDATE workspaces SET paths = ?2, paths_order = ?3, remote_connection_id = ?4, timestamp = CURRENT_TIMESTAMP WHERE workspace_id = ?1",
+                )?;
+                let mut next_index = stmt.bind(&workspace_id, 1)?;
+                next_index = stmt.bind(&serialized_paths.paths, next_index)?;
+                next_index = stmt.bind(&serialized_paths.order, next_index)?;
+                stmt.bind(&Some(remote_connection_id as i32), next_index)?;
+                stmt.exec()
+            })
+            .await
+            .unwrap();
+
+        clear_thread_metadata_remote_connection_backfill(cx);
+        cx.update(|cx| {
+            migrate_thread_remote_connections(cx, Task::ready(Ok(())));
+        });
+        cx.run_until_parked();
+
+        let metadata = cx.update(|cx| {
+            let store = ThreadMetadataStore::global(cx);
+            store
+                .read(cx)
+                .entry(&acp::SessionId::new("remote-session"))
+                .cloned()
+                .expect("expected migrated metadata row")
+        });
+
+        assert_eq!(
+            metadata.remote_connection,
+            Some(RemoteConnectionOptions::Wsl(WslConnectionOptions {
+                distro_name: "Ubuntu".to_string(),
+                user: Some("anth".to_string()),
+            }))
+        );
+    }
+
     #[gpui::test]
     async fn test_migrate_thread_metadata_archives_beyond_five_most_recent_per_project(
         cx: &mut TestAppContext,
@@ -1538,8 +1927,7 @@ mod tests {
             cx.run_until_parked();
         }
 
-        cx.update(|cx| migrate_thread_metadata(cx));
-        cx.run_until_parked();
+        run_thread_metadata_migrations(cx);
 
         let list = cx.update(|cx| {
             let store = ThreadMetadataStore::global(cx);
@@ -1551,7 +1939,7 @@ mod tests {
         // Project A: 5 most recent should be unarchived, 2 oldest should be archived
         let mut project_a_entries: Vec<_> = list
             .iter()
-            .filter(|m| m.folder_paths == project_a_paths)
+            .filter(|m| *m.folder_paths() == project_a_paths)
             .collect();
         assert_eq!(project_a_entries.len(), 7);
         project_a_entries.sort_by(|a, b| b.updated_at.cmp(&a.updated_at));
@@ -1574,7 +1962,7 @@ mod tests {
         // Project B: all 3 should be unarchived (under the limit)
         let project_b_entries: Vec<_> = list
             .iter()
-            .filter(|m| m.folder_paths == project_b_paths)
+            .filter(|m| *m.folder_paths() == project_b_paths)
             .collect();
         assert_eq!(project_b_entries.len(), 3);
         assert!(project_b_entries.iter().all(|m| !m.archived));
@@ -1738,7 +2126,7 @@ mod tests {
             let without_worktree = store
                 .entry(&session_without_worktree)
                 .expect("missing metadata for thread without project association");
-            assert!(without_worktree.folder_paths.is_empty());
+            assert!(without_worktree.folder_paths().is_empty());
             assert!(
                 without_worktree.archived,
                 "expected thread without project association to be archived"
@@ -1748,7 +2136,7 @@ mod tests {
                 .entry(&session_with_worktree)
                 .expect("missing metadata for thread with project association");
             assert_eq!(
-                with_worktree.folder_paths,
+                *with_worktree.folder_paths(),
                 PathList::new(&[Path::new("/project-a")])
             );
             assert!(
@@ -2382,7 +2770,7 @@ mod tests {
             store.entry(&acp::SessionId::new("session-multi")).cloned()
         });
         let entry = entry.unwrap();
-        let paths = entry.folder_paths.paths();
+        let paths = entry.folder_paths().paths();
         assert_eq!(paths.len(), 3);
         assert!(paths.contains(&PathBuf::from("/restored/worktree-a")));
         assert!(paths.contains(&PathBuf::from("/restored/worktree-b")));
@@ -2427,7 +2815,7 @@ mod tests {
                 .cloned()
         });
         let entry = entry.unwrap();
-        let paths = entry.folder_paths.paths();
+        let paths = entry.folder_paths().paths();
         assert_eq!(paths.len(), 2);
         assert!(paths.contains(&PathBuf::from("/new/worktree-a")));
         assert!(paths.contains(&PathBuf::from("/other/path")));
@@ -2473,7 +2861,7 @@ mod tests {
             store.entry(&acp::SessionId::new("session-multi")).cloned()
         });
         let entry = entry.unwrap();
-        let paths = entry.folder_paths.paths();
+        let paths = entry.folder_paths().paths();
         assert_eq!(paths.len(), 3);
         assert!(paths.contains(&PathBuf::from("/restored/worktree-a")));
         assert!(paths.contains(&PathBuf::from("/restored/worktree-b")));
@@ -2518,7 +2906,7 @@ mod tests {
                 .cloned()
         });
         let entry = entry.unwrap();
-        let paths = entry.folder_paths.paths();
+        let paths = entry.folder_paths().paths();
         assert_eq!(paths.len(), 2);
         assert!(paths.contains(&PathBuf::from("/new/worktree-a")));
         assert!(paths.contains(&PathBuf::from("/other/path")));
@@ -2590,4 +2978,136 @@ mod tests {
         assert!(paths.contains(&Path::new("/projects/worktree-a")));
         assert!(paths.contains(&Path::new("/projects/worktree-b")));
     }
+
+    // ── ThreadWorktreePaths tests ──────────────────────────────────────
+
+    /// Helper to build a `ThreadWorktreePaths` from (main, folder) pairs.
+    fn make_worktree_paths(pairs: &[(&str, &str)]) -> ThreadWorktreePaths {
+        let (mains, folders): (Vec<&Path>, Vec<&Path>) = pairs
+            .iter()
+            .map(|(m, f)| (Path::new(*m), Path::new(*f)))
+            .unzip();
+        ThreadWorktreePaths::from_path_lists(PathList::new(&mains), PathList::new(&folders))
+            .unwrap()
+    }
+
+    #[test]
+    fn test_thread_worktree_paths_full_add_then_remove_cycle() {
+        // Full scenario from the issue:
+        //   1. Start with linked worktree selectric → zed
+        //   2. Add cloud
+        //   3. Remove zed
+
+        let mut paths = make_worktree_paths(&[("/projects/zed", "/worktrees/selectric/zed")]);
+
+        // Step 2: add cloud
+        paths.add_path(Path::new("/projects/cloud"), Path::new("/projects/cloud"));
+
+        assert_eq!(paths.ordered_pairs().count(), 2);
+        assert_eq!(
+            paths.folder_path_list(),
+            &PathList::new(&[
+                Path::new("/worktrees/selectric/zed"),
+                Path::new("/projects/cloud"),
+            ])
+        );
+        assert_eq!(
+            paths.main_worktree_path_list(),
+            &PathList::new(&[Path::new("/projects/zed"), Path::new("/projects/cloud"),])
+        );
+
+        // Step 3: remove zed
+        paths.remove_main_path(Path::new("/projects/zed"));
+
+        assert_eq!(paths.ordered_pairs().count(), 1);
+        assert_eq!(
+            paths.folder_path_list(),
+            &PathList::new(&[Path::new("/projects/cloud")])
+        );
+        assert_eq!(
+            paths.main_worktree_path_list(),
+            &PathList::new(&[Path::new("/projects/cloud")])
+        );
+    }
+
+    #[test]
+    fn test_thread_worktree_paths_add_is_idempotent() {
+        let mut paths = make_worktree_paths(&[("/projects/zed", "/projects/zed")]);
+
+        paths.add_path(Path::new("/projects/zed"), Path::new("/projects/zed"));
+
+        assert_eq!(paths.ordered_pairs().count(), 1);
+    }
+
+    #[test]
+    fn test_thread_worktree_paths_remove_nonexistent_is_noop() {
+        let mut paths = make_worktree_paths(&[("/projects/zed", "/worktrees/selectric/zed")]);
+
+        paths.remove_main_path(Path::new("/projects/nonexistent"));
+
+        assert_eq!(paths.ordered_pairs().count(), 1);
+    }
+
+    #[test]
+    fn test_thread_worktree_paths_from_path_lists_preserves_association() {
+        let folder = PathList::new(&[
+            Path::new("/worktrees/selectric/zed"),
+            Path::new("/projects/cloud"),
+        ]);
+        let main = PathList::new(&[Path::new("/projects/zed"), Path::new("/projects/cloud")]);
+
+        let paths = ThreadWorktreePaths::from_path_lists(main, folder).unwrap();
+
+        let pairs: Vec<_> = paths
+            .ordered_pairs()
+            .map(|(m, f)| (m.clone(), f.clone()))
+            .collect();
+        assert_eq!(pairs.len(), 2);
+        assert!(pairs.contains(&(
+            PathBuf::from("/projects/zed"),
+            PathBuf::from("/worktrees/selectric/zed")
+        )));
+        assert!(pairs.contains(&(
+            PathBuf::from("/projects/cloud"),
+            PathBuf::from("/projects/cloud")
+        )));
+    }
+
+    #[test]
+    fn test_thread_worktree_paths_main_deduplicates_linked_worktrees() {
+        // Two linked worktrees of the same main repo: the main_worktree_path_list
+        // deduplicates because PathList stores unique sorted paths, but
+        // ordered_pairs still has both entries.
+        let paths = make_worktree_paths(&[
+            ("/projects/zed", "/worktrees/selectric/zed"),
+            ("/projects/zed", "/worktrees/feature/zed"),
+        ]);
+
+        // main_worktree_path_list has the duplicate main path twice
+        // (PathList keeps all entries from its input)
+        assert_eq!(paths.ordered_pairs().count(), 2);
+        assert_eq!(
+            paths.folder_path_list(),
+            &PathList::new(&[
+                Path::new("/worktrees/selectric/zed"),
+                Path::new("/worktrees/feature/zed"),
+            ])
+        );
+        assert_eq!(
+            paths.main_worktree_path_list(),
+            &PathList::new(&[Path::new("/projects/zed"), Path::new("/projects/zed"),])
+        );
+    }
+
+    #[test]
+    fn test_thread_worktree_paths_mismatched_lengths_returns_error() {
+        let folder = PathList::new(&[
+            Path::new("/worktrees/selectric/zed"),
+            Path::new("/projects/cloud"),
+        ]);
+        let main = PathList::new(&[Path::new("/projects/zed")]);
+
+        let result = ThreadWorktreePaths::from_path_lists(main, folder);
+        assert!(result.is_err());
+    }
 }

crates/agent_ui/src/thread_worktree_archive.rs 🔗

@@ -192,7 +192,7 @@ pub fn path_is_referenced_by_other_unarchived_threads(
         .filter(|thread| !thread.archived)
         .any(|thread| {
             thread
-                .folder_paths
+                .folder_paths()
                 .paths()
                 .iter()
                 .any(|other_path| other_path.as_path() == path)
@@ -428,7 +428,7 @@ pub async fn persist_worktree_state(root: &RootPlan, cx: &mut AsyncApp) -> Resul
             .entries()
             .filter(|thread| {
                 thread
-                    .folder_paths
+                    .folder_paths()
                     .paths()
                     .iter()
                     .any(|p| p.as_path() == root.root_path)

crates/agent_ui/src/threads_archive_view.rs 🔗

@@ -26,7 +26,7 @@ use picker::{
 use project::{AgentId, AgentServerStore};
 use settings::Settings as _;
 use theme::ActiveTheme;
-use ui::ThreadItem;
+use ui::{AgentThreadStatus, ThreadItem};
 use ui::{
     Divider, KeyBinding, ListItem, ListItemSpacing, ListSubHeader, Tooltip, WithScrollbar,
     prelude::*, utils::platform_title_bar_height,
@@ -113,6 +113,7 @@ fn fuzzy_match_positions(query: &str, text: &str) -> Option<Vec<usize>> {
 pub enum ThreadsArchiveViewEvent {
     Close,
     Unarchive { thread: ThreadMetadata },
+    CancelRestore { session_id: acp::SessionId },
 }
 
 impl EventEmitter<ThreadsArchiveViewEvent> for ThreadsArchiveView {}
@@ -131,6 +132,7 @@ pub struct ThreadsArchiveView {
     workspace: WeakEntity<Workspace>,
     agent_connection_store: WeakEntity<AgentConnectionStore>,
     agent_server_store: WeakEntity<AgentServerStore>,
+    restoring: HashSet<acp::SessionId>,
 }
 
 impl ThreadsArchiveView {
@@ -199,6 +201,7 @@ impl ThreadsArchiveView {
             workspace,
             agent_connection_store,
             agent_server_store,
+            restoring: HashSet::default(),
         };
 
         this.update_items(cx);
@@ -213,6 +216,16 @@ impl ThreadsArchiveView {
         self.selection = None;
     }
 
+    pub fn mark_restoring(&mut self, session_id: &acp::SessionId, cx: &mut Context<Self>) {
+        self.restoring.insert(session_id.clone());
+        cx.notify();
+    }
+
+    pub fn clear_restoring(&mut self, session_id: &acp::SessionId, cx: &mut Context<Self>) {
+        self.restoring.remove(session_id);
+        cx.notify();
+    }
+
     pub fn focus_filter_editor(&self, window: &mut Window, cx: &mut App) {
         let handle = self.filter_editor.read(cx).focus_handle(cx);
         handle.focus(window, cx);
@@ -323,11 +336,16 @@ impl ThreadsArchiveView {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
-        if thread.folder_paths.is_empty() {
+        if self.restoring.contains(&thread.session_id) {
+            return;
+        }
+
+        if thread.folder_paths().is_empty() {
             self.show_project_picker_for_thread(thread, window, cx);
             return;
         }
 
+        self.mark_restoring(&thread.session_id, cx);
         self.selection = None;
         self.reset_filter_editor_text(window, cx);
         cx.emit(ThreadsArchiveViewEvent::Unarchive { thread });
@@ -510,14 +528,16 @@ impl ThreadsArchiveView {
                     IconName::Sparkle
                 };
 
-                ThreadItem::new(id, thread.title.clone())
+                let is_restoring = self.restoring.contains(&thread.session_id);
+
+                let base = ThreadItem::new(id, thread.title.clone())
                     .icon(icon)
                     .when_some(icon_from_external_svg, |this, svg| {
                         this.custom_icon_from_external_svg(svg)
                     })
                     .timestamp(timestamp)
                     .highlight_positions(highlight_positions.clone())
-                    .project_paths(thread.folder_paths.paths_owned())
+                    .project_paths(thread.folder_paths().paths_owned())
                     .focused(is_focused)
                     .hovered(is_hovered)
                     .on_hover(cx.listener(move |this, is_hovered, _window, cx| {
@@ -527,8 +547,31 @@ impl ThreadsArchiveView {
                             this.hovered_index = None;
                         }
                         cx.notify();
-                    }))
-                    .action_slot(
+                    }));
+
+                if is_restoring {
+                    base.status(AgentThreadStatus::Running)
+                        .action_slot(
+                            IconButton::new("cancel-restore", IconName::Close)
+                                .style(ButtonStyle::Filled)
+                                .icon_size(IconSize::Small)
+                                .icon_color(Color::Muted)
+                                .tooltip(Tooltip::text("Cancel Restore"))
+                                .on_click({
+                                    let session_id = thread.session_id.clone();
+                                    cx.listener(move |this, _, _, cx| {
+                                        this.clear_restoring(&session_id, cx);
+                                        cx.emit(ThreadsArchiveViewEvent::CancelRestore {
+                                            session_id: session_id.clone(),
+                                        });
+                                        cx.stop_propagation();
+                                    })
+                                }),
+                        )
+                        .tooltip(Tooltip::text("Restoring\u{2026}"))
+                        .into_any_element()
+                } else {
+                    base.action_slot(
                         IconButton::new("delete-thread", IconName::Trash)
                             .style(ButtonStyle::Filled)
                             .icon_size(IconSize::Small)
@@ -561,6 +604,7 @@ impl ThreadsArchiveView {
                         })
                     })
                     .into_any_element()
+                }
             }
         }
     }
@@ -886,7 +930,8 @@ impl ProjectPickerDelegate {
         window: &mut Window,
         cx: &mut Context<Picker<Self>>,
     ) {
-        self.thread.folder_paths = paths.clone();
+        self.thread.worktree_paths =
+            super::thread_metadata_store::ThreadWorktreePaths::from_folder_paths(&paths);
         ThreadMetadataStore::global(cx).update(cx, |store, cx| {
             store.update_working_directories(&self.thread.session_id, paths, cx);
         });

crates/anthropic/src/anthropic.rs 🔗

@@ -109,7 +109,7 @@ pub enum Model {
     Custom {
         name: String,
         max_tokens: u64,
-        /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
+        /// The name displayed in the UI, such as in the agent panel model dropdown menu.
         display_name: Option<String>,
         /// Override this model with a different Anthropic model for tool calls.
         tool_override: Option<String>,

crates/client/src/test.rs 🔗

@@ -271,6 +271,7 @@ pub fn make_get_authenticated_user_response(
         organizations: vec![],
         default_organization_id: None,
         plans_by_organization: BTreeMap::new(),
+        configuration_by_organization: BTreeMap::new(),
         plan: PlanInfo {
             plan: KnownOrUnknown::Known(Plan::ZedPro),
             subscription_period: None,

crates/client/src/user.rs 🔗

@@ -5,6 +5,7 @@ use cloud_api_client::websocket_protocol::MessageToClient;
 use cloud_api_client::{
     GetAuthenticatedUserResponse, KnownOrUnknown, Organization, OrganizationId, Plan, PlanInfo,
 };
+use cloud_api_types::OrganizationConfiguration;
 use cloud_llm_client::{
     EDIT_PREDICTIONS_USAGE_AMOUNT_HEADER_NAME, EDIT_PREDICTIONS_USAGE_LIMIT_HEADER_NAME, UsageLimit,
 };
@@ -117,6 +118,7 @@ pub struct UserStore {
     current_organization: Option<Arc<Organization>>,
     organizations: Vec<Arc<Organization>>,
     plans_by_organization: HashMap<OrganizationId, Plan>,
+    configuration_by_organization: HashMap<OrganizationId, OrganizationConfiguration>,
     contacts: Vec<Arc<Contact>>,
     incoming_contact_requests: Vec<Arc<User>>,
     outgoing_contact_requests: Vec<Arc<User>>,
@@ -193,6 +195,7 @@ impl UserStore {
             current_organization: None,
             organizations: Vec::new(),
             plans_by_organization: HashMap::default(),
+            configuration_by_organization: HashMap::default(),
             plan_info: None,
             edit_prediction_usage: None,
             contacts: Default::default(),
@@ -730,6 +733,13 @@ impl UserStore {
         self.plans_by_organization.get(organization_id).copied()
     }
 
+    pub fn current_organization_configuration(&self) -> Option<&OrganizationConfiguration> {
+        let current_organization = self.current_organization.as_ref()?;
+
+        self.configuration_by_organization
+            .get(&current_organization.id)
+    }
+
     pub fn plan(&self) -> Option<Plan> {
         #[cfg(debug_assertions)]
         if let Ok(plan) = std::env::var("ZED_SIMULATE_PLAN").as_ref() {
@@ -865,6 +875,8 @@ impl UserStore {
                 (organization_id, plan)
             })
             .collect();
+        self.configuration_by_organization =
+            response.configuration_by_organization.into_iter().collect();
 
         self.edit_prediction_usage = Some(EditPredictionUsage(RequestUsage {
             limit: response.plan.usage.edit_predictions.limit,

crates/cloud_api_types/src/cloud_api_types.rs 🔗

@@ -26,6 +26,8 @@ pub struct GetAuthenticatedUserResponse {
     pub default_organization_id: Option<OrganizationId>,
     #[serde(default)]
     pub plans_by_organization: BTreeMap<OrganizationId, KnownOrUnknown<Plan, String>>,
+    #[serde(default)]
+    pub configuration_by_organization: BTreeMap<OrganizationId, OrganizationConfiguration>,
     pub plan: PlanInfo,
 }
 
@@ -50,6 +52,20 @@ pub struct Organization {
     pub is_personal: bool,
 }
 
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+pub struct OrganizationConfiguration {
+    pub is_zed_model_provider_enabled: bool,
+    pub is_agent_thread_feedback_enabled: bool,
+    pub is_collaboration_enabled: bool,
+    pub edit_prediction: OrganizationEditPredictionConfiguration,
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+pub struct OrganizationEditPredictionConfiguration {
+    pub is_enabled: bool,
+    pub is_feedback_enabled: bool,
+}
+
 #[derive(Debug, PartialEq, Serialize, Deserialize)]
 pub struct AcceptTermsOfServiceResponse {
     pub user: AuthenticatedUser,

crates/collab_ui/src/collab_panel.rs 🔗

@@ -2620,6 +2620,18 @@ impl CollabPanel {
         cx.write_to_clipboard(item)
     }
 
+    fn render_disabled_by_organization(&mut self, _cx: &mut Context<Self>) -> Div {
+        v_flex()
+            .p_4()
+            .gap_4()
+            .size_full()
+            .text_center()
+            .justify_center()
+            .child(Label::new(
+                "Collaboration is disabled for this organization.",
+            ))
+    }
+
     fn render_signed_out(&mut self, cx: &mut Context<Self>) -> Div {
         let collab_blurb = "Work with your team in realtime with collaborative editing, voice, shared notes and more.";
 
@@ -3645,6 +3657,12 @@ impl Render for CollabPanel {
     fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
         let status = *self.client.status().borrow();
 
+        let is_collaboration_disabled = self
+            .user_store
+            .read(cx)
+            .current_organization_configuration()
+            .is_some_and(|config| !config.is_collaboration_enabled);
+
         v_flex()
             .key_context(self.dispatch_context(window, cx))
             .on_action(cx.listener(CollabPanel::cancel))
@@ -3664,7 +3682,9 @@ impl Render for CollabPanel {
             .on_action(cx.listener(CollabPanel::move_channel_down))
             .track_focus(&self.focus_handle)
             .size_full()
-            .child(if !status.is_or_was_connected() || status.is_signing_in() {
+            .child(if is_collaboration_disabled {
+                self.render_disabled_by_organization(cx)
+            } else if !status.is_or_was_connected() || status.is_signing_in() {
                 self.render_signed_out(cx)
             } else {
                 self.render_signed_in(window, cx)

crates/debugger_ui/src/session/running/console.rs 🔗

@@ -84,6 +84,7 @@ impl Console {
             editor.set_show_indent_guides(false, cx);
             editor.set_show_edit_predictions(Some(false), window, cx);
             editor.set_use_modal_editing(false);
+            editor.disable_mouse_wheel_zoom();
             editor.set_soft_wrap_mode(language::language_settings::SoftWrap::EditorWidth, cx);
             editor
         });

crates/deepseek/src/deepseek.rs 🔗

@@ -56,7 +56,7 @@ pub enum Model {
     #[serde(rename = "custom")]
     Custom {
         name: String,
-        /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
+        /// The name displayed in the UI, such as in the agent panel model dropdown menu.
         display_name: Option<String>,
         max_tokens: u64,
         max_output_tokens: Option<u64>,

crates/dev_container/Cargo.toml 🔗

@@ -19,6 +19,7 @@ futures.workspace = true
 log.workspace = true
 menu.workspace = true
 paths.workspace = true
+regex.workspace = true
 picker.workspace = true
 project.workspace = true
 settings.workspace = true

crates/dev_container/src/devcontainer_json.rs 🔗

@@ -138,7 +138,7 @@ pub(crate) struct ContainerBuild {
     context: Option<String>,
     pub(crate) args: Option<HashMap<String, String>>,
     options: Option<Vec<String>>,
-    target: Option<String>,
+    pub(crate) target: Option<String>,
     #[serde(default, deserialize_with = "deserialize_string_or_array")]
     cache_from: Option<Vec<String>>,
 }
@@ -185,8 +185,8 @@ pub(crate) enum LifecycleCommand {
 
 #[derive(Debug, PartialEq, Eq)]
 pub(crate) enum DevContainerBuildType {
-    Image,
-    Dockerfile,
+    Image(String),
+    Dockerfile(ContainerBuild),
     DockerCompose,
     None,
 }
@@ -249,14 +249,15 @@ pub(crate) fn deserialize_devcontainer_json(json: &str) -> Result<DevContainer,
 
 impl DevContainer {
     pub(crate) fn build_type(&self) -> DevContainerBuildType {
-        if self.image.is_some() {
-            return DevContainerBuildType::Image;
+        if let Some(image) = &self.image {
+            DevContainerBuildType::Image(image.clone())
         } else if self.docker_compose_file.is_some() {
-            return DevContainerBuildType::DockerCompose;
-        } else if self.build.is_some() {
-            return DevContainerBuildType::Dockerfile;
+            DevContainerBuildType::DockerCompose
+        } else if let Some(build) = &self.build {
+            DevContainerBuildType::Dockerfile(build.clone())
+        } else {
+            DevContainerBuildType::None
         }
-        return DevContainerBuildType::None;
     }
 }
 
@@ -911,7 +912,12 @@ mod test {
             }
         );
 
-        assert_eq!(devcontainer.build_type(), DevContainerBuildType::Image);
+        assert_eq!(
+            devcontainer.build_type(),
+            DevContainerBuildType::Image(String::from(
+                "mcr.microsoft.com/devcontainers/base:ubuntu"
+            ))
+        );
     }
 
     #[test]
@@ -1366,7 +1372,20 @@ mod test {
             }
         );
 
-        assert_eq!(devcontainer.build_type(), DevContainerBuildType::Dockerfile);
+        assert_eq!(
+            devcontainer.build_type(),
+            DevContainerBuildType::Dockerfile(ContainerBuild {
+                dockerfile: "DockerFile".to_string(),
+                context: Some("..".to_string()),
+                args: Some(HashMap::from([(
+                    "MYARG".to_string(),
+                    "MYVALUE".to_string()
+                )])),
+                options: Some(vec!["--some-option".to_string(), "--mount".to_string()]),
+                target: Some("development".to_string()),
+                cache_from: Some(vec!["some_image".to_string()]),
+            })
+        );
     }
 
     #[test]

crates/dev_container/src/devcontainer_manifest.rs 🔗

@@ -6,6 +6,8 @@ use std::{
     sync::Arc,
 };
 
+use regex::Regex;
+
 use fs::Fs;
 use http_client::HttpClient;
 use util::{ResultExt, command::Command};
@@ -217,11 +219,10 @@ impl DevContainerManifest {
     async fn dockerfile_location(&self) -> Option<PathBuf> {
         let dev_container = self.dev_container();
         match dev_container.build_type() {
-            DevContainerBuildType::Image => None,
-            DevContainerBuildType::Dockerfile => dev_container
-                .build
-                .as_ref()
-                .map(|build| self.config_directory.join(&build.dockerfile)),
+            DevContainerBuildType::Image(_) => None,
+            DevContainerBuildType::Dockerfile(build) => {
+                Some(self.config_directory.join(&build.dockerfile))
+            }
             DevContainerBuildType::DockerCompose => {
                 let Ok(docker_compose_manifest) = self.docker_compose_manifest().await else {
                     return None;
@@ -260,48 +261,50 @@ impl DevContainerManifest {
     /// - The image sourced in the docker-compose main service dockerfile, if one is specified
     /// If no such image is available, return an error
     async fn get_base_image_from_config(&self) -> Result<String, DevContainerError> {
-        if let Some(image) = &self.dev_container().image {
-            return Ok(image.to_string());
-        }
-        if let Some(dockerfile) = self.dev_container().build.as_ref().map(|b| &b.dockerfile) {
-            let dockerfile_contents = self
-                .fs
-                .load(&self.config_directory.join(dockerfile))
-                .await
-                .map_err(|e| {
-                    log::error!("Error reading dockerfile: {e}");
-                    DevContainerError::DevContainerParseFailed
-                })?;
-            return image_from_dockerfile(self, dockerfile_contents);
-        }
-        if self.dev_container().docker_compose_file.is_some() {
-            let docker_compose_manifest = self.docker_compose_manifest().await?;
-            let (_, main_service) = find_primary_service(&docker_compose_manifest, &self)?;
+        match self.dev_container().build_type() {
+            DevContainerBuildType::Image(image) => {
+                return Ok(image);
+            }
+            DevContainerBuildType::Dockerfile(build) => {
+                let dockerfile_contents = self.expanded_dockerfile_content().await?;
+                return image_from_dockerfile(dockerfile_contents, &build.target).ok_or_else(
+                    || {
+                        log::error!("Unable to find base image in Dockerfile");
+                        DevContainerError::DevContainerParseFailed
+                    },
+                );
+            }
+            DevContainerBuildType::DockerCompose => {
+                let docker_compose_manifest = self.docker_compose_manifest().await?;
+                let (_, main_service) = find_primary_service(&docker_compose_manifest, &self)?;
 
-            if let Some(dockerfile) = main_service
-                .build
-                .as_ref()
-                .and_then(|b| b.dockerfile.as_ref())
-            {
-                let dockerfile_contents = self
-                    .fs
-                    .load(&self.config_directory.join(dockerfile))
-                    .await
-                    .map_err(|e| {
-                        log::error!("Error reading dockerfile: {e}");
+                if let Some(_) = main_service
+                    .build
+                    .as_ref()
+                    .and_then(|b| b.dockerfile.as_ref())
+                {
+                    let dockerfile_contents = self.expanded_dockerfile_content().await?;
+                    return image_from_dockerfile(
+                        dockerfile_contents,
+                        &main_service.build.as_ref().and_then(|b| b.target.clone()),
+                    )
+                    .ok_or_else(|| {
+                        log::error!("Unable to find base image in Dockerfile");
                         DevContainerError::DevContainerParseFailed
-                    })?;
-                return image_from_dockerfile(self, dockerfile_contents);
+                    });
+                }
+                if let Some(image) = &main_service.image {
+                    return Ok(image.to_string());
+                }
+
+                log::error!("No valid base image found in docker-compose configuration");
+                return Err(DevContainerError::DevContainerParseFailed);
             }
-            if let Some(image) = &main_service.image {
-                return Ok(image.to_string());
+            DevContainerBuildType::None => {
+                log::error!("Not a valid devcontainer config for build");
+                return Err(DevContainerError::NotInValidProject);
             }
-
-            log::error!("No valid base image found in docker-compose configuration");
-            return Err(DevContainerError::DevContainerParseFailed);
         }
-        log::error!("No valid base image found in dev container configuration");
-        Err(DevContainerError::DevContainerParseFailed)
     }
 
     async fn download_feature_and_dockerfile_resources(&mut self) -> Result<(), DevContainerError> {
@@ -505,7 +508,10 @@ impl DevContainerManifest {
 
         // --- Phase 3: Generate extended Dockerfile from the inflated manifests ---
 
-        let is_compose = dev_container.build_type() == DevContainerBuildType::DockerCompose;
+        let is_compose = match dev_container.build_type() {
+            DevContainerBuildType::DockerCompose => true,
+            _ => false,
+        };
         let use_buildkit = self.docker_client.supports_compose_buildkit() || !is_compose;
 
         let dockerfile_base_content = if let Some(location) = &self.dockerfile_location().await {
@@ -514,10 +520,29 @@ impl DevContainerManifest {
             None
         };
 
+        let build_target = if is_compose {
+            find_primary_service(&self.docker_compose_manifest().await?, self)?
+                .1
+                .build
+                .and_then(|b| b.target)
+        } else {
+            dev_container.build.as_ref().and_then(|b| b.target.clone())
+        };
+
+        let dockerfile_content = dockerfile_base_content
+            .map(|content| {
+                dockerfile_inject_alias(
+                    &content,
+                    "dev_container_auto_added_stage_label",
+                    build_target,
+                )
+            })
+            .unwrap_or_default();
+
         let dockerfile_content = self.generate_dockerfile_extended(
             &container_user,
             &remote_user,
-            dockerfile_base_content,
+            dockerfile_content,
             use_buildkit,
         );
 
@@ -544,7 +569,7 @@ impl DevContainerManifest {
         &self,
         container_user: &str,
         remote_user: &str,
-        dockerfile_content: Option<String>,
+        dockerfile_content: String,
         use_buildkit: bool,
     ) -> String {
         #[cfg(not(target_os = "windows"))]
@@ -565,16 +590,6 @@ impl DevContainerManifest {
         let container_home_cmd = get_ent_passwd_shell_command(container_user);
         let remote_home_cmd = get_ent_passwd_shell_command(remote_user);
 
-        let dockerfile_content = dockerfile_content
-            .map(|content| {
-                if dockerfile_alias(&content).is_some() {
-                    content
-                } else {
-                    dockerfile_inject_alias(&content, "dev_container_auto_added_stage_label")
-                }
-            })
-            .unwrap_or("".to_string());
-
         let dest = FEATURES_CONTAINER_TEMP_DEST_FOLDER;
 
         let feature_content_source_stage = if use_buildkit {
@@ -694,20 +709,17 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
         }
         let dev_container = self.dev_container();
         match dev_container.build_type() {
-            DevContainerBuildType::Image => {
+            DevContainerBuildType::Image(base_image) => {
                 let built_docker_image = self.build_docker_image().await?;
-                let Some(base_image) = dev_container.image.as_ref() else {
-                    log::error!("Dev container is using and image which can't be referenced");
-                    return Err(DevContainerError::DevContainerParseFailed);
-                };
+
                 let built_docker_image = self
-                    .update_remote_user_uid(built_docker_image, base_image)
+                    .update_remote_user_uid(built_docker_image, &base_image)
                     .await?;
 
                 let resources = self.build_merged_resources(built_docker_image)?;
                 Ok(DevContainerBuildResources::Docker(resources))
             }
-            DevContainerBuildType::Dockerfile => {
+            DevContainerBuildType::Dockerfile(_) => {
                 let built_docker_image = self.build_docker_image().await?;
                 let Some(features_build_info) = &self.features_build_info else {
                     log::error!(
@@ -892,6 +904,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
                                     }),
                             ),
                             dockerfile: Some(dockerfile_path.display().to_string()),
+                            target: Some("dev_containers_target_stage".to_string()),
                             args: Some(build_args),
                             additional_contexts,
                         }),
@@ -983,6 +996,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
                                     features_build_info.empty_context_dir.display().to_string(),
                                 ),
                                 dockerfile: Some(dockerfile_path.display().to_string()),
+                                target: Some("dev_containers_target_stage".to_string()),
                                 args: Some(build_args),
                                 additional_contexts,
                             }),
@@ -1252,11 +1266,8 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
         };
 
         match dev_container.build_type() {
-            DevContainerBuildType::Image => {
-                let Some(image_tag) = &dev_container.image else {
-                    return Err(DevContainerError::DevContainerParseFailed);
-                };
-                let base_image = self.docker_client.inspect(image_tag).await?;
+            DevContainerBuildType::Image(image_tag) => {
+                let base_image = self.docker_client.inspect(&image_tag).await?;
                 if dev_container
                     .features
                     .as_ref()
@@ -1266,7 +1277,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
                     return Ok(base_image);
                 }
             }
-            DevContainerBuildType::Dockerfile => {}
+            DevContainerBuildType::Dockerfile(_) => {}
             DevContainerBuildType::DockerCompose | DevContainerBuildType::None => {
                 return Err(DevContainerError::DevContainerParseFailed);
             }
@@ -1390,7 +1401,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
                 DevContainerError::FilesystemError
             })?;
 
-        let updated_image_tag = format!("{}-uid", features_build_info.image_tag);
+        let updated_image_tag = features_build_info.image_tag.clone();
 
         let mut command = Command::new(self.docker_client.docker_cli());
         command.args(["build"]);
@@ -1603,7 +1614,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
 
         command.args(["-t", &features_build_info.image_tag]);
 
-        if dev_container.build_type() == DevContainerBuildType::Dockerfile {
+        if let DevContainerBuildType::Dockerfile(_) = dev_container.build_type() {
             command.arg(self.config_directory.display().to_string());
         } else {
             // Use an empty folder as the build context to avoid pulling in unneeded files.
@@ -1784,7 +1795,6 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
         }
         for app_port in &self.dev_container().app_port {
             command.arg("-p");
-            // Should just implement display for an AppPort struct which takes care of this; it might be a custom map like (literally) "8081:8080"
             command.arg(app_port);
         }
 
@@ -1987,6 +1997,65 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
             safe_id_lower(alternate_name)
         }
     }
+
+    async fn expanded_dockerfile_content(&self) -> Result<String, DevContainerError> {
+        let Some(dockerfile_path) = self.dockerfile_location().await else {
+            log::error!("Tried to expand dockerfile for an image-type config");
+            return Err(DevContainerError::DevContainerParseFailed);
+        };
+
+        let devcontainer_args = self
+            .dev_container()
+            .build
+            .as_ref()
+            .and_then(|b| b.args.clone())
+            .unwrap_or_default();
+        let contents = self.fs.load(&dockerfile_path).await.map_err(|e| {
+            log::error!("Failed to load Dockerfile: {e}");
+            DevContainerError::FilesystemError
+        })?;
+        let mut parsed_lines: Vec<String> = Vec::new();
+        let mut inline_args: Vec<(String, String)> = Vec::new();
+        let key_regex = Regex::new(r"(?:^|\s)(\w+)=").expect("valid regex");
+
+        for line in contents.lines() {
+            let mut parsed_line = line.to_string();
+            // Replace from devcontainer args first, since they take precedence
+            for (key, value) in &devcontainer_args {
+                parsed_line = parsed_line.replace(&format!("${{{key}}}"), value)
+            }
+            for (key, value) in &inline_args {
+                parsed_line = parsed_line.replace(&format!("${{{key}}}"), value);
+            }
+            if let Some(arg_directives) = parsed_line.strip_prefix("ARG ") {
+                let trimmed = arg_directives.trim();
+                let key_matches: Vec<_> = key_regex.captures_iter(trimmed).collect();
+                for (i, captures) in key_matches.iter().enumerate() {
+                    let key = captures[1].to_string();
+                    // Insert the devcontainer overrides here if needed
+                    let value_start = captures.get(0).expect("full match").end();
+                    let value_end = if i + 1 < key_matches.len() {
+                        key_matches[i + 1].get(0).expect("full match").start()
+                    } else {
+                        trimmed.len()
+                    };
+                    let raw_value = trimmed[value_start..value_end].trim();
+                    let value = if raw_value.starts_with('"')
+                        && raw_value.ends_with('"')
+                        && raw_value.len() > 1
+                    {
+                        &raw_value[1..raw_value.len() - 1]
+                    } else {
+                        raw_value
+                    };
+                    inline_args.push((key, value.to_string()));
+                }
+            }
+            parsed_lines.push(parsed_line);
+        }
+
+        Ok(parsed_lines.join("\n"))
+    }
 }
 
 /// Holds all the information needed to construct a `docker buildx build` command
@@ -2237,46 +2306,37 @@ chmod +x ./install.sh
     Ok(script)
 }
 
-// Dockerfile actions need to be moved to their own file
-fn dockerfile_alias(dockerfile_content: &str) -> Option<String> {
-    dockerfile_content
-        .lines()
-        .find(|line| line.starts_with("FROM"))
-        .and_then(|line| {
-            let words: Vec<&str> = line.split(" ").collect();
-            if words.len() > 2 && words[words.len() - 2].to_lowercase() == "as" {
-                return Some(words[words.len() - 1].to_string());
-            } else {
-                return None;
-            }
-        })
-}
-
-fn dockerfile_inject_alias(dockerfile_content: &str, alias: &str) -> String {
-    if dockerfile_alias(dockerfile_content).is_some() {
-        dockerfile_content.to_string()
-    } else {
-        dockerfile_content
-            .lines()
-            .map(|line| {
-                if line.starts_with("FROM") {
-                    format!("{} AS {}", line, alias)
-                } else {
-                    line.to_string()
-                }
-            })
-            .collect::<Vec<String>>()
-            .join("\n")
+fn dockerfile_inject_alias(
+    dockerfile_content: &str,
+    alias: &str,
+    build_target: Option<String>,
+) -> String {
+    match image_from_dockerfile(dockerfile_content.to_string(), &build_target) {
+        Some(target) => format!(
+            r#"{dockerfile_content}
+FROM {target} AS {alias}"#
+        ),
+        None => dockerfile_content.to_string(),
     }
 }
 
-fn image_from_dockerfile(
-    devcontainer: &DevContainerManifest,
-    dockerfile_contents: String,
-) -> Result<String, DevContainerError> {
-    let mut raw_contents = dockerfile_contents
+fn image_from_dockerfile(dockerfile_contents: String, target: &Option<String>) -> Option<String> {
+    dockerfile_contents
         .lines()
-        .find(|line| line.starts_with("FROM"))
+        .filter(|line| line.starts_with("FROM"))
+        .rfind(|from_line| match &target {
+            Some(target) => {
+                let parts = from_line.split(' ').collect::<Vec<&str>>();
+                if parts.len() >= 3
+                    && parts.get(parts.len() - 2).unwrap_or(&"").to_lowercase() == "as"
+                {
+                    parts.last().unwrap_or(&"").to_lowercase() == target.to_lowercase()
+                } else {
+                    false
+                }
+            }
+            None => true,
+        })
         .and_then(|from_line| {
             from_line
                 .split(' ')
@@ -2284,21 +2344,6 @@ fn image_from_dockerfile(
                 .get(1)
                 .map(|s| s.to_string())
         })
-        .ok_or_else(|| {
-            log::error!("Could not find an image definition in dockerfile");
-            DevContainerError::DevContainerParseFailed
-        })?;
-
-    for (k, v) in devcontainer
-        .dev_container()
-        .build
-        .as_ref()
-        .and_then(|b| b.args.as_ref())
-        .unwrap_or(&HashMap::new())
-    {
-        raw_contents = raw_contents.replace(&format!("${{{}}}", k), v);
-    }
-    Ok(raw_contents)
 }
 
 // Container user things
@@ -2386,6 +2431,7 @@ mod test {
         devcontainer_manifest::{
             ConfigStatus, DevContainerManifest, DockerBuildResources, DockerComposeResources,
             DockerInspect, extract_feature_id, find_primary_service, get_remote_user_from_config,
+            image_from_dockerfile,
         },
         docker::{
             DockerClient, DockerComposeConfig, DockerComposeService, DockerComposeServiceBuild,
@@ -3087,7 +3133,7 @@ RUN echo "export HISTFILE=/home/$USERNAME/commandhistory/.bash_history" >> "/hom
 #  Copyright (c) Microsoft Corporation. All rights reserved.
 #  Licensed under the MIT License. See License.txt in the project root for license information.
 ARG VARIANT="16-bullseye"
-FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} AS dev_container_auto_added_stage_label
+FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT}
 
 RUN mkdir -p /workspaces && chown node:node /workspaces
 
@@ -3100,6 +3146,7 @@ RUN echo "export HISTFILE=/home/$USERNAME/commandhistory/.bash_history" >> "/hom
 && mkdir -p /home/$USERNAME/commandhistory \
 && touch /home/$USERNAME/commandhistory/.bash_history \
 && chown -R $USERNAME /home/$USERNAME/commandhistory
+FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} AS dev_container_auto_added_stage_label
 
 FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize
 USER root
@@ -3426,13 +3473,14 @@ RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
             &feature_dockerfile,
             r#"ARG _DEV_CONTAINERS_BASE_IMAGE=placeholder
 
-FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label
+FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm
 
 # Include lld linker to improve build times either by using environment variable
 # RUSTFLAGS="-C link-arg=-fuse-ld=lld" or with Cargo's configuration file (i.e see .cargo/config.toml).
 RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
     && apt-get -y install clang lld \
     && apt-get autoremove -y && apt-get clean -y
+FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label
 
 FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize
 USER root
@@ -3748,13 +3796,14 @@ RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
             &feature_dockerfile,
             r#"ARG _DEV_CONTAINERS_BASE_IMAGE=placeholder
 
-FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label
+FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm
 
 # Include lld linker to improve build times either by using environment variable
 # RUSTFLAGS="-C link-arg=-fuse-ld=lld" or with Cargo's configuration file (i.e see .cargo/config.toml).
 RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
 && apt-get -y install clang lld \
 && apt-get autoremove -y && apt-get clean -y
+FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label
 
 FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize
 USER root
@@ -3927,13 +3976,14 @@ RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
             &feature_dockerfile,
             r#"ARG _DEV_CONTAINERS_BASE_IMAGE=placeholder
 
-FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label
+FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm
 
 # Include lld linker to improve build times either by using environment variable
 # RUSTFLAGS="-C link-arg=-fuse-ld=lld" or with Cargo's configuration file (i.e see .cargo/config.toml).
 RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
 && apt-get -y install clang lld \
 && apt-get autoremove -y && apt-get clean -y
+FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label
 
 FROM dev_container_feature_content_temp as dev_containers_feature_content_source
 
@@ -4046,6 +4096,7 @@ ENV DOCKER_BUILDKIT=1
                   "VARIANT": "18-bookworm",
                   "FOO": "bar",
                 },
+                "target": "development",
               },
               "workspaceMount": "source=${localWorkspaceFolder},target=${containerWorkspaceFolder},type=bind,consistency=cached",
               "workspaceFolder": "/workspace2",
@@ -4131,7 +4182,8 @@ ENV DOCKER_BUILDKIT=1
 #  Copyright (c) Microsoft Corporation. All rights reserved.
 #  Licensed under the MIT License. See License.txt in the project root for license information.
 ARG VARIANT="16-bullseye"
-FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT}
+FROM mcr.microsoft.com/devcontainers/typescript-node:latest as predev
+FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} as development
 
 RUN mkdir -p /workspaces && chown node:node /workspaces
 
@@ -4174,7 +4226,8 @@ RUN echo "export HISTFILE=/home/$USERNAME/commandhistory/.bash_history" >> "/hom
 #  Copyright (c) Microsoft Corporation. All rights reserved.
 #  Licensed under the MIT License. See License.txt in the project root for license information.
 ARG VARIANT="16-bullseye"
-FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} AS dev_container_auto_added_stage_label
+FROM mcr.microsoft.com/devcontainers/typescript-node:latest as predev
+FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} as development
 
 RUN mkdir -p /workspaces && chown node:node /workspaces
 
@@ -4187,6 +4240,7 @@ RUN echo "export HISTFILE=/home/$USERNAME/commandhistory/.bash_history" >> "/hom
 && mkdir -p /home/$USERNAME/commandhistory \
 && touch /home/$USERNAME/commandhistory/.bash_history \
 && chown -R $USERNAME /home/$USERNAME/commandhistory
+FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} AS dev_container_auto_added_stage_label
 
 FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize
 USER root
@@ -4477,6 +4531,211 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
         );
     }
 
+    #[gpui::test]
+    async fn test_gets_base_image_from_dockerfile(cx: &mut TestAppContext) {
+        cx.executor().allow_parking();
+        env_logger::try_init().ok();
+        let given_devcontainer_contents = r#"
+            {
+              "name": "cli-${devcontainerId}",
+              "build": {
+                "dockerfile": "Dockerfile",
+                "args": {
+                    "VERSION": "1.22",
+                }
+              },
+            }
+            "#;
+
+        let (test_dependencies, mut devcontainer_manifest) =
+            init_default_devcontainer_manifest(cx, given_devcontainer_contents)
+                .await
+                .unwrap();
+
+        test_dependencies
+            .fs
+            .atomic_write(
+                PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/Dockerfile"),
+                r#"
+FROM dontgrabme as build_context
+ARG VERSION=1.21
+ARG REPOSITORY=mybuild
+ARG REGISTRY=docker.io/stuff
+
+ARG IMAGE=${REGISTRY}/${REPOSITORY}:${VERSION}
+
+FROM ${IMAGE} AS devcontainer
+                    "#
+                .trim()
+                .to_string(),
+            )
+            .await
+            .unwrap();
+
+        devcontainer_manifest.parse_nonremote_vars().unwrap();
+
+        let dockerfile_contents = devcontainer_manifest
+            .expanded_dockerfile_content()
+            .await
+            .unwrap();
+        let base_image = image_from_dockerfile(
+            dockerfile_contents,
+            &devcontainer_manifest
+                .dev_container()
+                .build
+                .as_ref()
+                .and_then(|b| b.target.clone()),
+        )
+        .unwrap();
+
+        assert_eq!(base_image, "docker.io/stuff/mybuild:1.22".to_string());
+    }
+
+    #[gpui::test]
+    async fn test_gets_base_image_from_dockerfile_with_target_specified(cx: &mut TestAppContext) {
+        cx.executor().allow_parking();
+        env_logger::try_init().ok();
+        let given_devcontainer_contents = r#"
+            {
+              "name": "cli-${devcontainerId}",
+              "build": {
+                "dockerfile": "Dockerfile",
+                "args": {
+                    "VERSION": "1.22",
+                },
+                "target": "development"
+              },
+            }
+            "#;
+
+        let (test_dependencies, mut devcontainer_manifest) =
+            init_default_devcontainer_manifest(cx, given_devcontainer_contents)
+                .await
+                .unwrap();
+
+        test_dependencies
+            .fs
+            .atomic_write(
+                PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/Dockerfile"),
+                r#"
+FROM dontgrabme as build_context
+ARG VERSION=1.21
+ARG REPOSITORY=mybuild
+ARG REGISTRY=docker.io/stuff
+
+ARG IMAGE=${REGISTRY}/${REPOSITORY}:${VERSION}
+ARG DEV_IMAGE=${REGISTRY}/${REPOSITORY}:latest
+
+FROM ${DEV_IMAGE} AS development
+FROM ${IMAGE} AS production
+                    "#
+                .trim()
+                .to_string(),
+            )
+            .await
+            .unwrap();
+
+        devcontainer_manifest.parse_nonremote_vars().unwrap();
+
+        let dockerfile_contents = devcontainer_manifest
+            .expanded_dockerfile_content()
+            .await
+            .unwrap();
+        let base_image = image_from_dockerfile(
+            dockerfile_contents,
+            &devcontainer_manifest
+                .dev_container()
+                .build
+                .as_ref()
+                .and_then(|b| b.target.clone()),
+        )
+        .unwrap();
+
+        assert_eq!(base_image, "docker.io/stuff/mybuild:latest".to_string());
+    }
+
+    #[gpui::test]
+    async fn test_expands_args_in_dockerfile(cx: &mut TestAppContext) {
+        cx.executor().allow_parking();
+        env_logger::try_init().ok();
+        let given_devcontainer_contents = r#"
+            {
+              "name": "cli-${devcontainerId}",
+              "build": {
+                "dockerfile": "Dockerfile",
+                "args": {
+                    "JSON_ARG": "some-value",
+                    "ELIXIR_VERSION": "1.21",
+                }
+              },
+            }
+            "#;
+
+        let (test_dependencies, mut devcontainer_manifest) =
+            init_default_devcontainer_manifest(cx, given_devcontainer_contents)
+                .await
+                .unwrap();
+
+        test_dependencies
+            .fs
+            .atomic_write(
+                PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/Dockerfile"),
+                r#"
+ARG INVALID_FORWARD_REFERENCE=${OTP_VERSION}
+ARG ELIXIR_VERSION=1.20.0-rc.4
+ARG FOO=foo BAR=bar
+ARG FOOBAR=${FOO}${BAR}
+ARG OTP_VERSION=28.4.1
+ARG DEBIAN_VERSION=trixie-20260316-slim
+ARG IMAGE="docker.io/hexpm/elixir:${ELIXIR_VERSION}-erlang-${OTP_VERSION}-debian-${DEBIAN_VERSION}"
+ARG NESTED_MAP="{"key1": "val1", "key2": "val2"}"
+ARG WRAPPING_MAP={"nested_map": ${NESTED_MAP}}
+ARG FROM_JSON=${JSON_ARG}
+
+FROM ${IMAGE} AS devcontainer
+                    "#
+                .trim()
+                .to_string(),
+            )
+            .await
+            .unwrap();
+
+        devcontainer_manifest.parse_nonremote_vars().unwrap();
+
+        let expanded_dockerfile = devcontainer_manifest
+            .expanded_dockerfile_content()
+            .await
+            .unwrap();
+
+        assert_eq!(
+            &expanded_dockerfile,
+            r#"
+ARG INVALID_FORWARD_REFERENCE=${OTP_VERSION}
+ARG ELIXIR_VERSION=1.20.0-rc.4
+ARG FOO=foo BAR=bar
+ARG FOOBAR=foobar
+ARG OTP_VERSION=28.4.1
+ARG DEBIAN_VERSION=trixie-20260316-slim
+ARG IMAGE="docker.io/hexpm/elixir:1.21-erlang-28.4.1-debian-trixie-20260316-slim"
+ARG NESTED_MAP="{"key1": "val1", "key2": "val2"}"
+ARG WRAPPING_MAP={"nested_map": {"key1": "val1", "key2": "val2"}}
+ARG FROM_JSON=some-value
+
+FROM docker.io/hexpm/elixir:1.21-erlang-28.4.1-debian-trixie-20260316-slim AS devcontainer
+            "#
+            .trim()
+        )
+    }
+
+    #[test]
+    fn test_aliases_dockerfile_with_pre_existing_aliases_for_build() {}
+
+    #[test]
+    fn test_aliases_dockerfile_with_no_aliases_for_build() {}
+
+    #[test]
+    fn test_aliases_dockerfile_with_build_target_specified() {}
+
     pub(crate) struct RecordedExecCommand {
         pub(crate) _container_id: String,
         pub(crate) _remote_folder: String,
@@ -4641,6 +4900,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
                                     dockerfile: Some("Dockerfile".to_string()),
                                     args: None,
                                     additional_contexts: None,
+                                    target: None,
                                 }),
                                 volumes: vec![MountDefinition {
                                     source: Some("../..".to_string()),

crates/dev_container/src/docker.rs 🔗

@@ -80,6 +80,8 @@ pub(crate) struct DockerComposeServiceBuild {
     #[serde(skip_serializing_if = "Option::is_none")]
     pub(crate) dockerfile: Option<String>,
     #[serde(skip_serializing_if = "Option::is_none")]
+    pub(crate) target: Option<String>,
+    #[serde(skip_serializing_if = "Option::is_none")]
     pub(crate) args: Option<HashMap<String, String>>,
     #[serde(skip_serializing_if = "Option::is_none")]
     pub(crate) additional_contexts: Option<HashMap<String, String>>,
@@ -194,7 +196,7 @@ impl Docker {
 
     async fn pull_image(&self, image: &String) -> Result<(), DevContainerError> {
         let mut command = Command::new(&self.docker_cli);
-        command.args(&["pull", image]);
+        command.args(&["pull", "--", image]);
 
         let output = command.output().await.map_err(|e| {
             log::error!("Error pulling image: {e}");
@@ -485,10 +487,18 @@ where
     let s: Option<String> = Option::deserialize(deserializer)?;
     match s {
         Some(json_string) => {
+            // The devcontainer metadata label can be either a JSON array (e.g. from
+            // image-based devcontainers) or a single JSON object (e.g. from
+            // docker-compose-based devcontainers created by the devcontainer CLI).
+            // Handle both formats.
             let parsed: Vec<HashMap<String, serde_json_lenient::Value>> =
-                serde_json_lenient::from_str(&json_string).map_err(|e| {
-                    log::error!("Error deserializing metadata: {e}");
-                    serde::de::Error::custom(e)
+                serde_json_lenient::from_str(&json_string).or_else(|_| {
+                    let single: HashMap<String, serde_json_lenient::Value> =
+                        serde_json_lenient::from_str(&json_string).map_err(|e| {
+                            log::error!("Error deserializing metadata: {e}");
+                            serde::de::Error::custom(e)
+                        })?;
+                    Ok(vec![single])
                 })?;
             Ok(Some(parsed))
         }
@@ -934,6 +944,30 @@ mod test {
         assert_eq!(target_dir.unwrap(), "/workspaces/cli/".to_string());
     }
 
+    #[test]
+    fn should_deserialize_object_metadata_from_docker_compose_container() {
+        // The devcontainer CLI writes metadata as a bare JSON object (not an array)
+        // when there is only one metadata entry (e.g. docker-compose with no features).
+        // See https://github.com/devcontainers/cli/issues/1054
+        let given_config = r#"
+    {
+      "Id": "dc4e7b8ff4bf",
+      "Config": {
+        "Labels": {
+          "devcontainer.metadata": "{\"remoteUser\":\"ubuntu\"}"
+        }
+      }
+    }
+                "#;
+        let config = serde_json_lenient::from_str::<DockerInspect>(given_config).unwrap();
+
+        assert!(config.config.labels.metadata.is_some());
+        let metadata = config.config.labels.metadata.unwrap();
+        assert_eq!(metadata.len(), 1);
+        assert!(metadata[0].contains_key("remoteUser"));
+        assert_eq!(metadata[0]["remoteUser"], "ubuntu");
+    }
+
     #[test]
     fn should_deserialize_docker_compose_config() {
         let given_config = r#"

crates/edit_prediction/src/edit_prediction.rs 🔗

@@ -1690,12 +1690,16 @@ impl EditPredictionStore {
                     settled_editable_region,
                     ts_error_count_before_prediction,
                     ts_error_count_after_prediction,
-                    edit_bytes_predicted_new = kept_rate_result.predicted_new_chars,
-                    edit_bytes_final_new = kept_rate_result.final_new_chars,
+                    edit_bytes_candidate_new = kept_rate_result.candidate_new_chars,
+                    edit_bytes_reference_new = kept_rate_result.reference_new_chars,
+                    edit_bytes_candidate_deleted = kept_rate_result.candidate_deleted_chars,
+                    edit_bytes_reference_deleted = kept_rate_result.reference_deleted_chars,
                     edit_bytes_kept = kept_rate_result.kept_chars,
+                    edit_bytes_correctly_deleted = kept_rate_result.correctly_deleted_chars,
                     edit_bytes_discarded = kept_rate_result.discarded_chars,
                     edit_bytes_context = kept_rate_result.context_chars,
                     edit_bytes_kept_rate = kept_rate_result.kept_rate,
+                    edit_bytes_recall_rate = kept_rate_result.recall_rate,
                     example,
                     e2e_latency = e2e_latency.as_millis(),
                 );

crates/edit_prediction/src/metrics/kept_rate.rs 🔗

@@ -13,12 +13,33 @@ pub enum TokenAnnotation {
 #[allow(dead_code)]
 #[derive(Debug, Clone)]
 pub struct KeptRateResult {
-    pub predicted_new_chars: usize,
-    pub final_new_chars: usize,
+    /// Characters newly introduced by the candidate
+    pub candidate_new_chars: usize,
+    /// Characters newly introduced by the reference
+    pub reference_new_chars: usize,
+    /// Characters from `base` that are deleted by the candidate.
+    pub candidate_deleted_chars: usize,
+    /// Characters from `base` that are deleted by the reference.
+    pub reference_deleted_chars: usize,
+    /// Candidate new characters that are also present in the reference.
     pub kept_chars: usize,
+    /// Base characters deleted by both the candidate and the reference.
+    pub correctly_deleted_chars: usize,
+    /// Candidate new characters that are not kept in the reference.
     pub discarded_chars: usize,
+    /// Candidate characters treated as unchanged context
     pub context_chars: usize,
+    /// Fraction of candidate edit characters that match the reference edit.
+    ///
+    /// This includes both kept newly introduced characters and correctly
+    /// deleted base characters.
     pub kept_rate: f64,
+    /// Fraction of reference edit characters covered by the candidate edit.
+    ///
+    /// This includes both kept newly introduced characters and correctly
+    /// deleted base characters.
+    pub recall_rate: f64,
+    /// Per-token classification for candidate tokens used by tests.
     #[cfg(test)]
     pub token_annotations: Vec<TokenAnnotation>,
 }
@@ -188,89 +209,127 @@ fn analyze_masked_tokens<'a>(tokens: &[&'a str], mask: &[bool]) -> (Vec<&'a str>
     (unmasked_tokens, unmasked_chars, masked_chars)
 }
 
-fn should_bail_for_dirty_final(base: &str, predicted: &str, final_text: &str) -> bool {
-    let predicted_delta_chars = predicted.len().abs_diff(base.len());
-    let final_delta_chars = final_text.len().abs_diff(base.len());
-    predicted_delta_chars.abs_diff(final_delta_chars) > MAX_DIRTY_LENGTH_DELTA_CHARS
+fn count_unmasked_chars(tokens: &[&str], mask: &[bool]) -> usize {
+    tokens
+        .iter()
+        .zip(mask.iter())
+        .filter_map(|(&token, &is_masked)| (!is_masked).then_some(token.len()))
+        .sum()
+}
+
+fn should_bail_for_dirty_final(base: &str, candidate: &str, reference: &str) -> bool {
+    let candidate_delta_chars = candidate.len().abs_diff(base.len());
+    let reference_delta_chars = reference.len().abs_diff(base.len());
+    candidate_delta_chars.abs_diff(reference_delta_chars) > MAX_DIRTY_LENGTH_DELTA_CHARS
 }
 
-pub fn compute_kept_rate(base: &str, predicted: &str, final_text: &str) -> KeptRateResult {
-    if base == predicted && predicted == final_text {
-        let predicted_tokens = tokenize(predicted);
-        let context_chars = predicted_tokens.iter().map(|token| token.len()).sum();
+pub fn compute_kept_rate(base: &str, candidate: &str, reference: &str) -> KeptRateResult {
+    if base == candidate && candidate == reference {
+        let candidate_tokens = tokenize(candidate);
+        let context_chars = candidate_tokens.iter().map(|token| token.len()).sum();
         return KeptRateResult {
-            predicted_new_chars: 0,
-            final_new_chars: 0,
+            candidate_new_chars: 0,
+            reference_new_chars: 0,
+            candidate_deleted_chars: 0,
+            reference_deleted_chars: 0,
             kept_chars: 0,
+            correctly_deleted_chars: 0,
             discarded_chars: 0,
             context_chars,
             kept_rate: 1.0,
+            recall_rate: 1.0,
             #[cfg(test)]
-            token_annotations: vec![TokenAnnotation::Context; predicted_tokens.len()],
+            token_annotations: vec![TokenAnnotation::Context; candidate_tokens.len()],
         };
     }
 
-    if should_bail_for_dirty_final(base, predicted, final_text) {
-        let predicted_new_chars = predicted.len().abs_diff(base.len());
-        let final_new_chars = final_text.len().abs_diff(base.len());
+    if should_bail_for_dirty_final(base, candidate, reference) {
+        let candidate_new_chars = candidate.len().abs_diff(base.len());
+        let reference_new_chars = reference.len().abs_diff(base.len());
         return KeptRateResult {
-            predicted_new_chars,
-            final_new_chars,
+            candidate_new_chars,
+            reference_new_chars,
+            candidate_deleted_chars: 0,
+            reference_deleted_chars: 0,
             kept_chars: 0,
-            discarded_chars: predicted_new_chars,
+            correctly_deleted_chars: 0,
+            discarded_chars: candidate_new_chars,
             context_chars: 0,
             kept_rate: 0.0,
+            recall_rate: 0.0,
             #[cfg(test)]
-            token_annotations: vec![TokenAnnotation::Discarded; tokenize(predicted).len()],
+            token_annotations: vec![TokenAnnotation::Discarded; tokenize(candidate).len()],
         };
     }
 
     let base_tokens = tokenize(base);
-    let predicted_tokens = tokenize(predicted);
-    let final_tokens = tokenize(final_text);
-
-    let pred_base_mask = lcs_keep_mask(&predicted_tokens, &base_tokens);
-    let (pred_final_mask, final_pred_mask) = lcs_keep_masks(&predicted_tokens, &final_tokens);
-    let context_mask: Vec<bool> = pred_base_mask
+    let candidate_tokens = tokenize(candidate);
+    let reference_tokens = tokenize(reference);
+
+    let (candidate_base_mask, base_candidate_mask) =
+        lcs_keep_masks(&candidate_tokens, &base_tokens);
+    let (candidate_reference_mask, reference_candidate_mask) =
+        lcs_keep_masks(&candidate_tokens, &reference_tokens);
+    let context_mask: Vec<bool> = candidate_base_mask
         .iter()
-        .zip(pred_final_mask.iter())
-        .map(|(&in_base, &in_final)| in_base && in_final)
+        .zip(candidate_reference_mask.iter())
+        .map(|(&in_base, &in_reference)| in_base && in_reference)
         .collect();
 
-    let (stripped_predicted, predicted_new_chars, context_chars) =
-        analyze_masked_tokens(&predicted_tokens, &context_mask);
+    let (stripped_candidate, candidate_new_chars, context_chars) =
+        analyze_masked_tokens(&candidate_tokens, &context_mask);
 
-    let final_base_mask = lcs_keep_mask(&final_tokens, &base_tokens);
-    let final_context_mask: Vec<bool> = final_base_mask
+    let (reference_base_mask, base_reference_mask) =
+        lcs_keep_masks(&reference_tokens, &base_tokens);
+    let reference_context_mask: Vec<bool> = reference_base_mask
         .iter()
-        .zip(final_pred_mask.iter())
-        .map(|(&in_base, &in_predicted)| in_base && in_predicted)
+        .zip(reference_candidate_mask.iter())
+        .map(|(&in_base, &in_candidate)| in_base && in_candidate)
         .collect();
 
-    let (stripped_final, final_new_chars, _) =
-        analyze_masked_tokens(&final_tokens, &final_context_mask);
+    let (stripped_reference, reference_new_chars, _) =
+        analyze_masked_tokens(&reference_tokens, &reference_context_mask);
 
-    let keep_mask = lcs_keep_mask(&stripped_predicted, &stripped_final);
+    let keep_mask = lcs_keep_mask(&stripped_candidate, &stripped_reference);
 
-    let kept_chars: usize = stripped_predicted
+    let kept_chars: usize = stripped_candidate
         .iter()
         .zip(keep_mask.iter())
         .filter_map(|(&token, &is_kept)| is_kept.then_some(token.len()))
         .sum();
 
-    let discarded_chars = predicted_new_chars - kept_chars;
+    let candidate_deleted_chars = count_unmasked_chars(&base_tokens, &base_candidate_mask);
+    let reference_deleted_chars = count_unmasked_chars(&base_tokens, &base_reference_mask);
+    let correctly_deleted_chars: usize = base_tokens
+        .iter()
+        .zip(base_candidate_mask.iter().zip(base_reference_mask.iter()))
+        .filter_map(|(&token, (&in_candidate, &in_reference))| {
+            (!in_candidate && !in_reference).then_some(token.len())
+        })
+        .sum();
+
+    let discarded_chars = candidate_new_chars - kept_chars;
+    let matched_edit_chars = kept_chars + correctly_deleted_chars;
+    let candidate_edit_chars = candidate_new_chars + candidate_deleted_chars;
+    let reference_edit_chars = reference_new_chars + reference_deleted_chars;
 
-    let kept_rate = if predicted_new_chars == 0 {
-        if final_new_chars == 0 { 1.0 } else { 0.0 }
+    let kept_rate = if candidate_edit_chars == 0 {
+        if reference_edit_chars == 0 { 1.0 } else { 0.0 }
     } else {
-        kept_chars as f64 / predicted_new_chars as f64
+        matched_edit_chars as f64 / candidate_edit_chars as f64
+    };
+
+    let recall_rate = if reference_edit_chars == 0 {
+        if candidate_edit_chars == 0 { 1.0 } else { 0.0 }
+    } else {
+        matched_edit_chars as f64 / reference_edit_chars as f64
     };
 
     #[cfg(test)]
     let token_annotations = {
-        let mut token_annotations = Vec::with_capacity(predicted_tokens.len());
+        let mut token_annotations = Vec::with_capacity(candidate_tokens.len());
         let mut new_index = 0;
-        for (token_index, _token) in predicted_tokens.iter().enumerate() {
+        for (token_index, _token) in candidate_tokens.iter().enumerate() {
             if context_mask[token_index] {
                 token_annotations.push(TokenAnnotation::Context);
             } else {
@@ -288,12 +347,16 @@ pub fn compute_kept_rate(base: &str, predicted: &str, final_text: &str) -> KeptR
     };
 
     KeptRateResult {
-        predicted_new_chars,
-        final_new_chars,
+        candidate_new_chars,
+        reference_new_chars,
+        candidate_deleted_chars,
+        reference_deleted_chars,
         kept_chars,
+        correctly_deleted_chars,
         discarded_chars,
         context_chars,
         kept_rate,
+        recall_rate,
         #[cfg(test)]
         token_annotations,
     }
@@ -327,7 +390,8 @@ mod test_kept_rate {
     fn test_rate_extremes() {
         let no_change = compute_kept_rate("foo bar", "foo bar", "foo bar");
         assert!((no_change.kept_rate - 1.0).abs() < 1e-6);
-        assert_eq!(no_change.predicted_new_chars, 0);
+        assert!((no_change.recall_rate - 1.0).abs() < 1e-6);
+        assert_eq!(no_change.candidate_new_chars, 0);
         assert!(
             no_change
                 .token_annotations
@@ -337,15 +401,17 @@ mod test_kept_rate {
 
         let accepted = compute_kept_rate("old", "new", "new");
         assert!((accepted.kept_rate - 1.0).abs() < 1e-6);
+        assert!((accepted.recall_rate - 1.0).abs() < 1e-6);
 
         let discarded = compute_kept_rate("old", "old", "new");
         assert!((discarded.kept_rate - 0.0).abs() < 1e-6);
+        assert!((discarded.recall_rate - 0.0).abs() < 1e-6);
     }
 
     #[test]
     fn test_pure_addition() {
         let kept = compute_kept_rate("", "brand new line\n", "brand new line\n");
-        assert_eq!(kept.kept_chars, kept.predicted_new_chars);
+        assert_eq!(kept.kept_chars, kept.candidate_new_chars);
         assert!(
             kept.token_annotations
                 .iter()
@@ -354,26 +420,28 @@ mod test_kept_rate {
 
         let discarded =
             compute_kept_rate("", "brand new line\n", "something completely different\n");
-        assert!(discarded.kept_chars < discarded.predicted_new_chars);
+        assert!(discarded.kept_chars < discarded.candidate_new_chars);
     }
 
     #[test]
     fn test_decoy_when_base_excluded() {
         let base = "    decoy.when(mock_sync_hardware_api.sp()).then_return(SpeedStatus.IDLE)\n";
-        let predicted = "    decoy.when(mock_sync_module_hardware.speed_status).then_return(SpeedStatus.IDLE)\n";
-        let final_text = "    decoy.when(mock_sync_module_hardware.speed_status).then_return(SpeedStatus.IDLE)\n";
-        let result = compute_kept_rate(base, predicted, final_text);
+        let candidate = "    decoy.when(mock_sync_module_hardware.speed_status).then_return(SpeedStatus.IDLE)\n";
+        let reference = "    decoy.when(mock_sync_module_hardware.speed_status).then_return(SpeedStatus.IDLE)\n";
+        let result = compute_kept_rate(base, candidate, reference);
         let expected_new = "mock_sync_module_hardware".len() + "speed_status".len();
-        assert_eq!(result.predicted_new_chars, expected_new);
+        assert_eq!(result.candidate_new_chars, expected_new);
+        assert!(result.correctly_deleted_chars > 0);
         assert!((result.kept_rate - 1.0).abs() < 1e-6);
+        assert!((result.recall_rate - 1.0).abs() < 1e-6);
     }
 
     #[test]
     fn test_missing_deletion() {
         let base = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        epr\n";
-        let predicted = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        epr\neprintln!(\"\");\n";
-        let final_text = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        eprintln!(\"\");\n";
-        let result = compute_kept_rate(base, predicted, final_text);
+        let candidate = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        epr\neprintln!(\"\");\n";
+        let reference = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        eprintln!(\"\");\n";
+        let result = compute_kept_rate(base, candidate, reference);
         assert!(
             result.kept_rate < 0.85,
             "expected kept_rate < 0.85, got {}",
@@ -385,7 +453,12 @@ mod test_kept_rate {
     #[test]
     fn test_empty_prediction() {
         let result = compute_kept_rate("old line\n", "", "new line\n");
-        assert!((result.kept_rate - 0.0).abs() < 1e-6);
+        assert_eq!(result.candidate_new_chars, 0);
+        assert!(result.candidate_deleted_chars > 0);
+        assert!(result.correctly_deleted_chars > 0);
+        assert!(result.correctly_deleted_chars < result.candidate_deleted_chars);
+        assert!(result.kept_rate > 0.0 && result.kept_rate < 1.0);
+        assert!(result.recall_rate > 0.0 && result.recall_rate < 1.0);
     }
 
     #[test]
@@ -399,24 +472,25 @@ mod test_kept_rate {
     #[test]
     fn test_bails_for_dirty_final() {
         let base = "fn example() {\n    work();\n}\n";
-        let predicted = "fn example() {\n    work();\n    predicted();\n}\n";
-        let final_text = format!(
+        let candidate = "fn example() {\n    work();\n    predicted();\n}\n";
+        let reference = format!(
             "fn example() {{\n    work();\n    {}\n}}\n",
             "settled();\n    ".repeat(MAX_DIRTY_LENGTH_DELTA_CHARS / 8 + 64)
         );
 
-        let result = compute_kept_rate(base, predicted, &final_text);
+        let result = compute_kept_rate(base, candidate, &reference);
         assert_eq!(result.kept_rate, 0.0);
+        assert_eq!(result.recall_rate, 0.0);
         assert_eq!(result.kept_chars, 0);
-        assert_eq!(result.discarded_chars, result.predicted_new_chars);
+        assert_eq!(result.discarded_chars, result.candidate_new_chars);
     }
 
     #[test]
     fn test_eprintln_token_alignment() {
         let base = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        epr\n";
-        let predicted = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        eprintln!(\"hello world!\");\n";
-        let final_text = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        eprintln!(\"\");\n";
-        let result = compute_kept_rate(base, predicted, final_text);
+        let candidate = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        eprintln!(\"hello world!\");\n";
+        let reference = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        eprintln!(\"\");\n";
+        let result = compute_kept_rate(base, candidate, reference);
         assert!(result.discarded_chars > 0);
         assert!(result.kept_chars > 0);
         assert!(result.kept_rate > 0.0 && result.kept_rate < 1.0);
@@ -427,14 +501,18 @@ mod test_kept_rate {
     #[test]
     fn test_annotations_rename() {
         let base = "    foo(old_name)\n";
-        let predicted = "    foo(new_name)\n";
-        let final_text = "    foo(new_name)\n";
-        let result = compute_kept_rate(base, predicted, final_text);
-
-        assert_eq!(result.predicted_new_chars, "new_name".len());
-        assert_eq!(result.token_annotations.len(), tokenize(predicted).len());
-
-        for (&token, &annotation) in tokenize(predicted).iter().zip(&result.token_annotations) {
+        let candidate = "    foo(new_name)\n";
+        let reference = "    foo(new_name)\n";
+        let result = compute_kept_rate(base, candidate, reference);
+
+        assert_eq!(result.candidate_new_chars, "new_name".len());
+        assert_eq!(result.candidate_deleted_chars, "old_name".len());
+        assert_eq!(result.reference_deleted_chars, "old_name".len());
+        assert_eq!(result.correctly_deleted_chars, "old_name".len());
+        assert!((result.recall_rate - 1.0).abs() < 1e-6);
+        assert_eq!(result.token_annotations.len(), tokenize(candidate).len());
+
+        for (&token, &annotation) in tokenize(candidate).iter().zip(&result.token_annotations) {
             if token == "new_name" {
                 assert_eq!(annotation, TokenAnnotation::Kept);
             } else {
@@ -446,12 +524,12 @@ mod test_kept_rate {
     #[test]
     fn test_annotations_eprintln_coloring() {
         let base = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        epr\n";
-        let predicted = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        eprintln!(\"hello world!\");\n";
-        let final_text = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        eprintln!(\"\");\n";
-        let result = compute_kept_rate(base, predicted, final_text);
-        let predicted_tokens = tokenize(predicted);
+        let candidate = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        eprintln!(\"hello world!\");\n";
+        let reference = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        eprintln!(\"\");\n";
+        let result = compute_kept_rate(base, candidate, reference);
+        let candidate_tokens = tokenize(candidate);
 
-        let eprintln_index = predicted_tokens
+        let eprintln_index = candidate_tokens
             .iter()
             .position(|&token| token == "eprintln")
             .expect("eprintln token not found");
@@ -485,12 +563,15 @@ mod test_kept_rate {
     #[test]
     fn test_repetitive_tokens_remain_discarded() {
         let base = "foo + foo + foo + foo + foo\n".repeat(16);
-        let predicted = "foo + foo + prediction_token + foo + foo\n".repeat(16);
-        let final_text = "foo + foo + kept_token + foo + foo\n".repeat(16);
-        let result = compute_kept_rate(&base, &predicted, &final_text);
+        let candidate = "foo + foo + prediction_token + foo + foo\n".repeat(16);
+        let reference = "foo + foo + kept_token + foo + foo\n".repeat(16);
+        let result = compute_kept_rate(&base, &candidate, &reference);
 
         assert_eq!(result.kept_chars, 0);
-        assert_eq!(result.discarded_chars, result.predicted_new_chars);
-        assert_eq!(result.predicted_new_chars, "prediction_token".len() * 16);
+        assert_eq!(result.correctly_deleted_chars, "foo".len() * 16);
+        assert_eq!(result.discarded_chars, result.candidate_new_chars);
+        assert_eq!(result.candidate_new_chars, "prediction_token".len() * 16);
+        assert!(result.kept_rate > 0.0);
+        assert!(result.recall_rate > 0.0);
     }
 }

crates/edit_prediction/src/zed_edit_prediction_delegate.rs 🔗

@@ -6,6 +6,7 @@ use edit_prediction_types::{
     DataCollectionState, EditPredictionDelegate, EditPredictionDiscardReason,
     EditPredictionIconSet, SuggestionDisplayType,
 };
+use feature_flags::FeatureFlagAppExt;
 use gpui::{App, Entity, prelude::*};
 use language::{Buffer, ToPoint as _};
 use project::Project;
@@ -73,6 +74,24 @@ impl EditPredictionDelegate for ZedEditPredictionDelegate {
                 self.store
                     .read(cx)
                     .is_file_open_source(&self.project, file, cx);
+
+            if let Some(organization_configuration) = self
+                .store
+                .read(cx)
+                .user_store
+                .read(cx)
+                .current_organization_configuration()
+            {
+                if !organization_configuration
+                    .edit_prediction
+                    .is_feedback_enabled
+                {
+                    return DataCollectionState::Disabled {
+                        is_project_open_source,
+                    };
+                }
+            }
+
             if self.store.read(cx).data_collection_choice.is_enabled(cx) {
                 DataCollectionState::Enabled {
                     is_project_open_source,
@@ -89,6 +108,29 @@ impl EditPredictionDelegate for ZedEditPredictionDelegate {
         }
     }
 
+    fn can_toggle_data_collection(&self, cx: &App) -> bool {
+        if cx.is_staff() {
+            return false;
+        }
+
+        if let Some(organization_configuration) = self
+            .store
+            .read(cx)
+            .user_store
+            .read(cx)
+            .current_organization_configuration()
+        {
+            if !organization_configuration
+                .edit_prediction
+                .is_feedback_enabled
+            {
+                return false;
+            }
+        }
+
+        true
+    }
+
     fn toggle_data_collection(&mut self, cx: &mut App) {
         self.store.update(cx, |store, cx| {
             store.toggle_data_collection_choice(cx);

crates/edit_prediction_cli/src/example.rs 🔗

@@ -187,6 +187,14 @@ pub struct ExampleScore {
     #[serde(default, skip_serializing_if = "Option::is_none")]
     pub kept_rate: Option<f64>,
     #[serde(default, skip_serializing_if = "Option::is_none")]
+    pub recall_rate: Option<f64>,
+    #[serde(default, skip_serializing_if = "Option::is_none")]
+    pub kept_chars: Option<usize>,
+    #[serde(default, skip_serializing_if = "Option::is_none")]
+    pub correctly_deleted_chars: Option<usize>,
+    #[serde(default, skip_serializing_if = "Option::is_none")]
+    pub discarded_chars: Option<usize>,
+    #[serde(default, skip_serializing_if = "Option::is_none")]
     pub cumulative_logprob: Option<f64>,
     #[serde(default, skip_serializing_if = "Option::is_none")]
     pub avg_logprob: Option<f64>,

crates/edit_prediction_cli/src/score.rs 🔗

@@ -85,6 +85,10 @@ pub async fn run_scoring(
         inserted_tokens: 0,
         deleted_tokens: 0,
         kept_rate: None,
+        recall_rate: None,
+        kept_chars: None,
+        correctly_deleted_chars: None,
+        discarded_chars: None,
         cumulative_logprob: None,
         avg_logprob: None,
     };
@@ -187,9 +191,20 @@ pub async fn run_scoring(
             prediction.actual_cursor.as_ref(),
         );
 
-        let kept_rate = best_expected_text.map(|final_text| {
-            metrics::compute_kept_rate(original_text, &actual_text, final_text).kept_rate
-        });
+        let (kept_rate, recall_rate, kept_chars, correctly_deleted_chars, discarded_chars) =
+            best_expected_text
+                .map(|reference_text| {
+                    let result =
+                        metrics::compute_kept_rate(original_text, &actual_text, reference_text);
+                    (
+                        Some(result.kept_rate),
+                        Some(result.recall_rate),
+                        Some(result.kept_chars),
+                        Some(result.correctly_deleted_chars),
+                        Some(result.discarded_chars),
+                    )
+                })
+                .unwrap_or((None, None, None, None, None));
 
         scores.push(ExampleScore {
             delta_chr_f: best_delta_chr_f_metrics.score as f32,
@@ -211,6 +226,10 @@ pub async fn run_scoring(
             inserted_tokens: token_changes.inserted_tokens,
             deleted_tokens: token_changes.deleted_tokens,
             kept_rate,
+            recall_rate,
+            kept_chars,
+            correctly_deleted_chars,
+            discarded_chars,
             cumulative_logprob: prediction.cumulative_logprob,
             avg_logprob: prediction.avg_logprob,
         });
@@ -277,6 +296,11 @@ pub fn print_report(examples: &[Example], verbose: bool) {
     let mut isolated_whitespace_count: usize = 0;
     let mut kept_rate_sum: f64 = 0.0;
     let mut kept_rate_count: usize = 0;
+    let mut kept_chars_total: usize = 0;
+    let mut correctly_deleted_chars_total: usize = 0;
+    let mut discarded_chars_total: usize = 0;
+    let mut recall_rate_sum: f64 = 0.0;
+    let mut recall_rate_count: usize = 0;
     let mut patch_inserted_tokens: Vec<usize> = Vec::new();
     let mut patch_deleted_tokens: Vec<usize> = Vec::new();
     let mut predictions_with_patch: usize = 0;
@@ -369,11 +393,24 @@ pub fn print_report(examples: &[Example], verbose: bool) {
                 isolated_whitespace_count += 1;
             }
 
-            // Accumulate kept rate metrics
+            // Accumulate kept and recall rate metrics
             if let Some(kr) = score.kept_rate {
                 kept_rate_sum += kr;
                 kept_rate_count += 1;
             }
+            if let Some(kept_chars) = score.kept_chars {
+                kept_chars_total += kept_chars;
+            }
+            if let Some(correctly_deleted_chars) = score.correctly_deleted_chars {
+                correctly_deleted_chars_total += correctly_deleted_chars;
+            }
+            if let Some(discarded_chars) = score.discarded_chars {
+                discarded_chars_total += discarded_chars;
+            }
+            if let Some(rr) = score.recall_rate {
+                recall_rate_sum += rr;
+                recall_rate_count += 1;
+            }
 
             // Accumulate token change metrics (only for predictions that produced a patch)
             let has_patch = example
@@ -504,13 +541,24 @@ pub fn print_report(examples: &[Example], verbose: bool) {
             println!("Isolated whitespace changes: {}", isolated_ws_str);
         }
 
-        // Print kept rate metrics
+        // Print kept and recall rate metrics
         if kept_rate_count > 0 {
             let avg_kept_rate = kept_rate_sum / kept_rate_count as f64;
             println!(
-                "Kept rate: {:.1}% avg ({} evaluated)",
+                "Kept rate: {:.1}% avg ({} evaluated, kept chars: {}, correctly deleted chars: {}, discarded chars: {})",
                 avg_kept_rate * 100.0,
-                kept_rate_count
+                kept_rate_count,
+                kept_chars_total,
+                correctly_deleted_chars_total,
+                discarded_chars_total
+            );
+        }
+        if recall_rate_count > 0 {
+            let avg_recall_rate = recall_rate_sum / recall_rate_count as f64;
+            println!(
+                "Recall rate: {:.1}% avg ({} evaluated)",
+                avg_recall_rate * 100.0,
+                recall_rate_count
             );
         }
 
@@ -618,6 +666,14 @@ pub struct SummaryJson {
     pub isolated_whitespace_rate: Option<f32>,
     #[serde(skip_serializing_if = "Option::is_none")]
     pub avg_kept_rate: Option<f64>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub avg_recall_rate: Option<f64>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub total_kept_chars: Option<usize>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub total_correctly_deleted_chars: Option<usize>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub total_discarded_chars: Option<usize>,
 }
 
 pub fn compute_summary(examples: &[Example]) -> SummaryJson {
@@ -645,6 +701,14 @@ pub fn compute_summary(examples: &[Example]) -> SummaryJson {
     let mut isolated_whitespace_count: usize = 0;
     let mut kept_rate_sum: f64 = 0.0;
     let mut kept_rate_count: usize = 0;
+    let mut kept_chars_total: usize = 0;
+    let mut kept_chars_count: usize = 0;
+    let mut correctly_deleted_chars_total: usize = 0;
+    let mut correctly_deleted_chars_count: usize = 0;
+    let mut discarded_chars_total: usize = 0;
+    let mut discarded_chars_count: usize = 0;
+    let mut recall_rate_sum: f64 = 0.0;
+    let mut recall_rate_count: usize = 0;
 
     for example in examples {
         for (score_idx, score) in example.score.iter().enumerate() {
@@ -685,11 +749,27 @@ pub fn compute_summary(examples: &[Example]) -> SummaryJson {
                 isolated_whitespace_count += 1;
             }
 
-            // Accumulate kept rate metrics
+            // Accumulate kept and recall rate metrics
             if let Some(kr) = score.kept_rate {
                 kept_rate_sum += kr;
                 kept_rate_count += 1;
             }
+            if let Some(kept_chars) = score.kept_chars {
+                kept_chars_total += kept_chars;
+                kept_chars_count += 1;
+            }
+            if let Some(correctly_deleted_chars) = score.correctly_deleted_chars {
+                correctly_deleted_chars_total += correctly_deleted_chars;
+                correctly_deleted_chars_count += 1;
+            }
+            if let Some(discarded_chars) = score.discarded_chars {
+                discarded_chars_total += discarded_chars;
+                discarded_chars_count += 1;
+            }
+            if let Some(rr) = score.recall_rate {
+                recall_rate_sum += rr;
+                recall_rate_count += 1;
+            }
 
             // Accumulate cursor metrics
             if let Some(exact_match) = score.cursor_exact_match {
@@ -771,6 +851,30 @@ pub fn compute_summary(examples: &[Example]) -> SummaryJson {
         None
     };
 
+    let avg_recall_rate = if recall_rate_count > 0 {
+        Some(recall_rate_sum / recall_rate_count as f64)
+    } else {
+        None
+    };
+
+    let total_kept_chars = if kept_chars_count > 0 {
+        Some(kept_chars_total)
+    } else {
+        None
+    };
+
+    let total_correctly_deleted_chars = if correctly_deleted_chars_count > 0 {
+        Some(correctly_deleted_chars_total)
+    } else {
+        None
+    };
+
+    let total_discarded_chars = if discarded_chars_count > 0 {
+        Some(discarded_chars_total)
+    } else {
+        None
+    };
+
     SummaryJson {
         total_examples: total_scores,
         avg_delta_chr_f,
@@ -804,6 +908,10 @@ pub fn compute_summary(examples: &[Example]) -> SummaryJson {
         wrong_editable_region_rate,
         isolated_whitespace_rate,
         avg_kept_rate,
+        avg_recall_rate,
+        total_kept_chars,
+        total_correctly_deleted_chars,
+        total_discarded_chars,
     }
 }
 

crates/edit_prediction_types/src/edit_prediction_types.rs 🔗

@@ -168,6 +168,10 @@ pub trait EditPredictionDelegate: 'static + Sized {
         None
     }
 
+    fn can_toggle_data_collection(&self, _cx: &App) -> bool {
+        true
+    }
+
     fn toggle_data_collection(&mut self, _cx: &mut App) {}
     fn is_enabled(
         &self,
@@ -209,6 +213,7 @@ pub trait EditPredictionDelegateHandle {
     fn icons(&self, cx: &App) -> EditPredictionIconSet;
     fn data_collection_state(&self, cx: &App) -> DataCollectionState;
     fn usage(&self, cx: &App) -> Option<EditPredictionUsage>;
+    fn can_toggle_data_collection(&self, cx: &App) -> bool;
     fn toggle_data_collection(&self, cx: &mut App);
     fn is_refreshing(&self, cx: &App) -> bool;
     fn refresh(
@@ -265,6 +270,10 @@ where
         self.read(cx).usage(cx)
     }
 
+    fn can_toggle_data_collection(&self, cx: &App) -> bool {
+        self.read(cx).can_toggle_data_collection(cx)
+    }
+
     fn toggle_data_collection(&self, cx: &mut App) {
         self.update(cx, |this, cx| this.toggle_data_collection(cx))
     }

crates/edit_prediction_ui/src/edit_prediction_button.rs 🔗

@@ -790,7 +790,7 @@ impl EditPredictionButton {
                             .toggleable(IconPosition::Start, data_collection.is_enabled())
                             .icon(icon_name)
                             .icon_color(icon_color)
-                            .disabled(cx.is_staff())
+                            .disabled(!provider.can_toggle_data_collection(cx))
                             .documentation_aside(DocumentationSide::Left, move |cx| {
                                 let (msg, label_color, icon_name, icon_color) = match (is_open_source, is_collecting) {
                                     (true, true) => (

crates/editor/src/editor.rs 🔗

@@ -1183,6 +1183,7 @@ pub struct Editor {
     delegate_open_excerpts: bool,
     enable_lsp_data: bool,
     enable_runnables: bool,
+    enable_mouse_wheel_zoom: bool,
     show_line_numbers: Option<bool>,
     use_relative_line_numbers: Option<bool>,
     show_git_diff_gutter: Option<bool>,
@@ -1972,6 +1973,9 @@ impl Editor {
         clone.read_only = self.read_only;
         clone.buffers_with_disabled_indent_guides =
             self.buffers_with_disabled_indent_guides.clone();
+        clone.enable_mouse_wheel_zoom = self.enable_mouse_wheel_zoom;
+        clone.enable_lsp_data = self.enable_lsp_data;
+        clone.enable_runnables = self.enable_runnables;
         clone
     }
 
@@ -2419,8 +2423,9 @@ impl Editor {
             delegate_expand_excerpts: false,
             delegate_stage_and_restore: false,
             delegate_open_excerpts: false,
-            enable_lsp_data: true,
-            enable_runnables: true,
+            enable_lsp_data: full_mode,
+            enable_runnables: full_mode,
+            enable_mouse_wheel_zoom: full_mode,
             show_git_diff_gutter: None,
             show_code_actions: None,
             show_runnables: None,
@@ -26082,6 +26087,10 @@ impl Editor {
         self.enable_runnables = false;
     }
 
+    pub fn disable_mouse_wheel_zoom(&mut self) {
+        self.enable_mouse_wheel_zoom = false;
+    }
+
     fn update_data_on_scroll(&mut self, window: &mut Window, cx: &mut Context<'_, Self>) {
         self.register_visible_buffers(cx);
         self.colorize_brackets(false, cx);

crates/editor/src/editor_settings.rs 🔗

@@ -33,6 +33,7 @@ pub struct EditorSettings {
     pub autoscroll_on_clicks: bool,
     pub horizontal_scroll_margin: f32,
     pub scroll_sensitivity: f32,
+    pub mouse_wheel_zoom: bool,
     pub fast_scroll_sensitivity: f32,
     pub sticky_scroll: StickyScroll,
     pub relative_line_numbers: RelativeLineNumbers,
@@ -251,6 +252,7 @@ impl Settings for EditorSettings {
             autoscroll_on_clicks: editor.autoscroll_on_clicks.unwrap(),
             horizontal_scroll_margin: editor.horizontal_scroll_margin.unwrap(),
             scroll_sensitivity: editor.scroll_sensitivity.unwrap(),
+            mouse_wheel_zoom: editor.mouse_wheel_zoom.unwrap(),
             fast_scroll_sensitivity: editor.fast_scroll_sensitivity.unwrap(),
             sticky_scroll: StickyScroll {
                 enabled: sticky_scroll.enabled.unwrap(),

crates/editor/src/element.rs 🔗

@@ -7673,22 +7673,21 @@ impl EditorElement {
 
             move |event: &ScrollWheelEvent, phase, window, cx| {
                 if phase == DispatchPhase::Bubble && hitbox.should_handle_scroll(window) {
-                    if event.modifiers.secondary() {
+                    delta = delta.coalesce(event.delta);
+
+                    if event.modifiers.secondary()
+                        && editor.read(cx).enable_mouse_wheel_zoom
+                        && EditorSettings::get_global(cx).mouse_wheel_zoom
+                    {
                         let delta_y = match event.delta {
                             ScrollDelta::Pixels(pixels) => pixels.y.into(),
                             ScrollDelta::Lines(lines) => lines.y,
                         };
 
                         if delta_y > 0.0 {
-                            window.dispatch_action(
-                                Box::new(zed_actions::IncreaseBufferFontSize { persist: false }),
-                                cx,
-                            );
+                            theme_settings::increase_buffer_font_size(cx);
                         } else if delta_y < 0.0 {
-                            window.dispatch_action(
-                                Box::new(zed_actions::DecreaseBufferFontSize { persist: false }),
-                                cx,
-                            );
+                            theme_settings::decrease_buffer_font_size(cx);
                         }
 
                         cx.stop_propagation();
@@ -7701,10 +7700,7 @@ impl EditorElement {
                             }
                         };
 
-                        delta = delta.coalesce(event.delta);
                         editor.update(cx, |editor, cx| {
-                            let position_map: &PositionMap = &position_map;
-
                             let line_height = position_map.line_height;
                             let glyph_width = position_map.em_layout_width;
                             let (delta, axis) = match delta {

crates/feature_flags/src/flags.rs 🔗

@@ -63,6 +63,6 @@ impl FeatureFlag for ProjectPanelUndoRedoFeatureFlag {
     const NAME: &'static str = "project-panel-undo-redo";
 
     fn enabled_for_staff() -> bool {
-        false
+        true
     }
 }

crates/fs/Cargo.toml 🔗

@@ -32,6 +32,7 @@ parking_lot.workspace = true
 paths.workspace = true
 rope.workspace = true
 proto.workspace = true
+thiserror.workspace = true
 serde.workspace = true
 serde_json.workspace = true
 smol.workspace = true
@@ -41,10 +42,7 @@ time.workspace = true
 util.workspace = true
 is_executable = "1.0.5"
 notify = "8.2.0"
-
-[target.'cfg(target_os = "macos")'.dependencies]
-objc.workspace = true
-cocoa = "0.26"
+trash = { git = "https://github.com/zed-industries/trash-rs", rev = "3bf27effd4eb8699f2e484d3326b852fe3e53af7" }
 
 [target.'cfg(target_os = "windows")'.dependencies]
 windows.workspace = true

crates/fs/src/fs.rs 🔗

@@ -1,13 +1,12 @@
 pub mod fs_watcher;
 
 use parking_lot::Mutex;
+use std::ffi::OsString;
 use std::sync::atomic::{AtomicU8, AtomicUsize, Ordering};
 use std::time::Instant;
 use util::maybe;
 
 use anyhow::{Context as _, Result, anyhow};
-#[cfg(any(target_os = "linux", target_os = "freebsd"))]
-use ashpd::desktop::trash;
 use futures::stream::iter;
 use gpui::App;
 use gpui::BackgroundExecutor;
@@ -110,14 +109,22 @@ pub trait Fs: Send + Sync {
     ) -> Result<()>;
     async fn copy_file(&self, source: &Path, target: &Path, options: CopyOptions) -> Result<()>;
     async fn rename(&self, source: &Path, target: &Path, options: RenameOptions) -> Result<()>;
+
+    /// Removes a directory from the filesystem.
+    /// There is no expectation that the directory will be preserved in the
+    /// system trash.
     async fn remove_dir(&self, path: &Path, options: RemoveOptions) -> Result<()>;
-    async fn trash_dir(&self, path: &Path, options: RemoveOptions) -> Result<()> {
-        self.remove_dir(path, options).await
-    }
+
+    /// Moves a file or directory to the system trash.
+    /// Returns a [`TrashedEntry`] that can be used to keep track of the
+    /// location of the trashed item in the system's trash.
+    async fn trash(&self, path: &Path, options: RemoveOptions) -> Result<TrashedEntry>;
+
+    /// Removes a file from the filesystem.
+    /// There is no expectation that the file will be preserved in the system
+    /// trash.
     async fn remove_file(&self, path: &Path, options: RemoveOptions) -> Result<()>;
-    async fn trash_file(&self, path: &Path, options: RemoveOptions) -> Result<()> {
-        self.remove_file(path, options).await
-    }
+
     async fn open_handle(&self, path: &Path) -> Result<Arc<dyn FileHandle>>;
     async fn open_sync(&self, path: &Path) -> Result<Box<dyn io::Read + Send + Sync>>;
     async fn load(&self, path: &Path) -> Result<String> {
@@ -158,12 +165,83 @@ pub trait Fs: Send + Sync {
     async fn is_case_sensitive(&self) -> bool;
     fn subscribe_to_jobs(&self) -> JobEventReceiver;
 
+    /// Restores a given `TrashedEntry`, moving it from the system's trash back
+    /// to the original path.
+    async fn restore(
+        &self,
+        trashed_entry: TrashedEntry,
+    ) -> std::result::Result<PathBuf, TrashRestoreError>;
+
     #[cfg(feature = "test-support")]
     fn as_fake(&self) -> Arc<FakeFs> {
         panic!("called as_fake on a real fs");
     }
 }
 
+// We use our own type rather than `trash::TrashItem` directly to avoid carrying
+// over fields we don't need (e.g. `time_deleted`) and to insulate callers and
+// tests from changes to that crate's API surface.
+/// Represents a file or directory that has been moved to the system trash,
+/// retaining enough information to restore it to its original location.
+#[derive(Clone, PartialEq, Debug)]
+pub struct TrashedEntry {
+    /// Platform-specific identifier for the file/directory in the trash.
+    ///
+    /// * Freedesktop – Path to the `.trashinfo` file.
+    /// * macOS & Windows – Full path to the file/directory in the system's
+    /// trash.
+    pub id: OsString,
+    /// Name of the file/directory at the time of trashing, including extension.
+    pub name: OsString,
+    /// Absolute path to the parent directory at the time of trashing.
+    pub original_parent: PathBuf,
+}
+
+impl From<trash::TrashItem> for TrashedEntry {
+    fn from(item: trash::TrashItem) -> Self {
+        Self {
+            id: item.id,
+            name: item.name,
+            original_parent: item.original_parent,
+        }
+    }
+}
+
+impl TrashedEntry {
+    fn into_trash_item(self) -> trash::TrashItem {
+        trash::TrashItem {
+            id: self.id,
+            name: self.name,
+            original_parent: self.original_parent,
+            // `TrashedEntry` doesn't preserve `time_deleted` as we don't
+            // currently need it for restore, so we default it to 0 here.
+            time_deleted: 0,
+        }
+    }
+}
+
+#[derive(Debug, thiserror::Error)]
+pub enum TrashRestoreError {
+    #[error("The specified `path` ({}) was not found in the system's trash.", path.display())]
+    NotFound { path: PathBuf },
+    #[error("File or directory ({}) already exists at the restore destination.", path.display())]
+    Collision { path: PathBuf },
+    #[error("Unknown error ({description})")]
+    Unknown { description: String },
+}
+
+impl From<trash::Error> for TrashRestoreError {
+    fn from(err: trash::Error) -> Self {
+        match err {
+            trash::Error::RestoreCollision { path, .. } => Self::Collision { path },
+            trash::Error::Unknown { description } => Self::Unknown { description },
+            other => Self::Unknown {
+                description: other.to_string(),
+            },
+        }
+    }
+}
+
 struct GlobalFs(Arc<dyn Fs>);
 
 impl Global for GlobalFs {}
@@ -718,93 +796,26 @@ impl Fs for RealFs {
         }
     }
 
-    #[cfg(target_os = "macos")]
-    async fn trash_file(&self, path: &Path, _options: RemoveOptions) -> Result<()> {
-        use cocoa::{
-            base::{id, nil},
-            foundation::{NSAutoreleasePool, NSString},
-        };
-        use objc::{class, msg_send, sel, sel_impl};
-
-        unsafe {
-            /// Allow NSString::alloc use here because it sets autorelease
-            #[allow(clippy::disallowed_methods)]
-            unsafe fn ns_string(string: &str) -> id {
-                unsafe { NSString::alloc(nil).init_str(string).autorelease() }
-            }
-
-            let url: id = msg_send![class!(NSURL), fileURLWithPath: ns_string(path.to_string_lossy().as_ref())];
-            let array: id = msg_send![class!(NSArray), arrayWithObject: url];
-            let workspace: id = msg_send![class!(NSWorkspace), sharedWorkspace];
-
-            let _: id = msg_send![workspace, recycleURLs: array completionHandler: nil];
-        }
-        Ok(())
-    }
-
-    #[cfg(any(target_os = "linux", target_os = "freebsd"))]
-    async fn trash_file(&self, path: &Path, _options: RemoveOptions) -> Result<()> {
-        if let Ok(Some(metadata)) = self.metadata(path).await
-            && metadata.is_symlink
-        {
-            // TODO: trash_file does not support trashing symlinks yet - https://github.com/bilelmoussaoui/ashpd/issues/255
-            return self.remove_file(path, RemoveOptions::default()).await;
-        }
-        let file = smol::fs::File::open(path).await?;
-        match trash::trash_file(&file.as_fd()).await {
-            Ok(_) => Ok(()),
-            Err(err) => {
-                log::error!("Failed to trash file: {}", err);
-                // Trashing files can fail if you don't have a trashing dbus service configured.
-                // In that case, delete the file directly instead.
-                return self.remove_file(path, RemoveOptions::default()).await;
-            }
-        }
-    }
-
-    #[cfg(target_os = "windows")]
-    async fn trash_file(&self, path: &Path, _options: RemoveOptions) -> Result<()> {
-        use util::paths::SanitizedPath;
-        use windows::{
-            Storage::{StorageDeleteOption, StorageFile},
-            core::HSTRING,
-        };
-        // todo(windows)
-        // When new version of `windows-rs` release, make this operation `async`
-        let path = path.canonicalize()?;
-        let path = SanitizedPath::new(&path);
-        let path_string = path.to_string();
-        let file = StorageFile::GetFileFromPathAsync(&HSTRING::from(path_string))?.get()?;
-        file.DeleteAsync(StorageDeleteOption::Default)?.get()?;
-        Ok(())
-    }
-
-    #[cfg(target_os = "macos")]
-    async fn trash_dir(&self, path: &Path, options: RemoveOptions) -> Result<()> {
-        self.trash_file(path, options).await
-    }
+    async fn trash(&self, path: &Path, _options: RemoveOptions) -> Result<TrashedEntry> {
+        // We must make the path absolute or trash will make a weird abomination
+        // of the zed working directory (not usually the worktree) and whatever
+        // the path variable holds.
+        let path = self
+            .canonicalize(path)
+            .await
+            .context("Could not canonicalize the path of the file")?;
 
-    #[cfg(any(target_os = "linux", target_os = "freebsd"))]
-    async fn trash_dir(&self, path: &Path, options: RemoveOptions) -> Result<()> {
-        self.trash_file(path, options).await
-    }
+        let (tx, rx) = futures::channel::oneshot::channel();
+        std::thread::Builder::new()
+            .name("trash file or dir".to_string())
+            .spawn(|| tx.send(trash::delete_with_info(path)))
+            .expect("The os can spawn threads");
 
-    #[cfg(target_os = "windows")]
-    async fn trash_dir(&self, path: &Path, _options: RemoveOptions) -> Result<()> {
-        use util::paths::SanitizedPath;
-        use windows::{
-            Storage::{StorageDeleteOption, StorageFolder},
-            core::HSTRING,
-        };
-
-        // todo(windows)
-        // When new version of `windows-rs` release, make this operation `async`
-        let path = path.canonicalize()?;
-        let path = SanitizedPath::new(&path);
-        let path_string = path.to_string();
-        let folder = StorageFolder::GetFolderFromPathAsync(&HSTRING::from(path_string))?.get()?;
-        folder.DeleteAsync(StorageDeleteOption::Default)?.get()?;
-        Ok(())
+        Ok(rx
+            .await
+            .context("Tx dropped or fs.restore panicked")?
+            .context("Could not trash file or dir")?
+            .into())
     }
 
     async fn open_sync(&self, path: &Path) -> Result<Box<dyn io::Read + Send + Sync>> {
@@ -1252,6 +1263,24 @@ impl Fs for RealFs {
         );
         res
     }
+
+    async fn restore(
+        &self,
+        trashed_entry: TrashedEntry,
+    ) -> std::result::Result<PathBuf, TrashRestoreError> {
+        let restored_item_path = trashed_entry.original_parent.join(&trashed_entry.name);
+
+        let (tx, rx) = futures::channel::oneshot::channel();
+        std::thread::Builder::new()
+            .name("restore trashed item".to_string())
+            .spawn(move || {
+                let res = trash::restore_all([trashed_entry.into_trash_item()]);
+                tx.send(res)
+            })
+            .expect("The OS can spawn a threads");
+        rx.await.expect("Restore all never panics")?;
+        Ok(restored_item_path)
+    }
 }
 
 #[cfg(not(any(target_os = "linux", target_os = "freebsd")))]
@@ -1287,6 +1316,7 @@ struct FakeFsState {
     path_write_counts: std::collections::HashMap<PathBuf, usize>,
     moves: std::collections::HashMap<u64, PathBuf>,
     job_event_subscribers: Arc<Mutex<Vec<JobEventSender>>>,
+    trash: Vec<(TrashedEntry, FakeFsEntry)>,
 }
 
 #[cfg(feature = "test-support")]
@@ -1572,6 +1602,7 @@ impl FakeFs {
                 path_write_counts: Default::default(),
                 moves: Default::default(),
                 job_event_subscribers: Arc::new(Mutex::new(Vec::new())),
+                trash: Vec::new(),
             })),
         });
 
@@ -2397,6 +2428,90 @@ impl FakeFs {
     fn simulate_random_delay(&self) -> impl futures::Future<Output = ()> {
         self.executor.simulate_random_delay()
     }
+
+    /// Returns list of all tracked trash entries.
+    pub fn trash_entries(&self) -> Vec<TrashedEntry> {
+        self.state
+            .lock()
+            .trash
+            .iter()
+            .map(|(entry, _)| entry.clone())
+            .collect()
+    }
+
+    async fn remove_dir_inner(
+        &self,
+        path: &Path,
+        options: RemoveOptions,
+    ) -> Result<Option<FakeFsEntry>> {
+        self.simulate_random_delay().await;
+
+        let path = normalize_path(path);
+        let parent_path = path.parent().context("cannot remove the root")?;
+        let base_name = path.file_name().context("cannot remove the root")?;
+
+        let mut state = self.state.lock();
+        let parent_entry = state.entry(parent_path)?;
+        let entry = parent_entry
+            .dir_entries(parent_path)?
+            .entry(base_name.to_str().unwrap().into());
+
+        let removed = match entry {
+            btree_map::Entry::Vacant(_) => {
+                if !options.ignore_if_not_exists {
+                    anyhow::bail!("{path:?} does not exist");
+                }
+
+                None
+            }
+            btree_map::Entry::Occupied(mut entry) => {
+                {
+                    let children = entry.get_mut().dir_entries(&path)?;
+                    if !options.recursive && !children.is_empty() {
+                        anyhow::bail!("{path:?} is not empty");
+                    }
+                }
+
+                Some(entry.remove())
+            }
+        };
+
+        state.emit_event([(path, Some(PathEventKind::Removed))]);
+        Ok(removed)
+    }
+
+    async fn remove_file_inner(
+        &self,
+        path: &Path,
+        options: RemoveOptions,
+    ) -> Result<Option<FakeFsEntry>> {
+        self.simulate_random_delay().await;
+
+        let path = normalize_path(path);
+        let parent_path = path.parent().context("cannot remove the root")?;
+        let base_name = path.file_name().unwrap();
+        let mut state = self.state.lock();
+        let parent_entry = state.entry(parent_path)?;
+        let entry = parent_entry
+            .dir_entries(parent_path)?
+            .entry(base_name.to_str().unwrap().into());
+        let removed = match entry {
+            btree_map::Entry::Vacant(_) => {
+                if !options.ignore_if_not_exists {
+                    anyhow::bail!("{path:?} does not exist");
+                }
+
+                None
+            }
+            btree_map::Entry::Occupied(mut entry) => {
+                entry.get_mut().file_content(&path)?;
+                Some(entry.remove())
+            }
+        };
+
+        state.emit_event([(path, Some(PathEventKind::Removed))]);
+        Ok(removed)
+    }
 }
 
 #[cfg(feature = "test-support")]
@@ -2696,62 +2811,37 @@ impl Fs for FakeFs {
     }
 
     async fn remove_dir(&self, path: &Path, options: RemoveOptions) -> Result<()> {
-        self.simulate_random_delay().await;
+        self.remove_dir_inner(path, options).await.map(|_| ())
+    }
 
-        let path = normalize_path(path);
-        let parent_path = path.parent().context("cannot remove the root")?;
-        let base_name = path.file_name().context("cannot remove the root")?;
+    async fn trash(&self, path: &Path, options: RemoveOptions) -> Result<TrashedEntry> {
+        let normalized_path = normalize_path(path);
+        let parent_path = normalized_path.parent().context("cannot remove the root")?;
+        let base_name = normalized_path.file_name().unwrap();
+        let result = if self.is_dir(path).await {
+            self.remove_dir_inner(path, options).await?
+        } else {
+            self.remove_file_inner(path, options).await?
+        };
 
-        let mut state = self.state.lock();
-        let parent_entry = state.entry(parent_path)?;
-        let entry = parent_entry
-            .dir_entries(parent_path)?
-            .entry(base_name.to_str().unwrap().into());
+        match result {
+            Some(fake_entry) => {
+                let trashed_entry = TrashedEntry {
+                    id: base_name.to_str().unwrap().into(),
+                    name: base_name.to_str().unwrap().into(),
+                    original_parent: parent_path.to_path_buf(),
+                };
 
-        match entry {
-            btree_map::Entry::Vacant(_) => {
-                if !options.ignore_if_not_exists {
-                    anyhow::bail!("{path:?} does not exist");
-                }
-            }
-            btree_map::Entry::Occupied(mut entry) => {
-                {
-                    let children = entry.get_mut().dir_entries(&path)?;
-                    if !options.recursive && !children.is_empty() {
-                        anyhow::bail!("{path:?} is not empty");
-                    }
-                }
-                entry.remove();
+                let mut state = self.state.lock();
+                state.trash.push((trashed_entry.clone(), fake_entry));
+                Ok(trashed_entry)
             }
+            None => anyhow::bail!("{normalized_path:?} does not exist"),
         }
-        state.emit_event([(path, Some(PathEventKind::Removed))]);
-        Ok(())
     }
 
     async fn remove_file(&self, path: &Path, options: RemoveOptions) -> Result<()> {
-        self.simulate_random_delay().await;
-
-        let path = normalize_path(path);
-        let parent_path = path.parent().context("cannot remove the root")?;
-        let base_name = path.file_name().unwrap();
-        let mut state = self.state.lock();
-        let parent_entry = state.entry(parent_path)?;
-        let entry = parent_entry
-            .dir_entries(parent_path)?
-            .entry(base_name.to_str().unwrap().into());
-        match entry {
-            btree_map::Entry::Vacant(_) => {
-                if !options.ignore_if_not_exists {
-                    anyhow::bail!("{path:?} does not exist");
-                }
-            }
-            btree_map::Entry::Occupied(mut entry) => {
-                entry.get_mut().file_content(&path)?;
-                entry.remove();
-            }
-        }
-        state.emit_event([(path, Some(PathEventKind::Removed))]);
-        Ok(())
+        self.remove_file_inner(path, options).await.map(|_| ())
     }
 
     async fn open_sync(&self, path: &Path) -> Result<Box<dyn io::Read + Send + Sync>> {
@@ -3002,6 +3092,49 @@ impl Fs for FakeFs {
         receiver
     }
 
+    async fn restore(&self, trashed_entry: TrashedEntry) -> Result<PathBuf, TrashRestoreError> {
+        let mut state = self.state.lock();
+
+        let Some((trashed_entry, fake_entry)) = state
+            .trash
+            .iter()
+            .find(|(entry, _)| *entry == trashed_entry)
+            .cloned()
+        else {
+            return Err(TrashRestoreError::NotFound {
+                path: PathBuf::from(trashed_entry.id),
+            });
+        };
+
+        let path = trashed_entry
+            .original_parent
+            .join(trashed_entry.name.clone());
+
+        let result = state.write_path(&path, |entry| match entry {
+            btree_map::Entry::Vacant(entry) => {
+                entry.insert(fake_entry);
+                Ok(())
+            }
+            btree_map::Entry::Occupied(_) => {
+                anyhow::bail!("Failed to restore {:?}", path);
+            }
+        });
+
+        match result {
+            Ok(_) => {
+                state.trash.retain(|(entry, _)| *entry != trashed_entry);
+                state.emit_event([(path.clone(), Some(PathEventKind::Created))]);
+                Ok(path)
+            }
+            Err(_) => {
+                // For now we'll just assume that this failed because it was a
+                // collision error, which I think that, for the time being, is
+                // the only case where this could fail?
+                Err(TrashRestoreError::Collision { path })
+            }
+        }
+    }
+
     #[cfg(feature = "test-support")]
     fn as_fake(&self) -> Arc<FakeFs> {
         self.this.upgrade().unwrap()

crates/fs/tests/integration/fs.rs 🔗

@@ -1,5 +1,6 @@
 use std::{
     collections::BTreeSet,
+    ffi::OsString,
     io::Write,
     path::{Path, PathBuf},
     time::Duration,
@@ -626,6 +627,205 @@ async fn test_realfs_symlink_loop_metadata(executor: BackgroundExecutor) {
     // don't care about len or mtime on symlinks?
 }
 
+#[gpui::test]
+async fn test_fake_fs_trash(executor: BackgroundExecutor) {
+    let fs = FakeFs::new(executor.clone());
+    fs.insert_tree(
+        path!("/root"),
+        json!({
+            "src": {
+                "file_c.txt": "File C",
+                "file_d.txt": "File D"
+            },
+            "file_a.txt": "File A",
+            "file_b.txt": "File B",
+        }),
+    )
+    .await;
+
+    // Trashing a file.
+    let root_path = PathBuf::from(path!("/root"));
+    let path = path!("/root/file_a.txt").as_ref();
+    let trashed_entry = fs
+        .trash(path, Default::default())
+        .await
+        .expect("should be able to trash {path:?}");
+
+    assert_eq!(trashed_entry.name, "file_a.txt");
+    assert_eq!(trashed_entry.original_parent, root_path);
+    assert_eq!(
+        fs.files(),
+        vec![
+            PathBuf::from(path!("/root/file_b.txt")),
+            PathBuf::from(path!("/root/src/file_c.txt")),
+            PathBuf::from(path!("/root/src/file_d.txt"))
+        ]
+    );
+
+    let trash_entries = fs.trash_entries();
+    assert_eq!(trash_entries.len(), 1);
+    assert_eq!(trash_entries[0].name, "file_a.txt");
+    assert_eq!(trash_entries[0].original_parent, root_path);
+
+    // Trashing a directory.
+    let path = path!("/root/src").as_ref();
+    let trashed_entry = fs
+        .trash(
+            path,
+            RemoveOptions {
+                recursive: true,
+                ..Default::default()
+            },
+        )
+        .await
+        .expect("should be able to trash {path:?}");
+
+    assert_eq!(trashed_entry.name, "src");
+    assert_eq!(trashed_entry.original_parent, root_path);
+    assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/file_b.txt"))]);
+
+    let trash_entries = fs.trash_entries();
+    assert_eq!(trash_entries.len(), 2);
+    assert_eq!(trash_entries[1].name, "src");
+    assert_eq!(trash_entries[1].original_parent, root_path);
+}
+
+#[gpui::test]
+async fn test_fake_fs_restore(executor: BackgroundExecutor) {
+    let fs = FakeFs::new(executor.clone());
+    fs.insert_tree(
+        path!("/root"),
+        json!({
+            "src": {
+                "file_a.txt": "File A",
+                "file_b.txt": "File B",
+            },
+            "file_c.txt": "File C",
+        }),
+    )
+    .await;
+
+    // Providing a non-existent `TrashedEntry` should result in an error.
+    let id = OsString::from("/trash/file_c.txt");
+    let name = OsString::from("file_c.txt");
+    let original_parent = PathBuf::from(path!("/root"));
+    let trashed_entry = TrashedEntry {
+        id,
+        name,
+        original_parent,
+    };
+    let result = fs.restore(trashed_entry).await;
+    assert!(matches!(result, Err(TrashRestoreError::NotFound { .. })));
+
+    // Attempt deleting a file, asserting that the filesystem no longer reports
+    // it as part of its list of files, restore it and verify that the list of
+    // files and trash has been updated accordingly.
+    let path = path!("/root/src/file_a.txt").as_ref();
+    let trashed_entry = fs.trash(path, Default::default()).await.unwrap();
+
+    assert_eq!(fs.trash_entries().len(), 1);
+    assert_eq!(
+        fs.files(),
+        vec![
+            PathBuf::from(path!("/root/file_c.txt")),
+            PathBuf::from(path!("/root/src/file_b.txt"))
+        ]
+    );
+
+    fs.restore(trashed_entry).await.unwrap();
+
+    assert_eq!(fs.trash_entries().len(), 0);
+    assert_eq!(
+        fs.files(),
+        vec![
+            PathBuf::from(path!("/root/file_c.txt")),
+            PathBuf::from(path!("/root/src/file_a.txt")),
+            PathBuf::from(path!("/root/src/file_b.txt"))
+        ]
+    );
+
+    // Deleting and restoring a directory should also remove all of its files
+    // but create a single trashed entry, which should be removed after
+    // restoration.
+    let options = RemoveOptions {
+        recursive: true,
+        ..Default::default()
+    };
+    let path = path!("/root/src/").as_ref();
+    let trashed_entry = fs.trash(path, options).await.unwrap();
+
+    assert_eq!(fs.trash_entries().len(), 1);
+    assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/file_c.txt"))]);
+
+    fs.restore(trashed_entry).await.unwrap();
+
+    assert_eq!(
+        fs.files(),
+        vec![
+            PathBuf::from(path!("/root/file_c.txt")),
+            PathBuf::from(path!("/root/src/file_a.txt")),
+            PathBuf::from(path!("/root/src/file_b.txt"))
+        ]
+    );
+    assert_eq!(fs.trash_entries().len(), 0);
+
+    // A collision error should be returned in case a file is being restored to
+    // a path where a file already exists.
+    let path = path!("/root/src/file_a.txt").as_ref();
+    let trashed_entry = fs.trash(path, Default::default()).await.unwrap();
+
+    assert_eq!(fs.trash_entries().len(), 1);
+    assert_eq!(
+        fs.files(),
+        vec![
+            PathBuf::from(path!("/root/file_c.txt")),
+            PathBuf::from(path!("/root/src/file_b.txt"))
+        ]
+    );
+
+    fs.write(path, "New File A".as_bytes()).await.unwrap();
+
+    assert_eq!(fs.trash_entries().len(), 1);
+    assert_eq!(
+        fs.files(),
+        vec![
+            PathBuf::from(path!("/root/file_c.txt")),
+            PathBuf::from(path!("/root/src/file_a.txt")),
+            PathBuf::from(path!("/root/src/file_b.txt"))
+        ]
+    );
+
+    let file_contents = fs.files_with_contents(path);
+    assert!(fs.restore(trashed_entry).await.is_err());
+    assert_eq!(
+        file_contents,
+        vec![(PathBuf::from(path), b"New File A".to_vec())]
+    );
+
+    // A collision error should be returned in case a directory is being
+    // restored to a path where a directory already exists.
+    let options = RemoveOptions {
+        recursive: true,
+        ..Default::default()
+    };
+    let path = path!("/root/src/").as_ref();
+    let trashed_entry = fs.trash(path, options).await.unwrap();
+
+    assert_eq!(fs.trash_entries().len(), 2);
+    assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/file_c.txt"))]);
+
+    fs.create_dir(path).await.unwrap();
+
+    assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/file_c.txt"))]);
+    assert_eq!(fs.trash_entries().len(), 2);
+
+    let result = fs.restore(trashed_entry).await;
+    assert!(result.is_err());
+
+    assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/file_c.txt"))]);
+    assert_eq!(fs.trash_entries().len(), 2);
+}
+
 #[gpui::test]
 #[ignore = "stress test; run explicitly when needed"]
 async fn test_realfs_watch_stress_reports_missed_paths(

crates/git/src/blame.rs 🔗

@@ -58,7 +58,7 @@ async fn run_git_blame(
     let mut child = {
         let span = ztracing::debug_span!("spawning git-blame command", path = path.as_unix_str());
         let _enter = span.enter();
-        git.build_command(&["blame", "--incremental", "--contents", "-"])
+        git.build_command(&["blame", "--incremental", "--contents", "-", "--"])
             .arg(path.as_unix_str())
             .stdin(Stdio::piped())
             .stdout(Stdio::piped())

crates/git/src/repository.rs 🔗

@@ -1476,7 +1476,7 @@ impl GitRepository for RealGitRepository {
                 } else {
                     log::debug!("removing path {path:?} from the index");
                     let output = git
-                        .build_command(&["update-index", "--force-remove"])
+                        .build_command(&["update-index", "--force-remove", "--"])
                         .envs(env.iter())
                         .arg(path.as_unix_str())
                         .output()
@@ -2114,7 +2114,7 @@ impl GitRepository for RealGitRepository {
             .spawn(async move {
                 let git = git_binary?;
                 let output = git
-                    .build_command(&["stash", "push", "--quiet", "--include-untracked"])
+                    .build_command(&["stash", "push", "--quiet", "--include-untracked", "--"])
                     .envs(env.iter())
                     .args(paths.iter().map(|p| p.as_unix_str()))
                     .output()
@@ -3146,6 +3146,7 @@ fn git_status_args(path_prefixes: &[RepoPath]) -> Vec<OsString> {
         OsString::from("--untracked-files=all"),
         OsString::from("--no-renames"),
         OsString::from("-z"),
+        OsString::from("--"),
     ];
     args.extend(path_prefixes.iter().map(|path_prefix| {
         if path_prefix.is_empty() {

crates/google_ai/src/google_ai.rs 🔗

@@ -518,7 +518,7 @@ pub enum Model {
     #[serde(rename = "custom")]
     Custom {
         name: String,
-        /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
+        /// The name displayed in the UI, such as in the agent panel model dropdown menu.
         display_name: Option<String>,
         max_tokens: u64,
         #[serde(default)]

crates/gpui/src/window.rs 🔗

@@ -1211,18 +1211,38 @@ impl Window {
                     .update(&mut cx, |_, _, cx| cx.thermal_state())
                     .log_err();
 
-                if thermal_state == Some(ThermalState::Serious)
-                    || thermal_state == Some(ThermalState::Critical)
+                // Throttle frame rate based on conditions:
+                // - Thermal pressure (Serious/Critical): cap to ~60fps
+                // - Inactive window (not focused): cap to ~30fps to save energy
+                let min_frame_interval = if !request_frame_options.force_render
+                    && !request_frame_options.require_presentation
+                    && next_frame_callbacks.borrow().is_empty()
                 {
-                    let now = Instant::now();
-                    let last_frame_time = last_frame_time.replace(Some(now));
+                    None
+                } else if !active.get() {
+                    Some(Duration::from_micros(33333))
+                } else if let Some(ThermalState::Critical | ThermalState::Serious) = thermal_state {
+                    Some(Duration::from_micros(16667))
+                } else {
+                    None
+                };
 
-                    if let Some(last_frame) = last_frame_time
-                        && now.duration_since(last_frame) < Duration::from_micros(16667)
+                let now = Instant::now();
+                if let Some(min_interval) = min_frame_interval {
+                    if let Some(last_frame) = last_frame_time.get()
+                        && now.duration_since(last_frame) < min_interval
                     {
+                        // Must still complete the frame on platforms that require it.
+                        // On Wayland, `surface.frame()` was already called to request the
+                        // next frame callback, so we must call `surface.commit()` (via
+                        // `complete_frame`) or the compositor won't send another callback.
+                        handle
+                            .update(&mut cx, |_, window, _| window.complete_frame())
+                            .log_err();
                         return;
                     }
                 }
+                last_frame_time.set(Some(now));
 
                 let next_frame_callbacks = next_frame_callbacks.take();
                 if !next_frame_callbacks.is_empty() {

crates/gpui_linux/src/linux/x11/client.rs 🔗

@@ -214,6 +214,8 @@ pub struct X11ClientState {
 
     pointer_device_states: BTreeMap<xinput::DeviceId, PointerDeviceState>,
 
+    pub(crate) supports_xinput_gestures: bool,
+
     pub(crate) common: LinuxCommon,
     pub(crate) clipboard: Clipboard,
     pub(crate) clipboard_item: Option<ClipboardItem>,
@@ -345,7 +347,8 @@ impl X11Client {
 
         // Announce to X server that XInput up to 2.4 is supported.
         // Version 2.4 is needed for gesture events (GesturePinchBegin/Update/End).
-        // If the server only supports an older version, gesture events simply won't be delivered.
+        // The server responds with the highest version it supports; if < 2.4,
+        // we must not request gesture event masks in XISelectEvents.
         let xinput_version = get_reply(
             || "XInput XiQueryVersion failed",
             xcb_connection.xinput_xi_query_version(2, 4),
@@ -354,6 +357,14 @@ impl X11Client {
             xinput_version.major_version >= 2,
             "XInput version >= 2 required."
         );
+        let supports_xinput_gestures = xinput_version.major_version > 2
+            || (xinput_version.major_version == 2 && xinput_version.minor_version >= 4);
+        log::info!(
+            "XInput version: {}.{}, gesture support: {}",
+            xinput_version.major_version,
+            xinput_version.minor_version,
+            supports_xinput_gestures,
+        );
 
         let pointer_device_states =
             current_pointer_device_states(&xcb_connection, &BTreeMap::new()).unwrap_or_default();
@@ -535,6 +546,8 @@ impl X11Client {
 
             pointer_device_states,
 
+            supports_xinput_gestures,
+
             clipboard,
             clipboard_item: None,
             xdnd_state: Xdnd::default(),
@@ -1593,6 +1606,7 @@ impl LinuxClient for X11Client {
         let scale_factor = state.scale_factor;
         let appearance = state.common.appearance;
         let compositor_gpu = state.compositor_gpu.take();
+        let supports_xinput_gestures = state.supports_xinput_gestures;
         let window = X11Window::new(
             handle,
             X11ClientStatePtr(Rc::downgrade(&self.0)),
@@ -1608,6 +1622,7 @@ impl LinuxClient for X11Client {
             scale_factor,
             appearance,
             parent_window,
+            supports_xinput_gestures,
         )?;
         check_reply(
             || "Failed to set XdndAware property",

crates/gpui_linux/src/linux/x11/window.rs 🔗

@@ -423,6 +423,7 @@ impl X11WindowState {
         scale_factor: f32,
         appearance: WindowAppearance,
         parent_window: Option<X11WindowStatePtr>,
+        supports_xinput_gestures: bool,
     ) -> anyhow::Result<Self> {
         let x_screen_index = params
             .display_id
@@ -660,25 +661,27 @@ impl X11WindowState {
                 ),
             )?;
 
+            let mut xi_event_mask = xinput::XIEventMask::MOTION
+                | xinput::XIEventMask::BUTTON_PRESS
+                | xinput::XIEventMask::BUTTON_RELEASE
+                | xinput::XIEventMask::ENTER
+                | xinput::XIEventMask::LEAVE;
+            if supports_xinput_gestures {
+                // x11rb 0.13 doesn't define XIEventMask constants for gesture
+                // events, so we construct them from the event opcodes (each
+                // XInput event type N maps to mask bit N).
+                xi_event_mask |=
+                    xinput::XIEventMask::from(1u32 << xinput::GESTURE_PINCH_BEGIN_EVENT)
+                        | xinput::XIEventMask::from(1u32 << xinput::GESTURE_PINCH_UPDATE_EVENT)
+                        | xinput::XIEventMask::from(1u32 << xinput::GESTURE_PINCH_END_EVENT);
+            }
             check_reply(
                 || "X11 XiSelectEvents failed.",
                 xcb.xinput_xi_select_events(
                     x_window,
                     &[xinput::EventMask {
                         deviceid: XINPUT_ALL_DEVICE_GROUPS,
-                        mask: vec![
-                            xinput::XIEventMask::MOTION
-                                | xinput::XIEventMask::BUTTON_PRESS
-                                | xinput::XIEventMask::BUTTON_RELEASE
-                                | xinput::XIEventMask::ENTER
-                                | xinput::XIEventMask::LEAVE
-                                // x11rb 0.13 doesn't define XIEventMask constants for gesture
-                                // events, so we construct them from the event opcodes (each
-                                // XInput event type N maps to mask bit N).
-                                | xinput::XIEventMask::from(1u32 << xinput::GESTURE_PINCH_BEGIN_EVENT)
-                                | xinput::XIEventMask::from(1u32 << xinput::GESTURE_PINCH_UPDATE_EVENT)
-                                | xinput::XIEventMask::from(1u32 << xinput::GESTURE_PINCH_END_EVENT),
-                        ],
+                        mask: vec![xi_event_mask],
                     }],
                 ),
             )?;
@@ -855,6 +858,7 @@ impl X11Window {
         scale_factor: f32,
         appearance: WindowAppearance,
         parent_window: Option<X11WindowStatePtr>,
+        supports_xinput_gestures: bool,
     ) -> anyhow::Result<Self> {
         let ptr = X11WindowStatePtr {
             state: Rc::new(RefCell::new(X11WindowState::new(
@@ -872,6 +876,7 @@ impl X11Window {
                 scale_factor,
                 appearance,
                 parent_window,
+                supports_xinput_gestures,
             )?)),
             callbacks: Rc::new(RefCell::new(Callbacks::default())),
             xcb: xcb.clone(),

crates/gpui_macos/src/platform.rs 🔗

@@ -858,6 +858,7 @@ impl Platform for MacPlatform {
             .background_executor
             .spawn(async move {
                 if let Some(mut child) = new_command("open")
+                    .arg("--")
                     .arg(path)
                     .spawn()
                     .context("invoking open command")

crates/inspector_ui/src/div_inspector.rs 🔗

@@ -498,6 +498,7 @@ impl DivInspector {
             editor.set_show_breakpoints(false, cx);
             editor.set_show_git_diff_gutter(false, cx);
             editor.set_show_runnables(false, cx);
+            editor.disable_mouse_wheel_zoom();
             editor.set_show_edit_predictions(Some(false), window, cx);
             editor.set_minimap_visibility(MinimapVisibility::Disabled, window, cx);
             editor

crates/keymap_editor/src/keymap_editor.rs 🔗

@@ -3318,6 +3318,7 @@ impl ActionArgumentsEditor {
                         window,
                         cx,
                     );
+                    editor.disable_mouse_wheel_zoom();
                     editor.set_searchable(false);
                     editor.disable_scrollbars_and_minimap(window, cx);
                     editor.set_show_edit_predictions(Some(false), window, cx);

crates/markdown/src/markdown.rs 🔗

@@ -249,6 +249,7 @@ pub struct Markdown {
     source: SharedString,
     selection: Selection,
     pressed_link: Option<RenderedLink>,
+    pressed_footnote_ref: Option<RenderedFootnoteRef>,
     autoscroll_request: Option<usize>,
     active_root_block: Option<usize>,
     parsed_markdown: ParsedMarkdown,
@@ -272,6 +273,7 @@ pub struct MarkdownOptions {
     pub parse_links_only: bool,
     pub parse_html: bool,
     pub render_mermaid_diagrams: bool,
+    pub parse_heading_slugs: bool,
 }
 
 #[derive(Clone, Copy, PartialEq, Eq)]
@@ -418,6 +420,7 @@ impl Markdown {
             source,
             selection: Selection::default(),
             pressed_link: None,
+            pressed_footnote_ref: None,
             autoscroll_request: None,
             active_root_block: None,
             should_reparse: false,
@@ -498,6 +501,16 @@ impl Markdown {
         self.pending_parse.is_some()
     }
 
+    pub fn scroll_to_heading(&mut self, slug: &str, cx: &mut Context<Self>) -> Option<usize> {
+        if let Some(source_index) = self.parsed_markdown.heading_slugs.get(slug).copied() {
+            self.autoscroll_request = Some(source_index);
+            cx.notify();
+            Some(source_index)
+        } else {
+            None
+        }
+    }
+
     pub fn source(&self) -> &str {
         &self.source
     }
@@ -521,6 +534,13 @@ impl Markdown {
         cx.refresh_windows();
     }
 
+    fn footnote_definition_content_start(&self, label: &SharedString) -> Option<usize> {
+        self.parsed_markdown
+            .footnote_definitions
+            .get(label)
+            .copied()
+    }
+
     pub fn set_active_root_for_source_index(
         &mut self,
         source_index: Option<usize>,
@@ -669,6 +689,7 @@ impl Markdown {
         let should_parse_links_only = self.options.parse_links_only;
         let should_parse_html = self.options.parse_html;
         let should_render_mermaid_diagrams = self.options.render_mermaid_diagrams;
+        let should_parse_heading_slugs = self.options.parse_heading_slugs;
         let language_registry = self.language_registry.clone();
         let fallback = self.fallback_code_block_language.clone();
 
@@ -683,17 +704,22 @@ impl Markdown {
                         root_block_starts: Arc::default(),
                         html_blocks: BTreeMap::default(),
                         mermaid_diagrams: BTreeMap::default(),
+                        heading_slugs: HashMap::default(),
+                        footnote_definitions: HashMap::default(),
                     },
                     Default::default(),
                 );
             }
 
-            let parsed = parse_markdown_with_options(&source, should_parse_html);
+            let parsed =
+                parse_markdown_with_options(&source, should_parse_html, should_parse_heading_slugs);
             let events = parsed.events;
             let language_names = parsed.language_names;
             let paths = parsed.language_paths;
             let root_block_starts = parsed.root_block_starts;
             let html_blocks = parsed.html_blocks;
+            let heading_slugs = parsed.heading_slugs;
+            let footnote_definitions = parsed.footnote_definitions;
             let mermaid_diagrams = if should_render_mermaid_diagrams {
                 extract_mermaid_diagrams(&source, &events)
             } else {
@@ -760,6 +786,8 @@ impl Markdown {
                     root_block_starts: Arc::from(root_block_starts),
                     html_blocks,
                     mermaid_diagrams,
+                    heading_slugs,
+                    footnote_definitions,
                 },
                 images_by_source_offset,
             )
@@ -883,6 +911,8 @@ pub struct ParsedMarkdown {
     pub root_block_starts: Arc<[usize]>,
     pub(crate) html_blocks: BTreeMap<usize, html::html_parser::ParsedHtmlBlock>,
     pub(crate) mermaid_diagrams: BTreeMap<usize, ParsedMarkdownMermaidDiagram>,
+    pub heading_slugs: HashMap<SharedString, usize>,
+    pub footnote_definitions: HashMap<SharedString, usize>,
 }
 
 impl ParsedMarkdown {
@@ -1283,18 +1313,22 @@ impl MarkdownElement {
             return;
         }
 
-        let is_hovering_link = hitbox.is_hovered(window)
+        let is_hovering_clickable = hitbox.is_hovered(window)
             && !self.markdown.read(cx).selection.pending
             && rendered_text
-                .link_for_position(window.mouse_position())
-                .is_some();
-
-        if !self.style.prevent_mouse_interaction {
-            if is_hovering_link {
-                window.set_cursor_style(CursorStyle::PointingHand, hitbox);
-            } else {
-                window.set_cursor_style(CursorStyle::IBeam, hitbox);
-            }
+                .source_index_for_position(window.mouse_position())
+                .ok()
+                .is_some_and(|source_index| {
+                    rendered_text.link_for_source_index(source_index).is_some()
+                        || rendered_text
+                            .footnote_ref_for_source_index(source_index)
+                            .is_some()
+                });
+
+        if is_hovering_clickable {
+            window.set_cursor_style(CursorStyle::PointingHand, hitbox);
+        } else {
+            window.set_cursor_style(CursorStyle::IBeam, hitbox);
         }
 
         let on_open_url = self.on_url_click.take();
@@ -1319,13 +1353,27 @@ impl MarkdownElement {
             move |markdown, event: &MouseDownEvent, phase, window, cx| {
                 if hitbox.is_hovered(window) {
                     if phase.bubble() {
-                        if let Some(link) = rendered_text.link_for_position(event.position) {
-                            markdown.pressed_link = Some(link.clone());
-                        } else {
-                            let source_index =
-                                match rendered_text.source_index_for_position(event.position) {
-                                    Ok(ix) | Err(ix) => ix,
-                                };
+                        let position_result =
+                            rendered_text.source_index_for_position(event.position);
+
+                        if let Ok(source_index) = position_result {
+                            if let Some(footnote_ref) =
+                                rendered_text.footnote_ref_for_source_index(source_index)
+                            {
+                                markdown.pressed_footnote_ref = Some(footnote_ref.clone());
+                            } else if let Some(link) =
+                                rendered_text.link_for_source_index(source_index)
+                            {
+                                markdown.pressed_link = Some(link.clone());
+                            }
+                        }
+
+                        if markdown.pressed_footnote_ref.is_none()
+                            && markdown.pressed_link.is_none()
+                        {
+                            let source_index = match position_result {
+                                Ok(ix) | Err(ix) => ix,
+                            };
                             if let Some(handler) = on_source_click.as_ref() {
                                 let blocked = handler(source_index, event.click_count, window, cx);
                                 if blocked {
@@ -1381,7 +1429,7 @@ impl MarkdownElement {
         self.on_mouse_event(window, cx, {
             let rendered_text = rendered_text.clone();
             let hitbox = hitbox.clone();
-            let was_hovering_link = is_hovering_link;
+            let was_hovering_clickable = is_hovering_clickable;
             move |markdown, event: &MouseMoveEvent, phase, window, cx| {
                 if phase.capture() {
                     return;
@@ -1397,9 +1445,17 @@ impl MarkdownElement {
                     markdown.autoscroll_request = Some(source_index);
                     cx.notify();
                 } else {
-                    let is_hovering_link = hitbox.is_hovered(window)
-                        && rendered_text.link_for_position(event.position).is_some();
-                    if is_hovering_link != was_hovering_link {
+                    let is_hovering_clickable = hitbox.is_hovered(window)
+                        && rendered_text
+                            .source_index_for_position(event.position)
+                            .ok()
+                            .is_some_and(|source_index| {
+                                rendered_text.link_for_source_index(source_index).is_some()
+                                    || rendered_text
+                                        .footnote_ref_for_source_index(source_index)
+                                        .is_some()
+                            });
+                    if is_hovering_clickable != was_hovering_clickable {
                         cx.notify();
                     }
                 }
@@ -1409,8 +1465,21 @@ impl MarkdownElement {
             let rendered_text = rendered_text.clone();
             move |markdown, event: &MouseUpEvent, phase, window, cx| {
                 if phase.bubble() {
-                    if let Some(pressed_link) = markdown.pressed_link.take()
-                        && Some(&pressed_link) == rendered_text.link_for_position(event.position)
+                    let source_index = rendered_text.source_index_for_position(event.position).ok();
+                    if let Some(pressed_footnote_ref) = markdown.pressed_footnote_ref.take()
+                        && source_index
+                            .and_then(|ix| rendered_text.footnote_ref_for_source_index(ix))
+                            == Some(&pressed_footnote_ref)
+                    {
+                        if let Some(source_index) =
+                            markdown.footnote_definition_content_start(&pressed_footnote_ref.label)
+                        {
+                            markdown.autoscroll_request = Some(source_index);
+                            cx.notify();
+                        }
+                    } else if let Some(pressed_link) = markdown.pressed_link.take()
+                        && source_index.and_then(|ix| rendered_text.link_for_source_index(ix))
+                            == Some(&pressed_link)
                     {
                         if let Some(open_url) = on_open_url.as_ref() {
                             open_url(pressed_link.destination_url, window, cx);
@@ -1801,6 +1870,36 @@ impl Element for MarkdownElement {
                                 builder.push_text_style(style)
                             }
                         }
+                        MarkdownTag::FootnoteDefinition(label) => {
+                            if !builder.rendered_footnote_separator {
+                                builder.rendered_footnote_separator = true;
+                                builder.push_div(
+                                    div()
+                                        .border_t_1()
+                                        .mt_2()
+                                        .border_color(self.style.rule_color),
+                                    range,
+                                    markdown_end,
+                                );
+                                builder.pop_div();
+                            }
+                            builder.push_div(
+                                div()
+                                    .pt_1()
+                                    .mb_1()
+                                    .line_height(rems(1.3))
+                                    .text_size(rems(0.85))
+                                    .h_flex()
+                                    .items_start()
+                                    .gap_2()
+                                    .child(
+                                        div().text_size(rems(0.85)).child(format!("{}.", label)),
+                                    ),
+                                range,
+                                markdown_end,
+                            );
+                            builder.push_div(div().flex_1().w_0(), range, markdown_end);
+                        }
                         MarkdownTag::MetadataBlock(_) => {}
                         MarkdownTag::Table(alignments) => {
                             builder.table.start(alignments.clone());
@@ -1956,6 +2055,10 @@ impl Element for MarkdownElement {
                         builder.pop_div();
                         builder.table.end_cell();
                     }
+                    MarkdownTagEnd::FootnoteDefinition => {
+                        builder.pop_div();
+                        builder.pop_div();
+                    }
                     _ => log::debug!("unsupported markdown tag end: {:?}", tag),
                 },
                 MarkdownEvent::Text => {
@@ -2011,7 +2114,12 @@ impl Element for MarkdownElement {
                 MarkdownEvent::TaskListMarker(_) => {
                     // handled inside the `MarkdownTag::Item` case
                 }
-                _ => log::debug!("unsupported markdown event {:?}", event),
+                MarkdownEvent::FootnoteReference(label) => {
+                    builder.push_footnote_ref(label.clone(), range.clone());
+                    builder.push_text_style(self.style.link.clone());
+                    builder.push_text(&format!("[{label}]"), range.clone());
+                    builder.pop_text_style();
+                }
             }
         }
         if self.style.code_block_overflow_x_scroll {
@@ -2253,8 +2361,10 @@ struct MarkdownElementBuilder {
     rendered_lines: Vec<RenderedLine>,
     pending_line: PendingLine,
     rendered_links: Vec<RenderedLink>,
+    rendered_footnote_refs: Vec<RenderedFootnoteRef>,
     current_source_index: usize,
     html_comment: bool,
+    rendered_footnote_separator: bool,
     base_text_style: TextStyle,
     text_style_stack: Vec<TextStyleRefinement>,
     code_block_stack: Vec<Option<Arc<Language>>>,
@@ -2289,8 +2399,10 @@ impl MarkdownElementBuilder {
             rendered_lines: Vec::new(),
             pending_line: PendingLine::default(),
             rendered_links: Vec::new(),
+            rendered_footnote_refs: Vec::new(),
             current_source_index: 0,
             html_comment: false,
+            rendered_footnote_separator: false,
             base_text_style,
             text_style_stack: Vec::new(),
             code_block_stack: Vec::new(),
@@ -2442,6 +2554,13 @@ impl MarkdownElementBuilder {
         });
     }
 
+    fn push_footnote_ref(&mut self, label: SharedString, source_range: Range<usize>) {
+        self.rendered_footnote_refs.push(RenderedFootnoteRef {
+            source_range,
+            label,
+        });
+    }
+
     fn push_text(&mut self, text: &str, source_range: Range<usize>) {
         self.pending_line.source_mappings.push(SourceMapping {
             rendered_index: self.pending_line.text.len(),
@@ -2559,6 +2678,7 @@ impl MarkdownElementBuilder {
             text: RenderedText {
                 lines: self.rendered_lines.into(),
                 links: self.rendered_links.into(),
+                footnote_refs: self.rendered_footnote_refs.into(),
             },
         }
     }
@@ -2673,6 +2793,7 @@ pub struct RenderedMarkdown {
 struct RenderedText {
     lines: Rc<[RenderedLine]>,
     links: Rc<[RenderedLink]>,
+    footnote_refs: Rc<[RenderedFootnoteRef]>,
 }
 
 #[derive(Debug, Clone, Eq, PartialEq)]
@@ -2681,6 +2802,12 @@ struct RenderedLink {
     destination_url: SharedString,
 }
 
+#[derive(Debug, Clone, Eq, PartialEq)]
+struct RenderedFootnoteRef {
+    source_range: Range<usize>,
+    label: SharedString,
+}
+
 impl RenderedText {
     fn source_index_for_position(&self, position: Point<Pixels>) -> Result<usize, usize> {
         let mut lines = self.lines.iter().peekable();
@@ -2827,12 +2954,17 @@ impl RenderedText {
         accumulator
     }
 
-    fn link_for_position(&self, position: Point<Pixels>) -> Option<&RenderedLink> {
-        let source_index = self.source_index_for_position(position).ok()?;
+    fn link_for_source_index(&self, source_index: usize) -> Option<&RenderedLink> {
         self.links
             .iter()
             .find(|link| link.source_range.contains(&source_index))
     }
+
+    fn footnote_ref_for_source_index(&self, source_index: usize) -> Option<&RenderedFootnoteRef> {
+        self.footnote_refs
+            .iter()
+            .find(|fref| fref.source_range.contains(&source_index))
+    }
 }
 
 #[cfg(test)]
@@ -3120,7 +3252,7 @@ mod tests {
     #[test]
     fn test_table_checkbox_detection() {
         let md = "| Done |\n|------|\n| [x] |\n| [ ] |";
-        let events = crate::parser::parse_markdown_with_options(md, false).events;
+        let events = crate::parser::parse_markdown_with_options(md, false, false).events;
 
         let mut in_table = false;
         let mut cell_texts: Vec<String> = Vec::new();
@@ -3338,7 +3470,7 @@ mod tests {
     }
 
     fn has_code_block(markdown: &str) -> bool {
-        let parsed_data = parse_markdown_with_options(markdown, false);
+        let parsed_data = parse_markdown_with_options(markdown, false, false);
         parsed_data
             .events
             .iter()

crates/markdown/src/mermaid.rs 🔗

@@ -371,7 +371,7 @@ mod tests {
     #[test]
     fn test_extract_mermaid_diagrams_parses_scale() {
         let markdown = "```mermaid 150\ngraph TD;\n```\n\n```rust\nfn main() {}\n```";
-        let events = crate::parser::parse_markdown_with_options(markdown, false).events;
+        let events = crate::parser::parse_markdown_with_options(markdown, false, false).events;
         let diagrams = extract_mermaid_diagrams(markdown, &events);
 
         assert_eq!(diagrams.len(), 1);

crates/markdown/src/parser.rs 🔗

@@ -1,12 +1,12 @@
+use collections::{BTreeMap, HashMap, HashSet};
 use gpui::SharedString;
 use linkify::LinkFinder;
 pub use pulldown_cmark::TagEnd as MarkdownTagEnd;
 use pulldown_cmark::{
     Alignment, CowStr, HeadingLevel, LinkType, MetadataBlockKind, Options, Parser,
 };
-use std::{collections::BTreeMap, ops::Range, sync::Arc};
-
-use collections::HashSet;
+use std::{ops::Range, sync::Arc};
+use util::markdown::generate_heading_slug;
 
 use crate::{html, path_range::PathWithRange};
 
@@ -37,6 +37,8 @@ pub(crate) struct ParsedMarkdownData {
     pub language_paths: HashSet<Arc<str>>,
     pub root_block_starts: Vec<usize>,
     pub html_blocks: BTreeMap<usize, html::html_parser::ParsedHtmlBlock>,
+    pub heading_slugs: HashMap<SharedString, usize>,
+    pub footnote_definitions: HashMap<SharedString, usize>,
 }
 
 impl ParseState {
@@ -80,7 +82,78 @@ impl ParseState {
     }
 }
 
-pub(crate) fn parse_markdown_with_options(text: &str, parse_html: bool) -> ParsedMarkdownData {
+const MAX_DUPLICATE_HEADING_SLUGS: usize = 128;
+
+fn build_heading_slugs(
+    source: &str,
+    events: &[(Range<usize>, MarkdownEvent)],
+) -> HashMap<SharedString, usize> {
+    let mut slugs = HashMap::default();
+    let mut slug_counts: HashMap<String, usize> = HashMap::default();
+    let mut inside_heading = false;
+    let mut heading_text = String::new();
+    let mut heading_source_start: Option<usize> = None;
+
+    for (range, event) in events {
+        match event {
+            MarkdownEvent::Start(MarkdownTag::Heading { .. }) => {
+                inside_heading = true;
+                heading_text.clear();
+                heading_source_start = None;
+            }
+            MarkdownEvent::End(MarkdownTagEnd::Heading(_)) => {
+                if inside_heading {
+                    let source_offset = heading_source_start.unwrap_or(range.start);
+                    let base_slug = generate_heading_slug(&heading_text);
+                    let count = slug_counts.entry(base_slug.clone()).or_insert(0);
+                    let mut slug = if *count == 0 {
+                        base_slug.clone()
+                    } else {
+                        format!("{base_slug}-{count}")
+                    };
+                    *count += 1;
+                    while slugs.contains_key(slug.as_str()) {
+                        let Some(count) = slug_counts.get_mut(&base_slug) else {
+                            slug.clear();
+                            break;
+                        };
+                        if *count >= MAX_DUPLICATE_HEADING_SLUGS {
+                            slug.clear();
+                            break;
+                        }
+                        slug = format!("{base_slug}-{count}");
+                        *count += 1;
+                    }
+                    if !slug.is_empty() {
+                        slugs.insert(SharedString::from(slug), source_offset);
+                    }
+                    inside_heading = false;
+                }
+            }
+            MarkdownEvent::Text | MarkdownEvent::Code if inside_heading => {
+                if heading_source_start.is_none() {
+                    heading_source_start = Some(range.start);
+                }
+                heading_text.push_str(&source[range.clone()]);
+            }
+            MarkdownEvent::SubstitutedText(substituted) if inside_heading => {
+                if heading_source_start.is_none() {
+                    heading_source_start = Some(range.start);
+                }
+                heading_text.push_str(substituted);
+            }
+            _ => {}
+        }
+    }
+
+    slugs
+}
+
+pub(crate) fn parse_markdown_with_options(
+    text: &str,
+    parse_html: bool,
+    parse_heading_slugs: bool,
+) -> ParsedMarkdownData {
     let mut state = ParseState::default();
     let mut language_names = HashSet::default();
     let mut language_paths = HashSet::default();
@@ -427,9 +500,10 @@ pub(crate) fn parse_markdown_with_options(text: &str, parse_html: bool) -> Parse
             pulldown_cmark::Event::InlineHtml(_) => {
                 state.push_event(range, MarkdownEvent::InlineHtml)
             }
-            pulldown_cmark::Event::FootnoteReference(_) => {
-                state.push_event(range, MarkdownEvent::FootnoteReference)
-            }
+            pulldown_cmark::Event::FootnoteReference(label) => state.push_event(
+                range,
+                MarkdownEvent::FootnoteReference(SharedString::from(label.to_string())),
+            ),
             pulldown_cmark::Event::SoftBreak => state.push_event(range, MarkdownEvent::SoftBreak),
             pulldown_cmark::Event::HardBreak => state.push_event(range, MarkdownEvent::HardBreak),
             pulldown_cmark::Event::Rule => state.push_event(range, MarkdownEvent::Rule),
@@ -440,13 +514,48 @@ pub(crate) fn parse_markdown_with_options(text: &str, parse_html: bool) -> Parse
         }
     }
 
+    let heading_slugs = if parse_heading_slugs {
+        build_heading_slugs(text, &state.events)
+    } else {
+        HashMap::default()
+    };
+    let footnote_definitions = build_footnote_definitions(&state.events);
+
     ParsedMarkdownData {
         events: state.events,
         language_names,
         language_paths,
         root_block_starts: state.root_block_starts,
         html_blocks,
+        heading_slugs,
+        footnote_definitions,
+    }
+}
+
+fn build_footnote_definitions(
+    events: &[(Range<usize>, MarkdownEvent)],
+) -> HashMap<SharedString, usize> {
+    let mut definitions = HashMap::default();
+    let mut current_label: Option<SharedString> = None;
+
+    for (range, event) in events {
+        match event {
+            MarkdownEvent::Start(MarkdownTag::FootnoteDefinition(label)) => {
+                current_label = Some(label.clone());
+            }
+            MarkdownEvent::End(MarkdownTagEnd::FootnoteDefinition) => {
+                current_label = None;
+            }
+            MarkdownEvent::Text if current_label.is_some() => {
+                if let Some(label) = current_label.take() {
+                    definitions.entry(label).or_insert(range.start);
+                }
+            }
+            _ => {}
+        }
     }
+
+    definitions
 }
 
 pub fn parse_links_only(text: &str) -> Vec<(Range<usize>, MarkdownEvent)> {
@@ -510,7 +619,7 @@ pub enum MarkdownEvent {
     /// A reference to a footnote with given label, which may or may not be defined
     /// by an event with a `Tag::FootnoteDefinition` tag. Definitions and references to them may
     /// occur in any order.
-    FootnoteReference,
+    FootnoteReference(SharedString),
     /// A soft line break.
     SoftBreak,
     /// A hard line break.
@@ -697,7 +806,7 @@ mod tests {
     #[test]
     fn test_html_comments() {
         assert_eq!(
-            parse_markdown_with_options("  <!--\nrdoc-file=string.c\n-->\nReturns", false),
+            parse_markdown_with_options("  <!--\nrdoc-file=string.c\n-->\nReturns", false, false),
             ParsedMarkdownData {
                 events: vec![
                     (2..30, RootStart),
@@ -725,7 +834,8 @@ mod tests {
         assert_eq!(
             parse_markdown_with_options(
                 "&nbsp;&nbsp; https://some.url some \\`&#9658;\\` text",
-                false
+                false,
+                false,
             ),
             ParsedMarkdownData {
                 events: vec![
@@ -764,7 +874,8 @@ mod tests {
         assert_eq!(
             parse_markdown_with_options(
                 "You can use the [GitHub Search API](https://docs.github.com/en",
-                false
+                false,
+                false,
             )
             .events,
             vec![
@@ -797,7 +908,8 @@ mod tests {
         assert_eq!(
             parse_markdown_with_options(
                 "-- --- ... \"double quoted\" 'single quoted' ----------",
-                false
+                false,
+                false,
             ),
             ParsedMarkdownData {
                 events: vec![
@@ -830,7 +942,7 @@ mod tests {
     #[test]
     fn test_code_block_metadata() {
         assert_eq!(
-            parse_markdown_with_options("```rust\nfn main() {\n let a = 1;\n}\n```", false),
+            parse_markdown_with_options("```rust\nfn main() {\n let a = 1;\n}\n```", false, false),
             ParsedMarkdownData {
                 events: vec![
                     (0..37, RootStart),
@@ -858,7 +970,7 @@ mod tests {
             }
         );
         assert_eq!(
-            parse_markdown_with_options("    fn main() {}", false),
+            parse_markdown_with_options("    fn main() {}", false, false),
             ParsedMarkdownData {
                 events: vec![
                     (4..16, RootStart),
@@ -883,7 +995,7 @@ mod tests {
     }
 
     fn assert_code_block_does_not_emit_links(markdown: &str) {
-        let parsed = parse_markdown_with_options(markdown, false);
+        let parsed = parse_markdown_with_options(markdown, false, false);
         let mut code_block_depth = 0;
         let mut code_block_count = 0;
         let mut saw_text_inside_code_block = false;
@@ -937,7 +1049,7 @@ mod tests {
     #[test]
     fn test_metadata_blocks_do_not_affect_root_blocks() {
         assert_eq!(
-            parse_markdown_with_options("+++\ntitle = \"Example\"\n+++\n\nParagraph", false),
+            parse_markdown_with_options("+++\ntitle = \"Example\"\n+++\n\nParagraph", false, false),
             ParsedMarkdownData {
                 events: vec![
                     (27..36, RootStart),
@@ -959,7 +1071,7 @@ mod tests {
 |------|---------|
 | [x]  | Fix bug |
 | [ ]  | Add feature |";
-        let parsed = parse_markdown_with_options(markdown, false);
+        let parsed = parse_markdown_with_options(markdown, false, false);
 
         let mut in_table = false;
         let mut saw_task_list_marker = false;
@@ -1029,6 +1141,48 @@ mod tests {
         assert_eq!(extract_code_block_content_range(input), 3..3);
     }
 
+    #[test]
+    fn test_footnotes() {
+        let parsed = parse_markdown_with_options(
+            "Text with a footnote[^1] and some more text.\n\n[^1]: This is the footnote content.",
+            false,
+            false,
+        );
+        assert_eq!(
+            parsed.events,
+            vec![
+                (0..45, RootStart),
+                (0..45, Start(Paragraph)),
+                (0..20, Text),
+                (20..24, FootnoteReference("1".into())),
+                (24..44, Text),
+                (0..45, End(MarkdownTagEnd::Paragraph)),
+                (0..45, RootEnd(0)),
+                (46..81, RootStart),
+                (46..81, Start(FootnoteDefinition("1".into()))),
+                (52..81, Start(Paragraph)),
+                (52..81, Text),
+                (52..81, End(MarkdownTagEnd::Paragraph)),
+                (46..81, End(MarkdownTagEnd::FootnoteDefinition)),
+                (46..81, RootEnd(1)),
+            ]
+        );
+        assert_eq!(parsed.footnote_definitions.len(), 1);
+        assert_eq!(parsed.footnote_definitions.get("1").copied(), Some(52));
+    }
+
+    #[test]
+    fn test_footnote_definitions_multiple() {
+        let parsed = parse_markdown_with_options(
+            "Text[^a] and[^b].\n\n[^a]: First.\n\n[^b]: Second.",
+            false,
+            false,
+        );
+        assert_eq!(parsed.footnote_definitions.len(), 2);
+        assert!(parsed.footnote_definitions.contains_key("a"));
+        assert!(parsed.footnote_definitions.contains_key("b"));
+    }
+
     #[test]
     fn test_links_split_across_fragments() {
         // This test verifies that links split across multiple text fragments due to escaping or other issues
@@ -1038,7 +1192,8 @@ mod tests {
         assert_eq!(
             parse_markdown_with_options(
                 "https:/\\/example.com is equivalent to https://example&#46;com!",
-                false
+                false,
+                false,
             )
             .events,
             vec![
@@ -1079,7 +1234,8 @@ mod tests {
         assert_eq!(
             parse_markdown_with_options(
                 "Visit https://example.com/cat\\/é&#8205;☕ for coffee!",
-                false
+                false,
+                false,
             )
             .events,
             [
@@ -1106,4 +1262,42 @@ mod tests {
             ]
         );
     }
+
+    #[test]
+    fn test_heading_slugs() {
+        let parsed = parse_markdown_with_options(
+            "# Hello World\n\n## Code `block`\n\n### Third Level\n\n#### Fourth Level\n\n## Hello World",
+            false,
+            true,
+        );
+        assert_eq!(parsed.heading_slugs.len(), 5);
+        assert!(parsed.heading_slugs.contains_key("hello-world"));
+        assert!(parsed.heading_slugs.contains_key("code-block"));
+        assert!(parsed.heading_slugs.contains_key("third-level"));
+        assert!(parsed.heading_slugs.contains_key("fourth-level"));
+        assert!(parsed.heading_slugs.contains_key("hello-world-1"));
+    }
+
+    #[test]
+    fn test_heading_source_index_for_slug() {
+        let parsed = parse_markdown_with_options(
+            "# Duplicate\n\nText\n\n## Duplicate\n\nMore text",
+            false,
+            true,
+        );
+        let first = parsed.heading_slugs.get("duplicate").copied();
+        let second = parsed.heading_slugs.get("duplicate-1").copied();
+        assert!(first.is_some());
+        assert!(second.is_some());
+        assert!(first.expect("first slug missing") < second.expect("second slug missing"));
+    }
+
+    #[test]
+    fn test_heading_slug_collision_with_dedup_suffix() {
+        let parsed = parse_markdown_with_options("# Foo\n\n## Foo\n\n## Foo 1", false, true);
+        assert_eq!(parsed.heading_slugs.len(), 3);
+        assert!(parsed.heading_slugs.contains_key("foo"));
+        assert!(parsed.heading_slugs.contains_key("foo-1"));
+        assert!(parsed.heading_slugs.contains_key("foo-1-1"));
+    }
 }

crates/markdown_preview/src/markdown_preview_view.rs 🔗

@@ -21,6 +21,7 @@ use project::search::SearchQuery;
 use settings::Settings;
 use theme_settings::ThemeSettings;
 use ui::{WithScrollbar, prelude::*};
+use util::markdown::split_local_url_fragment;
 use util::normalize_path;
 use workspace::item::{Item, ItemBufferKind, ItemHandle};
 use workspace::searchable::{
@@ -218,6 +219,7 @@ impl MarkdownPreviewView {
                     MarkdownOptions {
                         parse_html: true,
                         render_mermaid_diagrams: true,
+                        parse_heading_slugs: true,
                         ..Default::default()
                     },
                     cx,
@@ -580,8 +582,6 @@ impl MarkdownPreviewView {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) -> MarkdownElement {
-        let workspace = self.workspace.clone();
-        let base_directory = self.base_directory.clone();
         let active_editor = self
             .active_editor
             .as_ref()
@@ -615,8 +615,20 @@ impl MarkdownPreviewView {
                 )
             }
         })
-        .on_url_click(move |url, window, cx| {
-            open_preview_url(url, base_directory.clone(), &workspace, window, cx);
+        .on_url_click({
+            let view_handle = cx.entity().downgrade();
+            let workspace = self.workspace.clone();
+            let base_directory = self.base_directory.clone();
+            move |url, window, cx| {
+                handle_url_click(
+                    url,
+                    &view_handle,
+                    base_directory.clone(),
+                    &workspace,
+                    window,
+                    cx,
+                );
+            }
         });
 
         if let Some(active_editor) = active_editor {
@@ -655,6 +667,56 @@ impl MarkdownPreviewView {
     }
 }
 
+fn handle_url_click(
+    url: SharedString,
+    view: &WeakEntity<MarkdownPreviewView>,
+    base_directory: Option<PathBuf>,
+    workspace: &WeakEntity<Workspace>,
+    window: &mut Window,
+    cx: &mut App,
+) {
+    let (path_part, fragment) = split_local_url_fragment(url.as_ref());
+
+    if path_part.is_empty() {
+        if let Some(fragment) = fragment {
+            let view = view.clone();
+            let slug = SharedString::from(fragment.to_string());
+            window.defer(cx, move |window, cx| {
+                if let Some(view) = view.upgrade() {
+                    let markdown = view.read(cx).markdown.clone();
+                    let active_editor = view
+                        .read(cx)
+                        .active_editor
+                        .as_ref()
+                        .map(|state| state.editor.clone());
+
+                    let source_index =
+                        markdown.update(cx, |markdown, cx| markdown.scroll_to_heading(&slug, cx));
+
+                    if let Some(source_index) = source_index {
+                        if let Some(editor) = active_editor {
+                            MarkdownPreviewView::move_cursor_to_source_index(
+                                &editor,
+                                source_index,
+                                window,
+                                cx,
+                            );
+                        }
+                    }
+                }
+            });
+        }
+    } else {
+        open_preview_url(
+            SharedString::from(path_part.to_string()),
+            base_directory,
+            workspace,
+            window,
+            cx,
+        );
+    }
+}
+
 fn open_preview_url(
     url: SharedString,
     base_directory: Option<PathBuf>,

crates/migrator/src/migrations.rs 🔗

@@ -328,3 +328,9 @@ pub(crate) mod m_2026_04_01 {
 
     pub(crate) use settings::restructure_profiles_with_settings_key;
 }
+
+pub(crate) mod m_2026_04_10 {
+    mod settings;
+
+    pub(crate) use settings::rename_web_search_to_search_web;
+}

crates/migrator/src/migrations/m_2026_04_10/settings.rs 🔗

@@ -0,0 +1,64 @@
+use anyhow::Result;
+use serde_json::Value;
+
+use crate::migrations::migrate_settings;
+
+const AGENT_KEY: &str = "agent";
+const PROFILES_KEY: &str = "profiles";
+const SETTINGS_KEY: &str = "settings";
+const TOOL_PERMISSIONS_KEY: &str = "tool_permissions";
+const TOOLS_KEY: &str = "tools";
+const OLD_TOOL_NAME: &str = "web_search";
+const NEW_TOOL_NAME: &str = "search_web";
+
+pub fn rename_web_search_to_search_web(value: &mut Value) -> Result<()> {
+    migrate_settings(value, &mut migrate_one)
+}
+
+fn migrate_one(object: &mut serde_json::Map<String, Value>) -> Result<()> {
+    migrate_agent_value(object)?;
+
+    // Root-level profiles have a `settings` wrapper after m_2026_04_01,
+    // but `migrate_settings` calls us with the profile map directly,
+    // so we need to look inside `settings` too.
+    if let Some(settings) = object.get_mut(SETTINGS_KEY).and_then(|v| v.as_object_mut()) {
+        migrate_agent_value(settings)?;
+    }
+
+    Ok(())
+}
+
+fn migrate_agent_value(object: &mut serde_json::Map<String, Value>) -> Result<()> {
+    let Some(agent) = object.get_mut(AGENT_KEY).and_then(|v| v.as_object_mut()) else {
+        return Ok(());
+    };
+
+    if let Some(tools) = agent
+        .get_mut(TOOL_PERMISSIONS_KEY)
+        .and_then(|v| v.as_object_mut())
+        .and_then(|tp| tp.get_mut(TOOLS_KEY))
+        .and_then(|v| v.as_object_mut())
+    {
+        rename_key(tools);
+    }
+
+    if let Some(profiles) = agent.get_mut(PROFILES_KEY).and_then(|v| v.as_object_mut()) {
+        for (_profile_name, profile) in profiles.iter_mut() {
+            if let Some(tools) = profile
+                .as_object_mut()
+                .and_then(|p| p.get_mut(TOOLS_KEY))
+                .and_then(|v| v.as_object_mut())
+            {
+                rename_key(tools);
+            }
+        }
+    }
+
+    Ok(())
+}
+
+fn rename_key(tools: &mut serde_json::Map<String, Value>) {
+    if let Some(value) = tools.remove(OLD_TOOL_NAME) {
+        tools.insert(NEW_TOOL_NAME.to_string(), value);
+    }
+}

crates/migrator/src/migrator.rs 🔗

@@ -249,6 +249,7 @@ pub fn migrate_settings(text: &str) -> Result<Option<String>> {
         ),
         MigrationType::Json(migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum),
         MigrationType::Json(migrations::m_2026_04_01::restructure_profiles_with_settings_key),
+        MigrationType::Json(migrations::m_2026_04_10::rename_web_search_to_search_web),
     ];
     run_migrations(text, migrations)
 }
@@ -4682,4 +4683,301 @@ mod tests {
             None,
         );
     }
+
+    #[test]
+    fn test_rename_web_search_to_search_web_in_tool_permissions() {
+        assert_migrate_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_04_10::rename_web_search_to_search_web,
+            )],
+            &r#"
+            {
+                "agent": {
+                    "tool_permissions": {
+                        "tools": {
+                            "web_search": {
+                                "allow": true
+                            }
+                        }
+                    }
+                }
+            }
+            "#
+            .unindent(),
+            Some(
+                &r#"
+                {
+                    "agent": {
+                        "tool_permissions": {
+                            "tools": {
+                                "search_web": {
+                                    "allow": true
+                                }
+                            }
+                        }
+                    }
+                }
+                "#
+                .unindent(),
+            ),
+        );
+    }
+
+    #[test]
+    fn test_rename_web_search_to_search_web_in_profiles() {
+        assert_migrate_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_04_10::rename_web_search_to_search_web,
+            )],
+            &r#"
+            {
+                "agent": {
+                    "profiles": {
+                        "write": {
+                            "tools": {
+                                "web_search": false
+                            }
+                        }
+                    }
+                }
+            }
+            "#
+            .unindent(),
+            Some(
+                &r#"
+                {
+                    "agent": {
+                        "profiles": {
+                            "write": {
+                                "tools": {
+                                    "search_web": false
+                                }
+                            }
+                        }
+                    }
+                }
+                "#
+                .unindent(),
+            ),
+        );
+    }
+
+    #[test]
+    fn test_rename_web_search_to_search_web_no_change_when_already_migrated() {
+        assert_migrate_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_04_10::rename_web_search_to_search_web,
+            )],
+            &r#"
+            {
+                "agent": {
+                    "tool_permissions": {
+                        "tools": {
+                            "search_web": {
+                                "allow": true
+                            }
+                        }
+                    }
+                }
+            }
+            "#
+            .unindent(),
+            None,
+        );
+    }
+
+    #[test]
+    fn test_rename_web_search_to_search_web_no_clobber() {
+        assert_migrate_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_04_10::rename_web_search_to_search_web,
+            )],
+            &r#"
+            {
+                "agent": {
+                    "tool_permissions": {
+                        "tools": {
+                            "web_search": {
+                                "allow": false
+                            },
+                            "search_web": {
+                                "allow": true
+                            }
+                        }
+                    }
+                }
+            }
+            "#
+            .unindent(),
+            Some(
+                &r#"
+                {
+                    "agent": {
+                        "tool_permissions": {
+                            "tools": {
+                                "search_web": {
+                                    "allow": false
+                                }
+                            }
+                        }
+                    }
+                }
+                "#
+                .unindent(),
+            ),
+        );
+    }
+
+    #[test]
+    fn test_rename_web_search_to_search_web_platform_override() {
+        assert_migrate_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_04_10::rename_web_search_to_search_web,
+            )],
+            &r#"
+            {
+                "linux": {
+                    "agent": {
+                        "tool_permissions": {
+                            "tools": {
+                                "web_search": {
+                                    "allow": true
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+            "#
+            .unindent(),
+            Some(
+                &r#"
+                {
+                    "linux": {
+                        "agent": {
+                            "tool_permissions": {
+                                "tools": {
+                                    "search_web": {
+                                        "allow": true
+                                    }
+                                }
+                            }
+                        }
+                    }
+                }
+                "#
+                .unindent(),
+            ),
+        );
+    }
+
+    #[test]
+    fn test_rename_web_search_to_search_web_release_channel_override() {
+        assert_migrate_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_04_10::rename_web_search_to_search_web,
+            )],
+            &r#"
+            {
+                "nightly": {
+                    "agent": {
+                        "tool_permissions": {
+                            "tools": {
+                                "web_search": {
+                                    "default": "allow"
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+            "#
+            .unindent(),
+            Some(
+                &r#"
+                {
+                    "nightly": {
+                        "agent": {
+                            "tool_permissions": {
+                                "tools": {
+                                    "search_web": {
+                                        "default": "allow"
+                                    }
+                                }
+                            }
+                        }
+                    }
+                }
+                "#
+                .unindent(),
+            ),
+        );
+    }
+
+    #[test]
+    fn test_rename_web_search_to_search_web_no_agent() {
+        assert_migrate_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_04_10::rename_web_search_to_search_web,
+            )],
+            &r#"
+            {
+                "buffer_font_size": 14
+            }
+            "#
+            .unindent(),
+            None,
+        );
+    }
+
+    #[test]
+    fn test_rename_web_search_to_search_web_root_level_profile() {
+        assert_migrate_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_04_10::rename_web_search_to_search_web,
+            )],
+            &r#"
+            {
+                "profiles": {
+                    "Work": {
+                        "settings": {
+                            "agent": {
+                                "tool_permissions": {
+                                    "tools": {
+                                        "web_search": {
+                                            "default": "allow"
+                                        }
+                                    }
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+            "#
+            .unindent(),
+            Some(
+                &r#"
+                {
+                    "profiles": {
+                        "Work": {
+                            "settings": {
+                                "agent": {
+                                    "tool_permissions": {
+                                        "tools": {
+                                            "search_web": {
+                                                "default": "allow"
+                                            }
+                                        }
+                                    }
+                                }
+                            }
+                        }
+                    }
+                }
+                "#
+                .unindent(),
+            ),
+        );
+    }
 }

crates/mistral/src/mistral.rs 🔗

@@ -79,7 +79,7 @@ pub enum Model {
     #[serde(rename = "custom")]
     Custom {
         name: String,
-        /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
+        /// The name displayed in the UI, such as in the agent panel model dropdown menu.
         display_name: Option<String>,
         max_tokens: u64,
         max_output_tokens: Option<u64>,

crates/open_ai/src/open_ai.rs 🔗

@@ -98,7 +98,7 @@ pub enum Model {
     #[serde(rename = "custom")]
     Custom {
         name: String,
-        /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
+        /// The name displayed in the UI, such as in the agent panel model dropdown menu.
         display_name: Option<String>,
         max_tokens: u64,
         max_output_tokens: Option<u64>,

crates/project/src/project.rs 🔗

@@ -2566,7 +2566,7 @@ impl Project {
         path: ProjectPath,
         trash: bool,
         cx: &mut Context<Self>,
-    ) -> Option<Task<Result<()>>> {
+    ) -> Option<Task<Result<Option<TrashedEntry>>>> {
         let entry = self.entry_for_path(&path, cx)?;
         self.delete_entry(entry.id, trash, cx)
     }
@@ -2577,7 +2577,7 @@ impl Project {
         entry_id: ProjectEntryId,
         trash: bool,
         cx: &mut Context<Self>,
-    ) -> Option<Task<Result<()>>> {
+    ) -> Option<Task<Result<Option<TrashedEntry>>>> {
         let worktree = self.worktree_for_entry(entry_id, cx)?;
         cx.emit(Event::DeletedEntry(worktree.read(cx).id(), entry_id));
         worktree.update(cx, |worktree, cx| {
@@ -2585,6 +2585,27 @@ impl Project {
         })
     }
 
+    #[inline]
+    pub fn restore_entry(
+        &self,
+        worktree_id: WorktreeId,
+        trash_entry: TrashedEntry,
+        cx: &mut Context<'_, Self>,
+    ) -> Task<Result<ProjectPath>> {
+        let Some(worktree) = self.worktree_for_id(worktree_id, cx) else {
+            return Task::ready(Err(anyhow!("No worktree for id {worktree_id:?}")));
+        };
+
+        cx.spawn(async move |_, cx| {
+            Worktree::restore_entry(trash_entry, worktree, cx)
+                .await
+                .map(|rel_path_buf| ProjectPath {
+                    worktree_id: worktree_id,
+                    path: Arc::from(rel_path_buf.as_rel_path()),
+                })
+        })
+    }
+
     #[inline]
     pub fn expand_entry(
         &mut self,
@@ -6110,30 +6131,48 @@ impl ProjectGroupKey {
         Self { paths, host }
     }
 
-    pub fn display_name(&self) -> SharedString {
+    pub fn path_list(&self) -> &PathList {
+        &self.paths
+    }
+
+    pub fn display_name(
+        &self,
+        path_detail_map: &std::collections::HashMap<PathBuf, usize>,
+    ) -> SharedString {
         let mut names = Vec::with_capacity(self.paths.paths().len());
         for abs_path in self.paths.paths() {
-            if let Some(name) = abs_path.file_name() {
-                names.push(name.to_string_lossy().to_string());
+            let detail = path_detail_map.get(abs_path).copied().unwrap_or(0);
+            let suffix = path_suffix(abs_path, detail);
+            if !suffix.is_empty() {
+                names.push(suffix);
             }
         }
         if names.is_empty() {
-            // TODO: Can we do something better in this case?
             "Empty Workspace".into()
         } else {
             names.join(", ").into()
         }
     }
 
-    pub fn path_list(&self) -> &PathList {
-        &self.paths
-    }
-
     pub fn host(&self) -> Option<RemoteConnectionOptions> {
         self.host.clone()
     }
 }
 
+pub fn path_suffix(path: &Path, detail: usize) -> String {
+    let mut components: Vec<_> = path
+        .components()
+        .rev()
+        .filter_map(|component| match component {
+            std::path::Component::Normal(s) => Some(s.to_string_lossy()),
+            _ => None,
+        })
+        .take(detail + 1)
+        .collect();
+    components.reverse();
+    components.join("/")
+}
+
 pub struct PathMatchCandidateSet {
     pub snapshot: Snapshot,
     pub include_ignored: bool,

crates/project_panel/Cargo.toml 🔗

@@ -22,6 +22,7 @@ collections.workspace = true
 command_palette_hooks.workspace = true
 editor.workspace = true
 file_icons.workspace = true
+futures.workspace = true
 git_ui.workspace = true
 git.workspace = true
 gpui.workspace = true
@@ -48,6 +49,7 @@ zed_actions.workspace = true
 telemetry.workspace = true
 notifications.workspace = true
 feature_flags.workspace = true
+fs.workspace = true
 
 [dev-dependencies]
 client = { workspace = true, features = ["test-support"] }

crates/project_panel/src/project_panel.rs 🔗

@@ -47,16 +47,16 @@ use settings::{
     update_settings_file,
 };
 use smallvec::SmallVec;
-use std::ops::Neg;
-use std::{any::TypeId, time::Instant};
 use std::{
+    any::TypeId,
     cell::OnceCell,
     cmp,
     collections::HashSet,
+    ops::Neg,
     ops::Range,
     path::{Path, PathBuf},
     sync::Arc,
-    time::Duration,
+    time::{Duration, Instant},
 };
 use theme_settings::ThemeSettings;
 use ui::{
@@ -84,7 +84,7 @@ use zed_actions::{
 
 use crate::{
     project_panel_settings::ProjectPanelScrollbarProxy,
-    undo::{ProjectPanelOperation, UndoManager},
+    undo::{Change, UndoManager},
 };
 
 const PROJECT_PANEL_KEY: &str = "ProjectPanel";
@@ -401,6 +401,8 @@ actions!(
         CompareMarkedFiles,
         /// Undoes the last file operation.
         Undo,
+        /// Redoes the last undone file operation.
+        Redo,
     ]
 );
 
@@ -861,6 +863,7 @@ impl ProjectPanel {
             .detach();
 
             let scroll_handle = UniformListScrollHandle::new();
+            let weak_project_panel = cx.weak_entity();
             let mut this = Self {
                 project: project.clone(),
                 hover_scroll_task: None,
@@ -896,7 +899,7 @@ impl ProjectPanel {
                     unfolded_dir_ids: Default::default(),
                 },
                 update_visible_entries_task: Default::default(),
-                undo_manager: UndoManager::new(workspace.weak_handle()),
+                undo_manager: UndoManager::new(workspace.weak_handle(), weak_project_panel, &cx),
             };
             this.update_visible_entries(None, false, false, window, cx);
 
@@ -1176,6 +1179,11 @@ impl ProjectPanel {
                                     "Undo",
                                     Box::new(Undo),
                                 )
+                                .action_disabled_when(
+                                    !self.undo_manager.can_redo(),
+                                    "Redo",
+                                    Box::new(Redo),
+                                )
                             })
                             .when(is_remote, |menu| {
                                 menu.separator()
@@ -1874,16 +1882,12 @@ impl ProjectPanel {
                 // Record the operation if the edit was applied
                 if new_entry.is_ok() {
                     let operation = if let Some(old_entry) = edited_entry {
-                        ProjectPanelOperation::Rename {
-                            old_path: (worktree_id, old_entry.path).into(),
-                            new_path: new_project_path,
-                        }
+                        Change::Renamed((worktree_id, old_entry.path).into(), new_project_path)
                     } else {
-                        ProjectPanelOperation::Create {
-                            project_path: new_project_path,
-                        }
+                        Change::Created(new_project_path)
                     };
-                    project_panel.undo_manager.record(operation);
+
+                    project_panel.undo_manager.record([operation]).log_err();
                 }
 
                 cx.notify();
@@ -2136,9 +2140,12 @@ impl ProjectPanel {
         }
     }
 
-    pub fn undo(&mut self, _: &Undo, _window: &mut Window, cx: &mut Context<Self>) {
-        self.undo_manager.undo(cx);
-        cx.notify();
+    pub fn undo(&mut self, _: &Undo, _window: &mut Window, _cx: &mut Context<Self>) {
+        self.undo_manager.undo().log_err();
+    }
+
+    pub fn redo(&mut self, _: &Redo, _window: &mut Window, _cx: &mut Context<Self>) {
+        self.undo_manager.redo().log_err();
     }
 
     fn rename_impl(
@@ -2331,6 +2338,7 @@ impl ProjectPanel {
 
                     Some((
                         selection.entry_id,
+                        selection.worktree_id,
                         project_path.path.file_name()?.to_string(),
                     ))
                 })
@@ -2346,7 +2354,7 @@ impl ProjectPanel {
                     "Are you sure you want to permanently delete"
                 };
                 let prompt = match file_paths.first() {
-                    Some((_, path)) if file_paths.len() == 1 => {
+                    Some((_, _, path)) if file_paths.len() == 1 => {
                         let unsaved_warning = if dirty_buffers > 0 {
                             "\n\nIt has unsaved changes, which will be lost."
                         } else {
@@ -2361,7 +2369,7 @@ impl ProjectPanel {
                             let truncated_path_counts = file_paths.len() - CUTOFF_POINT;
                             let mut paths = file_paths
                                 .iter()
-                                .map(|(_, path)| path.clone())
+                                .map(|(_, _, path)| path.clone())
                                 .take(CUTOFF_POINT)
                                 .collect::<Vec<_>>();
                             paths.truncate(CUTOFF_POINT);
@@ -2372,7 +2380,7 @@ impl ProjectPanel {
                             }
                             paths
                         } else {
-                            file_paths.iter().map(|(_, path)| path.clone()).collect()
+                            file_paths.iter().map(|(_, _, path)| path.clone()).collect()
                         };
                         let unsaved_warning = if dirty_buffers == 0 {
                             String::new()
@@ -2409,8 +2417,11 @@ impl ProjectPanel {
                 {
                     return anyhow::Ok(());
                 }
-                for (entry_id, _) in file_paths {
-                    panel
+
+                let mut changes = Vec::new();
+
+                for (entry_id, worktree_id, _) in file_paths {
+                    let trashed_entry = panel
                         .update(cx, |panel, cx| {
                             panel
                                 .project
@@ -2418,8 +2429,19 @@ impl ProjectPanel {
                                 .context("no such entry")
                         })??
                         .await?;
+
+                    // Keep track of trashed change so that we can then record
+                    // all of the changes at once, such that undoing and redoing
+                    // restores or trashes all files in batch.
+                    if trash && let Some(trashed_entry) = trashed_entry {
+                        changes.push(Change::Trashed(worktree_id, trashed_entry));
+                    }
                 }
                 panel.update_in(cx, |panel, window, cx| {
+                    if trash {
+                        panel.undo_manager.record(changes).log_err();
+                    }
+
                     if let Some(next_selection) = next_selection {
                         panel.update_visible_entries(
                             Some((next_selection.worktree_id, next_selection.entry_id)),
@@ -3071,8 +3093,8 @@ impl ProjectPanel {
             enum PasteTask {
                 Rename {
                     task: Task<Result<CreatedEntry>>,
-                    old_path: ProjectPath,
-                    new_path: ProjectPath,
+                    from: ProjectPath,
+                    to: ProjectPath,
                 },
                 Copy {
                     task: Task<Result<Option<Entry>>>,
@@ -3089,14 +3111,14 @@ impl ProjectPanel {
                 let clip_entry_id = clipboard_entry.entry_id;
                 let destination: ProjectPath = (worktree_id, new_path).into();
                 let task = if clipboard_entries.is_cut() {
-                    let old_path = self.project.read(cx).path_for_entry(clip_entry_id, cx)?;
+                    let original_path = self.project.read(cx).path_for_entry(clip_entry_id, cx)?;
                     let task = self.project.update(cx, |project, cx| {
                         project.rename_entry(clip_entry_id, destination.clone(), cx)
                     });
                     PasteTask::Rename {
                         task,
-                        old_path,
-                        new_path: destination,
+                        from: original_path,
+                        to: destination,
                     }
                 } else {
                     let task = self.project.update(cx, |project, cx| {
@@ -3113,21 +3135,16 @@ impl ProjectPanel {
 
             cx.spawn_in(window, async move |project_panel, mut cx| {
                 let mut last_succeed = None;
-                let mut operations = Vec::new();
+                let mut changes = Vec::new();
 
                 for task in paste_tasks {
                     match task {
-                        PasteTask::Rename {
-                            task,
-                            old_path,
-                            new_path,
-                        } => {
+                        PasteTask::Rename { task, from, to } => {
                             if let Some(CreatedEntry::Included(entry)) = task
                                 .await
                                 .notify_workspace_async_err(workspace.clone(), &mut cx)
                             {
-                                operations
-                                    .push(ProjectPanelOperation::Rename { old_path, new_path });
+                                changes.push(Change::Renamed(from, to));
                                 last_succeed = Some(entry);
                             }
                         }
@@ -3136,9 +3153,7 @@ impl ProjectPanel {
                                 .await
                                 .notify_workspace_async_err(workspace.clone(), &mut cx)
                             {
-                                operations.push(ProjectPanelOperation::Create {
-                                    project_path: destination,
-                                });
+                                changes.push(Change::Created(destination));
                                 last_succeed = Some(entry);
                             }
                         }
@@ -3147,7 +3162,7 @@ impl ProjectPanel {
 
                 project_panel
                     .update(cx, |this, _| {
-                        this.undo_manager.record_batch(operations);
+                        this.undo_manager.record(changes).log_err();
                     })
                     .ok();
 
@@ -4371,6 +4386,20 @@ impl ProjectPanel {
                         this.marked_entries.clear();
                         this.update_visible_entries(new_selection, false, false, window, cx);
                     }
+
+                    let changes: Vec<Change> = opened_entries
+                        .iter()
+                        .filter_map(|entry_id| {
+                            worktree.read(cx).entry_for_id(*entry_id).map(|entry| {
+                                Change::Created(ProjectPath {
+                                    worktree_id,
+                                    path: entry.path.clone(),
+                                })
+                            })
+                        })
+                        .collect();
+
+                    this.undo_manager.record(changes).log_err();
                 })
             }
             .log_err()
@@ -4449,33 +4478,30 @@ impl ProjectPanel {
 
                 cx.spawn_in(window, async move |project_panel, cx| {
                     let mut last_succeed = None;
-                    let mut operations = Vec::new();
+                    let mut changes = Vec::new();
                     for task in copy_tasks.into_iter() {
                         if let Some(Some(entry)) = task.await.log_err() {
                             last_succeed = Some(entry.id);
-                            operations.push(ProjectPanelOperation::Create {
-                                project_path: (worktree_id, entry.path).into(),
-                            });
+                            changes.push(Change::Created((worktree_id, entry.path).into()));
                         }
                     }
                     // update selection
                     if let Some(entry_id) = last_succeed {
-                        project_panel
-                            .update_in(cx, |project_panel, window, cx| {
-                                project_panel.selection = Some(SelectedEntry {
-                                    worktree_id,
-                                    entry_id,
-                                });
-
-                                project_panel.undo_manager.record_batch(operations);
+                        project_panel.update_in(cx, |project_panel, window, cx| {
+                            project_panel.selection = Some(SelectedEntry {
+                                worktree_id,
+                                entry_id,
+                            });
+                            // if only one entry was dragged and it was disambiguated, open the rename editor
+                            if item_count == 1 && disambiguation_range.is_some() {
+                                project_panel.rename_impl(disambiguation_range, window, cx);
+                            }
 
-                                // if only one entry was dragged and it was disambiguated, open the rename editor
-                                if item_count == 1 && disambiguation_range.is_some() {
-                                    project_panel.rename_impl(disambiguation_range, window, cx);
-                                }
-                            })
-                            .ok();
+                            project_panel.undo_manager.record(changes)
+                        })??;
                     }
+
+                    std::result::Result::Ok::<(), anyhow::Error>(())
                 })
                 .detach();
                 Some(())
@@ -4551,7 +4577,7 @@ impl ProjectPanel {
             let workspace = self.workspace.clone();
             if folded_selection_info.is_empty() {
                 cx.spawn_in(window, async move |project_panel, mut cx| {
-                    let mut operations = Vec::new();
+                    let mut changes = Vec::new();
                     for (entry_id, task) in move_tasks {
                         if let Some(CreatedEntry::Included(new_entry)) = task
                             .await
@@ -4560,16 +4586,16 @@ impl ProjectPanel {
                             if let (Some(old_path), Some(worktree_id)) =
                                 (old_paths.get(&entry_id), destination_worktree_id)
                             {
-                                operations.push(ProjectPanelOperation::Rename {
-                                    old_path: old_path.clone(),
-                                    new_path: (worktree_id, new_entry.path).into(),
-                                });
+                                changes.push(Change::Renamed(
+                                    old_path.clone(),
+                                    (worktree_id, new_entry.path).into(),
+                                ));
                             }
                         }
                     }
                     project_panel
                         .update(cx, |this, _| {
-                            this.undo_manager.record_batch(operations);
+                            this.undo_manager.record(changes).log_err();
                         })
                         .ok();
                 })
@@ -4587,10 +4613,10 @@ impl ProjectPanel {
                             if let (Some(old_path), Some(worktree_id)) =
                                 (old_paths.get(&entry_id), destination_worktree_id)
                             {
-                                operations.push(ProjectPanelOperation::Rename {
-                                    old_path: old_path.clone(),
-                                    new_path: (worktree_id, new_entry.path.clone()).into(),
-                                });
+                                operations.push(Change::Renamed(
+                                    old_path.clone(),
+                                    (worktree_id, new_entry.path.clone()).into(),
+                                ));
                             }
                             move_results.push((entry_id, new_entry));
                         }
@@ -4602,7 +4628,7 @@ impl ProjectPanel {
 
                     project_panel
                         .update(cx, |this, _| {
-                            this.undo_manager.record_batch(operations);
+                            this.undo_manager.record(operations).log_err();
                         })
                         .ok();
 
@@ -6640,6 +6666,7 @@ impl Render for ProjectPanel {
                 .on_action(cx.listener(Self::compare_marked_files))
                 .when(cx.has_flag::<ProjectPanelUndoRedoFeatureFlag>(), |el| {
                     el.on_action(cx.listener(Self::undo))
+                        .on_action(cx.listener(Self::redo))
                 })
                 .when(!project.is_read_only(cx), |el| {
                     el.on_action(cx.listener(Self::new_file))
@@ -7333,3 +7360,4 @@ fn git_status_indicator(git_status: GitSummary) -> Option<(&'static str, Color)>
 
 #[cfg(test)]
 mod project_panel_tests;
+mod tests;

crates/project_panel/src/project_panel_tests.rs 🔗

@@ -1,4 +1,5 @@
 use super::*;
+// use crate::undo::tests::{build_create_operation, build_rename_operation};
 use collections::HashSet;
 use editor::MultiBufferOffset;
 use gpui::{Empty, Entity, TestAppContext, VisualTestContext};
@@ -1994,555 +1995,6 @@ async fn test_copy_paste_nested_and_root_entries(cx: &mut gpui::TestAppContext)
     );
 }
 
-#[gpui::test]
-async fn test_undo_rename(cx: &mut gpui::TestAppContext) {
-    init_test(cx);
-
-    let fs = FakeFs::new(cx.executor());
-    fs.insert_tree(
-        "/root",
-        json!({
-            "a.txt": "",
-            "b.txt": "",
-        }),
-    )
-    .await;
-
-    let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
-    let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
-    let workspace = window
-        .read_with(cx, |mw, _| mw.workspace().clone())
-        .unwrap();
-    let cx = &mut VisualTestContext::from_window(window.into(), cx);
-    let panel = workspace.update_in(cx, ProjectPanel::new);
-    cx.run_until_parked();
-
-    select_path(&panel, "root/a.txt", cx);
-    panel.update_in(cx, |panel, window, cx| panel.rename(&Rename, window, cx));
-    cx.run_until_parked();
-
-    let confirm = panel.update_in(cx, |panel, window, cx| {
-        panel
-            .filename_editor
-            .update(cx, |editor, cx| editor.set_text("renamed.txt", window, cx));
-        panel.confirm_edit(true, window, cx).unwrap()
-    });
-    confirm.await.unwrap();
-    cx.run_until_parked();
-
-    assert!(
-        find_project_entry(&panel, "root/renamed.txt", cx).is_some(),
-        "File should be renamed to renamed.txt"
-    );
-    assert_eq!(
-        find_project_entry(&panel, "root/a.txt", cx),
-        None,
-        "Original file should no longer exist"
-    );
-
-    panel.update_in(cx, |panel, window, cx| {
-        panel.undo(&Undo, window, cx);
-    });
-    cx.run_until_parked();
-
-    assert!(
-        find_project_entry(&panel, "root/a.txt", cx).is_some(),
-        "File should be restored to original name after undo"
-    );
-    assert_eq!(
-        find_project_entry(&panel, "root/renamed.txt", cx),
-        None,
-        "Renamed file should no longer exist after undo"
-    );
-}
-
-#[gpui::test]
-async fn test_undo_create_file(cx: &mut gpui::TestAppContext) {
-    init_test(cx);
-
-    let fs = FakeFs::new(cx.executor());
-    fs.insert_tree(
-        "/root",
-        json!({
-            "existing.txt": "",
-        }),
-    )
-    .await;
-
-    let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
-    let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
-    let workspace = window
-        .read_with(cx, |mw, _| mw.workspace().clone())
-        .unwrap();
-    let cx = &mut VisualTestContext::from_window(window.into(), cx);
-    let panel = workspace.update_in(cx, ProjectPanel::new);
-    cx.run_until_parked();
-
-    select_path(&panel, "root", cx);
-    panel.update_in(cx, |panel, window, cx| panel.new_file(&NewFile, window, cx));
-    cx.run_until_parked();
-
-    let confirm = panel.update_in(cx, |panel, window, cx| {
-        panel
-            .filename_editor
-            .update(cx, |editor, cx| editor.set_text("new.txt", window, cx));
-        panel.confirm_edit(true, window, cx).unwrap()
-    });
-    confirm.await.unwrap();
-    cx.run_until_parked();
-
-    assert!(
-        find_project_entry(&panel, "root/new.txt", cx).is_some(),
-        "New file should exist"
-    );
-
-    panel.update_in(cx, |panel, window, cx| {
-        panel.undo(&Undo, window, cx);
-    });
-    cx.run_until_parked();
-
-    assert_eq!(
-        find_project_entry(&panel, "root/new.txt", cx),
-        None,
-        "New file should be removed after undo"
-    );
-    assert!(
-        find_project_entry(&panel, "root/existing.txt", cx).is_some(),
-        "Existing file should still be present"
-    );
-}
-
-#[gpui::test]
-async fn test_undo_create_directory(cx: &mut gpui::TestAppContext) {
-    init_test(cx);
-
-    let fs = FakeFs::new(cx.executor());
-    fs.insert_tree(
-        "/root",
-        json!({
-            "existing.txt": "",
-        }),
-    )
-    .await;
-
-    let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
-    let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
-    let workspace = window
-        .read_with(cx, |mw, _| mw.workspace().clone())
-        .unwrap();
-    let cx = &mut VisualTestContext::from_window(window.into(), cx);
-    let panel = workspace.update_in(cx, ProjectPanel::new);
-    cx.run_until_parked();
-
-    select_path(&panel, "root", cx);
-    panel.update_in(cx, |panel, window, cx| {
-        panel.new_directory(&NewDirectory, window, cx)
-    });
-    cx.run_until_parked();
-
-    let confirm = panel.update_in(cx, |panel, window, cx| {
-        panel
-            .filename_editor
-            .update(cx, |editor, cx| editor.set_text("new_dir", window, cx));
-        panel.confirm_edit(true, window, cx).unwrap()
-    });
-    confirm.await.unwrap();
-    cx.run_until_parked();
-
-    assert!(
-        find_project_entry(&panel, "root/new_dir", cx).is_some(),
-        "New directory should exist"
-    );
-
-    panel.update_in(cx, |panel, window, cx| {
-        panel.undo(&Undo, window, cx);
-    });
-    cx.run_until_parked();
-
-    assert_eq!(
-        find_project_entry(&panel, "root/new_dir", cx),
-        None,
-        "New directory should be removed after undo"
-    );
-}
-
-#[gpui::test]
-async fn test_undo_cut_paste(cx: &mut gpui::TestAppContext) {
-    init_test(cx);
-
-    let fs = FakeFs::new(cx.executor());
-    fs.insert_tree(
-        "/root",
-        json!({
-            "src": {
-                "file.txt": "content",
-            },
-            "dst": {},
-        }),
-    )
-    .await;
-
-    let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
-    let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
-    let workspace = window
-        .read_with(cx, |mw, _| mw.workspace().clone())
-        .unwrap();
-    let cx = &mut VisualTestContext::from_window(window.into(), cx);
-    let panel = workspace.update_in(cx, ProjectPanel::new);
-    cx.run_until_parked();
-
-    toggle_expand_dir(&panel, "root/src", cx);
-
-    select_path_with_mark(&panel, "root/src/file.txt", cx);
-    panel.update_in(cx, |panel, window, cx| {
-        panel.cut(&Default::default(), window, cx);
-    });
-
-    select_path(&panel, "root/dst", cx);
-    panel.update_in(cx, |panel, window, cx| {
-        panel.paste(&Default::default(), window, cx);
-    });
-    cx.run_until_parked();
-
-    assert!(
-        find_project_entry(&panel, "root/dst/file.txt", cx).is_some(),
-        "File should be moved to dst"
-    );
-    assert_eq!(
-        find_project_entry(&panel, "root/src/file.txt", cx),
-        None,
-        "File should no longer be in src"
-    );
-
-    panel.update_in(cx, |panel, window, cx| {
-        panel.undo(&Undo, window, cx);
-    });
-    cx.run_until_parked();
-
-    assert!(
-        find_project_entry(&panel, "root/src/file.txt", cx).is_some(),
-        "File should be back in src after undo"
-    );
-    assert_eq!(
-        find_project_entry(&panel, "root/dst/file.txt", cx),
-        None,
-        "File should no longer be in dst after undo"
-    );
-}
-
-#[gpui::test]
-async fn test_undo_drag_single_entry(cx: &mut gpui::TestAppContext) {
-    init_test(cx);
-
-    let fs = FakeFs::new(cx.executor());
-    fs.insert_tree(
-        "/root",
-        json!({
-            "src": {
-                "main.rs": "",
-            },
-            "dst": {},
-        }),
-    )
-    .await;
-
-    let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
-    let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
-    let workspace = window
-        .read_with(cx, |mw, _| mw.workspace().clone())
-        .unwrap();
-    let cx = &mut VisualTestContext::from_window(window.into(), cx);
-    let panel = workspace.update_in(cx, ProjectPanel::new);
-    cx.run_until_parked();
-
-    toggle_expand_dir(&panel, "root/src", cx);
-
-    panel.update(cx, |panel, _| panel.marked_entries.clear());
-    select_path_with_mark(&panel, "root/src/main.rs", cx);
-    drag_selection_to(&panel, "root/dst", false, cx);
-
-    assert!(
-        find_project_entry(&panel, "root/dst/main.rs", cx).is_some(),
-        "File should be in dst after drag"
-    );
-    assert_eq!(
-        find_project_entry(&panel, "root/src/main.rs", cx),
-        None,
-        "File should no longer be in src after drag"
-    );
-
-    panel.update_in(cx, |panel, window, cx| {
-        panel.undo(&Undo, window, cx);
-    });
-    cx.run_until_parked();
-
-    assert!(
-        find_project_entry(&panel, "root/src/main.rs", cx).is_some(),
-        "File should be back in src after undo"
-    );
-    assert_eq!(
-        find_project_entry(&panel, "root/dst/main.rs", cx),
-        None,
-        "File should no longer be in dst after undo"
-    );
-}
-
-#[gpui::test]
-async fn test_undo_drag_multiple_entries(cx: &mut gpui::TestAppContext) {
-    init_test(cx);
-
-    let fs = FakeFs::new(cx.executor());
-    fs.insert_tree(
-        "/root",
-        json!({
-            "src": {
-                "alpha.txt": "",
-                "beta.txt": "",
-            },
-            "dst": {},
-        }),
-    )
-    .await;
-
-    let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
-    let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
-    let workspace = window
-        .read_with(cx, |mw, _| mw.workspace().clone())
-        .unwrap();
-    let cx = &mut VisualTestContext::from_window(window.into(), cx);
-    let panel = workspace.update_in(cx, ProjectPanel::new);
-    cx.run_until_parked();
-
-    toggle_expand_dir(&panel, "root/src", cx);
-
-    panel.update(cx, |panel, _| panel.marked_entries.clear());
-    select_path_with_mark(&panel, "root/src/alpha.txt", cx);
-    select_path_with_mark(&panel, "root/src/beta.txt", cx);
-    drag_selection_to(&panel, "root/dst", false, cx);
-
-    assert!(
-        find_project_entry(&panel, "root/dst/alpha.txt", cx).is_some(),
-        "alpha.txt should be in dst after drag"
-    );
-    assert!(
-        find_project_entry(&panel, "root/dst/beta.txt", cx).is_some(),
-        "beta.txt should be in dst after drag"
-    );
-
-    // A single undo should revert the entire batch
-    panel.update_in(cx, |panel, window, cx| {
-        panel.undo(&Undo, window, cx);
-    });
-    cx.run_until_parked();
-
-    assert!(
-        find_project_entry(&panel, "root/src/alpha.txt", cx).is_some(),
-        "alpha.txt should be back in src after undo"
-    );
-    assert!(
-        find_project_entry(&panel, "root/src/beta.txt", cx).is_some(),
-        "beta.txt should be back in src after undo"
-    );
-    assert_eq!(
-        find_project_entry(&panel, "root/dst/alpha.txt", cx),
-        None,
-        "alpha.txt should no longer be in dst after undo"
-    );
-    assert_eq!(
-        find_project_entry(&panel, "root/dst/beta.txt", cx),
-        None,
-        "beta.txt should no longer be in dst after undo"
-    );
-}
-
-#[gpui::test]
-async fn test_multiple_sequential_undos(cx: &mut gpui::TestAppContext) {
-    init_test(cx);
-
-    let fs = FakeFs::new(cx.executor());
-    fs.insert_tree(
-        "/root",
-        json!({
-            "a.txt": "",
-        }),
-    )
-    .await;
-
-    let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
-    let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
-    let workspace = window
-        .read_with(cx, |mw, _| mw.workspace().clone())
-        .unwrap();
-    let cx = &mut VisualTestContext::from_window(window.into(), cx);
-    let panel = workspace.update_in(cx, ProjectPanel::new);
-    cx.run_until_parked();
-
-    select_path(&panel, "root/a.txt", cx);
-    panel.update_in(cx, |panel, window, cx| panel.rename(&Rename, window, cx));
-    cx.run_until_parked();
-    let confirm = panel.update_in(cx, |panel, window, cx| {
-        panel
-            .filename_editor
-            .update(cx, |editor, cx| editor.set_text("b.txt", window, cx));
-        panel.confirm_edit(true, window, cx).unwrap()
-    });
-    confirm.await.unwrap();
-    cx.run_until_parked();
-
-    assert!(find_project_entry(&panel, "root/b.txt", cx).is_some());
-
-    select_path(&panel, "root", cx);
-    panel.update_in(cx, |panel, window, cx| panel.new_file(&NewFile, window, cx));
-    cx.run_until_parked();
-    let confirm = panel.update_in(cx, |panel, window, cx| {
-        panel
-            .filename_editor
-            .update(cx, |editor, cx| editor.set_text("c.txt", window, cx));
-        panel.confirm_edit(true, window, cx).unwrap()
-    });
-    confirm.await.unwrap();
-    cx.run_until_parked();
-
-    assert!(find_project_entry(&panel, "root/b.txt", cx).is_some());
-    assert!(find_project_entry(&panel, "root/c.txt", cx).is_some());
-
-    panel.update_in(cx, |panel, window, cx| {
-        panel.undo(&Undo, window, cx);
-    });
-    cx.run_until_parked();
-
-    assert_eq!(
-        find_project_entry(&panel, "root/c.txt", cx),
-        None,
-        "c.txt should be removed after first undo"
-    );
-    assert!(
-        find_project_entry(&panel, "root/b.txt", cx).is_some(),
-        "b.txt should still exist after first undo"
-    );
-
-    panel.update_in(cx, |panel, window, cx| {
-        panel.undo(&Undo, window, cx);
-    });
-    cx.run_until_parked();
-
-    assert!(
-        find_project_entry(&panel, "root/a.txt", cx).is_some(),
-        "a.txt should be restored after second undo"
-    );
-    assert_eq!(
-        find_project_entry(&panel, "root/b.txt", cx),
-        None,
-        "b.txt should no longer exist after second undo"
-    );
-}
-
-#[gpui::test]
-async fn test_undo_with_empty_stack(cx: &mut gpui::TestAppContext) {
-    init_test(cx);
-
-    let fs = FakeFs::new(cx.executor());
-    fs.insert_tree(
-        "/root",
-        json!({
-            "a.txt": "",
-        }),
-    )
-    .await;
-
-    let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
-    let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
-    let workspace = window
-        .read_with(cx, |mw, _| mw.workspace().clone())
-        .unwrap();
-    let cx = &mut VisualTestContext::from_window(window.into(), cx);
-    let panel = workspace.update_in(cx, ProjectPanel::new);
-    cx.run_until_parked();
-
-    panel.update_in(cx, |panel, window, cx| {
-        panel.undo(&Undo, window, cx);
-    });
-    cx.run_until_parked();
-
-    assert!(
-        find_project_entry(&panel, "root/a.txt", cx).is_some(),
-        "File tree should be unchanged after undo on empty stack"
-    );
-}
-
-#[gpui::test]
-async fn test_undo_batch(cx: &mut gpui::TestAppContext) {
-    init_test(cx);
-
-    let fs = FakeFs::new(cx.executor());
-    fs.insert_tree(
-        "/root",
-        json!({
-            "src": {
-                "main.rs": "// Code!"
-            }
-        }),
-    )
-    .await;
-
-    let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
-    let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
-    let workspace = window
-        .read_with(cx, |mw, _| mw.workspace().clone())
-        .unwrap();
-    let cx = &mut VisualTestContext::from_window(window.into(), cx);
-    let panel = workspace.update_in(cx, ProjectPanel::new);
-    let worktree_id = project.update(cx, |project, cx| {
-        project.visible_worktrees(cx).next().unwrap().read(cx).id()
-    });
-    cx.run_until_parked();
-
-    // Since there currently isn't a way to both create a folder and the file
-    // within it as two separate operations batched under the same
-    // `ProjectPanelOperation::Batch` operation, we'll simply record those
-    // ourselves, knowing that the filesystem already has the folder and file
-    // being provided in the operations.
-    panel.update(cx, |panel, _cx| {
-        panel.undo_manager.record_batch(vec![
-            ProjectPanelOperation::Create {
-                project_path: ProjectPath {
-                    worktree_id,
-                    path: Arc::from(rel_path("src/main.rs")),
-                },
-            },
-            ProjectPanelOperation::Create {
-                project_path: ProjectPath {
-                    worktree_id,
-                    path: Arc::from(rel_path("src/")),
-                },
-            },
-        ]);
-    });
-
-    // Ensure that `src/main.rs` is present in the filesystem before proceeding,
-    // otherwise this test is irrelevant.
-    assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/src/main.rs"))]);
-    assert_eq!(
-        fs.directories(false),
-        vec![
-            PathBuf::from(path!("/")),
-            PathBuf::from(path!("/root/")),
-            PathBuf::from(path!("/root/src/"))
-        ]
-    );
-
-    panel.update_in(cx, |panel, window, cx| {
-        panel.undo(&Undo, window, cx);
-    });
-    cx.run_until_parked();
-
-    assert_eq!(fs.files().len(), 0);
-    assert_eq!(
-        fs.directories(false),
-        vec![PathBuf::from(path!("/")), PathBuf::from(path!("/root/"))]
-    );
-}
-
 #[gpui::test]
 async fn test_paste_external_paths(cx: &mut gpui::TestAppContext) {
     init_test(cx);
@@ -7348,7 +6800,11 @@ async fn test_selection_fallback_to_next_highest_worktree(cx: &mut gpui::TestApp
     );
 }
 
-fn toggle_expand_dir(panel: &Entity<ProjectPanel>, path: &str, cx: &mut VisualTestContext) {
+pub(crate) fn toggle_expand_dir(
+    panel: &Entity<ProjectPanel>,
+    path: &str,
+    cx: &mut VisualTestContext,
+) {
     let path = rel_path(path);
     panel.update_in(cx, |panel, window, cx| {
         for worktree in panel.project.read(cx).worktrees(cx).collect::<Vec<_>>() {
@@ -9700,7 +9156,7 @@ async fn test_hide_hidden_entries(cx: &mut gpui::TestAppContext) {
     );
 }
 
-fn select_path(panel: &Entity<ProjectPanel>, path: &str, cx: &mut VisualTestContext) {
+pub(crate) fn select_path(panel: &Entity<ProjectPanel>, path: &str, cx: &mut VisualTestContext) {
     let path = rel_path(path);
     panel.update_in(cx, |panel, window, cx| {
         for worktree in panel.project.read(cx).worktrees(cx).collect::<Vec<_>>() {
@@ -9722,7 +9178,11 @@ fn select_path(panel: &Entity<ProjectPanel>, path: &str, cx: &mut VisualTestCont
     cx.run_until_parked();
 }
 
-fn select_path_with_mark(panel: &Entity<ProjectPanel>, path: &str, cx: &mut VisualTestContext) {
+pub(crate) fn select_path_with_mark(
+    panel: &Entity<ProjectPanel>,
+    path: &str,
+    cx: &mut VisualTestContext,
+) {
     let path = rel_path(path);
     panel.update(cx, |panel, cx| {
         for worktree in panel.project.read(cx).worktrees(cx).collect::<Vec<_>>() {
@@ -9810,7 +9270,7 @@ fn set_folded_active_ancestor(
     });
 }
 
-fn drag_selection_to(
+pub(crate) fn drag_selection_to(
     panel: &Entity<ProjectPanel>,
     target_path: &str,
     is_file: bool,
@@ -9835,7 +9295,7 @@ fn drag_selection_to(
     cx.executor().run_until_parked();
 }
 
-fn find_project_entry(
+pub(crate) fn find_project_entry(
     panel: &Entity<ProjectPanel>,
     path: &str,
     cx: &mut VisualTestContext,

crates/project_panel/src/tests/undo.rs 🔗

@@ -0,0 +1,384 @@
+#![cfg(test)]
+
+use collections::HashSet;
+use fs::{FakeFs, Fs};
+use gpui::{Entity, VisualTestContext};
+use project::Project;
+use serde_json::{Value, json};
+use std::path::Path;
+use std::sync::Arc;
+use workspace::MultiWorkspace;
+
+use crate::project_panel_tests::{self, find_project_entry, select_path};
+use crate::{NewDirectory, NewFile, ProjectPanel, Redo, Rename, Trash, Undo};
+
+struct TestContext {
+    panel: Entity<ProjectPanel>,
+    fs: Arc<FakeFs>,
+    cx: VisualTestContext,
+}
+
+// Using the `util::path` macro requires a string literal, which would mean that
+// callers of, for example, `rename`, would now need to know about `/` and
+// use `path!` in tests.
+//
+// As such, we define it as a function here to make the helper methods more
+// ergonomic for our use case.
+fn path(path: impl AsRef<str>) -> String {
+    let path = path.as_ref();
+    #[cfg(target_os = "windows")]
+    {
+        let mut path = path.replace("/", "\\");
+        if path.starts_with("\\") {
+            path = format!("C:{}", &path);
+        }
+        path
+    }
+
+    #[cfg(not(target_os = "windows"))]
+    {
+        path.to_string()
+    }
+}
+
+impl TestContext {
+    async fn undo(&mut self) {
+        self.panel.update_in(&mut self.cx, |panel, window, cx| {
+            panel.undo(&Undo, window, cx);
+        });
+        self.cx.run_until_parked();
+    }
+    async fn redo(&mut self) {
+        self.panel.update_in(&mut self.cx, |panel, window, cx| {
+            panel.redo(&Redo, window, cx);
+        });
+        self.cx.run_until_parked();
+    }
+
+    /// Note this only works when every file has an extension
+    fn assert_fs_state_is(&mut self, state: &[&str]) {
+        let state: HashSet<_> = state
+            .into_iter()
+            .map(|s| path(format!("/workspace/{s}")))
+            .chain([path("/workspace"), path("/")])
+            .map(|s| Path::new(&s).to_path_buf())
+            .collect();
+
+        let dirs: HashSet<_> = state
+            .iter()
+            .map(|p| match p.extension() {
+                Some(_) => p.parent().unwrap_or(Path::new(&path("/"))).to_owned(),
+                None => p.clone(),
+            })
+            .collect();
+
+        assert_eq!(
+            self.fs
+                .directories(true)
+                .into_iter()
+                .collect::<HashSet<_>>(),
+            dirs
+        );
+        assert_eq!(
+            self.fs.paths(true).into_iter().collect::<HashSet<_>>(),
+            state
+        );
+    }
+
+    fn assert_exists(&mut self, file: &str) {
+        assert!(
+            find_project_entry(&self.panel, &format!("workspace/{file}"), &mut self.cx).is_some(),
+            "{file} should exist"
+        );
+    }
+
+    fn assert_not_exists(&mut self, file: &str) {
+        assert_eq!(
+            find_project_entry(&self.panel, &format!("workspace/{file}"), &mut self.cx),
+            None,
+            "{file} should not exist"
+        );
+    }
+
+    async fn rename(&mut self, from: &str, to: &str) {
+        let from = format!("workspace/{from}");
+        let Self { panel, cx, .. } = self;
+        select_path(&panel, &from, cx);
+        panel.update_in(cx, |panel, window, cx| panel.rename(&Rename, window, cx));
+        cx.run_until_parked();
+
+        let confirm = panel.update_in(cx, |panel, window, cx| {
+            panel
+                .filename_editor
+                .update(cx, |editor, cx| editor.set_text(to, window, cx));
+            panel.confirm_edit(true, window, cx).unwrap()
+        });
+        confirm.await.unwrap();
+        cx.run_until_parked();
+    }
+
+    async fn create_file(&mut self, path: &str) {
+        let Self { panel, cx, .. } = self;
+        select_path(&panel, "workspace", cx);
+        panel.update_in(cx, |panel, window, cx| panel.new_file(&NewFile, window, cx));
+        cx.run_until_parked();
+
+        let confirm = panel.update_in(cx, |panel, window, cx| {
+            panel
+                .filename_editor
+                .update(cx, |editor, cx| editor.set_text(path, window, cx));
+            panel.confirm_edit(true, window, cx).unwrap()
+        });
+        confirm.await.unwrap();
+        cx.run_until_parked();
+    }
+
+    async fn create_directory(&mut self, path: &str) {
+        let Self { panel, cx, .. } = self;
+
+        select_path(&panel, "workspace", cx);
+        panel.update_in(cx, |panel, window, cx| {
+            panel.new_directory(&NewDirectory, window, cx)
+        });
+        cx.run_until_parked();
+
+        let confirm = panel.update_in(cx, |panel, window, cx| {
+            panel
+                .filename_editor
+                .update(cx, |editor, cx| editor.set_text(path, window, cx));
+            panel.confirm_edit(true, window, cx).unwrap()
+        });
+        confirm.await.unwrap();
+        cx.run_until_parked();
+    }
+
+    /// Drags the `files` to the provided `directory`.
+    fn drag(&mut self, files: &[&str], directory: &str) {
+        self.panel
+            .update(&mut self.cx, |panel, _| panel.marked_entries.clear());
+        files.into_iter().for_each(|file| {
+            project_panel_tests::select_path_with_mark(
+                &self.panel,
+                &format!("workspace/{file}"),
+                &mut self.cx,
+            )
+        });
+        project_panel_tests::drag_selection_to(
+            &self.panel,
+            &format!("workspace/{directory}"),
+            false,
+            &mut self.cx,
+        );
+    }
+
+    /// Only supports files in root (otherwise would need toggle_expand_dir).
+    /// For undo redo the paths themselves do not matter so this is fine
+    async fn cut(&mut self, file: &str) {
+        project_panel_tests::select_path_with_mark(
+            &self.panel,
+            &format!("workspace/{file}"),
+            &mut self.cx,
+        );
+        self.panel.update_in(&mut self.cx, |panel, window, cx| {
+            panel.cut(&Default::default(), window, cx);
+        });
+    }
+
+    /// Only supports files in root (otherwise would need toggle_expand_dir).
+    /// For undo redo the paths themselves do not matter so this is fine
+    async fn paste(&mut self, directory: &str) {
+        select_path(&self.panel, &format!("workspace/{directory}"), &mut self.cx);
+        self.panel.update_in(&mut self.cx, |panel, window, cx| {
+            panel.paste(&Default::default(), window, cx);
+        });
+        self.cx.run_until_parked();
+    }
+
+    async fn trash(&mut self, paths: &[&str]) {
+        paths.iter().for_each(|p| {
+            project_panel_tests::select_path_with_mark(
+                &self.panel,
+                &format!("workspace/{p}"),
+                &mut self.cx,
+            )
+        });
+
+        self.panel.update_in(&mut self.cx, |panel, window, cx| {
+            panel.trash(&Trash { skip_prompt: true }, window, cx);
+        });
+
+        self.cx.run_until_parked();
+    }
+
+    /// The test tree is:
+    /// ```txt
+    /// a.txt
+    /// b.txt
+    /// ```
+    /// a and b are empty, x has the text "content" inside
+    async fn new(cx: &mut gpui::TestAppContext) -> TestContext {
+        Self::new_with_tree(
+            cx,
+            json!({
+                    "a.txt": "",
+                    "b.txt": "",
+            }),
+        )
+        .await
+    }
+
+    async fn new_with_tree(cx: &mut gpui::TestAppContext, tree: Value) -> TestContext {
+        project_panel_tests::init_test(cx);
+
+        let fs = FakeFs::new(cx.executor());
+        fs.insert_tree("/workspace", tree).await;
+        let project = Project::test(fs.clone(), ["/workspace".as_ref()], cx).await;
+        let window =
+            cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+        let workspace = window
+            .read_with(cx, |mw, _| mw.workspace().clone())
+            .unwrap();
+        let mut cx = VisualTestContext::from_window(window.into(), cx);
+        let panel = workspace.update_in(&mut cx, ProjectPanel::new);
+        cx.run_until_parked();
+
+        TestContext { panel, fs, cx }
+    }
+}
+
+#[gpui::test]
+async fn rename_undo_redo(cx: &mut gpui::TestAppContext) {
+    let mut cx = TestContext::new(cx).await;
+
+    cx.rename("a.txt", "renamed.txt").await;
+    cx.assert_fs_state_is(&["b.txt", "renamed.txt"]);
+
+    cx.undo().await;
+    cx.assert_fs_state_is(&["a.txt", "b.txt"]);
+
+    cx.redo().await;
+    cx.assert_fs_state_is(&["b.txt", "renamed.txt"]);
+}
+
+#[gpui::test]
+async fn create_undo_redo(cx: &mut gpui::TestAppContext) {
+    let mut cx = TestContext::new(cx).await;
+    let path = path("/workspace/c.txt");
+
+    cx.create_file("c.txt").await;
+    cx.assert_exists("c.txt");
+
+    // We'll now insert some content into `c.txt` in order to ensure that, after
+    // undoing the trash operation, i.e., when the file is restored, the actual
+    // file's contents are preserved instead of a new one with the same path
+    // being created.
+    cx.fs.write(Path::new(&path), b"Hello!").await.unwrap();
+
+    cx.undo().await;
+    cx.assert_not_exists("c.txt");
+
+    cx.redo().await;
+    cx.assert_exists("c.txt");
+    assert_eq!(cx.fs.load(Path::new(&path)).await.unwrap(), "Hello!");
+}
+
+#[gpui::test]
+async fn create_dir_undo(cx: &mut gpui::TestAppContext) {
+    let mut cx = TestContext::new(cx).await;
+
+    cx.create_directory("new_dir").await;
+    cx.assert_exists("new_dir");
+    cx.undo().await;
+    cx.assert_not_exists("new_dir");
+}
+
+#[gpui::test]
+async fn cut_paste_undo(cx: &mut gpui::TestAppContext) {
+    let mut cx = TestContext::new(cx).await;
+
+    cx.create_directory("files").await;
+    cx.cut("a.txt").await;
+    cx.paste("files").await;
+    cx.assert_fs_state_is(&["b.txt", "files/", "files/a.txt"]);
+
+    cx.undo().await;
+    cx.assert_fs_state_is(&["a.txt", "b.txt", "files/"]);
+
+    cx.redo().await;
+    cx.assert_fs_state_is(&["b.txt", "files/", "files/a.txt"]);
+}
+
+#[gpui::test]
+async fn drag_undo_redo(cx: &mut gpui::TestAppContext) {
+    let mut cx = TestContext::new(cx).await;
+
+    cx.create_directory("src").await;
+    cx.create_file("src/a.rs").await;
+    cx.assert_fs_state_is(&["a.txt", "b.txt", "src/", "src/a.rs"]);
+
+    cx.drag(&["src/a.rs"], "");
+    cx.assert_fs_state_is(&["a.txt", "b.txt", "a.rs", "src/"]);
+
+    cx.undo().await;
+    cx.assert_fs_state_is(&["a.txt", "b.txt", "src/", "src/a.rs"]);
+
+    cx.redo().await;
+    cx.assert_fs_state_is(&["a.txt", "b.txt", "a.rs", "src/"]);
+}
+
+#[gpui::test]
+async fn drag_multiple_undo_redo(cx: &mut gpui::TestAppContext) {
+    let mut cx = TestContext::new(cx).await;
+
+    cx.create_directory("src").await;
+    cx.create_file("src/x.rs").await;
+    cx.create_file("src/y.rs").await;
+
+    cx.drag(&["src/x.rs", "src/y.rs"], "");
+    cx.assert_fs_state_is(&["a.txt", "b.txt", "x.rs", "y.rs", "src/"]);
+
+    cx.undo().await;
+    cx.assert_fs_state_is(&["a.txt", "b.txt", "src/", "src/x.rs", "src/y.rs"]);
+
+    cx.redo().await;
+    cx.assert_fs_state_is(&["a.txt", "b.txt", "x.rs", "y.rs", "src/"]);
+}
+
+#[gpui::test]
+async fn two_sequential_undos(cx: &mut gpui::TestAppContext) {
+    let mut cx = TestContext::new(cx).await;
+
+    cx.rename("a.txt", "x.txt").await;
+    cx.create_file("y.txt").await;
+    cx.assert_fs_state_is(&["b.txt", "x.txt", "y.txt"]);
+
+    cx.undo().await;
+    cx.assert_fs_state_is(&["b.txt", "x.txt"]);
+
+    cx.undo().await;
+    cx.assert_fs_state_is(&["a.txt", "b.txt"]);
+}
+
+#[gpui::test]
+async fn undo_without_history(cx: &mut gpui::TestAppContext) {
+    let mut cx = TestContext::new(cx).await;
+
+    // Undoing without any history should just result in the filesystem state
+    // remaining unchanged.
+    cx.undo().await;
+    cx.assert_fs_state_is(&["a.txt", "b.txt"])
+}
+
+#[gpui::test]
+async fn trash_undo_redo(cx: &mut gpui::TestAppContext) {
+    let mut cx = TestContext::new(cx).await;
+
+    cx.trash(&["a.txt", "b.txt"]).await;
+    cx.assert_fs_state_is(&[]);
+
+    cx.undo().await;
+    cx.assert_fs_state_is(&["a.txt", "b.txt"]);
+
+    cx.redo().await;
+    cx.assert_fs_state_is(&[]);
+}

crates/project_panel/src/undo.rs 🔗

@@ -1,286 +1,558 @@
-use anyhow::anyhow;
-use gpui::{AppContext, SharedString, Task, WeakEntity};
-use project::ProjectPath;
-use std::collections::VecDeque;
-use ui::{App, IntoElement, Label, ParentElement, Styled, v_flex};
+//! # Undo Manager
+//!
+//! ## Operations and Results
+//!
+//! Undo and Redo actions execute an operation against the filesystem, producing
+//! a result that is recorded back into the history in place of the original
+//! entry. Each result is the semantic inverse of its paired operation, so the
+//! cycle can repeat for continued undo and redo.
+//!
+//!  Operations                            Results
+//!  ─────────────────────────────────  ──────────────────────────────────────
+//!  Create(ProjectPath)               →  Created(ProjectPath)
+//!  Trash(ProjectPath)                →  Trashed(TrashedEntry)
+//!  Rename(ProjectPath, ProjectPath)  →  Renamed(ProjectPath, ProjectPath)
+//!  Restore(TrashedEntry)             →  Restored(ProjectPath)
+//!  Batch(Vec<Operation>)             →  Batch(Vec<Result>)
+//!
+//!
+//! ## History and Cursor
+//!
+//! The undo manager maintains an operation history with a cursor position (↑).
+//! Recording an operation appends it to the history and advances the cursor to
+//! the end. The cursor separates past entries (left of ↑) from future entries
+//! (right of ↑).
+//!
+//! ─ **Undo**: Takes the history entry just *before* ↑, executes its inverse,
+//!   records the result back in its place, and moves ↑ one step to the left.
+//! ─ **Redo**: Takes the history entry just *at* ↑, executes its inverse,
+//!   records the result back in its place, and advances ↑ one step to the right.
+//!
+//!
+//! ## Example
+//!
+//! User Operation  Create(src/main.rs)
+//! History
+//! 	0 Created(src/main.rs)
+//!     1 +++cursor+++
+//!
+//! User Operation  Rename(README.md, readme.md)
+//! History
+//! 	0 Created(src/main.rs)
+//! 	1 Renamed(README.md, readme.md)
+//!     2 +++cursor+++
+//!
+//! User Operation  Create(CONTRIBUTING.md)
+//! History
+//! 	0 Created(src/main.rs)
+//!     1 Renamed(README.md, readme.md)
+//! 	2 Created(CONTRIBUTING.md) ──┐
+//!     3 +++cursor+++               │(before the cursor)
+//!                                  │
+//!   ┌──────────────────────────────┴─────────────────────────────────────────────┐
+//!     Redoing will take the result at the cursor position, convert that into the
+//!     operation that can revert that result, execute that operation and replace
+//!     the result in the history with the new result, obtained from running the
+//!     inverse operation, advancing the cursor position.
+//!   └──────────────────────────────┬─────────────────────────────────────────────┘
+//!                                  │
+//!                                  │
+//! User Operation  Undo             v
+//! Execute         Created(CONTRIBUTING.md) ────────> Trash(CONTRIBUTING.md)
+//! Record          Trashed(TrashedEntry(1))
+//! History
+//! 	0 Created(src/main.rs)
+//! 	1 Renamed(README.md, readme.md) ─┐
+//!     2 +++cursor+++                   │(before the cursor)
+//! 	2 Trashed(TrashedEntry(1))       │
+//!                                      │
+//! User Operation  Undo                 v
+//! Execute         Renamed(README.md, readme.md) ───> Rename(readme.md, README.md)
+//! Record          Renamed(readme.md, README.md)
+//! History
+//! 	0 Created(src/main.rs)
+//!     1 +++cursor+++
+//! 	1 Renamed(readme.md, README.md) ─┐ (at the cursor)
+//! 	2 Trashed(TrashedEntry(1))       │
+//!                                      │
+//!   ┌──────────────────────────────────┴─────────────────────────────────────────┐
+//!     Redoing will take the result at the cursor position, convert that into the
+//!     operation that can revert that result, execute that operation and replace
+//!     the result in the history with the new result, obtained from running the
+//!     inverse operation, advancing the cursor position.
+//!   └──────────────────────────────────┬─────────────────────────────────────────┘
+//!                                      │
+//!                                      │
+//! User Operation  Redo                 v
+//! Execute         Renamed(readme.md, README.md) ───> Rename(README.md, readme.md)
+//! Record          Renamed(README.md, readme.md)
+//! History
+//! 	0 Created(src/main.rs)
+//! 	1 Renamed(README.md, readme.md)
+//!     2 +++cursor+++
+//! 	2 Trashed(TrashedEntry(1))────┐ (at the cursor)
+//!                                   │
+//! User Operation  Redo              v
+//! Execute         Trashed(TrashedEntry(1)) ────────> Restore(TrashedEntry(1))
+//! Record          Restored(ProjectPath)
+//! History
+//! 	0 Created(src/main.rs)
+//! 	1 Renamed(README.md, readme.md)
+//! 	2 Restored(ProjectPath)
+//!     2 +++cursor+++
+
+//!
+//! create A;                                                      A
+//! rename A -> B;                                                 B
+//! undo (rename B -> A)       (takes 10s for some reason)         B (still b cause it's hanging for 10s)
+//! remove B                                                       _
+//! create B                                                       B
+//! put important content in B                                     B
+//! undo manger renames (does not hang)                            A
+//! remove A                                                       _
+//! user sad
+
+//!
+//! create A;                                                      A
+//! rename A -> B;                                                 B
+//! undo (rename B -> A)       (takes 10s for some reason)         B (still b cause it's hanging for 10s)
+//! create C                                                       B
+//! -- src/c.rs
+//!    --
+
+//!
+//! create docs/files/ directory                                   docs/files/
+//! create docs/files/a.txt                                        docs/files/
+//! undo (rename B -> A)       (takes 10s for some reason)         B (still b cause it's hanging for 10s)
+//! create C                                                       B
+//! -- src/c.rs
+//!    --
+
+//! List of "tainted files" that the user may not operate on
+
+use crate::ProjectPanel;
+use anyhow::{Context, Result, anyhow};
+use fs::TrashedEntry;
+use futures::channel::mpsc;
+use gpui::{AppContext, AsyncApp, SharedString, Task, WeakEntity};
+use project::{ProjectPath, WorktreeId};
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::{collections::VecDeque, sync::Arc};
+use ui::App;
 use workspace::{
     Workspace,
     notifications::{NotificationId, simple_message_notification::MessageNotification},
 };
+use worktree::CreatedEntry;
 
-const MAX_UNDO_OPERATIONS: usize = 10_000;
+enum Operation {
+    Trash(ProjectPath),
+    Rename(ProjectPath, ProjectPath),
+    Restore(WorktreeId, TrashedEntry),
+    Batch(Vec<Operation>),
+}
 
-#[derive(Clone)]
-pub enum ProjectPanelOperation {
-    Batch(Vec<ProjectPanelOperation>),
-    Create {
-        project_path: ProjectPath,
-    },
-    Rename {
-        old_path: ProjectPath,
-        new_path: ProjectPath,
-    },
+impl Operation {
+    async fn execute(self, undo_manager: &Inner, cx: &mut AsyncApp) -> Result<Change> {
+        Ok(match self {
+            Operation::Trash(project_path) => {
+                let trash_entry = undo_manager.trash(&project_path, cx).await?;
+                Change::Trashed(project_path.worktree_id, trash_entry)
+            }
+            Operation::Rename(from, to) => {
+                undo_manager.rename(&from, &to, cx).await?;
+                Change::Renamed(from, to)
+            }
+            Operation::Restore(worktree_id, trashed_entry) => {
+                let project_path = undo_manager.restore(worktree_id, trashed_entry, cx).await?;
+                Change::Restored(project_path)
+            }
+            Operation::Batch(operations) => {
+                let mut res = Vec::new();
+                for op in operations {
+                    res.push(Box::pin(op.execute(undo_manager, cx)).await?);
+                }
+                Change::Batched(res)
+            }
+        })
+    }
 }
 
-pub struct UndoManager {
+#[derive(Clone, Debug)]
+pub(crate) enum Change {
+    Created(ProjectPath),
+    Trashed(WorktreeId, TrashedEntry),
+    Renamed(ProjectPath, ProjectPath),
+    Restored(ProjectPath),
+    Batched(Vec<Change>),
+}
+
+impl Change {
+    fn to_inverse(self) -> Operation {
+        match self {
+            Change::Created(project_path) => Operation::Trash(project_path),
+            Change::Trashed(worktree_id, trashed_entry) => {
+                Operation::Restore(worktree_id, trashed_entry)
+            }
+            Change::Renamed(from, to) => Operation::Rename(to, from),
+            Change::Restored(project_path) => Operation::Trash(project_path),
+            // When inverting a batch of operations, we reverse the order of
+            // operations to handle dependencies between them. For example, if a
+            // batch contains the following order of operations:
+            //
+            // 1. Create `src/`
+            // 2. Create `src/main.rs`
+            //
+            // If we first tried to revert the directory creation, it would fail
+            // because there's still files inside the directory.
+            Change::Batched(changes) => {
+                Operation::Batch(changes.into_iter().rev().map(Change::to_inverse).collect())
+            }
+        }
+    }
+}
+
+// Imagine pressing undo 10000+ times?!
+const MAX_UNDO_OPERATIONS: usize = 10_000;
+
+struct Inner {
     workspace: WeakEntity<Workspace>,
-    stack: VecDeque<ProjectPanelOperation>,
-    /// Maximum number of operations to keep on the undo stack.
+    panel: WeakEntity<ProjectPanel>,
+    history: VecDeque<Change>,
+    cursor: usize,
+    /// Maximum number of operations to keep on the undo history.
     limit: usize,
+    can_undo: Arc<AtomicBool>,
+    can_redo: Arc<AtomicBool>,
+    rx: mpsc::Receiver<UndoMessage>,
+}
+
+/// pls arc this
+#[derive(Clone)]
+pub struct UndoManager {
+    tx: mpsc::Sender<UndoMessage>,
+    can_undo: Arc<AtomicBool>,
+    can_redo: Arc<AtomicBool>,
 }
 
 impl UndoManager {
-    pub fn new(workspace: WeakEntity<Workspace>) -> Self {
-        Self::new_with_limit(workspace, MAX_UNDO_OPERATIONS)
+    pub fn new(
+        workspace: WeakEntity<Workspace>,
+        panel: WeakEntity<ProjectPanel>,
+        cx: &App,
+    ) -> Self {
+        let (tx, rx) = mpsc::channel(1024);
+        let inner = Inner::new(workspace, panel, rx);
+
+        let this = Self {
+            tx,
+            can_undo: Arc::clone(&inner.can_undo),
+            can_redo: Arc::clone(&inner.can_redo),
+        };
+
+        cx.spawn(async move |cx| inner.manage_undo_and_redo(cx.clone()).await)
+            .detach();
+
+        this
     }
 
-    pub fn new_with_limit(workspace: WeakEntity<Workspace>, limit: usize) -> Self {
+    pub fn undo(&mut self) -> Result<()> {
+        self.tx
+            .try_send(UndoMessage::Undo)
+            .context("Undo and redo task can not keep up")
+    }
+    pub fn redo(&mut self) -> Result<()> {
+        self.tx
+            .try_send(UndoMessage::Redo)
+            .context("Undo and redo task can not keep up")
+    }
+    pub fn record(&mut self, changes: impl IntoIterator<Item = Change>) -> Result<()> {
+        self.tx
+            .try_send(UndoMessage::Changed(changes.into_iter().collect()))
+            .context("Undo and redo task can not keep up")
+    }
+    /// just for the UI, an undo may still fail if there are concurrent file
+    /// operations happening.
+    pub fn can_undo(&self) -> bool {
+        self.can_undo.load(Ordering::Relaxed)
+    }
+    /// just for the UI, an undo may still fail if there are concurrent file
+    /// operations happening.
+    pub fn can_redo(&self) -> bool {
+        self.can_redo.load(Ordering::Relaxed)
+    }
+}
+
+#[derive(Debug)]
+enum UndoMessage {
+    Changed(Vec<Change>),
+    Undo,
+    Redo,
+}
+
+impl UndoMessage {
+    fn error_title(&self) -> &'static str {
+        match self {
+            UndoMessage::Changed(_) => {
+                "this is a bug in the manage_undo_and_redo task please report"
+            }
+            UndoMessage::Undo => "Undo failed",
+            UndoMessage::Redo => "Redo failed",
+        }
+    }
+}
+
+impl Inner {
+    async fn manage_undo_and_redo(mut self, mut cx: AsyncApp) {
+        loop {
+            let Ok(new) = self.rx.recv().await else {
+                // project panel got closed
+                return;
+            };
+
+            let error_title = new.error_title();
+            let res = match new {
+                UndoMessage::Changed(changes) => {
+                    self.record(changes);
+                    Ok(())
+                }
+                UndoMessage::Undo => {
+                    let res = self.undo(&mut cx).await;
+                    let _ = self.panel.update(&mut cx, |_, cx| cx.notify());
+                    res
+                }
+                UndoMessage::Redo => {
+                    let res = self.redo(&mut cx).await;
+                    let _ = self.panel.update(&mut cx, |_, cx| cx.notify());
+                    res
+                }
+            };
+
+            if let Err(e) = res {
+                Self::show_error(error_title, self.workspace.clone(), e.to_string(), &mut cx);
+            }
+
+            self.can_undo.store(self.can_undo(), Ordering::Relaxed);
+            self.can_redo.store(self.can_redo(), Ordering::Relaxed);
+        }
+    }
+}
+
+impl Inner {
+    pub fn new(
+        workspace: WeakEntity<Workspace>,
+        panel: WeakEntity<ProjectPanel>,
+        rx: mpsc::Receiver<UndoMessage>,
+    ) -> Self {
+        Self::new_with_limit(workspace, panel, MAX_UNDO_OPERATIONS, rx)
+    }
+
+    pub fn new_with_limit(
+        workspace: WeakEntity<Workspace>,
+        panel: WeakEntity<ProjectPanel>,
+        limit: usize,
+        rx: mpsc::Receiver<UndoMessage>,
+    ) -> Self {
         Self {
             workspace,
+            panel,
+            history: VecDeque::new(),
+            cursor: 0usize,
             limit,
-            stack: VecDeque::new(),
+            can_undo: Arc::new(AtomicBool::new(false)),
+            can_redo: Arc::new(AtomicBool::new(false)),
+            rx,
         }
     }
 
     pub fn can_undo(&self) -> bool {
-        !self.stack.is_empty()
+        self.cursor > 0
     }
 
-    pub fn undo(&mut self, cx: &mut App) {
-        if let Some(operation) = self.stack.pop_back() {
-            let task = self.revert_operation(operation, cx);
-            let workspace = self.workspace.clone();
-
-            cx.spawn(async move |cx| {
-                let errors = task.await;
-                if !errors.is_empty() {
-                    cx.update(|cx| {
-                        let messages = errors
-                            .iter()
-                            .map(|err| SharedString::from(err.to_string()))
-                            .collect();
-
-                        Self::show_errors(workspace, messages, cx)
-                    })
-                }
-            })
-            .detach();
+    pub fn can_redo(&self) -> bool {
+        self.cursor < self.history.len()
+    }
+
+    pub async fn undo(&mut self, cx: &mut AsyncApp) -> Result<()> {
+        if !self.can_undo() {
+            return Ok(());
         }
+
+        // Undo failure:
+        //
+        // History
+        // 	0 Created(src/main.rs)
+        // 	1 Renamed(README.md, readme.md) ─┐
+        //     2 +++cursor+++                │(before the cursor)
+        // 	2 Trashed(TrashedEntry(1))       │
+        //                                   │
+        // User Operation  Undo              v
+        // Failed execute  Renamed(README.md, readme.md) ───> Rename(readme.md, README.md)
+        // Record nothing
+        // History
+        // 	0 Created(src/main.rs)
+        //     1 +++cursor+++
+        // 	1 Trashed(TrashedEntry(1)) -----
+        //                                  |(at the cursor)
+        // User Operation  Redo             v
+        // Execute         Trashed(TrashedEntry(1)) ────────> Restore(TrashedEntry(1))
+        // Record          Restored(ProjectPath)
+        // History
+        // 	0 Created(src/main.rs)
+        // 	1 Restored(ProjectPath)
+        //  1 +++cursor+++
+
+        // We always want to move the cursor back regardless of whether undoing
+        // succeeds or fails, otherwise the cursor could end up pointing to a
+        // position outside of the history, as we remove the change before the
+        // cursor, in case undo fails.
+        let before_cursor = self.cursor - 1; // see docs above
+        self.cursor -= 1; // take a step back into the past
+
+        // If undoing fails, the user would be in a stuck state from which
+        // manual intervention would likely be needed in order to undo. As such,
+        // we remove the change from the `history` even before attempting to
+        // execute its inversion.
+        let undo_change = self
+            .history
+            .remove(before_cursor)
+            .expect("we can undo")
+            .to_inverse()
+            .execute(self, cx)
+            .await?;
+        self.history.insert(before_cursor, undo_change);
+        Ok(())
     }
 
-    pub fn record(&mut self, operation: ProjectPanelOperation) {
-        if self.stack.len() >= self.limit {
-            self.stack.pop_front();
+    pub async fn redo(&mut self, cx: &mut AsyncApp) -> Result<()> {
+        if !self.can_redo() {
+            return Ok(());
         }
 
-        self.stack.push_back(operation);
+        // If redoing fails, the user would be in a stuck state from which
+        // manual intervention would likely be needed in order to redo. As such,
+        // we remove the change from the `history` even before attempting to
+        // execute its inversion.
+        let redo_change = self
+            .history
+            .remove(self.cursor)
+            .expect("we can redo")
+            .to_inverse()
+            .execute(self, cx)
+            .await?;
+        self.history.insert(self.cursor, redo_change);
+        self.cursor += 1;
+        Ok(())
     }
 
-    pub fn record_batch(&mut self, operations: impl IntoIterator<Item = ProjectPanelOperation>) {
-        let mut operations = operations.into_iter().collect::<Vec<_>>();
-        let operation = match operations.len() {
+    /// Passed in changes will always be performed as a single step
+    pub fn record(&mut self, mut changes: Vec<Change>) {
+        let change = match changes.len() {
             0 => return,
-            1 => operations.pop().unwrap(),
-            _ => ProjectPanelOperation::Batch(operations),
+            1 => changes.remove(0),
+            _ => Change::Batched(changes),
         };
 
-        self.record(operation);
+        // When recording a new change, discard any changes that could still be
+        // redone.
+        if self.cursor < self.history.len() {
+            self.history.drain(self.cursor..);
+        }
+
+        // Ensure that the number of recorded changes does not exceed the
+        // maximum amount of tracked changes.
+        if self.history.len() >= self.limit {
+            self.history.pop_front();
+        } else {
+            self.cursor += 1;
+        }
+
+        self.history.push_back(change);
     }
 
-    /// Attempts to revert the provided `operation`, returning a vector of errors
-    /// in case there was any failure while reverting the operation.
-    ///
-    /// For all operations other than [`crate::undo::ProjectPanelOperation::Batch`], a maximum
-    /// of one error is returned.
-    fn revert_operation(
+    async fn rename(
         &self,
-        operation: ProjectPanelOperation,
-        cx: &mut App,
-    ) -> Task<Vec<anyhow::Error>> {
-        match operation {
-            ProjectPanelOperation::Create { project_path } => {
-                let Some(workspace) = self.workspace.upgrade() else {
-                    return Task::ready(vec![anyhow!("Failed to obtain workspace.")]);
-                };
-
-                let result = workspace.update(cx, |workspace, cx| {
-                    workspace.project().update(cx, |project, cx| {
-                        let entry_id = project
-                            .entry_for_path(&project_path, cx)
-                            .map(|entry| entry.id)
-                            .ok_or_else(|| anyhow!("No entry for path."))?;
-
-                        project
-                            .delete_entry(entry_id, true, cx)
-                            .ok_or_else(|| anyhow!("Failed to trash entry."))
-                    })
-                });
-
-                let task = match result {
-                    Ok(task) => task,
-                    Err(err) => return Task::ready(vec![err]),
-                };
-
-                cx.spawn(async move |_| match task.await {
-                    Ok(_) => vec![],
-                    Err(err) => vec![err],
-                })
-            }
-            ProjectPanelOperation::Rename { old_path, new_path } => {
-                let Some(workspace) = self.workspace.upgrade() else {
-                    return Task::ready(vec![anyhow!("Failed to obtain workspace.")]);
-                };
-
-                let result = workspace.update(cx, |workspace, cx| {
-                    workspace.project().update(cx, |project, cx| {
-                        let entry_id = project
-                            .entry_for_path(&new_path, cx)
-                            .map(|entry| entry.id)
-                            .ok_or_else(|| anyhow!("No entry for path."))?;
-
-                        Ok(project.rename_entry(entry_id, old_path.clone(), cx))
-                    })
-                });
-
-                let task = match result {
-                    Ok(task) => task,
-                    Err(err) => return Task::ready(vec![err]),
-                };
-
-                cx.spawn(async move |_| match task.await {
-                    Ok(_) => vec![],
-                    Err(err) => vec![err],
+        from: &ProjectPath,
+        to: &ProjectPath,
+        cx: &mut AsyncApp,
+    ) -> Result<CreatedEntry> {
+        let Some(workspace) = self.workspace.upgrade() else {
+            return Err(anyhow!("Failed to obtain workspace."));
+        };
+
+        let res: Result<Task<Result<CreatedEntry>>> = workspace.update(cx, |workspace, cx| {
+            workspace.project().update(cx, |project, cx| {
+                let entry_id = project
+                    .entry_for_path(from, cx)
+                    .map(|entry| entry.id)
+                    .ok_or_else(|| anyhow!("No entry for path."))?;
+
+                Ok(project.rename_entry(entry_id, to.clone(), cx))
+            })
+        });
+
+        res?.await
+    }
+
+    async fn trash(&self, project_path: &ProjectPath, cx: &mut AsyncApp) -> Result<TrashedEntry> {
+        let Some(workspace) = self.workspace.upgrade() else {
+            return Err(anyhow!("Failed to obtain workspace."));
+        };
+
+        workspace
+            .update(cx, |workspace, cx| {
+                workspace.project().update(cx, |project, cx| {
+                    let entry_id = project
+                        .entry_for_path(&project_path, cx)
+                        .map(|entry| entry.id)
+                        .ok_or_else(|| anyhow!("No entry for path."))?;
+
+                    project
+                        .delete_entry(entry_id, true, cx)
+                        .ok_or_else(|| anyhow!("Worktree entry should exist"))
                 })
-            }
-            ProjectPanelOperation::Batch(operations) => {
-                // When reverting operations in a batch, we reverse the order of
-                // operations to handle dependencies between them. For example,
-                // if a batch contains the following order of operations:
-                //
-                // 1. Create `src/`
-                // 2. Create `src/main.rs`
-                //
-                // If we first try to revert the directory creation, it would
-                // fail because there's still files inside the directory.
-                // Operations are also reverted sequentially in order to avoid
-                // this same problem.
-                let tasks: Vec<_> = operations
-                    .into_iter()
-                    .rev()
-                    .map(|operation| self.revert_operation(operation, cx))
-                    .collect();
-
-                cx.spawn(async move |_| {
-                    let mut errors = Vec::new();
-                    for task in tasks {
-                        errors.extend(task.await);
-                    }
-                    errors
+            })?
+            .await
+            .and_then(|entry| {
+                entry.ok_or_else(|| anyhow!("When trashing we should always get a trashentry"))
+            })
+    }
+
+    async fn restore(
+        &self,
+        worktree_id: WorktreeId,
+        trashed_entry: TrashedEntry,
+        cx: &mut AsyncApp,
+    ) -> Result<ProjectPath> {
+        let Some(workspace) = self.workspace.upgrade() else {
+            return Err(anyhow!("Failed to obtain workspace."));
+        };
+
+        workspace
+            .update(cx, |workspace, cx| {
+                workspace.project().update(cx, |project, cx| {
+                    project.restore_entry(worktree_id, trashed_entry, cx)
                 })
-            }
-        }
+            })
+            .await
     }
 
-    /// Displays a notification with the list of provided errors ensuring that,
-    /// when more than one error is provided, which can be the case when dealing
-    /// with undoing a [`crate::undo::ProjectPanelOperation::Batch`], a list is
-    /// displayed with each of the errors, instead of a single message.
-    fn show_errors(workspace: WeakEntity<Workspace>, messages: Vec<SharedString>, cx: &mut App) {
+    /// Displays a notification with the provided `title` and `error`.
+    fn show_error(
+        title: impl Into<SharedString>,
+        workspace: WeakEntity<Workspace>,
+        error: String,
+        cx: &mut AsyncApp,
+    ) {
         workspace
             .update(cx, move |workspace, cx| {
                 let notification_id =
                     NotificationId::Named(SharedString::new_static("project_panel_undo"));
 
                 workspace.show_notification(notification_id, cx, move |cx| {
-                    cx.new(|cx| {
-                        if let [err] = messages.as_slice() {
-                            MessageNotification::new(err.to_string(), cx)
-                                .with_title("Failed to undo Project Panel Operation")
-                        } else {
-                            MessageNotification::new_from_builder(cx, move |_, _| {
-                                v_flex()
-                                    .gap_1()
-                                    .children(
-                                        messages
-                                            .iter()
-                                            .map(|message| Label::new(format!("- {message}"))),
-                                    )
-                                    .into_any_element()
-                            })
-                            .with_title("Failed to undo Project Panel Operations")
-                        }
-                    })
+                    cx.new(|cx| MessageNotification::new(error, cx).with_title(title))
                 })
             })
             .ok();
     }
 }
-
-#[cfg(test)]
-mod test {
-    use crate::{
-        ProjectPanel, project_panel_tests,
-        undo::{ProjectPanelOperation, UndoManager},
-    };
-    use gpui::{Entity, TestAppContext, VisualTestContext};
-    use project::{FakeFs, Project, ProjectPath};
-    use std::sync::Arc;
-    use util::rel_path::rel_path;
-    use workspace::MultiWorkspace;
-
-    struct TestContext {
-        project: Entity<Project>,
-        panel: Entity<ProjectPanel>,
-    }
-
-    async fn init_test(cx: &mut TestAppContext) -> TestContext {
-        project_panel_tests::init_test(cx);
-
-        let fs = FakeFs::new(cx.executor());
-        let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
-        let window =
-            cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
-        let workspace = window
-            .read_with(cx, |mw, _| mw.workspace().clone())
-            .unwrap();
-        let cx = &mut VisualTestContext::from_window(window.into(), cx);
-        let panel = workspace.update_in(cx, ProjectPanel::new);
-        cx.run_until_parked();
-
-        TestContext { project, panel }
-    }
-
-    #[gpui::test]
-    async fn test_limit(cx: &mut TestAppContext) {
-        let test_context = init_test(cx).await;
-        let worktree_id = test_context.project.update(cx, |project, cx| {
-            project.visible_worktrees(cx).next().unwrap().read(cx).id()
-        });
-
-        let build_create_operation = |file_name: &str| ProjectPanelOperation::Create {
-            project_path: ProjectPath {
-                path: Arc::from(rel_path(file_name)),
-                worktree_id,
-            },
-        };
-
-        // Since we're updating the `ProjectPanel`'s undo manager with one whose
-        // limit is 3 operations, we only need to create 4 operations which
-        // we'll record, in order to confirm that the oldest operation is
-        // evicted.
-        let operation_a = build_create_operation("file_a.txt");
-        let operation_b = build_create_operation("file_b.txt");
-        let operation_c = build_create_operation("file_c.txt");
-        let operation_d = build_create_operation("file_d.txt");
-
-        test_context.panel.update(cx, move |panel, _cx| {
-            panel.undo_manager = UndoManager::new_with_limit(panel.workspace.clone(), 3);
-            panel.undo_manager.record(operation_a);
-            panel.undo_manager.record(operation_b);
-            panel.undo_manager.record(operation_c);
-            panel.undo_manager.record(operation_d);
-
-            assert_eq!(panel.undo_manager.stack.len(), 3);
-        });
-    }
-}

crates/project_symbols/src/project_symbols.rs 🔗

@@ -288,7 +288,7 @@ impl PickerDelegate for ProjectSymbolsDelegate {
         let custom_highlights = string_match
             .positions
             .iter()
-            .map(|pos| (*pos..pos + 1, highlight_style));
+            .map(|pos| (*pos..label.ceil_char_boundary(pos + 1), highlight_style));
 
         let highlights = gpui::combine_highlights(custom_highlights, syntax_runs);
 
@@ -299,9 +299,12 @@ impl PickerDelegate for ProjectSymbolsDelegate {
                 .toggle_state(selected)
                 .child(
                     v_flex()
-                        .child(LabelLike::new().child(
-                            StyledText::new(label).with_default_highlights(&text_style, highlights),
-                        ))
+                        .child(
+                            LabelLike::new().child(
+                                StyledText::new(&label)
+                                    .with_default_highlights(&text_style, highlights),
+                            ),
+                        )
                         .child(
                             h_flex()
                                 .child(Label::new(path).size(LabelSize::Small).color(Color::Muted))
@@ -483,6 +486,106 @@ mod tests {
         });
     }
 
+    #[gpui::test]
+    async fn test_project_symbols_renders_utf8_match(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let fs = FakeFs::new(cx.executor());
+        fs.insert_tree(path!("/dir"), json!({ "test.rs": "" }))
+            .await;
+
+        let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+
+        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
+        language_registry.add(Arc::new(Language::new(
+            LanguageConfig {
+                name: "Rust".into(),
+                matcher: LanguageMatcher {
+                    path_suffixes: vec!["rs".to_string()],
+                    ..Default::default()
+                },
+                ..Default::default()
+            },
+            None,
+        )));
+        let mut fake_servers = language_registry.register_fake_lsp(
+            "Rust",
+            FakeLspAdapter {
+                capabilities: lsp::ServerCapabilities {
+                    workspace_symbol_provider: Some(OneOf::Left(true)),
+                    ..Default::default()
+                },
+                ..Default::default()
+            },
+        );
+
+        let _buffer = project
+            .update(cx, |project, cx| {
+                project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
+            })
+            .await
+            .unwrap();
+
+        let fake_symbols = [symbol("안녕", path!("/dir/test.rs"))];
+        let fake_server = fake_servers.next().await.unwrap();
+        fake_server.set_request_handler::<lsp::WorkspaceSymbolRequest, _, _>(
+            move |params: lsp::WorkspaceSymbolParams, cx| {
+                let executor = cx.background_executor().clone();
+                let fake_symbols = fake_symbols.clone();
+                async move {
+                    let candidates = fake_symbols
+                        .iter()
+                        .enumerate()
+                        .map(|(id, symbol)| StringMatchCandidate::new(id, &symbol.name))
+                        .collect::<Vec<_>>();
+                    let matches = fuzzy::match_strings(
+                        &candidates,
+                        &params.query,
+                        true,
+                        true,
+                        100,
+                        &Default::default(),
+                        executor,
+                    )
+                    .await;
+
+                    Ok(Some(lsp::WorkspaceSymbolResponse::Flat(
+                        matches
+                            .into_iter()
+                            .map(|mat| fake_symbols[mat.candidate_id].clone())
+                            .collect(),
+                    )))
+                }
+            },
+        );
+
+        let (multi_workspace, cx) =
+            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+        let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+
+        let symbols = cx.new_window_entity(|window, cx| {
+            Picker::uniform_list(
+                ProjectSymbolsDelegate::new(workspace.downgrade(), project.clone()),
+                window,
+                cx,
+            )
+        });
+
+        symbols.update_in(cx, |p, window, cx| {
+            p.update_matches("안".to_string(), window, cx);
+        });
+
+        cx.run_until_parked();
+        symbols.read_with(cx, |symbols, _| {
+            assert_eq!(symbols.delegate.matches.len(), 1);
+            assert_eq!(symbols.delegate.matches[0].string, "안녕");
+        });
+
+        symbols.update_in(cx, |p, window, cx| {
+            assert!(p.delegate.render_match(0, false, window, cx).is_some());
+        });
+    }
+
     fn init_test(cx: &mut TestAppContext) {
         cx.update(|cx| {
             let store = SettingsStore::test(cx);

crates/recent_projects/src/recent_projects.rs 🔗

@@ -99,27 +99,40 @@ pub async fn get_recent_projects(
         .await
         .unwrap_or_default();
 
-    let entries: Vec<RecentProjectEntry> = workspaces
+    let filtered: Vec<_> = workspaces
         .into_iter()
         .filter(|(id, _, _, _)| Some(*id) != current_workspace_id)
         .filter(|(_, location, _, _)| matches!(location, SerializedWorkspaceLocation::Local))
+        .collect();
+
+    let mut all_paths: Vec<PathBuf> = filtered
+        .iter()
+        .flat_map(|(_, _, path_list, _)| path_list.paths().iter().cloned())
+        .collect();
+    all_paths.sort();
+    all_paths.dedup();
+    let path_details =
+        util::disambiguate::compute_disambiguation_details(&all_paths, |path, detail| {
+            project::path_suffix(path, detail)
+        });
+    let path_detail_map: std::collections::HashMap<PathBuf, usize> =
+        all_paths.into_iter().zip(path_details).collect();
+
+    let entries: Vec<RecentProjectEntry> = filtered
+        .into_iter()
         .map(|(workspace_id, _, path_list, timestamp)| {
             let paths: Vec<PathBuf> = path_list.paths().to_vec();
             let ordered_paths: Vec<&PathBuf> = path_list.ordered_paths().collect();
 
-            let name = if ordered_paths.len() == 1 {
-                ordered_paths[0]
-                    .file_name()
-                    .map(|n| n.to_string_lossy().to_string())
-                    .unwrap_or_else(|| ordered_paths[0].to_string_lossy().to_string())
-            } else {
-                ordered_paths
-                    .iter()
-                    .filter_map(|p| p.file_name())
-                    .map(|n| n.to_string_lossy().to_string())
-                    .collect::<Vec<_>>()
-                    .join(", ")
-            };
+            let name = ordered_paths
+                .iter()
+                .map(|p| {
+                    let detail = path_detail_map.get(*p).copied().unwrap_or(0);
+                    project::path_suffix(p, detail)
+                })
+                .filter(|s| !s.is_empty())
+                .collect::<Vec<_>>()
+                .join(", ");
 
             let full_path = ordered_paths
                 .iter()
@@ -172,6 +185,19 @@ fn get_open_folders(workspace: &Workspace, cx: &App) -> Vec<OpenFolderEntry> {
             .map(|wt| wt.read(cx).id())
     });
 
+    let mut all_paths: Vec<PathBuf> = visible_worktrees
+        .iter()
+        .map(|wt| wt.read(cx).abs_path().to_path_buf())
+        .collect();
+    all_paths.sort();
+    all_paths.dedup();
+    let path_details =
+        util::disambiguate::compute_disambiguation_details(&all_paths, |path, detail| {
+            project::path_suffix(path, detail)
+        });
+    let path_detail_map: std::collections::HashMap<PathBuf, usize> =
+        all_paths.into_iter().zip(path_details).collect();
+
     let git_store = project.git_store().read(cx);
     let repositories: Vec<_> = git_store.repositories().values().cloned().collect();
 
@@ -180,8 +206,9 @@ fn get_open_folders(workspace: &Workspace, cx: &App) -> Vec<OpenFolderEntry> {
         .map(|worktree| {
             let worktree_ref = worktree.read(cx);
             let worktree_id = worktree_ref.id();
-            let name = SharedString::from(worktree_ref.root_name().as_unix_str().to_string());
             let path = worktree_ref.abs_path().to_path_buf();
+            let detail = path_detail_map.get(&path).copied().unwrap_or(0);
+            let name = SharedString::from(project::path_suffix(&path, detail));
             let branch = get_branch_for_worktree(worktree_ref, &repositories, cx);
             let is_active = active_worktree_id == Some(worktree_id);
             OpenFolderEntry {

crates/remote_connection/src/remote_connection.rs 🔗

@@ -574,6 +574,23 @@ pub fn connect_with_modal(
     })
 }
 
+/// Dismisses any active [`RemoteConnectionModal`] on the given workspace.
+///
+/// This should be called after a remote connection attempt completes
+/// (success or failure) when the modal was shown on a workspace that may
+/// outlive the connection flow — for example, when the modal is shown
+/// on a local workspace before switching to a newly-created remote
+/// workspace.
+pub fn dismiss_connection_modal(workspace: &Entity<Workspace>, cx: &mut gpui::AsyncWindowContext) {
+    workspace
+        .update_in(cx, |workspace, _window, cx| {
+            if let Some(modal) = workspace.active_modal::<RemoteConnectionModal>(cx) {
+                modal.update(cx, |modal, cx| modal.finished(cx));
+            }
+        })
+        .ok();
+}
+
 /// Creates a [`RemoteClient`] by reusing an existing connection from the
 /// global pool. No interactive UI is shown. This should only be called
 /// when [`remote::has_active_connection`] returns `true`.

crates/repl/src/notebook/cell.rs 🔗

@@ -378,6 +378,7 @@ impl MarkdownCell {
             editor.set_show_gutter(false, cx);
             editor.set_text_style_refinement(refinement);
             editor.set_use_modal_editing(true);
+            editor.disable_mouse_wheel_zoom();
             editor
         });
 
@@ -641,6 +642,7 @@ impl CodeCell {
                 ..Default::default()
             };
 
+            editor.disable_mouse_wheel_zoom();
             editor.set_show_gutter(false, cx);
             editor.set_text_style_refinement(refinement);
             editor.set_use_modal_editing(true);
@@ -718,6 +720,7 @@ impl CodeCell {
                 ..Default::default()
             };
 
+            editor.disable_mouse_wheel_zoom();
             editor.set_text(source.clone(), window, cx);
             editor.set_show_gutter(false, cx);
             editor.set_text_style_refinement(refinement);

crates/settings/src/vscode_import.rs 🔗

@@ -286,6 +286,7 @@ impl VsCodeSettings {
             }),
             rounded_selection: self.read_bool("editor.roundedSelection"),
             scroll_beyond_last_line: None,
+            mouse_wheel_zoom: self.read_bool("editor.mouseWheelZoom"),
             scroll_sensitivity: self.read_f32("editor.mouseWheelScrollSensitivity"),
             scrollbar: self.scrollbar_content(),
             search: self.search_content(),

crates/settings_content/src/agent.rs 🔗

@@ -33,7 +33,7 @@ pub enum NewThreadLocation {
     NewWorktree,
 }
 
-/// Where to position the sidebar.
+/// Where to position the threads sidebar.
 #[derive(
     Clone,
     Copy,
@@ -114,7 +114,7 @@ pub struct AgentSettingsContent {
     ///
     /// Default: true
     pub flexible: Option<bool>,
-    /// Where to position the sidebar.
+    /// Where to position the threads sidebar.
     ///
     /// Default: left
     pub sidebar_side: Option<SidebarDockPosition>,

crates/settings_content/src/editor.rs 🔗

@@ -89,6 +89,11 @@ pub struct EditorSettingsContent {
     /// Default: 1.0
     #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")]
     pub scroll_sensitivity: Option<f32>,
+    /// Whether to zoom the editor font size with the mouse wheel
+    /// while holding the primary modifier key (Cmd on macOS, Ctrl on other platforms).
+    ///
+    /// Default: false
+    pub mouse_wheel_zoom: Option<bool>,
     /// Scroll sensitivity multiplier for fast scrolling. This multiplier is applied
     /// to both the horizontal and vertical delta values while scrolling. Fast scrolling
     /// happens when a user holds the alt or option key while scrolling.

crates/settings_content/src/language_model.rs 🔗

@@ -39,7 +39,7 @@ pub struct AnthropicSettingsContent {
 pub struct AnthropicAvailableModel {
     /// The model's name in the Anthropic API. e.g. claude-3-5-sonnet-latest, claude-3-opus-20240229, etc
     pub name: String,
-    /// The model's name in Zed's UI, such as in the model selector dropdown menu in the assistant panel.
+    /// The model's name in Zed's UI, such as in the model selector dropdown menu in the agent panel.
     pub display_name: Option<String>,
     /// The model's context window size.
     pub max_tokens: u64,
@@ -109,7 +109,7 @@ pub struct OllamaSettingsContent {
 pub struct OllamaAvailableModel {
     /// The model name in the Ollama API (e.g. "llama3.2:latest")
     pub name: String,
-    /// The model's name in Zed's UI, such as in the model selector dropdown menu in the assistant panel.
+    /// The model's name in Zed's UI, such as in the model selector dropdown menu in the agent panel.
     pub display_name: Option<String>,
     /// The Context Length parameter to the model (aka num_ctx or n_ctx)
     pub max_tokens: u64,
@@ -388,7 +388,7 @@ pub struct ZedDotDevAvailableModel {
     pub provider: ZedDotDevAvailableProvider,
     /// The model's name in the provider's API. e.g. claude-3-5-sonnet-20240620
     pub name: String,
-    /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
+    /// The name displayed in the UI, such as in the agent panel model dropdown menu.
     pub display_name: Option<String>,
     /// The size of the context window, indicating the maximum number of tokens the model can process.
     pub max_tokens: usize,

crates/settings_ui/src/page_data.rs 🔗

@@ -1571,7 +1571,7 @@ fn editor_page() -> SettingsPage {
         ]
     }
 
-    fn scrolling_section() -> [SettingsPageItem; 8] {
+    fn scrolling_section() -> [SettingsPageItem; 9] {
         [
             SettingsPageItem::SectionHeader("Scrolling"),
             SettingsPageItem::SettingItem(SettingItem {
@@ -1632,6 +1632,19 @@ fn editor_page() -> SettingsPage {
                 metadata: None,
                 files: USER,
             }),
+            SettingsPageItem::SettingItem(SettingItem {
+                title: "Mouse Wheel Zoom",
+                description: "Whether to zoom the editor font size with the mouse wheel while holding the primary modifier key.",
+                field: Box::new(SettingField {
+                    json_path: Some("mouse_wheel_zoom"),
+                    pick: |settings_content| settings_content.editor.mouse_wheel_zoom.as_ref(),
+                    write: |settings_content, value| {
+                        settings_content.editor.mouse_wheel_zoom = value;
+                    },
+                }),
+                metadata: None,
+                files: USER,
+            }),
             SettingsPageItem::SettingItem(SettingItem {
                 title: "Fast Scroll Sensitivity",
                 description: "Fast scroll sensitivity multiplier for both horizontal and vertical scrolling.",
@@ -7186,7 +7199,7 @@ fn collaboration_page() -> SettingsPage {
 }
 
 fn ai_page(cx: &App) -> SettingsPage {
-    fn general_section() -> [SettingsPageItem; 2] {
+    fn general_section() -> [SettingsPageItem; 3] {
         [
             SettingsPageItem::SectionHeader("General"),
             SettingsPageItem::SettingItem(SettingItem {
@@ -7202,6 +7215,19 @@ fn ai_page(cx: &App) -> SettingsPage {
                 metadata: None,
                 files: USER | PROJECT,
             }),
+            SettingsPageItem::SettingItem(SettingItem {
+                title: "Threads Sidebar Side",
+                description: "Which side of the window the threads sidebar appears on.",
+                field: Box::new(SettingField {
+                    json_path: Some("agent.sidebar_side"),
+                    pick: |settings_content| settings_content.agent.as_ref()?.sidebar_side.as_ref(),
+                    write: |settings_content, value| {
+                        settings_content.agent.get_or_insert_default().sidebar_side = value;
+                    },
+                }),
+                metadata: None,
+                files: USER,
+            }),
         ]
     }
 

crates/settings_ui/src/pages/tool_permissions_setup.rs 🔗

@@ -69,7 +69,7 @@ const TOOLS: &[ToolInfo] = &[
         regex_explanation: "Patterns are matched against the URL being fetched.",
     },
     ToolInfo {
-        id: "web_search",
+        id: "search_web",
         name: "Web Search",
         description: "Web search queries",
         regex_explanation: "Patterns are matched against the search query.",
@@ -309,7 +309,7 @@ fn get_tool_render_fn(
         "create_directory" => render_create_directory_tool_config,
         "save_file" => render_save_file_tool_config,
         "fetch" => render_fetch_tool_config,
-        "web_search" => render_web_search_tool_config,
+        "search_web" => render_web_search_tool_config,
         "restore_file_from_disk" => render_restore_file_from_disk_tool_config,
         _ => render_terminal_tool_config, // fallback
     }
@@ -1389,7 +1389,7 @@ tool_config_page_fn!(render_move_path_tool_config, "move_path");
 tool_config_page_fn!(render_create_directory_tool_config, "create_directory");
 tool_config_page_fn!(render_save_file_tool_config, "save_file");
 tool_config_page_fn!(render_fetch_tool_config, "fetch");
-tool_config_page_fn!(render_web_search_tool_config, "web_search");
+tool_config_page_fn!(render_web_search_tool_config, "search_web");
 tool_config_page_fn!(
     render_restore_file_from_disk_tool_config,
     "restore_file_from_disk"

crates/settings_ui/src/settings_ui.rs 🔗

@@ -474,6 +474,7 @@ fn init_renderers(cx: &mut App) {
         .add_basic_renderer::<settings::DockSide>(render_dropdown)
         .add_basic_renderer::<settings::TerminalDockPosition>(render_dropdown)
         .add_basic_renderer::<settings::DockPosition>(render_dropdown)
+        .add_basic_renderer::<settings::SidebarDockPosition>(render_dropdown)
         .add_basic_renderer::<settings::GitGutterSetting>(render_dropdown)
         .add_basic_renderer::<settings::GitHunkStyleSetting>(render_dropdown)
         .add_basic_renderer::<settings::GitPathStyle>(render_dropdown)

crates/sidebar/src/sidebar.rs 🔗

@@ -4,7 +4,7 @@ use acp_thread::ThreadStatus;
 use action_log::DiffStats;
 use agent_client_protocol::{self as acp};
 use agent_settings::AgentSettings;
-use agent_ui::thread_metadata_store::{ThreadMetadata, ThreadMetadataStore};
+use agent_ui::thread_metadata_store::{ThreadMetadata, ThreadMetadataStore, ThreadWorktreePaths};
 use agent_ui::thread_worktree_archive;
 use agent_ui::threads_archive_view::{
     ThreadsArchiveView, ThreadsArchiveViewEvent, format_history_entry_timestamp,
@@ -283,10 +283,8 @@ impl ListEntry {
                 }
             }
             ListEntry::ProjectHeader { key, .. } => multi_workspace
-                .workspaces()
-                .find(|ws| PathList::new(&ws.read(cx).root_paths(cx)) == *key.path_list())
+                .workspaces_for_project_group(key, cx)
                 .cloned()
-                .into_iter()
                 .collect(),
             ListEntry::ViewMore { .. } => Vec::new(),
         }
@@ -365,35 +363,63 @@ fn workspace_path_list(workspace: &Entity<Workspace>, cx: &App) -> PathList {
 ///
 /// For each path in the thread's `folder_paths`, produces a
 /// [`WorktreeInfo`] with a short display name, full path, and whether
-/// the worktree is the main checkout or a linked git worktree.
-fn worktree_info_from_thread_paths(
-    folder_paths: &PathList,
-    group_key: &project::ProjectGroupKey,
-) -> impl Iterator<Item = WorktreeInfo> {
-    let main_paths = group_key.path_list().paths();
-    folder_paths.paths().iter().filter_map(|path| {
-        let is_main = main_paths.iter().any(|mp| mp.as_path() == path.as_path());
-        if is_main {
-            let name = path.file_name()?.to_string_lossy().to_string();
-            Some(WorktreeInfo {
-                name: SharedString::from(name),
-                full_path: SharedString::from(path.display().to_string()),
+/// the worktree is the main checkout or a linked git worktree. When
+/// multiple main paths exist and a linked worktree's short name alone
+/// wouldn't identify which main project it belongs to, the main project
+/// name is prefixed for disambiguation (e.g. `project:feature`).
+///
+fn worktree_info_from_thread_paths(worktree_paths: &ThreadWorktreePaths) -> Vec<WorktreeInfo> {
+    let mut infos: Vec<WorktreeInfo> = Vec::new();
+    let mut linked_short_names: Vec<(SharedString, SharedString)> = Vec::new();
+    let mut unique_main_count = HashSet::new();
+
+    for (main_path, folder_path) in worktree_paths.ordered_pairs() {
+        unique_main_count.insert(main_path.clone());
+        let is_linked = main_path != folder_path;
+
+        if is_linked {
+            let short_name = linked_worktree_short_name(main_path, folder_path).unwrap_or_default();
+            let project_name = main_path
+                .file_name()
+                .map(|n| SharedString::from(n.to_string_lossy().to_string()))
+                .unwrap_or_default();
+            linked_short_names.push((short_name.clone(), project_name));
+            infos.push(WorktreeInfo {
+                name: short_name,
+                full_path: SharedString::from(folder_path.display().to_string()),
                 highlight_positions: Vec::new(),
-                kind: ui::WorktreeKind::Main,
-            })
+                kind: ui::WorktreeKind::Linked,
+            });
         } else {
-            let main_path = main_paths
-                .iter()
-                .find(|mp| mp.file_name() == path.file_name())
-                .or(main_paths.first())?;
-            Some(WorktreeInfo {
-                name: linked_worktree_short_name(main_path, path).unwrap_or_default(),
-                full_path: SharedString::from(path.display().to_string()),
+            let Some(name) = folder_path.file_name() else {
+                continue;
+            };
+            infos.push(WorktreeInfo {
+                name: SharedString::from(name.to_string_lossy().to_string()),
+                full_path: SharedString::from(folder_path.display().to_string()),
                 highlight_positions: Vec::new(),
-                kind: ui::WorktreeKind::Linked,
-            })
+                kind: ui::WorktreeKind::Main,
+            });
         }
-    })
+    }
+
+    // When the group has multiple main worktree paths and the thread's
+    // folder paths don't all share the same short name, prefix each
+    // linked worktree chip with its main project name so the user knows
+    // which project it belongs to.
+    let all_same_name = infos.len() > 1 && infos.iter().all(|i| i.name == infos[0].name);
+
+    if unique_main_count.len() > 1 && !all_same_name {
+        for (info, (_short_name, project_name)) in infos
+            .iter_mut()
+            .filter(|i| i.kind == ui::WorktreeKind::Linked)
+            .zip(linked_short_names.iter())
+        {
+            info.name = SharedString::from(format!("{}:{}", project_name, info.name));
+        }
+    }
+
+    infos
 }
 
 /// Shows a [`RemoteConnectionModal`] on the given workspace and establishes
@@ -441,6 +467,7 @@ pub struct Sidebar {
     _thread_switcher_subscriptions: Vec<gpui::Subscription>,
     pending_remote_thread_activation: Option<acp::SessionId>,
     view: SidebarView,
+    restoring_tasks: HashMap<acp::SessionId, Task<()>>,
     recent_projects_popover_handle: PopoverMenuHandle<SidebarRecentProjects>,
     project_header_menu_ix: Option<usize>,
     _subscriptions: Vec<gpui::Subscription>,
@@ -479,6 +506,34 @@ impl Sidebar {
                 MultiWorkspaceEvent::WorkspaceRemoved(_) => {
                     this.update_entries(cx);
                 }
+                MultiWorkspaceEvent::WorktreePathAdded {
+                    old_main_paths,
+                    added_path,
+                } => {
+                    let added_path = added_path.clone();
+                    ThreadMetadataStore::global(cx).update(cx, |store, cx| {
+                        store.change_worktree_paths(
+                            old_main_paths,
+                            |paths| paths.add_path(&added_path, &added_path),
+                            cx,
+                        );
+                    });
+                    this.update_entries(cx);
+                }
+                MultiWorkspaceEvent::WorktreePathRemoved {
+                    old_main_paths,
+                    removed_path,
+                } => {
+                    let removed_path = removed_path.clone();
+                    ThreadMetadataStore::global(cx).update(cx, |store, cx| {
+                        store.change_worktree_paths(
+                            old_main_paths,
+                            |paths| paths.remove_main_path(&removed_path),
+                            cx,
+                        );
+                    });
+                    this.update_entries(cx);
+                }
             },
         )
         .detach();
@@ -528,6 +583,7 @@ impl Sidebar {
             _thread_switcher_subscriptions: Vec::new(),
             pending_remote_thread_activation: None,
             view: SidebarView::default(),
+            restoring_tasks: HashMap::new(),
             recent_projects_popover_handle: PopoverMenuHandle::default(),
             project_header_menu_ix: None,
             _subscriptions: Vec::new(),
@@ -751,19 +807,26 @@ impl Sidebar {
         let host = project_group_key.host();
         let provisional_key = Some(project_group_key.clone());
         let active_workspace = multi_workspace.read(cx).workspace().clone();
+        let modal_workspace = active_workspace.clone();
 
-        multi_workspace
-            .update(cx, |this, cx| {
-                this.find_or_create_workspace(
-                    path_list,
-                    host,
-                    provisional_key,
-                    |options, window, cx| connect_remote(active_workspace, options, window, cx),
-                    window,
-                    cx,
-                )
-            })
-            .detach_and_log_err(cx);
+        let task = multi_workspace.update(cx, |this, cx| {
+            this.find_or_create_workspace(
+                path_list,
+                host,
+                provisional_key,
+                |options, window, cx| connect_remote(active_workspace, options, window, cx),
+                window,
+                cx,
+            )
+        });
+
+        cx.spawn_in(window, async move |_this, cx| {
+            let result = task.await;
+            remote_connection::dismiss_connection_modal(&modal_workspace, cx);
+            result?;
+            anyhow::Ok(())
+        })
+        .detach_and_log_err(cx);
     }
 
     /// Rebuilds the sidebar contents from current workspace and thread state.
@@ -881,12 +944,27 @@ impl Sidebar {
             (icon, icon_from_external_svg)
         };
 
-        for (group_key, group_workspaces) in mw.project_groups(cx) {
+        let groups: Vec<_> = mw.project_groups(cx).collect();
+
+        let mut all_paths: Vec<PathBuf> = groups
+            .iter()
+            .flat_map(|(key, _)| key.path_list().paths().iter().cloned())
+            .collect();
+        all_paths.sort();
+        all_paths.dedup();
+        let path_details =
+            util::disambiguate::compute_disambiguation_details(&all_paths, |path, detail| {
+                project::path_suffix(path, detail)
+            });
+        let path_detail_map: HashMap<PathBuf, usize> =
+            all_paths.into_iter().zip(path_details).collect();
+
+        for (group_key, group_workspaces) in &groups {
             if group_key.path_list().paths().is_empty() {
                 continue;
             }
 
-            let label = group_key.display_name();
+            let label = group_key.display_name(&path_detail_map);
 
             let is_collapsed = self.collapsed_groups.contains(&group_key);
             let should_load_threads = !is_collapsed || !query.is_empty();
@@ -923,35 +1001,33 @@ impl Sidebar {
                 // Open; otherwise use Closed.
                 let resolve_workspace = |row: &ThreadMetadata| -> ThreadEntryWorkspace {
                     workspace_by_path_list
-                        .get(&row.folder_paths)
+                        .get(row.folder_paths())
                         .map(|ws| ThreadEntryWorkspace::Open((*ws).clone()))
                         .unwrap_or_else(|| ThreadEntryWorkspace::Closed {
-                            folder_paths: row.folder_paths.clone(),
+                            folder_paths: row.folder_paths().clone(),
                             project_group_key: group_key.clone(),
                         })
                 };
 
                 // Build a ThreadEntry from a metadata row.
-                let make_thread_entry = |row: ThreadMetadata,
-                                         workspace: ThreadEntryWorkspace|
-                 -> ThreadEntry {
-                    let (icon, icon_from_external_svg) = resolve_agent_icon(&row.agent_id);
-                    let worktrees: Vec<WorktreeInfo> =
-                        worktree_info_from_thread_paths(&row.folder_paths, &group_key).collect();
-                    ThreadEntry {
-                        metadata: row,
-                        icon,
-                        icon_from_external_svg,
-                        status: AgentThreadStatus::default(),
-                        workspace,
-                        is_live: false,
-                        is_background: false,
-                        is_title_generating: false,
-                        highlight_positions: Vec::new(),
-                        worktrees,
-                        diff_stats: DiffStats::default(),
-                    }
-                };
+                let make_thread_entry =
+                    |row: ThreadMetadata, workspace: ThreadEntryWorkspace| -> ThreadEntry {
+                        let (icon, icon_from_external_svg) = resolve_agent_icon(&row.agent_id);
+                        let worktrees = worktree_info_from_thread_paths(&row.worktree_paths);
+                        ThreadEntry {
+                            metadata: row,
+                            icon,
+                            icon_from_external_svg,
+                            status: AgentThreadStatus::default(),
+                            workspace,
+                            is_live: false,
+                            is_background: false,
+                            is_title_generating: false,
+                            highlight_positions: Vec::new(),
+                            worktrees,
+                            diff_stats: DiffStats::default(),
+                        }
+                    };
 
                 // Main code path: one query per group via main_worktree_paths.
                 // The main_worktree_paths column is set on all new threads and
@@ -987,7 +1063,7 @@ impl Sidebar {
 
                 // Load any legacy threads for any single linked wortree of this project group.
                 let mut linked_worktree_paths = HashSet::new();
-                for workspace in &group_workspaces {
+                for workspace in group_workspaces {
                     if workspace.read(cx).visible_worktrees(cx).count() != 1 {
                         continue;
                     }
@@ -1166,12 +1242,15 @@ impl Sidebar {
                 // Emit a DraftThread entry when the active draft belongs to this group.
                 if is_draft_for_group {
                     if let Some(ActiveEntry::Draft(draft_ws)) = &self.active_entry {
-                        let ws_path_list = workspace_path_list(draft_ws, cx);
-                        let worktrees = worktree_info_from_thread_paths(&ws_path_list, &group_key);
+                        let ws_worktree_paths = ThreadWorktreePaths::from_project(
+                            draft_ws.read(cx).project().read(cx),
+                            cx,
+                        );
+                        let worktrees = worktree_info_from_thread_paths(&ws_worktree_paths);
                         entries.push(ListEntry::DraftThread {
                             key: group_key.clone(),
                             workspace: None,
-                            worktrees: worktrees.collect(),
+                            worktrees,
                         });
                     }
                 }
@@ -1190,17 +1269,20 @@ impl Sidebar {
                         None
                     };
                     let thread_store = ThreadMetadataStore::global(cx);
-                    for ws in &group_workspaces {
+                    for ws in group_workspaces {
                         if Some(ws.entity_id()) == draft_ws_id {
                             continue;
                         }
-                        let ws_path_list = workspace_path_list(ws, cx);
+                        let ws_worktree_paths =
+                            ThreadWorktreePaths::from_project(ws.read(cx).project().read(cx), cx);
                         let has_linked_worktrees =
-                            worktree_info_from_thread_paths(&ws_path_list, &group_key)
+                            worktree_info_from_thread_paths(&ws_worktree_paths)
+                                .iter()
                                 .any(|wt| wt.kind == ui::WorktreeKind::Linked);
                         if !has_linked_worktrees {
                             continue;
                         }
+                        let ws_path_list = workspace_path_list(ws, cx);
                         let store = thread_store.read(cx);
                         let has_threads = store.entries_for_path(&ws_path_list).next().is_some()
                             || store
@@ -1210,8 +1292,7 @@ impl Sidebar {
                         if has_threads {
                             continue;
                         }
-                        let worktrees: Vec<WorktreeInfo> =
-                            worktree_info_from_thread_paths(&ws_path_list, &group_key).collect();
+                        let worktrees = worktree_info_from_thread_paths(&ws_worktree_paths);
 
                         entries.push(ListEntry::DraftThread {
                             key: group_key.clone(),
@@ -1678,6 +1759,7 @@ impl Sidebar {
 
                 let menu =
                     ContextMenu::build_persistent(window, cx, move |menu, _window, menu_cx| {
+                        let weak_menu = menu_cx.weak_entity();
                         let mut menu = menu
                             .header("Project Folders")
                             .end_slot_action(Box::new(menu::EndSlot));
@@ -1690,6 +1772,7 @@ impl Sidebar {
                             let path = path.clone();
                             let project_group_key = project_group_key.clone();
                             let multi_workspace = multi_workspace.clone();
+                            let weak_menu = weak_menu.clone();
                             menu = menu.entry_with_end_slot_on_hover(
                                 name.clone(),
                                 None,
@@ -1706,6 +1789,7 @@ impl Sidebar {
                                             );
                                         })
                                         .ok();
+                                    weak_menu.update(cx, |_, cx| cx.emit(DismissEvent)).ok();
                                 },
                             );
                         }
@@ -1716,6 +1800,7 @@ impl Sidebar {
                             {
                                 let project_group_key = project_group_key.clone();
                                 let multi_workspace = multi_workspace.clone();
+                                let weak_menu = weak_menu.clone();
                                 move |window, cx| {
                                     multi_workspace
                                         .update(cx, |multi_workspace, cx| {
@@ -1726,13 +1811,13 @@ impl Sidebar {
                                             );
                                         })
                                         .ok();
+                                    weak_menu.update(cx, |_, cx| cx.emit(DismissEvent)).ok();
                                 }
                             },
                         );
 
                         let project_group_key = project_group_key.clone();
                         let multi_workspace = multi_workspace.clone();
-                        let weak_menu = menu_cx.weak_entity();
                         menu.separator()
                             .entry("Remove Project", None, move |window, cx| {
                                 multi_workspace
@@ -2146,7 +2231,7 @@ impl Sidebar {
                 panel.load_agent_thread(
                     Agent::from(metadata.agent_id.clone()),
                     metadata.session_id.clone(),
-                    Some(metadata.folder_paths.clone()),
+                    Some(metadata.folder_paths().clone()),
                     Some(metadata.title.clone()),
                     focus,
                     window,
@@ -2275,6 +2360,7 @@ impl Sidebar {
         let host = project_group_key.host();
         let provisional_key = Some(project_group_key.clone());
         let active_workspace = multi_workspace.read(cx).workspace().clone();
+        let modal_workspace = active_workspace.clone();
 
         let open_task = multi_workspace.update(cx, |this, cx| {
             this.find_or_create_workspace(
@@ -2289,6 +2375,9 @@ impl Sidebar {
 
         cx.spawn_in(window, async move |this, cx| {
             let result = open_task.await;
+            // Dismiss the modal as soon as the open attempt completes so
+            // failures or cancellations do not leave a stale connection modal behind.
+            remote_connection::dismiss_connection_modal(&modal_workspace, cx);
 
             if result.is_err() || is_remote {
                 this.update(cx, |this, _cx| {
@@ -2335,10 +2424,15 @@ impl Sidebar {
         cx: &mut Context<Self>,
     ) {
         let session_id = metadata.session_id.clone();
+        let weak_archive_view = match &self.view {
+            SidebarView::Archive(view) => Some(view.downgrade()),
+            _ => None,
+        };
 
-        ThreadMetadataStore::global(cx).update(cx, |store, cx| store.unarchive(&session_id, cx));
+        if metadata.folder_paths().paths().is_empty() {
+            ThreadMetadataStore::global(cx)
+                .update(cx, |store, cx| store.unarchive(&session_id, cx));
 
-        if metadata.folder_paths.paths().is_empty() {
             let active_workspace = self
                 .multi_workspace
                 .upgrade()
@@ -2347,18 +2441,17 @@ impl Sidebar {
             if let Some(workspace) = active_workspace {
                 self.activate_thread_locally(&metadata, &workspace, false, window, cx);
             } else {
-                let path_list = metadata.folder_paths.clone();
+                let path_list = metadata.folder_paths().clone();
                 if let Some((target_window, workspace)) =
                     self.find_open_workspace_for_path_list(&path_list, cx)
                 {
                     self.activate_thread_in_other_window(metadata, workspace, target_window, cx);
                 } else {
-                    // Archived thread metadata doesn't carry the remote host,
-                    // so we construct a local-only key as a best-effort fallback.
                     let key = ProjectGroupKey::new(None, path_list.clone());
                     self.open_workspace_and_activate_thread(metadata, path_list, &key, window, cx);
                 }
             }
+            self.show_thread_list(window, cx);
             return;
         }
 
@@ -2366,111 +2459,133 @@ impl Sidebar {
         let task = store
             .read(cx)
             .get_archived_worktrees_for_thread(session_id.0.to_string(), cx);
-        let path_list = metadata.folder_paths.clone();
+        let path_list = metadata.folder_paths().clone();
 
-        cx.spawn_in(window, async move |this, cx| {
-            let archived_worktrees = task.await?;
-
-            // No archived worktrees means the thread wasn't associated with a
-            // linked worktree that got deleted, so we just need to find (or
-            // open) a workspace that matches the thread's folder paths.
-            if archived_worktrees.is_empty() {
-                this.update_in(cx, |this, window, cx| {
-                    if let Some(workspace) =
-                        this.find_current_workspace_for_path_list(&path_list, cx)
-                    {
-                        this.activate_thread_locally(&metadata, &workspace, false, window, cx);
-                    } else if let Some((target_window, workspace)) =
-                        this.find_open_workspace_for_path_list(&path_list, cx)
-                    {
-                        this.activate_thread_in_other_window(
-                            metadata,
-                            workspace,
-                            target_window,
-                            cx,
-                        );
-                    } else {
-                        let key = ProjectGroupKey::new(None, path_list.clone());
-                        this.open_workspace_and_activate_thread(
-                            metadata, path_list, &key, window, cx,
-                        );
-                    }
-                })?;
-                return anyhow::Ok(());
-            }
+        let task_session_id = session_id.clone();
+        let restore_task = cx.spawn_in(window, async move |this, cx| {
+            let result: anyhow::Result<()> = async {
+                let archived_worktrees = task.await?;
+
+                if archived_worktrees.is_empty() {
+                    this.update_in(cx, |this, window, cx| {
+                        this.restoring_tasks.remove(&session_id);
+                        ThreadMetadataStore::global(cx)
+                            .update(cx, |store, cx| store.unarchive(&session_id, cx));
+
+                        if let Some(workspace) =
+                            this.find_current_workspace_for_path_list(&path_list, cx)
+                        {
+                            this.activate_thread_locally(&metadata, &workspace, false, window, cx);
+                        } else if let Some((target_window, workspace)) =
+                            this.find_open_workspace_for_path_list(&path_list, cx)
+                        {
+                            this.activate_thread_in_other_window(
+                                metadata,
+                                workspace,
+                                target_window,
+                                cx,
+                            );
+                        } else {
+                            let key = ProjectGroupKey::new(None, path_list.clone());
+                            this.open_workspace_and_activate_thread(
+                                metadata, path_list, &key, window, cx,
+                            );
+                        }
+                        this.show_thread_list(window, cx);
+                    })?;
+                    return anyhow::Ok(());
+                }
 
-            // Restore each archived worktree back to disk via git. If the
-            // worktree already exists (e.g. a previous unarchive of a different
-            // thread on the same worktree already restored it), it's reused
-            // as-is. We track (old_path, restored_path) pairs so we can update
-            // the thread's folder_paths afterward.
-            let mut path_replacements: Vec<(PathBuf, PathBuf)> = Vec::new();
-            for row in &archived_worktrees {
-                match thread_worktree_archive::restore_worktree_via_git(row, &mut *cx).await {
-                    Ok(restored_path) => {
-                        // The worktree is on disk now; clean up the DB record
-                        // and git ref we created during archival.
-                        thread_worktree_archive::cleanup_archived_worktree_record(row, &mut *cx)
+                let mut path_replacements: Vec<(PathBuf, PathBuf)> = Vec::new();
+                for row in &archived_worktrees {
+                    match thread_worktree_archive::restore_worktree_via_git(row, &mut *cx).await {
+                        Ok(restored_path) => {
+                            thread_worktree_archive::cleanup_archived_worktree_record(
+                                row, &mut *cx,
+                            )
                             .await;
-                        path_replacements.push((row.worktree_path.clone(), restored_path));
-                    }
-                    Err(error) => {
-                        log::error!("Failed to restore worktree: {error:#}");
-                        this.update_in(cx, |this, _window, cx| {
-                            if let Some(multi_workspace) = this.multi_workspace.upgrade() {
-                                let workspace = multi_workspace.read(cx).workspace().clone();
-                                workspace.update(cx, |workspace, cx| {
-                                    struct RestoreWorktreeErrorToast;
-                                    workspace.show_toast(
-                                        Toast::new(
-                                            NotificationId::unique::<RestoreWorktreeErrorToast>(),
-                                            format!("Failed to restore worktree: {error:#}"),
-                                        )
-                                        .autohide(),
-                                        cx,
-                                    );
-                                });
-                            }
-                        })
-                        .ok();
-                        return anyhow::Ok(());
+                            path_replacements.push((row.worktree_path.clone(), restored_path));
+                        }
+                        Err(error) => {
+                            log::error!("Failed to restore worktree: {error:#}");
+                            this.update_in(cx, |this, _window, cx| {
+                                this.restoring_tasks.remove(&session_id);
+                                if let Some(weak_archive_view) = &weak_archive_view {
+                                    weak_archive_view
+                                        .update(cx, |view, cx| {
+                                            view.clear_restoring(&session_id, cx);
+                                        })
+                                        .ok();
+                                }
+
+                                if let Some(multi_workspace) = this.multi_workspace.upgrade() {
+                                    let workspace = multi_workspace.read(cx).workspace().clone();
+                                    workspace.update(cx, |workspace, cx| {
+                                        struct RestoreWorktreeErrorToast;
+                                        workspace.show_toast(
+                                            Toast::new(
+                                                NotificationId::unique::<RestoreWorktreeErrorToast>(
+                                                ),
+                                                format!("Failed to restore worktree: {error:#}"),
+                                            )
+                                            .autohide(),
+                                            cx,
+                                        );
+                                    });
+                                }
+                            })
+                            .ok();
+                            return anyhow::Ok(());
+                        }
                     }
                 }
-            }
 
-            if !path_replacements.is_empty() {
-                // Update the thread's stored folder_paths: swap each old
-                // worktree path for the restored path (which may differ if
-                // the worktree was restored to a new location).
-                cx.update(|_window, cx| {
-                    store.update(cx, |store, cx| {
-                        store.update_restored_worktree_paths(&session_id, &path_replacements, cx);
-                    });
-                })?;
+                if !path_replacements.is_empty() {
+                    cx.update(|_window, cx| {
+                        store.update(cx, |store, cx| {
+                            store.update_restored_worktree_paths(
+                                &session_id,
+                                &path_replacements,
+                                cx,
+                            );
+                        });
+                    })?;
 
-                // Re-read the metadata (now with updated paths) and open
-                // the workspace so the user lands in the restored worktree.
-                let updated_metadata =
-                    cx.update(|_window, cx| store.read(cx).entry(&session_id).cloned())?;
+                    let updated_metadata =
+                        cx.update(|_window, cx| store.read(cx).entry(&session_id).cloned())?;
 
-                if let Some(updated_metadata) = updated_metadata {
-                    let new_paths = updated_metadata.folder_paths.clone();
-                    this.update_in(cx, |this, window, cx| {
-                        let key = ProjectGroupKey::new(None, new_paths.clone());
-                        this.open_workspace_and_activate_thread(
-                            updated_metadata,
-                            new_paths,
-                            &key,
-                            window,
-                            cx,
-                        );
-                    })?;
+                    if let Some(updated_metadata) = updated_metadata {
+                        let new_paths = updated_metadata.folder_paths().clone();
+
+                        cx.update(|_window, cx| {
+                            store.update(cx, |store, cx| {
+                                store.unarchive(&updated_metadata.session_id, cx);
+                            });
+                        })?;
+
+                        this.update_in(cx, |this, window, cx| {
+                            this.restoring_tasks.remove(&session_id);
+                            let key = ProjectGroupKey::new(None, new_paths.clone());
+                            this.open_workspace_and_activate_thread(
+                                updated_metadata,
+                                new_paths,
+                                &key,
+                                window,
+                                cx,
+                            );
+                            this.show_thread_list(window, cx);
+                        })?;
+                    }
                 }
-            }
 
-            anyhow::Ok(())
-        })
-        .detach_and_log_err(cx);
+                anyhow::Ok(())
+            }
+            .await;
+            if let Err(error) = result {
+                log::error!("{error:#}");
+            }
+        });
+        self.restoring_tasks.insert(task_session_id, restore_task);
     }
 
     fn expand_selected_entry(
@@ -2615,7 +2730,7 @@ impl Sidebar {
             .read(cx)
             .entry(session_id)
             .cloned();
-        let thread_folder_paths = metadata.as_ref().map(|m| m.folder_paths.clone());
+        let thread_folder_paths = metadata.as_ref().map(|m| m.folder_paths().clone());
 
         // Compute which linked worktree roots should be archived from disk if
         // this thread is archived. This must happen before we remove any
@@ -2642,7 +2757,7 @@ impl Sidebar {
                     }
                 }
                 metadata
-                    .folder_paths
+                    .folder_paths()
                     .ordered_paths()
                     .filter_map(|path| {
                         thread_worktree_archive::build_root_plan(path, &workspaces, cx)
@@ -2848,7 +2963,7 @@ impl Sidebar {
         if let Some(metadata) = neighbor {
             if let Some(workspace) = self.multi_workspace.upgrade().and_then(|mw| {
                 mw.read(cx)
-                    .workspace_for_paths(&metadata.folder_paths, None, cx)
+                    .workspace_for_paths(metadata.folder_paths(), None, cx)
             }) {
                 self.activate_workspace(&workspace, window, cx);
                 Self::load_agent_thread_in_workspace(&workspace, metadata, true, window, cx);
@@ -4367,9 +4482,11 @@ impl Sidebar {
                     this.show_thread_list(window, cx);
                 }
                 ThreadsArchiveViewEvent::Unarchive { thread } => {
-                    this.show_thread_list(window, cx);
                     this.activate_archived_thread(thread.clone(), window, cx);
                 }
+                ThreadsArchiveViewEvent::CancelRestore { session_id } => {
+                    this.restoring_tasks.remove(session_id);
+                }
             },
         );
 
@@ -4682,6 +4799,36 @@ pub fn dump_workspace_info(
         )
         .ok();
 
+        // project_group_key_for_workspace internally reads the workspace,
+        // so we can only call it for workspaces other than this_entity
+        // (which is already being updated).
+        if let Some(mw) = &multi_workspace {
+            if *ws == this_entity {
+                let workspace_key = workspace.project_group_key(cx);
+                writeln!(output, "ProjectGroupKey: {workspace_key:?}").ok();
+            } else {
+                let effective_key = mw.read(cx).project_group_key_for_workspace(ws, cx);
+                let workspace_key = ws.read(cx).project_group_key(cx);
+                if effective_key != workspace_key {
+                    writeln!(
+                        output,
+                        "ProjectGroupKey (multi_workspace): {effective_key:?}"
+                    )
+                    .ok();
+                    writeln!(
+                        output,
+                        "ProjectGroupKey (workspace, DISAGREES): {workspace_key:?}"
+                    )
+                    .ok();
+                } else {
+                    writeln!(output, "ProjectGroupKey: {effective_key:?}").ok();
+                }
+            }
+        } else {
+            let workspace_key = workspace.project_group_key(cx);
+            writeln!(output, "ProjectGroupKey: {workspace_key:?}").ok();
+        }
+
         // The action handler is already inside an update on `this_entity`,
         // so we must avoid a nested read/update on that same entity.
         if *ws == this_entity {

crates/sidebar/src/sidebar_tests.rs 🔗

@@ -3,7 +3,7 @@ use acp_thread::{AcpThread, PermissionOptions, StubAgentConnection};
 use agent::ThreadStore;
 use agent_ui::{
     test_support::{active_session_id, open_thread_with_connection, send_message},
-    thread_metadata_store::ThreadMetadata,
+    thread_metadata_store::{ThreadMetadata, ThreadWorktreePaths},
 };
 use chrono::DateTime;
 use fs::{FakeFs, Fs};
@@ -226,31 +226,49 @@ fn save_thread_metadata(
     cx: &mut TestAppContext,
 ) {
     cx.update(|cx| {
-        let (folder_paths, main_worktree_paths) = {
-            let project_ref = project.read(cx);
-            let paths: Vec<Arc<Path>> = project_ref
-                .visible_worktrees(cx)
-                .map(|worktree| worktree.read(cx).abs_path())
-                .collect();
-            let folder_paths = PathList::new(&paths);
-            let main_worktree_paths = project_ref.project_group_key(cx).path_list().clone();
-            (folder_paths, main_worktree_paths)
-        };
+        let worktree_paths = ThreadWorktreePaths::from_project(project.read(cx), cx);
         let metadata = ThreadMetadata {
             session_id,
             agent_id: agent::ZED_AGENT_ID.clone(),
             title,
             updated_at,
             created_at,
-            folder_paths,
-            main_worktree_paths,
+            worktree_paths,
             archived: false,
+            remote_connection: None,
         };
         ThreadMetadataStore::global(cx).update(cx, |store, cx| store.save_manually(metadata, cx));
     });
     cx.run_until_parked();
 }
 
+fn save_thread_metadata_with_main_paths(
+    session_id: &str,
+    title: &str,
+    folder_paths: PathList,
+    main_worktree_paths: PathList,
+    cx: &mut TestAppContext,
+) {
+    let session_id = acp::SessionId::new(Arc::from(session_id));
+    let title = SharedString::from(title.to_string());
+    let updated_at = chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap();
+    let metadata = ThreadMetadata {
+        session_id,
+        agent_id: agent::ZED_AGENT_ID.clone(),
+        title,
+        updated_at,
+        created_at: None,
+        worktree_paths: ThreadWorktreePaths::from_path_lists(main_worktree_paths, folder_paths)
+            .unwrap(),
+        archived: false,
+        remote_connection: None,
+    };
+    cx.update(|cx| {
+        ThreadMetadataStore::global(cx).update(cx, |store, cx| store.save_manually(metadata, cx));
+    });
+    cx.run_until_parked();
+}
+
 fn focus_sidebar(sidebar: &Entity<Sidebar>, cx: &mut gpui::VisualTestContext) {
     sidebar.update_in(cx, |_, window, cx| {
         cx.focus_self(window);
@@ -322,6 +340,11 @@ fn visible_entries_as_strings(
                 } else {
                     ""
                 };
+                let is_active = sidebar
+                    .active_entry
+                    .as_ref()
+                    .is_some_and(|active| active.matches_entry(entry));
+                let active_indicator = if is_active { " (active)" } else { "" };
                 match entry {
                     ListEntry::ProjectHeader {
                         label,
@@ -338,7 +361,7 @@ fn visible_entries_as_strings(
                     }
                     ListEntry::Thread(thread) => {
                         let title = thread.metadata.title.as_ref();
-                        let active = if thread.is_live { " *" } else { "" };
+                        let live = if thread.is_live { " *" } else { "" };
                         let status_str = match thread.status {
                             AgentThreadStatus::Running => " (running)",
                             AgentThreadStatus::Error => " (error)",
@@ -354,7 +377,7 @@ fn visible_entries_as_strings(
                             ""
                         };
                         let worktree = format_linked_worktree_chips(&thread.worktrees);
-                        format!("  {title}{worktree}{active}{status_str}{notified}{selected}")
+                        format!("  {title}{worktree}{live}{status_str}{notified}{active_indicator}{selected}")
                     }
                     ListEntry::ViewMore {
                         is_fully_expanded, ..
@@ -374,7 +397,7 @@ fn visible_entries_as_strings(
                         if workspace.is_some() {
                             format!("  [+ New Thread{}]{}", worktree, selected)
                         } else {
-                            format!("  [~ Draft{}]{}", worktree, selected)
+                            format!("  [~ Draft{}]{}{}", worktree, active_indicator, selected)
                         }
                     }
                 }
@@ -543,7 +566,10 @@ async fn test_single_workspace_no_threads(cx: &mut TestAppContext) {
 
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [my-project]"]
+        vec![
+            //
+            "v [my-project]",
+        ]
     );
 }
 
@@ -579,6 +605,7 @@ async fn test_single_workspace_with_saved_threads(cx: &mut TestAppContext) {
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
         vec![
+            //
             "v [my-project]",
             "  Fix crash in project panel",
             "  Add inline diff view",
@@ -609,7 +636,11 @@ async fn test_workspace_lifecycle(cx: &mut TestAppContext) {
 
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [project-a]", "  Thread A1"]
+        vec![
+            //
+            "v [project-a]",
+            "  Thread A1",
+        ]
     );
 
     // Add a second workspace
@@ -620,7 +651,11 @@ async fn test_workspace_lifecycle(cx: &mut TestAppContext) {
 
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [project-a]", "  Thread A1",]
+        vec![
+            //
+            "v [project-a]",
+            "  Thread A1",
+        ]
     );
 }
 
@@ -639,6 +674,7 @@ async fn test_view_more_pagination(cx: &mut TestAppContext) {
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
         vec![
+            //
             "v [my-project]",
             "  Thread 12",
             "  Thread 11",
@@ -749,7 +785,11 @@ async fn test_collapse_and_expand_group(cx: &mut TestAppContext) {
 
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [my-project]", "  Thread 1"]
+        vec![
+            //
+            "v [my-project]",
+            "  Thread 1",
+        ]
     );
 
     // Collapse
@@ -760,7 +800,10 @@ async fn test_collapse_and_expand_group(cx: &mut TestAppContext) {
 
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["> [my-project]"]
+        vec![
+            //
+            "> [my-project]",
+        ]
     );
 
     // Expand
@@ -771,7 +814,11 @@ async fn test_collapse_and_expand_group(cx: &mut TestAppContext) {
 
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [my-project]", "  Thread 1"]
+        vec![
+            //
+            "v [my-project]",
+            "  Thread 1",
+        ]
     );
 }
 
@@ -807,12 +854,12 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
                 metadata: ThreadMetadata {
                     session_id: acp::SessionId::new(Arc::from("t-1")),
                     agent_id: AgentId::new("zed-agent"),
-                    folder_paths: PathList::default(),
-                    main_worktree_paths: PathList::default(),
+                    worktree_paths: ThreadWorktreePaths::default(),
                     title: "Completed thread".into(),
                     updated_at: Utc::now(),
                     created_at: Some(Utc::now()),
                     archived: false,
+                    remote_connection: None,
                 },
                 icon: IconName::ZedAgent,
                 icon_from_external_svg: None,
@@ -830,12 +877,12 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
                 metadata: ThreadMetadata {
                     session_id: acp::SessionId::new(Arc::from("t-2")),
                     agent_id: AgentId::new("zed-agent"),
-                    folder_paths: PathList::default(),
-                    main_worktree_paths: PathList::default(),
+                    worktree_paths: ThreadWorktreePaths::default(),
                     title: "Running thread".into(),
                     updated_at: Utc::now(),
                     created_at: Some(Utc::now()),
                     archived: false,
+                    remote_connection: None,
                 },
                 icon: IconName::ZedAgent,
                 icon_from_external_svg: None,
@@ -853,12 +900,12 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
                 metadata: ThreadMetadata {
                     session_id: acp::SessionId::new(Arc::from("t-3")),
                     agent_id: AgentId::new("zed-agent"),
-                    folder_paths: PathList::default(),
-                    main_worktree_paths: PathList::default(),
+                    worktree_paths: ThreadWorktreePaths::default(),
                     title: "Error thread".into(),
                     updated_at: Utc::now(),
                     created_at: Some(Utc::now()),
                     archived: false,
+                    remote_connection: None,
                 },
                 icon: IconName::ZedAgent,
                 icon_from_external_svg: None,
@@ -872,16 +919,17 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
                 diff_stats: DiffStats::default(),
             }),
             // Thread with WaitingForConfirmation status, not active
+            // remote_connection: None,
             ListEntry::Thread(ThreadEntry {
                 metadata: ThreadMetadata {
                     session_id: acp::SessionId::new(Arc::from("t-4")),
                     agent_id: AgentId::new("zed-agent"),
-                    folder_paths: PathList::default(),
-                    main_worktree_paths: PathList::default(),
+                    worktree_paths: ThreadWorktreePaths::default(),
                     title: "Waiting thread".into(),
                     updated_at: Utc::now(),
                     created_at: Some(Utc::now()),
                     archived: false,
+                    remote_connection: None,
                 },
                 icon: IconName::ZedAgent,
                 icon_from_external_svg: None,
@@ -895,16 +943,17 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
                 diff_stats: DiffStats::default(),
             }),
             // Background thread that completed (should show notification)
+            // remote_connection: None,
             ListEntry::Thread(ThreadEntry {
                 metadata: ThreadMetadata {
                     session_id: acp::SessionId::new(Arc::from("t-5")),
                     agent_id: AgentId::new("zed-agent"),
-                    folder_paths: PathList::default(),
-                    main_worktree_paths: PathList::default(),
+                    worktree_paths: ThreadWorktreePaths::default(),
                     title: "Notified thread".into(),
                     updated_at: Utc::now(),
                     created_at: Some(Utc::now()),
                     archived: false,
+                    remote_connection: None,
                 },
                 icon: IconName::ZedAgent,
                 icon_from_external_svg: None,
@@ -941,6 +990,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
         vec![
+            //
             "v [expanded-project]",
             "  Completed thread",
             "  Running thread * (running)  <== selected",
@@ -1104,10 +1154,14 @@ async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestA
 
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [my-project]", "  Thread 1"]
+        vec![
+            //
+            "v [my-project]",
+            "  Thread 1",
+        ]
     );
 
-    // Focus the sidebar and select the header (index 0)
+    // Focus the sidebar and select the header
     focus_sidebar(&sidebar, cx);
     sidebar.update_in(cx, |sidebar, _window, _cx| {
         sidebar.selection = Some(0);
@@ -1119,7 +1173,10 @@ async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestA
 
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["> [my-project]  <== selected"]
+        vec![
+            //
+            "> [my-project]  <== selected",
+        ]
     );
 
     // Confirm again expands the group
@@ -1128,7 +1185,11 @@ async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestA
 
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [my-project]  <== selected", "  Thread 1",]
+        vec![
+            //
+            "v [my-project]  <== selected",
+            "  Thread 1",
+        ]
     );
 }
 
@@ -1179,7 +1240,11 @@ async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContex
 
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [my-project]", "  Thread 1"]
+        vec![
+            //
+            "v [my-project]",
+            "  Thread 1",
+        ]
     );
 
     // Focus sidebar and manually select the header (index 0). Press left to collapse.
@@ -1193,7 +1258,10 @@ async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContex
 
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["> [my-project]  <== selected"]
+        vec![
+            //
+            "> [my-project]  <== selected",
+        ]
     );
 
     // Press right to expand
@@ -1202,7 +1270,11 @@ async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContex
 
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [my-project]  <== selected", "  Thread 1",]
+        vec![
+            //
+            "v [my-project]  <== selected",
+            "  Thread 1",
+        ]
     );
 
     // Press right again on already-expanded header moves selection down
@@ -1229,7 +1301,11 @@ async fn test_keyboard_collapse_from_child_selects_parent(cx: &mut TestAppContex
 
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [my-project]", "  Thread 1  <== selected",]
+        vec![
+            //
+            "v [my-project]",
+            "  Thread 1  <== selected",
+        ]
     );
 
     // Pressing left on a child collapses the parent group and selects it
@@ -1239,7 +1315,10 @@ async fn test_keyboard_collapse_from_child_selects_parent(cx: &mut TestAppContex
     assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0));
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["> [my-project]  <== selected"]
+        vec![
+            //
+            "> [my-project]  <== selected",
+        ]
     );
 }
 
@@ -1253,7 +1332,10 @@ async fn test_keyboard_navigation_on_empty_list(cx: &mut TestAppContext) {
     // An empty project has only the header.
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [empty-project]"]
+        vec![
+            //
+            "v [empty-project]",
+        ]
     );
 
     // Focus sidebar — focus_in does not set a selection
@@ -1385,7 +1467,12 @@ async fn test_parallel_threads_shown_with_live_status(cx: &mut TestAppContext) {
     entries[1..].sort();
     assert_eq!(
         entries,
-        vec!["v [my-project]", "  Hello *", "  Hello * (running)",]
+        vec![
+            //
+            "v [my-project]",
+            "  Hello * (active)",
+            "  Hello * (running)",
+        ]
     );
 }
 
@@ -1478,7 +1565,11 @@ async fn test_background_thread_completion_triggers_notification(cx: &mut TestAp
     // Thread A is still running; no notification yet.
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [project-a]", "  Hello * (running)",]
+        vec![
+            //
+            "v [project-a]",
+            "  Hello * (running) (active)",
+        ]
     );
 
     // Complete thread A's turn (transition Running → Completed).
@@ -1488,7 +1579,11 @@ async fn test_background_thread_completion_triggers_notification(cx: &mut TestAp
     // The completed background thread shows a notification indicator.
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [project-a]", "  Hello * (!)",]
+        vec![
+            //
+            "v [project-a]",
+            "  Hello * (!) (active)",
+        ]
     );
 }
 
@@ -1528,6 +1623,7 @@ async fn test_search_narrows_visible_threads_to_matches(cx: &mut TestAppContext)
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
         vec![
+            //
             "v [my-project]",
             "  Fix crash in project panel",
             "  Add inline diff view",
@@ -1540,7 +1636,11 @@ async fn test_search_narrows_visible_threads_to_matches(cx: &mut TestAppContext)
     type_in_search(&sidebar, "diff", cx);
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [my-project]", "  Add inline diff view  <== selected",]
+        vec![
+            //
+            "v [my-project]",
+            "  Add inline diff view  <== selected",
+        ]
     );
 
     // User changes query to something with no matches — list is empty.
@@ -1575,6 +1675,7 @@ async fn test_search_matches_regardless_of_case(cx: &mut TestAppContext) {
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
         vec![
+            //
             "v [my-project]",
             "  Fix Crash In Project Panel  <== selected",
         ]
@@ -1585,6 +1686,7 @@ async fn test_search_matches_regardless_of_case(cx: &mut TestAppContext) {
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
         vec![
+            //
             "v [my-project]",
             "  Fix Crash In Project Panel  <== selected",
         ]
@@ -1615,7 +1717,12 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex
     // Confirm the full list is showing.
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [my-project]", "  Alpha thread", "  Beta thread",]
+        vec![
+            //
+            "v [my-project]",
+            "  Alpha thread",
+            "  Beta thread",
+        ]
     );
 
     // User types a search query to filter down.
@@ -1623,7 +1730,11 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex
     type_in_search(&sidebar, "alpha", cx);
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [my-project]", "  Alpha thread  <== selected",]
+        vec![
+            //
+            "v [my-project]",
+            "  Alpha thread  <== selected",
+        ]
     );
 
     // User presses Escape — filter clears, full list is restored.
@@ -1633,6 +1744,7 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
         vec![
+            //
             "v [my-project]",
             "  Alpha thread  <== selected",
             "  Beta thread",
@@ -1689,6 +1801,7 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
         vec![
+            //
             "v [project-a]",
             "  Fix bug in sidebar",
             "  Add tests for editor",
@@ -1699,7 +1812,11 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC
     type_in_search(&sidebar, "sidebar", cx);
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [project-a]", "  Fix bug in sidebar  <== selected",]
+        vec![
+            //
+            "v [project-a]",
+            "  Fix bug in sidebar  <== selected",
+        ]
     );
 
     // "typo" only matches in the second workspace — the first header disappears.
@@ -1715,6 +1832,7 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
         vec![
+            //
             "v [project-a]",
             "  Fix bug in sidebar  <== selected",
             "  Add tests for editor",
@@ -1774,6 +1892,7 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) {
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
         vec![
+            //
             "v [alpha-project]",
             "  Fix bug in sidebar  <== selected",
             "  Add tests for editor",
@@ -1785,7 +1904,11 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) {
     type_in_search(&sidebar, "sidebar", cx);
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [alpha-project]", "  Fix bug in sidebar  <== selected",]
+        vec![
+            //
+            "v [alpha-project]",
+            "  Fix bug in sidebar  <== selected",
+        ]
     );
 
     // "alpha sidebar" matches the workspace name "alpha-project" (fuzzy: a-l-p-h-a-s-i-d-e-b-a-r
@@ -1795,7 +1918,11 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) {
     type_in_search(&sidebar, "fix", cx);
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [alpha-project]", "  Fix bug in sidebar  <== selected",]
+        vec![
+            //
+            "v [alpha-project]",
+            "  Fix bug in sidebar  <== selected",
+        ]
     );
 
     // A query that matches a workspace name AND a thread in that same workspace.
@@ -1804,6 +1931,7 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) {
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
         vec![
+            //
             "v [alpha-project]",
             "  Fix bug in sidebar  <== selected",
             "  Add tests for editor",
@@ -1817,6 +1945,7 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) {
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
         vec![
+            //
             "v [alpha-project]",
             "  Fix bug in sidebar  <== selected",
             "  Add tests for editor",
@@ -1866,7 +1995,11 @@ async fn test_search_finds_threads_hidden_behind_view_more(cx: &mut TestAppConte
     let filtered = visible_entries_as_strings(&sidebar, cx);
     assert_eq!(
         filtered,
-        vec!["v [my-project]", "  Hidden gem thread  <== selected",]
+        vec![
+            //
+            "v [my-project]",
+            "  Hidden gem thread  <== selected",
+        ]
     );
     assert!(
         !filtered.iter().any(|e| e.contains("View More")),
@@ -1902,14 +2035,21 @@ async fn test_search_finds_threads_inside_collapsed_groups(cx: &mut TestAppConte
 
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["> [my-project]  <== selected"]
+        vec![
+            //
+            "> [my-project]  <== selected",
+        ]
     );
 
     // User types a search — the thread appears even though its group is collapsed.
     type_in_search(&sidebar, "important", cx);
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["> [my-project]", "  Important thread  <== selected",]
+        vec![
+            //
+            "> [my-project]",
+            "  Important thread  <== selected",
+        ]
     );
 }
 
@@ -1943,6 +2083,7 @@ async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext)
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
         vec![
+            //
             "v [my-project]",
             "  Fix crash in panel  <== selected",
             "  Fix lint warnings",
@@ -1955,6 +2096,7 @@ async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext)
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
         vec![
+            //
             "v [my-project]",
             "  Fix crash in panel",
             "  Fix lint warnings  <== selected",
@@ -1966,6 +2108,7 @@ async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext)
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
         vec![
+            //
             "v [my-project]",
             "  Fix crash in panel  <== selected",
             "  Fix lint warnings",
@@ -2006,7 +2149,11 @@ async fn test_confirm_on_historical_thread_activates_workspace(cx: &mut TestAppC
 
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [my-project]", "  Historical Thread",]
+        vec![
+            //
+            "v [my-project]",
+            "  Historical Thread",
+        ]
     );
 
     // Switch to workspace 1 so we can verify the confirm switches back.
@@ -2067,7 +2214,12 @@ async fn test_click_clears_selection_and_focus_in_restores_it(cx: &mut TestAppCo
 
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [my-project]", "  Thread A", "  Thread B",]
+        vec![
+            //
+            "v [my-project]",
+            "  Thread A",
+            "  Thread B",
+        ]
     );
 
     // Keyboard confirm preserves selection.
@@ -2119,7 +2271,11 @@ async fn test_thread_title_update_propagates_to_sidebar(cx: &mut TestAppContext)
 
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [my-project]", "  Hello *"]
+        vec![
+            //
+            "v [my-project]",
+            "  Hello * (active)",
+        ]
     );
 
     // Simulate the agent generating a title. The notification chain is:
@@ -2141,7 +2297,11 @@ async fn test_thread_title_update_propagates_to_sidebar(cx: &mut TestAppContext)
 
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [my-project]", "  Friendly Greeting with AI *"]
+        vec![
+            //
+            "v [my-project]",
+            "  Friendly Greeting with AI * (active)",
+        ]
     );
 }
 
@@ -2194,9 +2354,9 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) {
                 title: "Test".into(),
                 updated_at: Utc::now(),
                 created_at: None,
-                folder_paths: PathList::default(),
-                main_worktree_paths: PathList::default(),
+                worktree_paths: ThreadWorktreePaths::default(),
                 archived: false,
+                remote_connection: None,
             },
             &workspace_a,
             false,
@@ -2250,9 +2410,9 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) {
                 title: "Thread B".into(),
                 updated_at: Utc::now(),
                 created_at: None,
-                folder_paths: PathList::default(),
-                main_worktree_paths: PathList::default(),
+                worktree_paths: ThreadWorktreePaths::default(),
                 archived: false,
+                remote_connection: None,
             },
             &workspace_b,
             false,
@@ -2302,167 +2462,935 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) {
     save_test_thread_metadata(&session_id_b2, &project_b, cx).await;
     cx.run_until_parked();
 
-    // Panel B is not the active workspace's panel (workspace A is
-    // active), so opening a thread there should not change focused_thread.
-    // This prevents running threads in background workspaces from causing
-    // the selection highlight to jump around.
-    sidebar.read_with(cx, |sidebar, _cx| {
-        assert_active_thread(
-            sidebar,
-            &session_id_a,
-            "Opening a thread in a non-active panel should not change focused_thread",
-        );
-    });
+    // Panel B is not the active workspace's panel (workspace A is
+    // active), so opening a thread there should not change focused_thread.
+    // This prevents running threads in background workspaces from causing
+    // the selection highlight to jump around.
+    sidebar.read_with(cx, |sidebar, _cx| {
+        assert_active_thread(
+            sidebar,
+            &session_id_a,
+            "Opening a thread in a non-active panel should not change focused_thread",
+        );
+    });
+
+    workspace_b.update_in(cx, |workspace, window, cx| {
+        workspace.focus_handle(cx).focus(window, cx);
+    });
+    cx.run_until_parked();
+
+    sidebar.read_with(cx, |sidebar, _cx| {
+        assert_active_thread(
+            sidebar,
+            &session_id_a,
+            "Defocusing the sidebar should not change focused_thread",
+        );
+    });
+
+    // Switching workspaces via the multi_workspace (simulates clicking
+    // a workspace header) should clear focused_thread.
+    multi_workspace.update_in(cx, |mw, window, cx| {
+        let workspace = mw.workspaces().find(|w| *w == &workspace_b).cloned();
+        if let Some(workspace) = workspace {
+            mw.activate(workspace, window, cx);
+        }
+    });
+    cx.run_until_parked();
+
+    sidebar.read_with(cx, |sidebar, _cx| {
+        assert_active_thread(
+            sidebar,
+            &session_id_b2,
+            "Switching workspace should seed focused_thread from the new active panel",
+        );
+        assert!(
+            has_thread_entry(sidebar, &session_id_b2),
+            "The seeded thread should be present in the entries"
+        );
+    });
+
+    // ── 8. Focusing the agent panel thread keeps focused_thread ────
+    // Workspace B still has session_id_b2 loaded in the agent panel.
+    // Clicking into the thread (simulated by focusing its view) should
+    // keep focused_thread since it was already seeded on workspace switch.
+    panel_b.update_in(cx, |panel, window, cx| {
+        if let Some(thread_view) = panel.active_conversation_view() {
+            thread_view.read(cx).focus_handle(cx).focus(window, cx);
+        }
+    });
+    cx.run_until_parked();
+
+    sidebar.read_with(cx, |sidebar, _cx| {
+        assert_active_thread(
+            sidebar,
+            &session_id_b2,
+            "Focusing the agent panel thread should set focused_thread",
+        );
+        assert!(
+            has_thread_entry(sidebar, &session_id_b2),
+            "The focused thread should be present in the entries"
+        );
+    });
+}
+
+#[gpui::test]
+async fn test_new_thread_button_works_after_adding_folder(cx: &mut TestAppContext) {
+    let project = init_test_project_with_agent_panel("/project-a", cx).await;
+    let fs = cx.update(|cx| <dyn fs::Fs>::global(cx));
+    let (multi_workspace, cx) =
+        cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+    let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx);
+
+    // Start a thread and send a message so it has history.
+    let connection = StubAgentConnection::new();
+    connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk(
+        acp::ContentChunk::new("Done".into()),
+    )]);
+    open_thread_with_connection(&panel, connection, cx);
+    send_message(&panel, cx);
+    let session_id = active_session_id(&panel, cx);
+    save_test_thread_metadata(&session_id, &project, cx).await;
+    cx.run_until_parked();
+
+    // Verify the thread appears in the sidebar.
+    assert_eq!(
+        visible_entries_as_strings(&sidebar, cx),
+        vec![
+            //
+            "v [project-a]",
+            "  Hello * (active)",
+        ]
+    );
+
+    // The "New Thread" button should NOT be in "active/draft" state
+    // because the panel has a thread with messages.
+    sidebar.read_with(cx, |sidebar, _cx| {
+        assert!(
+            matches!(&sidebar.active_entry, Some(ActiveEntry::Thread { .. })),
+            "Panel has a thread with messages, so active_entry should be Thread, got {:?}",
+            sidebar.active_entry,
+        );
+    });
+
+    // Now add a second folder to the workspace, changing the path_list.
+    fs.as_fake()
+        .insert_tree("/project-b", serde_json::json!({ "src": {} }))
+        .await;
+    project
+        .update(cx, |project, cx| {
+            project.find_or_create_worktree("/project-b", true, cx)
+        })
+        .await
+        .expect("should add worktree");
+    cx.run_until_parked();
+
+    // The workspace path_list is now [project-a, project-b]. The active
+    // thread's metadata was re-saved with the new paths by the agent panel's
+    // project subscription. The old [project-a] key is replaced by the new
+    // key since no other workspace claims it.
+    assert_eq!(
+        visible_entries_as_strings(&sidebar, cx),
+        vec![
+            //
+            "v [project-a, project-b]",
+            "  Hello * (active)",
+        ]
+    );
+
+    // The "New Thread" button must still be clickable (not stuck in
+    // "active/draft" state). Verify that `active_thread_is_draft` is
+    // false — the panel still has the old thread with messages.
+    sidebar.read_with(cx, |sidebar, _cx| {
+        assert!(
+            matches!(&sidebar.active_entry, Some(ActiveEntry::Thread { .. })),
+            "After adding a folder the panel still has a thread with messages, \
+                 so active_entry should be Thread, got {:?}",
+            sidebar.active_entry,
+        );
+    });
+
+    // Actually click "New Thread" by calling create_new_thread and
+    // verify a new draft is created.
+    let workspace = multi_workspace.read_with(cx, |mw, _cx| mw.workspace().clone());
+    sidebar.update_in(cx, |sidebar, window, cx| {
+        sidebar.create_new_thread(&workspace, window, cx);
+    });
+    cx.run_until_parked();
+
+    // After creating a new thread, the panel should now be in draft
+    // state (no messages on the new thread).
+    sidebar.read_with(cx, |sidebar, _cx| {
+        assert_active_draft(
+            sidebar,
+            &workspace,
+            "After creating a new thread active_entry should be Draft",
+        );
+    });
+}
+
+#[gpui::test]
+async fn test_worktree_add_and_remove_migrates_threads(cx: &mut TestAppContext) {
+    // When a worktree is added to a project, the project group key changes
+    // and all historical threads should be migrated to the new key. Removing
+    // the worktree should migrate them back.
+    let (_fs, project) = init_multi_project_test(&["/project-a", "/project-b"], cx).await;
+    let (multi_workspace, cx) =
+        cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+    let sidebar = setup_sidebar(&multi_workspace, cx);
+
+    // Save two threads against the initial project group [/project-a].
+    save_n_test_threads(2, &project, cx).await;
+    sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx));
+    cx.run_until_parked();
+
+    assert_eq!(
+        visible_entries_as_strings(&sidebar, cx),
+        vec![
+            //
+            "v [project-a]",
+            "  Thread 2",
+            "  Thread 1",
+        ]
+    );
+
+    // Verify the metadata store has threads under the old key.
+    let old_key_paths = PathList::new(&[PathBuf::from("/project-a")]);
+    cx.update(|_window, cx| {
+        let store = ThreadMetadataStore::global(cx).read(cx);
+        assert_eq!(
+            store.entries_for_main_worktree_path(&old_key_paths).count(),
+            2,
+            "should have 2 threads under old key before add"
+        );
+    });
+
+    // Add a second worktree to the same project.
+    project
+        .update(cx, |project, cx| {
+            project.find_or_create_worktree("/project-b", true, cx)
+        })
+        .await
+        .expect("should add worktree");
+    cx.run_until_parked();
+
+    // The project group key should now be [/project-a, /project-b].
+    let new_key_paths = PathList::new(&[PathBuf::from("/project-a"), PathBuf::from("/project-b")]);
+
+    // Verify multi-workspace state: exactly one project group key, the new one.
+    multi_workspace.read_with(cx, |mw, _cx| {
+        let keys: Vec<_> = mw.project_group_keys().cloned().collect();
+        assert_eq!(
+            keys.len(),
+            1,
+            "should have exactly 1 project group key after add"
+        );
+        assert_eq!(
+            keys[0].path_list(),
+            &new_key_paths,
+            "the key should be the new combined path list"
+        );
+    });
+
+    // Verify threads were migrated to the new key.
+    cx.update(|_window, cx| {
+        let store = ThreadMetadataStore::global(cx).read(cx);
+        assert_eq!(
+            store.entries_for_main_worktree_path(&old_key_paths).count(),
+            0,
+            "should have 0 threads under old key after migration"
+        );
+        assert_eq!(
+            store.entries_for_main_worktree_path(&new_key_paths).count(),
+            2,
+            "should have 2 threads under new key after migration"
+        );
+    });
+
+    // Sidebar should show threads under the new header.
+    sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx));
+    cx.run_until_parked();
+
+    assert_eq!(
+        visible_entries_as_strings(&sidebar, cx),
+        vec![
+            //
+            "v [project-a, project-b]",
+            "  Thread 2",
+            "  Thread 1",
+        ]
+    );
+
+    // Now remove the second worktree.
+    let worktree_id = project.read_with(cx, |project, cx| {
+        project
+            .visible_worktrees(cx)
+            .find(|wt| wt.read(cx).abs_path().as_ref() == Path::new("/project-b"))
+            .map(|wt| wt.read(cx).id())
+            .expect("should find project-b worktree")
+    });
+    project.update(cx, |project, cx| {
+        project.remove_worktree(worktree_id, cx);
+    });
+    cx.run_until_parked();
+
+    // The key should revert to [/project-a].
+    multi_workspace.read_with(cx, |mw, _cx| {
+        let keys: Vec<_> = mw.project_group_keys().cloned().collect();
+        assert_eq!(
+            keys.len(),
+            1,
+            "should have exactly 1 project group key after remove"
+        );
+        assert_eq!(
+            keys[0].path_list(),
+            &old_key_paths,
+            "the key should revert to the original path list"
+        );
+    });
+
+    // Threads should be migrated back to the old key.
+    cx.update(|_window, cx| {
+        let store = ThreadMetadataStore::global(cx).read(cx);
+        assert_eq!(
+            store.entries_for_main_worktree_path(&new_key_paths).count(),
+            0,
+            "should have 0 threads under new key after revert"
+        );
+        assert_eq!(
+            store.entries_for_main_worktree_path(&old_key_paths).count(),
+            2,
+            "should have 2 threads under old key after revert"
+        );
+    });
+
+    sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx));
+    cx.run_until_parked();
+
+    assert_eq!(
+        visible_entries_as_strings(&sidebar, cx),
+        vec![
+            //
+            "v [project-a]",
+            "  Thread 2",
+            "  Thread 1",
+        ]
+    );
+}
+
+#[gpui::test]
+async fn test_worktree_add_and_remove_preserves_thread_path_associations(cx: &mut TestAppContext) {
+    // Verifies that adding/removing folders to a project correctly updates
+    // each thread's worktree_paths (both folder_paths and main_worktree_paths)
+    // while preserving per-path associations for linked worktrees.
+    init_test(cx);
+    let fs = FakeFs::new(cx.executor());
+    fs.insert_tree(
+        "/project",
+        serde_json::json!({
+            ".git": {},
+            "src": {},
+        }),
+    )
+    .await;
+    fs.add_linked_worktree_for_repo(
+        Path::new("/project/.git"),
+        false,
+        git::repository::Worktree {
+            path: PathBuf::from("/wt-feature"),
+            ref_name: Some("refs/heads/feature".into()),
+            sha: "aaa".into(),
+            is_main: false,
+        },
+    )
+    .await;
+    fs.insert_tree("/other-project", serde_json::json!({ ".git": {} }))
+        .await;
+    cx.update(|cx| <dyn Fs>::set_global(fs.clone(), cx));
+
+    // Start with a linked worktree workspace: visible root is /wt-feature,
+    // main repo is /project.
+    let project =
+        project::Project::test(fs.clone() as Arc<dyn Fs>, ["/wt-feature".as_ref()], cx).await;
+    let (multi_workspace, cx) =
+        cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+    let _sidebar = setup_sidebar(&multi_workspace, cx);
+
+    // Save a thread. It should have folder_paths=[/wt-feature], main=[/project].
+    save_named_thread_metadata("thread-1", "Thread 1", &project, cx).await;
+
+    let session_id = acp::SessionId::new(Arc::from("thread-1"));
+    cx.update(|_window, cx| {
+        let store = ThreadMetadataStore::global(cx).read(cx);
+        let thread = store.entry(&session_id).expect("thread should exist");
+        assert_eq!(
+            thread.folder_paths().paths(),
+            &[PathBuf::from("/wt-feature")],
+            "initial folder_paths should be the linked worktree"
+        );
+        assert_eq!(
+            thread.main_worktree_paths().paths(),
+            &[PathBuf::from("/project")],
+            "initial main_worktree_paths should be the main repo"
+        );
+    });
+
+    // Add /other-project to the workspace.
+    project
+        .update(cx, |project, cx| {
+            project.find_or_create_worktree("/other-project", true, cx)
+        })
+        .await
+        .expect("should add worktree");
+    cx.run_until_parked();
+
+    // Thread should now have both paths, with correct associations.
+    cx.update(|_window, cx| {
+        let store = ThreadMetadataStore::global(cx).read(cx);
+        let thread = store.entry(&session_id).expect("thread should exist");
+        let pairs: Vec<_> = thread
+            .worktree_paths
+            .ordered_pairs()
+            .map(|(m, f)| (m.clone(), f.clone()))
+            .collect();
+        assert!(
+            pairs.contains(&(PathBuf::from("/project"), PathBuf::from("/wt-feature"))),
+            "linked worktree association should be preserved, got: {:?}",
+            pairs
+        );
+        assert!(
+            pairs.contains(&(
+                PathBuf::from("/other-project"),
+                PathBuf::from("/other-project")
+            )),
+            "new folder should have main == folder, got: {:?}",
+            pairs
+        );
+    });
+
+    // Remove /other-project.
+    let worktree_id = project.read_with(cx, |project, cx| {
+        project
+            .visible_worktrees(cx)
+            .find(|wt| wt.read(cx).abs_path().as_ref() == Path::new("/other-project"))
+            .map(|wt| wt.read(cx).id())
+            .expect("should find other-project worktree")
+    });
+    project.update(cx, |project, cx| {
+        project.remove_worktree(worktree_id, cx);
+    });
+    cx.run_until_parked();
+
+    // Thread should be back to original state.
+    cx.update(|_window, cx| {
+        let store = ThreadMetadataStore::global(cx).read(cx);
+        let thread = store.entry(&session_id).expect("thread should exist");
+        assert_eq!(
+            thread.folder_paths().paths(),
+            &[PathBuf::from("/wt-feature")],
+            "folder_paths should revert to just the linked worktree"
+        );
+        assert_eq!(
+            thread.main_worktree_paths().paths(),
+            &[PathBuf::from("/project")],
+            "main_worktree_paths should revert to just the main repo"
+        );
+        let pairs: Vec<_> = thread
+            .worktree_paths
+            .ordered_pairs()
+            .map(|(m, f)| (m.clone(), f.clone()))
+            .collect();
+        assert_eq!(
+            pairs,
+            vec![(PathBuf::from("/project"), PathBuf::from("/wt-feature"))],
+            "linked worktree association should be preserved through add+remove cycle"
+        );
+    });
+}
+
+#[gpui::test]
+async fn test_worktree_add_key_collision_removes_duplicate_workspace(cx: &mut TestAppContext) {
+    // When a worktree is added to workspace A and the resulting key matches
+    // an existing workspace B's key (and B has the same root paths), B
+    // should be removed as a true duplicate.
+    let (fs, project_a) = init_multi_project_test(&["/project-a", "/project-b"], cx).await;
+    let (multi_workspace, cx) =
+        cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx));
+    let sidebar = setup_sidebar(&multi_workspace, cx);
+
+    // Save a thread against workspace A [/project-a].
+    save_named_thread_metadata("thread-a", "Thread A", &project_a, cx).await;
+
+    // Create workspace B with both worktrees [/project-a, /project-b].
+    let project_b = project::Project::test(
+        fs.clone() as Arc<dyn Fs>,
+        ["/project-a".as_ref(), "/project-b".as_ref()],
+        cx,
+    )
+    .await;
+    let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| {
+        mw.test_add_workspace(project_b.clone(), window, cx)
+    });
+    cx.run_until_parked();
+
+    // Switch back to workspace A so it's the active workspace when the collision happens.
+    let workspace_a =
+        multi_workspace.read_with(cx, |mw, _| mw.workspaces().next().unwrap().clone());
+    multi_workspace.update_in(cx, |mw, window, cx| {
+        mw.activate(workspace_a, window, cx);
+    });
+    cx.run_until_parked();
+
+    // Save a thread against workspace B [/project-a, /project-b].
+    save_named_thread_metadata("thread-b", "Thread B", &project_b, cx).await;
+
+    sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx));
+    cx.run_until_parked();
+
+    // Both project groups should be visible.
+    assert_eq!(
+        visible_entries_as_strings(&sidebar, cx),
+        vec![
+            //
+            "v [project-a, project-b]",
+            "  Thread B",
+            "v [project-a]",
+            "  Thread A",
+        ]
+    );
+
+    let workspace_b_id = workspace_b.entity_id();
+
+    // Now add /project-b to workspace A's project, causing a key collision.
+    project_a
+        .update(cx, |project, cx| {
+            project.find_or_create_worktree("/project-b", true, cx)
+        })
+        .await
+        .expect("should add worktree");
+    cx.run_until_parked();
+
+    // Workspace B should have been removed (true duplicate — same root paths).
+    multi_workspace.read_with(cx, |mw, _cx| {
+        let workspace_ids: Vec<_> = mw.workspaces().map(|ws| ws.entity_id()).collect();
+        assert!(
+            !workspace_ids.contains(&workspace_b_id),
+            "workspace B should have been removed after key collision"
+        );
+    });
+
+    // There should be exactly one project group key now.
+    let combined_paths = PathList::new(&[PathBuf::from("/project-a"), PathBuf::from("/project-b")]);
+    multi_workspace.read_with(cx, |mw, _cx| {
+        let keys: Vec<_> = mw.project_group_keys().cloned().collect();
+        assert_eq!(
+            keys.len(),
+            1,
+            "should have exactly 1 project group key after collision"
+        );
+        assert_eq!(
+            keys[0].path_list(),
+            &combined_paths,
+            "the remaining key should be the combined paths"
+        );
+    });
+
+    // Both threads should be visible under the merged group.
+    sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx));
+    cx.run_until_parked();
+
+    assert_eq!(
+        visible_entries_as_strings(&sidebar, cx),
+        vec![
+            //
+            "v [project-a, project-b]",
+            "  Thread A",
+            "  Thread B",
+        ]
+    );
+}
+
+#[gpui::test]
+async fn test_worktree_collision_keeps_active_workspace(cx: &mut TestAppContext) {
+    // When workspace A adds a folder that makes it collide with workspace B,
+    // and B is the *active* workspace, A (the incoming one) should be
+    // dropped so the user stays on B. A linked worktree sibling of A
+    // should migrate into B's group.
+    init_test(cx);
+    let fs = FakeFs::new(cx.executor());
+
+    // Set up /project-a with a linked worktree.
+    fs.insert_tree(
+        "/project-a",
+        serde_json::json!({
+            ".git": {
+                "worktrees": {
+                    "feature": {
+                        "commondir": "../../",
+                        "HEAD": "ref: refs/heads/feature",
+                    },
+                },
+            },
+            "src": {},
+        }),
+    )
+    .await;
+    fs.insert_tree(
+        "/wt-feature",
+        serde_json::json!({
+            ".git": "gitdir: /project-a/.git/worktrees/feature",
+            "src": {},
+        }),
+    )
+    .await;
+    fs.add_linked_worktree_for_repo(
+        Path::new("/project-a/.git"),
+        false,
+        git::repository::Worktree {
+            path: PathBuf::from("/wt-feature"),
+            ref_name: Some("refs/heads/feature".into()),
+            sha: "aaa".into(),
+            is_main: false,
+        },
+    )
+    .await;
+    fs.insert_tree("/project-b", serde_json::json!({ ".git": {}, "src": {} }))
+        .await;
+    cx.update(|cx| <dyn fs::Fs>::set_global(fs.clone(), cx));
+
+    let project_a = project::Project::test(fs.clone(), ["/project-a".as_ref()], cx).await;
+    project_a.update(cx, |p, cx| p.git_scans_complete(cx)).await;
+
+    // Linked worktree sibling of A.
+    let project_wt = project::Project::test(fs.clone(), ["/wt-feature".as_ref()], cx).await;
+    project_wt
+        .update(cx, |p, cx| p.git_scans_complete(cx))
+        .await;
+
+    // Workspace B has both folders already.
+    let project_b = project::Project::test(
+        fs.clone() as Arc<dyn Fs>,
+        ["/project-a".as_ref(), "/project-b".as_ref()],
+        cx,
+    )
+    .await;
+
+    let (multi_workspace, cx) =
+        cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx));
+    let sidebar = setup_sidebar(&multi_workspace, cx);
+
+    // Add agent panels to all workspaces.
+    let workspace_a_entity = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+    add_agent_panel(&workspace_a_entity, cx);
+
+    // Add the linked worktree workspace (sibling of A).
+    let workspace_wt = multi_workspace.update_in(cx, |mw, window, cx| {
+        mw.test_add_workspace(project_wt.clone(), window, cx)
+    });
+    add_agent_panel(&workspace_wt, cx);
+    cx.run_until_parked();
+
+    // Add workspace B (will become active).
+    let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| {
+        mw.test_add_workspace(project_b.clone(), window, cx)
+    });
+    add_agent_panel(&workspace_b, cx);
+    cx.run_until_parked();
+
+    // Save threads in each group.
+    save_named_thread_metadata("thread-a", "Thread A", &project_a, cx).await;
+    save_thread_metadata_with_main_paths(
+        "thread-wt",
+        "Worktree Thread",
+        PathList::new(&[PathBuf::from("/wt-feature")]),
+        PathList::new(&[PathBuf::from("/project-a")]),
+        cx,
+    );
+    save_named_thread_metadata("thread-b", "Thread B", &project_b, cx).await;
+
+    sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx));
+    cx.run_until_parked();
+
+    // B is active, A and wt-feature are in one group, B in another.
+    assert_eq!(
+        multi_workspace.read_with(cx, |mw, _| mw.workspace().entity_id()),
+        workspace_b.entity_id(),
+        "workspace B should be active"
+    );
+    multi_workspace.read_with(cx, |mw, _cx| {
+        assert_eq!(mw.project_group_keys().count(), 2, "should have 2 groups");
+        assert_eq!(mw.workspaces().count(), 3, "should have 3 workspaces");
+    });
+
+    assert_eq!(
+        visible_entries_as_strings(&sidebar, cx),
+        vec![
+            //
+            "v [project-a, project-b]",
+            "  [~ Draft] (active)",
+            "  Thread B",
+            "v [project-a]",
+            "  Thread A",
+            "  Worktree Thread {wt-feature}",
+        ]
+    );
+
+    let workspace_a = multi_workspace.read_with(cx, |mw, _| {
+        mw.workspaces()
+            .find(|ws| {
+                ws.entity_id() != workspace_b.entity_id()
+                    && ws.entity_id() != workspace_wt.entity_id()
+            })
+            .unwrap()
+            .clone()
+    });
+
+    // Add /project-b to workspace A's project, causing a collision with B.
+    project_a
+        .update(cx, |project, cx| {
+            project.find_or_create_worktree("/project-b", true, cx)
+        })
+        .await
+        .expect("should add worktree");
+    cx.run_until_parked();
+
+    // Workspace A (the incoming duplicate) should have been dropped.
+    multi_workspace.read_with(cx, |mw, _cx| {
+        let workspace_ids: Vec<_> = mw.workspaces().map(|ws| ws.entity_id()).collect();
+        assert!(
+            !workspace_ids.contains(&workspace_a.entity_id()),
+            "workspace A should have been dropped"
+        );
+    });
+
+    // The active workspace should still be B.
+    assert_eq!(
+        multi_workspace.read_with(cx, |mw, _| mw.workspace().entity_id()),
+        workspace_b.entity_id(),
+        "workspace B should still be active"
+    );
+
+    // The linked worktree sibling should have migrated into B's group
+    // (it got the folder add and now shares the same key).
+    multi_workspace.read_with(cx, |mw, _cx| {
+        let workspace_ids: Vec<_> = mw.workspaces().map(|ws| ws.entity_id()).collect();
+        assert!(
+            workspace_ids.contains(&workspace_wt.entity_id()),
+            "linked worktree workspace should still exist"
+        );
+        assert_eq!(
+            mw.project_group_keys().count(),
+            1,
+            "should have 1 group after merge"
+        );
+        assert_eq!(
+            mw.workspaces().count(),
+            2,
+            "should have 2 workspaces (B + linked worktree)"
+        );
+    });
+
+    // The linked worktree workspace should have gotten the new folder.
+    let wt_worktree_count =
+        project_wt.read_with(cx, |project, cx| project.visible_worktrees(cx).count());
+    assert_eq!(
+        wt_worktree_count, 2,
+        "linked worktree project should have gotten /project-b"
+    );
+
+    // After: everything merged under one group. Thread A migrated,
+    // worktree thread shows its chip, B's thread and draft remain.
+    sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx));
+    cx.run_until_parked();
+
+    assert_eq!(
+        visible_entries_as_strings(&sidebar, cx),
+        vec![
+            //
+            "v [project-a, project-b]",
+            "  [~ Draft] (active)",
+            "  Thread A",
+            "  Worktree Thread {project-a:wt-feature}",
+            "  Thread B",
+        ]
+    );
+}
+
+#[gpui::test]
+async fn test_worktree_add_syncs_linked_worktree_sibling(cx: &mut TestAppContext) {
+    // When a worktree is added to the main workspace, a linked worktree
+    // sibling (different root paths, same project group key) should also
+    // get the new folder added to its project.
+    init_test(cx);
+    let fs = FakeFs::new(cx.executor());
+
+    fs.insert_tree(
+        "/project",
+        serde_json::json!({
+            ".git": {
+                "worktrees": {
+                    "feature": {
+                        "commondir": "../../",
+                        "HEAD": "ref: refs/heads/feature",
+                    },
+                },
+            },
+            "src": {},
+        }),
+    )
+    .await;
+
+    fs.insert_tree(
+        "/wt-feature",
+        serde_json::json!({
+            ".git": "gitdir: /project/.git/worktrees/feature",
+            "src": {},
+        }),
+    )
+    .await;
+
+    fs.add_linked_worktree_for_repo(
+        Path::new("/project/.git"),
+        false,
+        git::repository::Worktree {
+            path: PathBuf::from("/wt-feature"),
+            ref_name: Some("refs/heads/feature".into()),
+            sha: "aaa".into(),
+            is_main: false,
+        },
+    )
+    .await;
+
+    // Create a second independent project to add as a folder later.
+    fs.insert_tree(
+        "/other-project",
+        serde_json::json!({ ".git": {}, "src": {} }),
+    )
+    .await;
+
+    cx.update(|cx| <dyn fs::Fs>::set_global(fs.clone(), cx));
 
-    workspace_b.update_in(cx, |workspace, window, cx| {
-        workspace.focus_handle(cx).focus(window, cx);
-    });
-    cx.run_until_parked();
+    let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await;
+    let worktree_project = project::Project::test(fs.clone(), ["/wt-feature".as_ref()], cx).await;
 
-    sidebar.read_with(cx, |sidebar, _cx| {
-        assert_active_thread(
-            sidebar,
-            &session_id_a,
-            "Defocusing the sidebar should not change focused_thread",
-        );
-    });
+    main_project
+        .update(cx, |p, cx| p.git_scans_complete(cx))
+        .await;
+    worktree_project
+        .update(cx, |p, cx| p.git_scans_complete(cx))
+        .await;
 
-    // Switching workspaces via the multi_workspace (simulates clicking
-    // a workspace header) should clear focused_thread.
-    multi_workspace.update_in(cx, |mw, window, cx| {
-        let workspace = mw.workspaces().find(|w| *w == &workspace_b).cloned();
-        if let Some(workspace) = workspace {
-            mw.activate(workspace, window, cx);
-        }
+    let (multi_workspace, cx) =
+        cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx));
+    let sidebar = setup_sidebar(&multi_workspace, cx);
+
+    // Add agent panel to the main workspace.
+    let main_workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+    add_agent_panel(&main_workspace, cx);
+
+    // Open the linked worktree as a separate workspace.
+    let wt_workspace = multi_workspace.update_in(cx, |mw, window, cx| {
+        mw.test_add_workspace(worktree_project.clone(), window, cx)
     });
+    add_agent_panel(&wt_workspace, cx);
     cx.run_until_parked();
 
-    sidebar.read_with(cx, |sidebar, _cx| {
-        assert_active_thread(
-            sidebar,
-            &session_id_b2,
-            "Switching workspace should seed focused_thread from the new active panel",
-        );
-        assert!(
-            has_thread_entry(sidebar, &session_id_b2),
-            "The seeded thread should be present in the entries"
+    // Both workspaces should share the same project group key [/project].
+    multi_workspace.read_with(cx, |mw, _cx| {
+        assert_eq!(
+            mw.project_group_keys().count(),
+            1,
+            "should have 1 project group key before add"
         );
+        assert_eq!(mw.workspaces().count(), 2, "should have 2 workspaces");
     });
 
-    // ── 8. Focusing the agent panel thread keeps focused_thread ────
-    // Workspace B still has session_id_b2 loaded in the agent panel.
-    // Clicking into the thread (simulated by focusing its view) should
-    // keep focused_thread since it was already seeded on workspace switch.
-    panel_b.update_in(cx, |panel, window, cx| {
-        if let Some(thread_view) = panel.active_conversation_view() {
-            thread_view.read(cx).focus_handle(cx).focus(window, cx);
-        }
-    });
-    cx.run_until_parked();
+    // Save threads against each workspace.
+    save_named_thread_metadata("main-thread", "Main Thread", &main_project, cx).await;
+    save_named_thread_metadata("wt-thread", "Worktree Thread", &worktree_project, cx).await;
 
-    sidebar.read_with(cx, |sidebar, _cx| {
-        assert_active_thread(
-            sidebar,
-            &session_id_b2,
-            "Focusing the agent panel thread should set focused_thread",
-        );
-        assert!(
-            has_thread_entry(sidebar, &session_id_b2),
-            "The focused thread should be present in the entries"
+    // Verify both threads are under the old key [/project].
+    let old_key_paths = PathList::new(&[PathBuf::from("/project")]);
+    cx.update(|_window, cx| {
+        let store = ThreadMetadataStore::global(cx).read(cx);
+        assert_eq!(
+            store.entries_for_main_worktree_path(&old_key_paths).count(),
+            2,
+            "should have 2 threads under old key before add"
         );
     });
-}
-
-#[gpui::test]
-async fn test_new_thread_button_works_after_adding_folder(cx: &mut TestAppContext) {
-    let project = init_test_project_with_agent_panel("/project-a", cx).await;
-    let fs = cx.update(|cx| <dyn fs::Fs>::global(cx));
-    let (multi_workspace, cx) =
-        cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
-    let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx);
 
-    // Start a thread and send a message so it has history.
-    let connection = StubAgentConnection::new();
-    connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk(
-        acp::ContentChunk::new("Done".into()),
-    )]);
-    open_thread_with_connection(&panel, connection, cx);
-    send_message(&panel, cx);
-    let session_id = active_session_id(&panel, cx);
-    save_test_thread_metadata(&session_id, &project, cx).await;
+    sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx));
     cx.run_until_parked();
 
-    // Verify the thread appears in the sidebar.
     assert_eq!(
         visible_entries_as_strings(&sidebar, cx),
-        vec!["v [project-a]", "  Hello *",]
+        vec![
+            //
+            "v [project]",
+            "  [~ Draft {wt-feature}] (active)",
+            "  Worktree Thread {wt-feature}",
+            "  Main Thread",
+        ]
     );
 
-    // The "New Thread" button should NOT be in "active/draft" state
-    // because the panel has a thread with messages.
-    sidebar.read_with(cx, |sidebar, _cx| {
-        assert!(
-            matches!(&sidebar.active_entry, Some(ActiveEntry::Thread { .. })),
-            "Panel has a thread with messages, so active_entry should be Thread, got {:?}",
-            sidebar.active_entry,
-        );
-    });
-
-    // Now add a second folder to the workspace, changing the path_list.
-    fs.as_fake()
-        .insert_tree("/project-b", serde_json::json!({ "src": {} }))
-        .await;
-    project
+    // Add /other-project as a folder to the main workspace.
+    main_project
         .update(cx, |project, cx| {
-            project.find_or_create_worktree("/project-b", true, cx)
+            project.find_or_create_worktree("/other-project", true, cx)
         })
         .await
         .expect("should add worktree");
     cx.run_until_parked();
 
-    // The workspace path_list is now [project-a, project-b]. The active
-    // thread's metadata was re-saved with the new paths by the agent panel's
-    // project subscription, so it stays visible under the updated group.
-    // The old [project-a] group persists in the sidebar (empty) because
-    // project_group_keys is append-only.
+    // The linked worktree workspace should have gotten the new folder too.
+    let wt_worktree_count =
+        worktree_project.read_with(cx, |project, cx| project.visible_worktrees(cx).count());
     assert_eq!(
-        visible_entries_as_strings(&sidebar, cx),
-        vec![
-            "v [project-a, project-b]", //
-            "  Hello *",
-            "v [project-a]",
-        ]
+        wt_worktree_count, 2,
+        "linked worktree project should have gotten the new folder"
     );
 
-    // The "New Thread" button must still be clickable (not stuck in
-    // "active/draft" state). Verify that `active_thread_is_draft` is
-    // false — the panel still has the old thread with messages.
-    sidebar.read_with(cx, |sidebar, _cx| {
-        assert!(
-            matches!(&sidebar.active_entry, Some(ActiveEntry::Thread { .. })),
-            "After adding a folder the panel still has a thread with messages, \
-                 so active_entry should be Thread, got {:?}",
-            sidebar.active_entry,
+    // Both workspaces should still exist under one key.
+    multi_workspace.read_with(cx, |mw, _cx| {
+        assert_eq!(mw.workspaces().count(), 2, "both workspaces should survive");
+        assert_eq!(
+            mw.project_group_keys().count(),
+            1,
+            "should still have 1 project group key"
         );
     });
 
-    // Actually click "New Thread" by calling create_new_thread and
-    // verify a new draft is created.
-    let workspace = multi_workspace.read_with(cx, |mw, _cx| mw.workspace().clone());
-    sidebar.update_in(cx, |sidebar, window, cx| {
-        sidebar.create_new_thread(&workspace, window, cx);
+    // Threads should have been migrated to the new key.
+    let new_key_paths =
+        PathList::new(&[PathBuf::from("/other-project"), PathBuf::from("/project")]);
+    cx.update(|_window, cx| {
+        let store = ThreadMetadataStore::global(cx).read(cx);
+        assert_eq!(
+            store.entries_for_main_worktree_path(&old_key_paths).count(),
+            0,
+            "should have 0 threads under old key after migration"
+        );
+        assert_eq!(
+            store.entries_for_main_worktree_path(&new_key_paths).count(),
+            2,
+            "should have 2 threads under new key after migration"
+        );
     });
+
+    // Both threads should still be visible in the sidebar.
+    sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx));
     cx.run_until_parked();
 
-    // After creating a new thread, the panel should now be in draft
-    // state (no messages on the new thread).
-    sidebar.read_with(cx, |sidebar, _cx| {
-        assert_active_draft(
-            sidebar,
-            &workspace,
-            "After creating a new thread active_entry should be Draft",
-        );
-    });
+    assert_eq!(
+        visible_entries_as_strings(&sidebar, cx),
+        vec![
+            //
+            "v [other-project, project]",
+            "  [~ Draft {project:wt-feature}] (active)",
+            "  Worktree Thread {project:wt-feature}",
+            "  Main Thread",
+        ]
+    );
 }
 
 #[gpui::test]

crates/story/Cargo.toml 🔗

@@ -1,17 +0,0 @@
-[package]
-name = "story"
-version = "0.1.0"
-edition.workspace = true
-publish.workspace = true
-license = "GPL-3.0-or-later"
-
-[lib]
-path = "src/story.rs"
-
-[lints]
-workspace = true
-
-[dependencies]
-gpui.workspace = true
-itertools.workspace = true
-smallvec.workspace = true

crates/story/src/story.rs 🔗

@@ -1,209 +0,0 @@
-use gpui::{
-    AnyElement, App, Div, SharedString, Window, colors::DefaultColors, div, prelude::*, px, rems,
-};
-use itertools::Itertools;
-use smallvec::SmallVec;
-
-pub struct Story {}
-
-impl Story {
-    pub fn container(cx: &App) -> gpui::Stateful<Div> {
-        div()
-            .id("story_container")
-            .overflow_y_scroll()
-            .w_full()
-            .min_h_full()
-            .flex()
-            .flex_col()
-            .text_color(cx.default_colors().text)
-            .bg(cx.default_colors().background)
-    }
-
-    pub fn title(title: impl Into<SharedString>, cx: &App) -> impl Element {
-        div()
-            .text_xs()
-            .text_color(cx.default_colors().text)
-            .child(title.into())
-    }
-
-    pub fn title_for<T>(cx: &App) -> impl Element {
-        Self::title(std::any::type_name::<T>(), cx)
-    }
-
-    pub fn section(cx: &App) -> Div {
-        div()
-            .p_4()
-            .m_4()
-            .border_1()
-            .border_color(cx.default_colors().separator)
-    }
-
-    pub fn section_title(cx: &App) -> Div {
-        div().text_lg().text_color(cx.default_colors().text)
-    }
-
-    pub fn group(cx: &App) -> Div {
-        div().my_2().bg(cx.default_colors().container)
-    }
-
-    pub fn code_block(code: impl Into<SharedString>, cx: &App) -> Div {
-        div()
-            .size_full()
-            .p_2()
-            .max_w(rems(36.))
-            .bg(cx.default_colors().container)
-            .rounded_sm()
-            .text_sm()
-            .text_color(cx.default_colors().text)
-            .overflow_hidden()
-            .child(code.into())
-    }
-
-    pub fn divider(cx: &App) -> Div {
-        div().my_2().h(px(1.)).bg(cx.default_colors().separator)
-    }
-
-    pub fn description(description: impl Into<SharedString>, cx: &App) -> impl Element {
-        div()
-            .text_sm()
-            .text_color(cx.default_colors().text)
-            .min_w_96()
-            .child(description.into())
-    }
-
-    pub fn label(label: impl Into<SharedString>, cx: &App) -> impl Element {
-        div()
-            .text_xs()
-            .text_color(cx.default_colors().text)
-            .child(label.into())
-    }
-
-    /// Note: Not `ui::v_flex` as the `story` crate doesn't depend on the `ui` crate.
-    pub fn v_flex() -> Div {
-        div().flex().flex_col().gap_1()
-    }
-}
-
-#[derive(IntoElement)]
-pub struct StoryItem {
-    label: SharedString,
-    item: AnyElement,
-    description: Option<SharedString>,
-    usage: Option<SharedString>,
-}
-
-impl StoryItem {
-    pub fn new(label: impl Into<SharedString>, item: impl IntoElement) -> Self {
-        Self {
-            label: label.into(),
-            item: item.into_any_element(),
-            description: None,
-            usage: None,
-        }
-    }
-
-    pub fn description(mut self, description: impl Into<SharedString>) -> Self {
-        self.description = Some(description.into());
-        self
-    }
-
-    pub fn usage(mut self, code: impl Into<SharedString>) -> Self {
-        self.usage = Some(code.into());
-        self
-    }
-}
-
-impl RenderOnce for StoryItem {
-    fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
-        let colors = cx.default_colors();
-
-        div()
-            .my_2()
-            .flex()
-            .gap_4()
-            .w_full()
-            .child(
-                Story::v_flex()
-                    .px_2()
-                    .w_1_2()
-                    .min_h_px()
-                    .child(Story::label(self.label, cx))
-                    .child(
-                        div()
-                            .rounded_sm()
-                            .bg(colors.background)
-                            .border_1()
-                            .border_color(colors.border)
-                            .py_1()
-                            .px_2()
-                            .overflow_hidden()
-                            .child(self.item),
-                    )
-                    .when_some(self.description, |this, description| {
-                        this.child(Story::description(description, cx))
-                    }),
-            )
-            .child(
-                Story::v_flex()
-                    .px_2()
-                    .flex_none()
-                    .w_1_2()
-                    .min_h_px()
-                    .when_some(self.usage, |this, usage| {
-                        this.child(Story::label("Example Usage", cx))
-                            .child(Story::code_block(usage, cx))
-                    }),
-            )
-    }
-}
-
-#[derive(IntoElement)]
-pub struct StorySection {
-    description: Option<SharedString>,
-    children: SmallVec<[AnyElement; 2]>,
-}
-
-impl Default for StorySection {
-    fn default() -> Self {
-        Self::new()
-    }
-}
-
-impl StorySection {
-    pub fn new() -> Self {
-        Self {
-            description: None,
-            children: SmallVec::new(),
-        }
-    }
-
-    pub fn description(mut self, description: impl Into<SharedString>) -> Self {
-        self.description = Some(description.into());
-        self
-    }
-}
-
-impl RenderOnce for StorySection {
-    fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
-        let children: SmallVec<[AnyElement; 2]> = SmallVec::from_iter(Itertools::intersperse_with(
-            self.children.into_iter(),
-            || Story::divider(cx).into_any_element(),
-        ));
-
-        Story::section(cx)
-            // Section title
-            .py_2()
-            // Section description
-            .when_some(self.description, |section, description| {
-                section.child(Story::description(description, cx))
-            })
-            .child(div().flex().flex_col().gap_2().children(children))
-            .child(Story::divider(cx))
-    }
-}
-
-impl ParentElement for StorySection {
-    fn extend(&mut self, elements: impl IntoIterator<Item = AnyElement>) {
-        self.children.extend(elements)
-    }
-}

crates/storybook/Cargo.toml 🔗

@@ -1,41 +0,0 @@
-[package]
-name = "storybook"
-version = "0.1.0"
-edition.workspace = true
-publish.workspace = true
-license = "GPL-3.0-or-later"
-
-[lints]
-workspace = true
-
-[[bin]]
-name = "storybook"
-path = "src/storybook.rs"
-
-[dependencies]
-anyhow.workspace = true
-clap = { workspace = true, features = ["derive", "string"] }
-ctrlc = "3.4"
-dialoguer = { version = "0.11.0", features = ["fuzzy-select"] }
-editor.workspace = true
-fuzzy.workspace = true
-gpui = { workspace = true, default-features = true }
-gpui_platform.workspace = true
-indoc.workspace = true
-language.workspace = true
-log.workspace = true
-menu.workspace = true
-picker.workspace = true
-reqwest_client.workspace = true
-rust-embed.workspace = true
-settings.workspace = true
-theme_settings.workspace = true
-simplelog.workspace = true
-story.workspace = true
-strum = { workspace = true, features = ["derive"] }
-theme.workspace = true
-title_bar = { workspace = true, features = ["stories"] }
-ui = { workspace = true, features = ["stories"] }
-
-[dev-dependencies]
-gpui = { workspace = true, features = ["test-support"] }

crates/storybook/build.rs 🔗

@@ -1,9 +0,0 @@
-fn main() {
-    #[cfg(target_os = "windows")]
-    {
-        #[cfg(target_env = "msvc")]
-        {
-            println!("cargo:rustc-link-arg=/stack:{}", 8 * 1024 * 1024);
-        }
-    }
-}

crates/storybook/docs/thoughts.md 🔗

@@ -1,57 +0,0 @@
-Much of element styling is now handled by an external engine.
-
-How do I make an element hover.
-
-There's a hover style.
-
-Hoverable needs to wrap another element. That element can be styled.
-
-```rs
-struct Hoverable<E: Element> {
-
-}
-
-impl<V> Element<V> for Hoverable {
-
-}
-```
-
-```rs
-#[derive(Styled, Interactive)]
-pub struct Div {
-    declared_style: StyleRefinement,
-    interactions: Interactions
-}
-
-pub trait Styled {
-    fn declared_style(&mut self) -> &mut StyleRefinement;
-    fn compute_style(&mut self) -> Style {
-        Style::default().refine(self.declared_style())
-    }
-
-    // All the tailwind classes, modifying self.declared_style()
-}
-
-impl Style {
-    pub fn paint_background<V>(layout: Layout, cx: &mut PaintContext<V>);
-    pub fn paint_foreground<V>(layout: Layout, cx: &mut PaintContext<V>);
-}
-
-pub trait Interactive<V> {
-    fn interactions(&mut self) -> &mut Interactions<V>;
-
-    fn on_click(self, )
-}
-
-struct Interactions<V> {
-    click: SmallVec<[<Rc<dyn Fn(&mut V, &dyn Any, )>; 1]>,
-}
-```
-
-```rs
-trait Stylable {
-    type Style;
-
-    fn with_style(self, style: Self::Style) -> Self;
-}
-```

crates/storybook/src/app_menus.rs 🔗

@@ -1,7 +0,0 @@
-use gpui::{Menu, MenuItem};
-
-pub fn app_menus() -> Vec<Menu> {
-    use crate::actions::Quit;
-
-    vec![Menu::new("Storybook").items([MenuItem::action("Quit", Quit)])]
-}

crates/storybook/src/assets.rs 🔗

@@ -1,32 +0,0 @@
-use std::borrow::Cow;
-
-use anyhow::{Context as _, Result};
-use gpui::{AssetSource, SharedString};
-use rust_embed::RustEmbed;
-
-#[derive(RustEmbed)]
-#[folder = "../../assets"]
-#[include = "fonts/**/*"]
-#[include = "icons/**/*"]
-#[include = "images/**/*"]
-#[include = "themes/**/*"]
-#[include = "sounds/**/*"]
-#[include = "*.md"]
-#[exclude = "*.DS_Store"]
-pub struct Assets;
-
-impl AssetSource for Assets {
-    fn load(&self, path: &str) -> Result<Option<Cow<'static, [u8]>>> {
-        Self::get(path)
-            .map(|f| f.data)
-            .with_context(|| format!("could not find asset at path {path:?}"))
-            .map(Some)
-    }
-
-    fn list(&self, path: &str) -> Result<Vec<SharedString>> {
-        Ok(Self::iter()
-            .filter(|p| p.starts_with(path))
-            .map(SharedString::from)
-            .collect())
-    }
-}

crates/storybook/src/stories.rs 🔗

@@ -1,23 +0,0 @@
-mod auto_height_editor;
-mod cursor;
-mod focus;
-mod indent_guides;
-mod kitchen_sink;
-mod overflow_scroll;
-mod picker;
-mod scroll;
-mod text;
-mod viewport_units;
-mod with_rem_size;
-
-pub use auto_height_editor::*;
-pub use cursor::*;
-pub use focus::*;
-pub use indent_guides::*;
-pub use kitchen_sink::*;
-pub use overflow_scroll::*;
-pub use picker::*;
-pub use scroll::*;
-pub use text::*;
-pub use viewport_units::*;
-pub use with_rem_size::*;

crates/storybook/src/stories/auto_height_editor.rs 🔗

@@ -1,36 +0,0 @@
-use editor::Editor;
-use gpui::{
-    App, AppContext as _, Context, Entity, IntoElement, KeyBinding, ParentElement, Render, Styled,
-    Window, div, white,
-};
-
-pub struct AutoHeightEditorStory {
-    editor: Entity<Editor>,
-}
-
-impl AutoHeightEditorStory {
-    pub fn new(window: &mut Window, cx: &mut App) -> gpui::Entity<Self> {
-        cx.bind_keys([KeyBinding::new(
-            "enter",
-            editor::actions::Newline,
-            Some("Editor"),
-        )]);
-        cx.new(|cx| Self {
-            editor: cx.new(|cx| {
-                let mut editor = Editor::auto_height(1, 3, window, cx);
-                editor.set_soft_wrap_mode(language::language_settings::SoftWrap::EditorWidth, cx);
-                editor
-            }),
-        })
-    }
-}
-
-impl Render for AutoHeightEditorStory {
-    fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
-        div()
-            .size_full()
-            .bg(white())
-            .text_sm()
-            .child(div().w_32().bg(gpui::black()).child(self.editor.clone()))
-    }
-}

crates/storybook/src/stories/cursor.rs 🔗

@@ -1,109 +0,0 @@
-use gpui::{Div, Render, Stateful};
-use story::Story;
-use ui::prelude::*;
-
-pub struct CursorStory;
-
-impl Render for CursorStory {
-    fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
-        let all_cursors: [(&str, Box<dyn Fn(Stateful<Div>) -> Stateful<Div>>); 19] = [
-            (
-                "cursor_default",
-                Box::new(|el: Stateful<Div>| el.cursor_default()),
-            ),
-            (
-                "cursor_pointer",
-                Box::new(|el: Stateful<Div>| el.cursor_pointer()),
-            ),
-            (
-                "cursor_text",
-                Box::new(|el: Stateful<Div>| el.cursor_text()),
-            ),
-            (
-                "cursor_move",
-                Box::new(|el: Stateful<Div>| el.cursor_move()),
-            ),
-            (
-                "cursor_not_allowed",
-                Box::new(|el: Stateful<Div>| el.cursor_not_allowed()),
-            ),
-            (
-                "cursor_context_menu",
-                Box::new(|el: Stateful<Div>| el.cursor_context_menu()),
-            ),
-            (
-                "cursor_crosshair",
-                Box::new(|el: Stateful<Div>| el.cursor_crosshair()),
-            ),
-            (
-                "cursor_vertical_text",
-                Box::new(|el: Stateful<Div>| el.cursor_vertical_text()),
-            ),
-            (
-                "cursor_alias",
-                Box::new(|el: Stateful<Div>| el.cursor_alias()),
-            ),
-            (
-                "cursor_copy",
-                Box::new(|el: Stateful<Div>| el.cursor_copy()),
-            ),
-            (
-                "cursor_no_drop",
-                Box::new(|el: Stateful<Div>| el.cursor_no_drop()),
-            ),
-            (
-                "cursor_grab",
-                Box::new(|el: Stateful<Div>| el.cursor_grab()),
-            ),
-            (
-                "cursor_grabbing",
-                Box::new(|el: Stateful<Div>| el.cursor_grabbing()),
-            ),
-            (
-                "cursor_col_resize",
-                Box::new(|el: Stateful<Div>| el.cursor_col_resize()),
-            ),
-            (
-                "cursor_row_resize",
-                Box::new(|el: Stateful<Div>| el.cursor_row_resize()),
-            ),
-            (
-                "cursor_n_resize",
-                Box::new(|el: Stateful<Div>| el.cursor_n_resize()),
-            ),
-            (
-                "cursor_e_resize",
-                Box::new(|el: Stateful<Div>| el.cursor_e_resize()),
-            ),
-            (
-                "cursor_s_resize",
-                Box::new(|el: Stateful<Div>| el.cursor_s_resize()),
-            ),
-            (
-                "cursor_w_resize",
-                Box::new(|el: Stateful<Div>| el.cursor_w_resize()),
-            ),
-        ];
-
-        Story::container(cx)
-            .flex()
-            .gap_1()
-            .child(Story::title("cursor", cx))
-            .children(all_cursors.map(|(name, apply_cursor)| {
-                div().gap_1().flex().text_color(gpui::white()).child(
-                    div()
-                        .flex()
-                        .items_center()
-                        .justify_center()
-                        .id(name)
-                        .map(apply_cursor)
-                        .w_64()
-                        .h_8()
-                        .bg(gpui::red())
-                        .active(|style| style.bg(gpui::green()))
-                        .text_sm()
-                        .child(Story::label(name, cx)),
-                )
-            }))
-    }
-}

crates/storybook/src/stories/focus.rs 🔗

@@ -1,123 +0,0 @@
-use gpui::{
-    App, Entity, FocusHandle, KeyBinding, Render, Subscription, Window, actions, div, prelude::*,
-};
-use ui::prelude::*;
-
-actions!(focus, [ActionA, ActionB, ActionC]);
-
-pub struct FocusStory {
-    parent_focus: FocusHandle,
-    child_1_focus: FocusHandle,
-    child_2_focus: FocusHandle,
-    _focus_subscriptions: Vec<Subscription>,
-}
-
-impl FocusStory {
-    pub fn model(window: &mut Window, cx: &mut App) -> Entity<Self> {
-        cx.bind_keys([
-            KeyBinding::new("cmd-a", ActionA, Some("parent")),
-            KeyBinding::new("cmd-a", ActionB, Some("child-1")),
-            KeyBinding::new("cmd-c", ActionC, None),
-        ]);
-
-        cx.new(|cx| {
-            let parent_focus = cx.focus_handle();
-            let child_1_focus = cx.focus_handle();
-            let child_2_focus = cx.focus_handle();
-            let _focus_subscriptions = vec![
-                cx.on_focus(&parent_focus, window, |_, _, _| {
-                    println!("Parent focused");
-                }),
-                cx.on_blur(&parent_focus, window, |_, _, _| {
-                    println!("Parent blurred");
-                }),
-                cx.on_focus(&child_1_focus, window, |_, _, _| {
-                    println!("Child 1 focused");
-                }),
-                cx.on_blur(&child_1_focus, window, |_, _, _| {
-                    println!("Child 1 blurred");
-                }),
-                cx.on_focus(&child_2_focus, window, |_, _, _| {
-                    println!("Child 2 focused");
-                }),
-                cx.on_blur(&child_2_focus, window, |_, _, _| {
-                    println!("Child 2 blurred");
-                }),
-            ];
-
-            Self {
-                parent_focus,
-                child_1_focus,
-                child_2_focus,
-                _focus_subscriptions,
-            }
-        })
-    }
-}
-
-impl Render for FocusStory {
-    fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
-        let theme = cx.theme();
-        let color_1 = theme.status().created;
-        let color_2 = theme.status().modified;
-        let color_4 = theme.status().conflict;
-        let color_5 = theme.status().ignored;
-        let color_6 = theme.status().renamed;
-        let color_7 = theme.status().hint;
-
-        div()
-            .id("parent")
-            .active(|style| style.bg(color_7))
-            .track_focus(&self.parent_focus)
-            .key_context("parent")
-            .on_action(cx.listener(|_, _action: &ActionA, _window, _cx| {
-                println!("Action A dispatched on parent");
-            }))
-            .on_action(cx.listener(|_, _action: &ActionB, _window, _cx| {
-                println!("Action B dispatched on parent");
-            }))
-            .on_key_down(cx.listener(|_, event, _, _| println!("Key down on parent {:?}", event)))
-            .on_key_up(cx.listener(|_, event, _, _| println!("Key up on parent {:?}", event)))
-            .size_full()
-            .bg(color_1)
-            .focus(|style| style.bg(color_2))
-            .child(
-                div()
-                    .track_focus(&self.child_1_focus)
-                    .key_context("child-1")
-                    .on_action(cx.listener(|_, _action: &ActionB, _window, _cx| {
-                        println!("Action B dispatched on child 1 during");
-                    }))
-                    .w_full()
-                    .h_6()
-                    .bg(color_4)
-                    .focus(|style| style.bg(color_5))
-                    .in_focus(|style| style.bg(color_6))
-                    .on_key_down(
-                        cx.listener(|_, event, _, _| println!("Key down on child 1 {:?}", event)),
-                    )
-                    .on_key_up(
-                        cx.listener(|_, event, _, _| println!("Key up on child 1 {:?}", event)),
-                    )
-                    .child("Child 1"),
-            )
-            .child(
-                div()
-                    .track_focus(&self.child_2_focus)
-                    .key_context("child-2")
-                    .on_action(cx.listener(|_, _action: &ActionC, _window, _cx| {
-                        println!("Action C dispatched on child 2");
-                    }))
-                    .w_full()
-                    .h_6()
-                    .bg(color_4)
-                    .on_key_down(
-                        cx.listener(|_, event, _, _| println!("Key down on child 2 {:?}", event)),
-                    )
-                    .on_key_up(
-                        cx.listener(|_, event, _, _| println!("Key up on child 2 {:?}", event)),
-                    )
-                    .child("Child 2"),
-            )
-    }
-}

crates/storybook/src/stories/indent_guides.rs 🔗

@@ -1,82 +0,0 @@
-use std::ops::Range;
-
-use gpui::{Entity, Render, div, uniform_list};
-use gpui::{prelude::*, *};
-use ui::{AbsoluteLength, Color, DefiniteLength, Label, LabelCommon, px, v_flex};
-
-use story::Story;
-
-const LENGTH: usize = 100;
-
-pub struct IndentGuidesStory {
-    depths: Vec<usize>,
-}
-
-impl IndentGuidesStory {
-    pub fn model(_window: &mut Window, cx: &mut App) -> Entity<Self> {
-        let mut depths = Vec::new();
-        depths.push(0);
-        depths.push(1);
-        depths.push(2);
-        for _ in 0..LENGTH - 6 {
-            depths.push(3);
-        }
-        depths.push(2);
-        depths.push(1);
-        depths.push(0);
-
-        cx.new(|_cx| Self { depths })
-    }
-}
-
-impl Render for IndentGuidesStory {
-    fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
-        Story::container(cx)
-            .child(Story::title("Indent guides", cx))
-            .child(
-                v_flex().size_full().child(
-                    uniform_list(
-                        "some-list",
-                        self.depths.len(),
-                        cx.processor(move |this, range: Range<usize>, _window, _cx| {
-                            this.depths
-                                .iter()
-                                .enumerate()
-                                .skip(range.start)
-                                .take(range.end - range.start)
-                                .map(|(i, depth)| {
-                                    div()
-                                        .pl(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(
-                                            16. * (*depth as f32),
-                                        ))))
-                                        .child(Label::new(format!("Item {}", i)).color(Color::Info))
-                                })
-                                .collect()
-                        }),
-                    )
-                    .with_sizing_behavior(gpui::ListSizingBehavior::Infer)
-                    .with_decoration(
-                        ui::indent_guides(
-                            px(16.),
-                            ui::IndentGuideColors {
-                                default: Color::Info.color(cx),
-                                hover: Color::Accent.color(cx),
-                                active: Color::Accent.color(cx),
-                            },
-                        )
-                        .with_compute_indents_fn(
-                            cx.entity(),
-                            |this, range, _cx, _context| {
-                                this.depths
-                                    .iter()
-                                    .skip(range.start)
-                                    .take(range.end - range.start)
-                                    .cloned()
-                                    .collect()
-                            },
-                        ),
-                    ),
-                ),
-            )
-    }
-}

crates/storybook/src/stories/kitchen_sink.rs 🔗

@@ -1,32 +0,0 @@
-use gpui::{Entity, Render, prelude::*};
-use story::Story;
-use strum::IntoEnumIterator;
-use ui::prelude::*;
-
-use crate::story_selector::ComponentStory;
-
-pub struct KitchenSinkStory;
-
-impl KitchenSinkStory {
-    pub fn model(cx: &mut App) -> Entity<Self> {
-        cx.new(|_| Self)
-    }
-}
-
-impl Render for KitchenSinkStory {
-    fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
-        let component_stories = ComponentStory::iter()
-            .map(|selector| selector.story(window, cx))
-            .collect::<Vec<_>>();
-
-        Story::container(cx)
-            .id("kitchen-sink")
-            .overflow_y_scroll()
-            .child(Story::title("Kitchen Sink", cx))
-            .child(Story::label("Components", cx))
-            .child(div().flex().flex_col().children(component_stories))
-            // Add a bit of space at the bottom of the kitchen sink so elements
-            // don't end up squished right up against the bottom of the screen.
-            .child(div().p_4())
-    }
-}

crates/storybook/src/stories/overflow_scroll.rs 🔗

@@ -1,41 +0,0 @@
-use gpui::Render;
-use story::Story;
-
-use ui::prelude::*;
-
-pub struct OverflowScrollStory;
-
-impl Render for OverflowScrollStory {
-    fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
-        Story::container(cx)
-            .child(Story::title("Overflow Scroll", cx))
-            .child(Story::label("`overflow_x_scroll`", cx))
-            .child(
-                h_flex()
-                    .id("overflow_x_scroll")
-                    .gap_2()
-                    .overflow_x_scroll()
-                    .children((0..100).map(|i| {
-                        div()
-                            .p_4()
-                            .debug_bg_cyan()
-                            .child(SharedString::from(format!("Child {}", i + 1)))
-                    })),
-            )
-            .child(Story::label("`overflow_y_scroll`", cx))
-            .child(
-                v_flex()
-                    .w_full()
-                    .flex_1()
-                    .id("overflow_y_scroll")
-                    .gap_2()
-                    .overflow_y_scroll()
-                    .children((0..100).map(|i| {
-                        div()
-                            .p_4()
-                            .debug_bg_green()
-                            .child(SharedString::from(format!("Child {}", i + 1)))
-                    })),
-            )
-    }
-}

crates/storybook/src/stories/picker.rs 🔗

@@ -1,206 +0,0 @@
-use fuzzy::StringMatchCandidate;
-use gpui::{App, Entity, KeyBinding, Render, SharedString, Styled, Task, Window, div, prelude::*};
-use picker::{Picker, PickerDelegate};
-use std::sync::Arc;
-use ui::{Label, ListItem};
-use ui::{ListItemSpacing, prelude::*};
-
-pub struct PickerStory {
-    picker: Entity<Picker<Delegate>>,
-}
-
-struct Delegate {
-    candidates: Arc<[StringMatchCandidate]>,
-    matches: Vec<usize>,
-    selected_ix: usize,
-}
-
-impl Delegate {
-    fn new(strings: &[&str]) -> Self {
-        Self {
-            candidates: strings
-                .iter()
-                .copied()
-                .enumerate()
-                .map(|(id, string)| StringMatchCandidate::new(id, string))
-                .collect(),
-            matches: vec![],
-            selected_ix: 0,
-        }
-    }
-}
-
-impl PickerDelegate for Delegate {
-    type ListItem = ListItem;
-
-    fn match_count(&self) -> usize {
-        self.candidates.len()
-    }
-
-    fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> {
-        "Test".into()
-    }
-
-    fn render_match(
-        &self,
-        ix: usize,
-        selected: bool,
-        _window: &mut Window,
-        _cx: &mut Context<Picker<Self>>,
-    ) -> Option<Self::ListItem> {
-        let candidate_ix = self.matches.get(ix)?;
-        // TASK: Make StringMatchCandidate::string a SharedString
-        let candidate = SharedString::from(self.candidates[*candidate_ix].string.clone());
-
-        Some(
-            ListItem::new(ix)
-                .inset(true)
-                .spacing(ListItemSpacing::Sparse)
-                .toggle_state(selected)
-                .child(Label::new(candidate)),
-        )
-    }
-
-    fn selected_index(&self) -> usize {
-        self.selected_ix
-    }
-
-    fn set_selected_index(&mut self, ix: usize, _: &mut Window, cx: &mut Context<Picker<Self>>) {
-        self.selected_ix = ix;
-        cx.notify();
-    }
-
-    fn confirm(&mut self, secondary: bool, _window: &mut Window, _cx: &mut Context<Picker<Self>>) {
-        let candidate_ix = self.matches[self.selected_ix];
-        let candidate = self.candidates[candidate_ix].string.clone();
-
-        if secondary {
-            eprintln!("Secondary confirmed {}", candidate)
-        } else {
-            eprintln!("Confirmed {}", candidate)
-        }
-    }
-
-    fn dismissed(&mut self, _: &mut Window, cx: &mut Context<Picker<Self>>) {
-        cx.quit();
-    }
-
-    fn update_matches(
-        &mut self,
-        query: String,
-        _: &mut Window,
-        cx: &mut Context<Picker<Self>>,
-    ) -> Task<()> {
-        let candidates = self.candidates.clone();
-        self.matches = cx
-            .foreground_executor()
-            .block_on(fuzzy::match_strings(
-                &candidates,
-                &query,
-                true,
-                true,
-                100,
-                &Default::default(),
-                cx.background_executor().clone(),
-            ))
-            .into_iter()
-            .map(|r| r.candidate_id)
-            .collect();
-        self.selected_ix = 0;
-        Task::ready(())
-    }
-}
-
-impl PickerStory {
-    pub fn new(window: &mut Window, cx: &mut App) -> Entity<Self> {
-        cx.new(|cx| {
-            cx.bind_keys([
-                KeyBinding::new("up", menu::SelectPrevious, Some("picker")),
-                KeyBinding::new("pageup", menu::SelectFirst, Some("picker")),
-                KeyBinding::new("shift-pageup", menu::SelectFirst, Some("picker")),
-                KeyBinding::new("ctrl-p", menu::SelectPrevious, Some("picker")),
-                KeyBinding::new("down", menu::SelectNext, Some("picker")),
-                KeyBinding::new("pagedown", menu::SelectLast, Some("picker")),
-                KeyBinding::new("shift-pagedown", menu::SelectFirst, Some("picker")),
-                KeyBinding::new("ctrl-n", menu::SelectNext, Some("picker")),
-                KeyBinding::new("cmd-up", menu::SelectFirst, Some("picker")),
-                KeyBinding::new("cmd-down", menu::SelectLast, Some("picker")),
-                KeyBinding::new("enter", menu::Confirm, Some("picker")),
-                KeyBinding::new("ctrl-enter", menu::SecondaryConfirm, Some("picker")),
-                KeyBinding::new("cmd-enter", menu::SecondaryConfirm, Some("picker")),
-                KeyBinding::new("escape", menu::Cancel, Some("picker")),
-                KeyBinding::new("ctrl-c", menu::Cancel, Some("picker")),
-            ]);
-
-            PickerStory {
-                picker: cx.new(|cx| {
-                    let mut delegate = Delegate::new(&[
-                        "Baguette (France)",
-                        "Baklava (Turkey)",
-                        "Beef Wellington (UK)",
-                        "Biryani (India)",
-                        "Borscht (Ukraine)",
-                        "Bratwurst (Germany)",
-                        "Bulgogi (Korea)",
-                        "Burrito (USA)",
-                        "Ceviche (Peru)",
-                        "Chicken Tikka Masala (India)",
-                        "Churrasco (Brazil)",
-                        "Couscous (North Africa)",
-                        "Croissant (France)",
-                        "Dim Sum (China)",
-                        "Empanada (Argentina)",
-                        "Fajitas (Mexico)",
-                        "Falafel (Middle East)",
-                        "Feijoada (Brazil)",
-                        "Fish and Chips (UK)",
-                        "Fondue (Switzerland)",
-                        "Goulash (Hungary)",
-                        "Haggis (Scotland)",
-                        "Kebab (Middle East)",
-                        "Kimchi (Korea)",
-                        "Lasagna (Italy)",
-                        "Maple Syrup Pancakes (Canada)",
-                        "Moussaka (Greece)",
-                        "Pad Thai (Thailand)",
-                        "Paella (Spain)",
-                        "Pancakes (USA)",
-                        "Pasta Carbonara (Italy)",
-                        "Pavlova (Australia)",
-                        "Peking Duck (China)",
-                        "Pho (Vietnam)",
-                        "Pierogi (Poland)",
-                        "Pizza (Italy)",
-                        "Poutine (Canada)",
-                        "Pretzel (Germany)",
-                        "Ramen (Japan)",
-                        "Rendang (Indonesia)",
-                        "Sashimi (Japan)",
-                        "Satay (Indonesia)",
-                        "Shepherd's Pie (Ireland)",
-                        "Sushi (Japan)",
-                        "Tacos (Mexico)",
-                        "Tandoori Chicken (India)",
-                        "Tortilla (Spain)",
-                        "Tzatziki (Greece)",
-                        "Wiener Schnitzel (Austria)",
-                    ]);
-                    delegate.update_matches("".into(), window, cx).detach();
-
-                    let picker = Picker::uniform_list(delegate, window, cx);
-                    picker.focus(window, cx);
-                    picker
-                }),
-            }
-        })
-    }
-}
-
-impl Render for PickerStory {
-    fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
-        div()
-            .bg(cx.theme().styles.colors.background)
-            .size_full()
-            .child(self.picker.clone())
-    }
-}

crates/storybook/src/stories/scroll.rs 🔗

@@ -1,52 +0,0 @@
-use gpui::{App, Entity, Render, SharedString, Styled, Window, div, prelude::*, px};
-use ui::Tooltip;
-use ui::prelude::*;
-
-pub struct ScrollStory;
-
-impl ScrollStory {
-    pub fn model(cx: &mut App) -> Entity<ScrollStory> {
-        cx.new(|_| ScrollStory)
-    }
-}
-
-impl Render for ScrollStory {
-    fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
-        let theme = cx.theme();
-        let color_1 = theme.status().created;
-        let color_2 = theme.status().modified;
-
-        div()
-            .id("parent")
-            .bg(theme.colors().background)
-            .size_full()
-            .overflow_scroll()
-            .children((0..10).map(|row| {
-                div()
-                    .w(px(1000.))
-                    .h(px(100.))
-                    .flex()
-                    .flex_row()
-                    .children((0..10).map(|column| {
-                        let id = SharedString::from(format!("{}, {}", row, column));
-                        let bg = if row % 2 == column % 2 {
-                            color_1
-                        } else {
-                            color_2
-                        };
-                        div()
-                            .id(id.clone())
-                            .tooltip(Tooltip::text(id))
-                            .bg(bg)
-                            .size(px(100_f32))
-                            .when(row >= 5 && column >= 5, |d| {
-                                d.overflow_scroll()
-                                    .child(div().size(px(50.)).bg(color_1))
-                                    .child(div().size(px(50.)).bg(color_2))
-                                    .child(div().size(px(50.)).bg(color_1))
-                                    .child(div().size(px(50.)).bg(color_2))
-                            })
-                    }))
-            }))
-    }
-}

crates/storybook/src/stories/text.rs 🔗

@@ -1,120 +0,0 @@
-use gpui::{
-    App, AppContext as _, Context, Entity, HighlightStyle, InteractiveText, IntoElement,
-    ParentElement, Render, Styled, StyledText, Window, div, green, red,
-};
-use indoc::indoc;
-use story::*;
-
-pub struct TextStory;
-
-impl TextStory {
-    pub fn model(cx: &mut App) -> Entity<Self> {
-        cx.new(|_| Self)
-    }
-}
-
-impl Render for TextStory {
-    fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
-        Story::container(cx)
-            .child(Story::title("Text", cx))
-            .children(vec![
-                StorySection::new()
-                    .child(
-                        StoryItem::new("Default", div().bg(gpui::blue()).child("Hello World!"))
-                            .usage(indoc! {r##"
-                                div()
-                                    .child("Hello World!")
-                                "##
-                            }),
-                    )
-                    .child(
-                        StoryItem::new(
-                            "Wrapping Text",
-                            div().max_w_96().child(concat!(
-                                "The quick brown fox jumps over the lazy dog. ",
-                                "Meanwhile, the lazy dog decided it was time for a change. ",
-                                "He started daily workout routines, ate healthier and became the fastest dog in town.",
-                            )),
-                        )
-                        .description("Set a width or max-width to enable text wrapping.")
-                        .usage(indoc! {r##"
-                            div()
-                                .max_w_96()
-                                .child("Some text that you want to wrap.")
-                            "##
-                        }),
-                    )
-                    .child(
-                        StoryItem::new(
-                            "tbd",
-                            div().flex().w_96().child(
-                                div().overflow_hidden().child(concat!(
-                                    "flex-row. width 96. overflow-hidden. The quick brown fox jumps over the lazy dog. ",
-                                    "Meanwhile, the lazy dog decided it was time for a change. ",
-                                    "He started daily workout routines, ate healthier and became the fastest dog in town.",
-                                )),
-                            ),
-                        ),
-                    )
-                    .child(
-                        StoryItem::new(
-                            "Text in Horizontal Flex",
-                            div().flex().w_96().bg(red()).child(concat!(
-                                "flex-row. width 96. The quick brown fox jumps over the lazy dog. ",
-                                "Meanwhile, the lazy dog decided it was time for a change. ",
-                                "He started daily workout routines, ate healthier and became the fastest dog in town.",
-                            )),
-                        )
-                        .usage(indoc! {r##"
-                            // NOTE: When rendering text in a horizontal flex container,
-                            // Taffy will not pass width constraints down from the parent.
-                            // To fix this, render text in a parent with overflow: hidden
-
-                            div()
-                                .max_w_96()
-                                .child("Some text that you want to wrap.")
-                            "##
-                        }),
-                    )
-                    .child(
-                        StoryItem::new(
-                            "Interactive Text",
-                            InteractiveText::new(
-                                "interactive",
-                                StyledText::new("Hello world, how is it going?").with_default_highlights(
-                                    &window.text_style(),
-                                    [
-                                        (
-                                            6..11,
-                                            HighlightStyle {
-                                                background_color: Some(green()),
-                                                ..Default::default()
-                                            },
-                                        ),
-                                    ],
-                                ),
-                            )
-                            .on_click(vec![2..4, 1..3, 7..9], |range_ix, _, _cx| {
-                                println!("Clicked range {range_ix}");
-                            }),
-                        )
-                        .usage(indoc! {r##"
-                            InteractiveText::new(
-                                "interactive",
-                                StyledText::new("Hello world, how is it going?").with_highlights(&window.text_style(), [
-                                    (6..11, HighlightStyle {
-                                        background_color: Some(green()),
-                                        ..Default::default()
-                                    }),
-                                ]),
-                            )
-                            .on_click(vec![2..4, 1..3, 7..9], |range_ix, _cx| {
-                                println!("Clicked range {range_ix}");
-                            })
-                            "##
-                        }),
-                    ),
-            ])
-            .into_element()
-    }
-}

crates/storybook/src/stories/viewport_units.rs 🔗

@@ -1,32 +0,0 @@
-use gpui::Render;
-use story::Story;
-
-use ui::prelude::*;
-
-pub struct ViewportUnitsStory;
-
-impl Render for ViewportUnitsStory {
-    fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
-        Story::container(cx).child(
-            div()
-                .flex()
-                .flex_row()
-                .child(
-                    div()
-                        .w(vw(0.5, window))
-                        .h(vh(0.8, window))
-                        .bg(gpui::red())
-                        .text_color(gpui::white())
-                        .child("50vw, 80vh"),
-                )
-                .child(
-                    div()
-                        .w(vw(0.25, window))
-                        .h(vh(0.33, window))
-                        .bg(gpui::green())
-                        .text_color(gpui::white())
-                        .child("25vw, 33vh"),
-                ),
-        )
-    }
-}

crates/storybook/src/stories/with_rem_size.rs 🔗

@@ -1,61 +0,0 @@
-use gpui::{AnyElement, Hsla, Render};
-use story::Story;
-
-use ui::{prelude::*, utils::WithRemSize};
-
-pub struct WithRemSizeStory;
-
-impl Render for WithRemSizeStory {
-    fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
-        Story::container(cx).child(
-            Example::new(16., gpui::red())
-                .child(
-                    Example::new(24., gpui::green())
-                        .child(Example::new(8., gpui::blue()))
-                        .child(Example::new(16., gpui::yellow())),
-                )
-                .child(
-                    Example::new(12., gpui::green())
-                        .child(Example::new(48., gpui::blue()))
-                        .child(Example::new(16., gpui::yellow())),
-                ),
-        )
-    }
-}
-
-#[derive(IntoElement)]
-struct Example {
-    rem_size: Pixels,
-    border_color: Hsla,
-    children: Vec<AnyElement>,
-}
-
-impl Example {
-    pub fn new(rem_size: impl Into<Pixels>, border_color: Hsla) -> Self {
-        Self {
-            rem_size: rem_size.into(),
-            border_color,
-            children: Vec::new(),
-        }
-    }
-}
-
-impl ParentElement for Example {
-    fn extend(&mut self, elements: impl IntoIterator<Item = AnyElement>) {
-        self.children.extend(elements);
-    }
-}
-
-impl RenderOnce for Example {
-    fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement {
-        WithRemSize::new(self.rem_size).child(
-            v_flex()
-                .gap_2()
-                .p_2()
-                .border_2()
-                .border_color(self.border_color)
-                .child(Label::new(format!("1rem = {}px", f32::from(self.rem_size))))
-                .children(self.children),
-        )
-    }
-}

crates/storybook/src/story_selector.rs 🔗

@@ -1,109 +0,0 @@
-use std::str::FromStr;
-use std::sync::OnceLock;
-
-use crate::stories::*;
-use clap::ValueEnum;
-use clap::builder::PossibleValue;
-use gpui::AnyView;
-use strum::{EnumIter, EnumString, IntoEnumIterator};
-use ui::prelude::*;
-
-#[derive(Debug, PartialEq, Eq, Clone, Copy, strum::Display, EnumString, EnumIter)]
-#[strum(serialize_all = "snake_case")]
-pub enum ComponentStory {
-    ApplicationMenu,
-    AutoHeightEditor,
-    ContextMenu,
-    Cursor,
-    Focus,
-    OverflowScroll,
-    Picker,
-    Scroll,
-    Text,
-    ViewportUnits,
-    WithRemSize,
-    IndentGuides,
-}
-
-impl ComponentStory {
-    pub fn story(&self, window: &mut Window, cx: &mut App) -> AnyView {
-        match self {
-            Self::ApplicationMenu => cx
-                .new(|cx| title_bar::ApplicationMenuStory::new(window, cx))
-                .into(),
-            Self::AutoHeightEditor => AutoHeightEditorStory::new(window, cx).into(),
-            Self::ContextMenu => cx.new(|_| ui::ContextMenuStory).into(),
-            Self::Cursor => cx.new(|_| crate::stories::CursorStory).into(),
-            Self::Focus => FocusStory::model(window, cx).into(),
-            Self::OverflowScroll => cx.new(|_| crate::stories::OverflowScrollStory).into(),
-            Self::Picker => PickerStory::new(window, cx).into(),
-            Self::Scroll => ScrollStory::model(cx).into(),
-            Self::Text => TextStory::model(cx).into(),
-            Self::ViewportUnits => cx.new(|_| crate::stories::ViewportUnitsStory).into(),
-            Self::WithRemSize => cx.new(|_| crate::stories::WithRemSizeStory).into(),
-            Self::IndentGuides => crate::stories::IndentGuidesStory::model(window, cx).into(),
-        }
-    }
-}
-
-#[derive(Debug, PartialEq, Eq, Clone, Copy)]
-pub enum StorySelector {
-    Component(ComponentStory),
-    KitchenSink,
-}
-
-impl FromStr for StorySelector {
-    type Err = anyhow::Error;
-
-    fn from_str(raw_story_name: &str) -> std::result::Result<Self, Self::Err> {
-        use anyhow::Context as _;
-
-        let story = raw_story_name.to_ascii_lowercase();
-
-        if story == "kitchen_sink" {
-            return Ok(Self::KitchenSink);
-        }
-
-        if let Some((_, story)) = story.split_once("components/") {
-            let component_story = ComponentStory::from_str(story)
-                .with_context(|| format!("story not found for component '{story}'"))?;
-
-            return Ok(Self::Component(component_story));
-        }
-
-        anyhow::bail!("story not found for '{raw_story_name}'")
-    }
-}
-
-impl StorySelector {
-    pub fn story(&self, window: &mut Window, cx: &mut App) -> AnyView {
-        match self {
-            Self::Component(component_story) => component_story.story(window, cx),
-            Self::KitchenSink => KitchenSinkStory::model(cx).into(),
-        }
-    }
-}
-
-/// The list of all stories available in the storybook.
-static ALL_STORY_SELECTORS: OnceLock<Vec<StorySelector>> = OnceLock::new();
-
-impl ValueEnum for StorySelector {
-    fn value_variants<'a>() -> &'a [Self] {
-        (ALL_STORY_SELECTORS.get_or_init(|| {
-            let component_stories = ComponentStory::iter().map(StorySelector::Component);
-
-            component_stories
-                .chain(std::iter::once(StorySelector::KitchenSink))
-                .collect::<Vec<_>>()
-        })) as _
-    }
-
-    fn to_possible_value(&self) -> Option<clap::builder::PossibleValue> {
-        let value = match self {
-            Self::Component(story) => format!("components/{story}"),
-            Self::KitchenSink => "kitchen_sink".to_string(),
-        };
-
-        Some(PossibleValue::new(value))
-    }
-}

crates/storybook/src/storybook.rs 🔗

@@ -1,162 +0,0 @@
-mod actions;
-mod app_menus;
-mod assets;
-mod stories;
-mod story_selector;
-
-use std::sync::Arc;
-
-use clap::Parser;
-use dialoguer::FuzzySelect;
-use gpui::{
-    AnyView, App, Bounds, Context, Render, Window, WindowBounds, WindowOptions,
-    colors::{Colors, GlobalColors},
-    div, px, size,
-};
-use log::LevelFilter;
-use reqwest_client::ReqwestClient;
-use settings::{KeymapFile, Settings as _};
-use simplelog::SimpleLogger;
-use strum::IntoEnumIterator;
-use theme_settings::ThemeSettings;
-use ui::prelude::*;
-
-use crate::app_menus::app_menus;
-use crate::assets::Assets;
-use crate::story_selector::{ComponentStory, StorySelector};
-use actions::Quit;
-pub use indoc::indoc;
-
-#[derive(Parser)]
-#[command(author, version, about, long_about = None)]
-struct Args {
-    #[arg(value_enum)]
-    story: Option<StorySelector>,
-
-    /// The name of the theme to use in the storybook.
-    ///
-    /// If not provided, the default theme will be used.
-    #[arg(long)]
-    theme: Option<String>,
-}
-
-fn main() {
-    SimpleLogger::init(LevelFilter::Info, Default::default()).expect("could not initialize logger");
-
-    menu::init();
-    let args = Args::parse();
-
-    let story_selector = args.story.unwrap_or_else(|| {
-        let stories = ComponentStory::iter().collect::<Vec<_>>();
-
-        ctrlc::set_handler(move || {}).unwrap();
-
-        let result = FuzzySelect::new()
-            .with_prompt("Choose a story to run:")
-            .items(&stories)
-            .interact();
-
-        let Ok(selection) = result else {
-            dialoguer::console::Term::stderr().show_cursor().unwrap();
-            std::process::exit(0);
-        };
-
-        StorySelector::Component(stories[selection])
-    });
-    let theme_name = args.theme.unwrap_or("One Dark".to_string());
-
-    gpui_platform::application()
-        .with_assets(Assets)
-        .run(move |cx| {
-            load_embedded_fonts(cx).unwrap();
-
-            cx.set_global(GlobalColors(Arc::new(Colors::default())));
-
-            let http_client = ReqwestClient::user_agent("zed_storybook").unwrap();
-            cx.set_http_client(Arc::new(http_client));
-
-            settings::init(cx);
-            theme_settings::init(theme::LoadThemes::All(Box::new(Assets)), cx);
-
-            let selector = story_selector;
-
-            let mut theme_settings = ThemeSettings::get_global(cx).clone();
-            theme_settings.theme =
-                theme_settings::ThemeSelection::Static(settings::ThemeName(theme_name.into()));
-            ThemeSettings::override_global(theme_settings, cx);
-
-            editor::init(cx);
-            init(cx);
-            load_storybook_keymap(cx);
-            cx.set_menus(app_menus());
-
-            let size = size(px(1500.), px(780.));
-            let bounds = Bounds::centered(None, size, cx);
-            let _window = cx.open_window(
-                WindowOptions {
-                    window_bounds: Some(WindowBounds::Windowed(bounds)),
-                    ..Default::default()
-                },
-                move |window, cx| {
-                    theme_settings::setup_ui_font(window, cx);
-
-                    cx.new(|cx| StoryWrapper::new(selector.story(window, cx)))
-                },
-            );
-
-            cx.activate(true);
-        });
-}
-
-#[derive(Clone)]
-pub struct StoryWrapper {
-    story: AnyView,
-}
-
-impl StoryWrapper {
-    pub(crate) fn new(story: AnyView) -> Self {
-        Self { story }
-    }
-}
-
-impl Render for StoryWrapper {
-    fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
-        div()
-            .flex()
-            .flex_col()
-            .size_full()
-            .font_family(".ZedMono")
-            .child(self.story.clone())
-    }
-}
-
-fn load_embedded_fonts(cx: &App) -> anyhow::Result<()> {
-    let font_paths = cx.asset_source().list("fonts")?;
-    let mut embedded_fonts = Vec::new();
-    for font_path in font_paths {
-        if font_path.ends_with(".ttf") {
-            let font_bytes = cx
-                .asset_source()
-                .load(&font_path)?
-                .expect("Should never be None in the storybook");
-            embedded_fonts.push(font_bytes);
-        }
-    }
-
-    cx.text_system().add_fonts(embedded_fonts)
-}
-
-fn load_storybook_keymap(cx: &mut App) {
-    cx.bind_keys(KeymapFile::load_asset("keymaps/storybook.json", None, cx).unwrap());
-}
-
-pub fn init(cx: &mut App) {
-    cx.on_action(quit);
-}
-
-fn quit(_: &Quit, cx: &mut App) {
-    cx.spawn(async move |cx| {
-        cx.update(|cx| cx.quit());
-    })
-    .detach();
-}

crates/terminal_view/src/terminal_view.rs 🔗

@@ -1356,7 +1356,9 @@ impl Item for TerminalView {
         h_flex()
             .gap_1()
             .group("term-tab-icon")
-            .track_focus(&self.focus_handle)
+            .when(!params.selected, |this| {
+                this.track_focus(&self.focus_handle)
+            })
             .on_action(move |action: &RenameTerminal, window, cx| {
                 self_handle
                     .update(cx, |this, cx| this.rename_terminal(action, window, cx))

crates/theme/Cargo.toml 🔗

@@ -19,7 +19,6 @@ doctest = false
 [dependencies]
 anyhow.workspace = true
 collections.workspace = true
-derive_more.workspace = true
 gpui.workspace = true
 syntax_theme.workspace = true
 palette = { workspace = true, default-features = false, features = ["std"] }

crates/theme/src/registry.rs 🔗

@@ -3,7 +3,6 @@ use std::{fmt::Debug, path::Path};
 
 use anyhow::Result;
 use collections::HashMap;
-use derive_more::{Deref, DerefMut};
 use gpui::{App, AssetSource, Global, SharedString};
 use parking_lot::RwLock;
 use thiserror::Error;
@@ -38,9 +37,23 @@ pub struct IconThemeNotFoundError(pub SharedString);
 /// inserting the [`ThemeRegistry`] into the context as a global.
 ///
 /// This should not be exposed outside of this module.
-#[derive(Default, Deref, DerefMut)]
+#[derive(Default)]
 struct GlobalThemeRegistry(Arc<ThemeRegistry>);
 
+impl std::ops::DerefMut for GlobalThemeRegistry {
+    fn deref_mut(&mut self) -> &mut Self::Target {
+        &mut self.0
+    }
+}
+
+impl std::ops::Deref for GlobalThemeRegistry {
+    type Target = Arc<ThemeRegistry>;
+
+    fn deref(&self) -> &Self::Target {
+        &self.0
+    }
+}
+
 impl Global for GlobalThemeRegistry {}
 
 struct ThemeRegistryState {

crates/theme/src/theme.rs 🔗

@@ -22,7 +22,6 @@ mod ui_density;
 
 use std::sync::Arc;
 
-use derive_more::{Deref, DerefMut};
 use gpui::BorrowAppContext;
 use gpui::Global;
 use gpui::{
@@ -129,18 +128,40 @@ impl ActiveTheme for App {
 }
 
 /// The appearance of the system.
-#[derive(Debug, Clone, Copy, Deref)]
+#[derive(Debug, Clone, Copy)]
 pub struct SystemAppearance(pub Appearance);
 
+impl std::ops::Deref for SystemAppearance {
+    type Target = Appearance;
+
+    fn deref(&self) -> &Self::Target {
+        &self.0
+    }
+}
+
 impl Default for SystemAppearance {
     fn default() -> Self {
         Self(Appearance::Dark)
     }
 }
 
-#[derive(Deref, DerefMut, Default)]
+#[derive(Default)]
 struct GlobalSystemAppearance(SystemAppearance);
 
+impl std::ops::DerefMut for GlobalSystemAppearance {
+    fn deref_mut(&mut self) -> &mut Self::Target {
+        &mut self.0
+    }
+}
+
+impl std::ops::Deref for GlobalSystemAppearance {
+    type Target = SystemAppearance;
+
+    fn deref(&self) -> &Self::Target {
+        &self.0
+    }
+}
+
 impl Global for GlobalSystemAppearance {}
 
 impl SystemAppearance {

crates/theme_settings/src/settings.rs 🔗

@@ -490,7 +490,8 @@ pub fn adjusted_font_size(size: Pixels, cx: &App) -> Pixels {
     clamp_font_size(adjusted_font_size)
 }
 
-/// Adjusts the buffer font size.
+/// Adjusts the buffer font size, without persisting the result in the settings.
+/// This will be effective until the app is restarted.
 pub fn adjust_buffer_font_size(cx: &mut App, f: impl FnOnce(Pixels) -> Pixels) {
     let buffer_font_size = ThemeSettings::get_global(cx).buffer_font_size;
     let adjusted_size = cx

crates/theme_settings/src/theme_settings.rs 🔗

@@ -12,7 +12,7 @@ use std::sync::Arc;
 
 use ::settings::{IntoGpui, Settings, SettingsStore};
 use anyhow::{Context as _, Result};
-use gpui::{App, Font, HighlightStyle, Pixels, Refineable};
+use gpui::{App, Font, HighlightStyle, Pixels, Refineable, px};
 use gpui_util::ResultExt;
 use theme::{
     AccentColors, Appearance, AppearanceContent, DEFAULT_DARK_THEME, DEFAULT_ICON_THEME_NAME,
@@ -26,11 +26,12 @@ pub use crate::schema::{
     ThemeColorsContent, ThemeContent, ThemeFamilyContent, ThemeStyleContent,
     WindowBackgroundContent, status_colors_refinement, syntax_overrides, theme_colors_refinement,
 };
+use crate::settings::adjust_buffer_font_size;
 pub use crate::settings::{
     AgentFontSize, BufferLineHeight, FontFamilyName, IconThemeName, IconThemeSelection,
     ThemeAppearanceMode, ThemeName, ThemeSelection, ThemeSettings, adjust_agent_buffer_font_size,
-    adjust_agent_ui_font_size, adjust_buffer_font_size, adjust_ui_font_size, adjusted_font_size,
-    appearance_to_mode, clamp_font_size, default_theme, observe_buffer_font_size_adjustment,
+    adjust_agent_ui_font_size, adjust_ui_font_size, adjusted_font_size, appearance_to_mode,
+    clamp_font_size, default_theme, observe_buffer_font_size_adjustment,
     reset_agent_buffer_font_size, reset_agent_ui_font_size, reset_buffer_font_size,
     reset_ui_font_size, set_icon_theme, set_mode, set_theme, setup_ui_font,
 };
@@ -410,3 +411,15 @@ pub fn merge_accent_colors(
         accent_colors.0 = Arc::from(colors);
     }
 }
+
+/// Increases the buffer font size by 1 pixel, without persisting the result in the settings.
+/// This will be effective until the app is restarted.
+pub fn increase_buffer_font_size(cx: &mut App) {
+    adjust_buffer_font_size(cx, |size| size + px(1.0));
+}
+
+/// Decreases the buffer font size by 1 pixel, without persisting the result in the settings.
+/// This will be effective until the app is restarted.
+pub fn decrease_buffer_font_size(cx: &mut App) {
+    adjust_buffer_font_size(cx, |size| size - px(1.0));
+}

crates/title_bar/Cargo.toml 🔗

@@ -14,7 +14,7 @@ doctest = false
 
 [features]
 default = []
-stories = ["dep:story"]
+
 test-support = [
     "call/test-support",
     "client/test-support",
@@ -53,7 +53,6 @@ schemars.workspace = true
 serde.workspace = true
 settings.workspace = true
 smallvec.workspace = true
-story = { workspace = true, optional = true }
 telemetry.workspace = true
 theme.workspace = true
 ui.workspace = true

crates/title_bar/src/stories/application_menu.rs 🔗

@@ -1,29 +0,0 @@
-use gpui::{Entity, Render};
-use story::{Story, StoryItem, StorySection};
-
-use ui::prelude::*;
-
-use crate::application_menu::ApplicationMenu;
-
-pub struct ApplicationMenuStory {
-    menu: Entity<ApplicationMenu>,
-}
-
-impl ApplicationMenuStory {
-    pub fn new(window: &mut Window, cx: &mut App) -> Self {
-        Self {
-            menu: cx.new(|cx| ApplicationMenu::new(window, cx)),
-        }
-    }
-}
-
-impl Render for ApplicationMenuStory {
-    fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
-        Story::container(cx)
-            .child(Story::title_for::<ApplicationMenu>(cx))
-            .child(StorySection::new().child(StoryItem::new(
-                "Application Menu",
-                h_flex().child(self.menu.clone()),
-            )))
-    }
-}

crates/title_bar/src/title_bar.rs 🔗

@@ -5,9 +5,6 @@ mod plan_chip;
 mod title_bar_settings;
 mod update_version;
 
-#[cfg(feature = "stories")]
-mod stories;
-
 use crate::application_menu::{ApplicationMenu, show_menus};
 use crate::plan_chip::PlanChip;
 pub use platform_title_bar::{
@@ -56,9 +53,6 @@ use zed_actions::OpenRemote;
 
 pub use onboarding_banner::restore_banner;
 
-#[cfg(feature = "stories")]
-pub use stories::*;
-
 const MAX_PROJECT_NAME_LENGTH: usize = 40;
 const MAX_BRANCH_NAME_LENGTH: usize = 40;
 const MAX_SHORT_SHA_LENGTH: usize = 8;

crates/title_bar/src/update_version.rs 🔗

@@ -84,11 +84,11 @@ impl Render for UpdateVersion {
             AutoUpdateStatus::Checking if self.update_check_type.is_manual() => {
                 UpdateButton::checking().into_any_element()
             }
-            AutoUpdateStatus::Downloading { version } if self.update_check_type.is_manual() => {
+            AutoUpdateStatus::Downloading { version } => {
                 let tooltip = Self::version_tooltip_message(&version);
                 UpdateButton::downloading(tooltip).into_any_element()
             }
-            AutoUpdateStatus::Installing { version } if self.update_check_type.is_manual() => {
+            AutoUpdateStatus::Installing { version } => {
                 let tooltip = Self::version_tooltip_message(&version);
                 UpdateButton::installing(tooltip).into_any_element()
             }
@@ -116,10 +116,7 @@ impl Render for UpdateVersion {
                     }))
                     .into_any_element()
             }
-            AutoUpdateStatus::Idle
-            | AutoUpdateStatus::Checking { .. }
-            | AutoUpdateStatus::Downloading { .. }
-            | AutoUpdateStatus::Installing { .. } => Empty.into_any_element(),
+            AutoUpdateStatus::Idle | AutoUpdateStatus::Checking { .. } => Empty.into_any_element(),
         }
     }
 }

crates/ui/Cargo.toml 🔗

@@ -24,7 +24,6 @@ menu.workspace = true
 schemars.workspace = true
 serde.workspace = true
 smallvec.workspace = true
-story = { workspace = true, optional = true }
 strum.workspace = true
 theme.workspace = true
 ui_macros.workspace = true
@@ -38,4 +37,3 @@ gpui = { workspace = true, features = ["test-support"] }
 
 [features]
 default = []
-stories = ["dep:story"]

crates/ui/src/components.rs 🔗

@@ -40,9 +40,6 @@ mod toggle;
 mod tooltip;
 mod tree_view_item;
 
-#[cfg(feature = "stories")]
-mod stories;
-
 pub use ai::*;
 pub use avatar::*;
 pub use banner::*;
@@ -84,6 +81,3 @@ pub use tab_bar::*;
 pub use toggle::*;
 pub use tooltip::*;
 pub use tree_view_item::*;
-
-#[cfg(feature = "stories")]
-pub use stories::*;

crates/ui/src/components/stories/context_menu.rs 🔗

@@ -1,81 +0,0 @@
-use gpui::{Corner, Entity, Render, actions};
-use story::Story;
-
-use crate::prelude::*;
-use crate::{ContextMenu, Label, right_click_menu};
-
-actions!(stories, [PrintCurrentDate, PrintBestFood]);
-
-fn build_menu(
-    window: &mut Window,
-    cx: &mut App,
-    header: impl Into<SharedString>,
-) -> Entity<ContextMenu> {
-    ContextMenu::build(window, cx, |menu, _, _| {
-        menu.header(header)
-            .separator()
-            .action("Print current time", Box::new(PrintCurrentDate))
-            .entry(
-                "Print best food",
-                Some(Box::new(PrintBestFood)),
-                |window, cx| window.dispatch_action(Box::new(PrintBestFood), cx),
-            )
-    })
-}
-
-pub struct ContextMenuStory;
-
-impl Render for ContextMenuStory {
-    fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
-        Story::container(cx)
-            .on_action(|_: &PrintCurrentDate, _, _| {
-                println!("printing unix time!");
-                if let Ok(unix_time) = std::time::UNIX_EPOCH.elapsed() {
-                    println!("Current Unix time is {:?}", unix_time.as_secs());
-                }
-            })
-            .on_action(|_: &PrintBestFood, _, _| {
-                println!("burrito");
-            })
-            .flex()
-            .flex_row()
-            .justify_between()
-            .child(
-                div()
-                    .flex()
-                    .flex_col()
-                    .justify_between()
-                    .child(
-                        right_click_menu("test2")
-                            .trigger(|_, _, _| Label::new("TOP LEFT"))
-                            .menu(move |window, cx| build_menu(window, cx, "top left")),
-                    )
-                    .child(
-                        right_click_menu("test1")
-                            .trigger(|_, _, _| Label::new("BOTTOM LEFT"))
-                            .anchor(Corner::BottomLeft)
-                            .attach(Corner::TopLeft)
-                            .menu(move |window, cx| build_menu(window, cx, "bottom left")),
-                    ),
-            )
-            .child(
-                div()
-                    .flex()
-                    .flex_col()
-                    .justify_between()
-                    .child(
-                        right_click_menu("test3")
-                            .trigger(|_, _, _| Label::new("TOP RIGHT"))
-                            .anchor(Corner::TopRight)
-                            .menu(move |window, cx| build_menu(window, cx, "top right")),
-                    )
-                    .child(
-                        right_click_menu("test4")
-                            .trigger(|_, _, _| Label::new("BOTTOM RIGHT"))
-                            .anchor(Corner::BottomRight)
-                            .attach(Corner::TopRight)
-                            .menu(move |window, cx| build_menu(window, cx, "bottom right")),
-                    ),
-            )
-    }
-}

crates/util/src/disambiguate.rs 🔗

@@ -0,0 +1,202 @@
+use std::collections::HashMap;
+use std::hash::Hash;
+
+/// Computes the minimum detail level needed for each item so that no two items
+/// share the same description. Items whose descriptions are unique at level 0
+/// stay at 0; items that collide get their detail level incremented until either
+/// the collision is resolved or increasing the level no longer changes the
+/// description (preventing infinite loops for truly identical items).
+///
+/// The `get_description` closure must return a sequence that eventually reaches
+/// a "fixed point" where increasing `detail` no longer changes the output. If
+/// an item reaches its fixed point, it is assumed it will no longer change and
+/// will no longer be checked for collisions.
+pub fn compute_disambiguation_details<T, D>(
+    items: &[T],
+    get_description: impl Fn(&T, usize) -> D,
+) -> Vec<usize>
+where
+    D: Eq + Hash + Clone,
+{
+    let mut details = vec![0usize; items.len()];
+    let mut descriptions: HashMap<D, Vec<usize>> = HashMap::default();
+    let mut current_descriptions: Vec<D> =
+        items.iter().map(|item| get_description(item, 0)).collect();
+
+    loop {
+        let mut any_collisions = false;
+
+        for (index, (item, &detail)) in items.iter().zip(&details).enumerate() {
+            if detail > 0 {
+                let new_description = get_description(item, detail);
+                if new_description == current_descriptions[index] {
+                    continue;
+                }
+                current_descriptions[index] = new_description;
+            }
+            descriptions
+                .entry(current_descriptions[index].clone())
+                .or_insert_with(Vec::new)
+                .push(index);
+        }
+
+        for (_, indices) in descriptions.drain() {
+            if indices.len() > 1 {
+                any_collisions = true;
+                for index in indices {
+                    details[index] += 1;
+                }
+            }
+        }
+
+        if !any_collisions {
+            break;
+        }
+    }
+
+    details
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn test_no_conflicts() {
+        let items = vec!["alpha", "beta", "gamma"];
+        let details = compute_disambiguation_details(&items, |item, _detail| item.to_string());
+        assert_eq!(details, vec![0, 0, 0]);
+    }
+
+    #[test]
+    fn test_simple_two_way_conflict() {
+        // Two items with the same base name but different parents.
+        let items = vec![("src/foo.rs", "foo.rs"), ("lib/foo.rs", "foo.rs")];
+        let details = compute_disambiguation_details(&items, |item, detail| match detail {
+            0 => item.1.to_string(),
+            _ => item.0.to_string(),
+        });
+        assert_eq!(details, vec![1, 1]);
+    }
+
+    #[test]
+    fn test_three_way_conflict() {
+        let items = vec![
+            ("foo.rs", "a/foo.rs"),
+            ("foo.rs", "b/foo.rs"),
+            ("foo.rs", "c/foo.rs"),
+        ];
+        let details = compute_disambiguation_details(&items, |item, detail| match detail {
+            0 => item.0.to_string(),
+            _ => item.1.to_string(),
+        });
+        assert_eq!(details, vec![1, 1, 1]);
+    }
+
+    #[test]
+    fn test_deeper_conflict() {
+        // At detail 0, all three show "file.rs".
+        // At detail 1, items 0 and 1 both show "src/file.rs", item 2 shows "lib/file.rs".
+        // At detail 2, item 0 shows "a/src/file.rs", item 1 shows "b/src/file.rs".
+        let items = vec![
+            vec!["file.rs", "src/file.rs", "a/src/file.rs"],
+            vec!["file.rs", "src/file.rs", "b/src/file.rs"],
+            vec!["file.rs", "lib/file.rs", "x/lib/file.rs"],
+        ];
+        let details = compute_disambiguation_details(&items, |item, detail| {
+            let clamped = detail.min(item.len() - 1);
+            item[clamped].to_string()
+        });
+        assert_eq!(details, vec![2, 2, 1]);
+    }
+
+    #[test]
+    fn test_mixed_conflicting_and_unique() {
+        let items = vec![
+            ("src/foo.rs", "foo.rs"),
+            ("lib/foo.rs", "foo.rs"),
+            ("src/bar.rs", "bar.rs"),
+        ];
+        let details = compute_disambiguation_details(&items, |item, detail| match detail {
+            0 => item.1.to_string(),
+            _ => item.0.to_string(),
+        });
+        assert_eq!(details, vec![1, 1, 0]);
+    }
+
+    #[test]
+    fn test_identical_items_terminates() {
+        // All items return the same description at every detail level.
+        // The algorithm must terminate rather than looping forever.
+        let items = vec!["same", "same", "same"];
+        let details = compute_disambiguation_details(&items, |item, _detail| item.to_string());
+        // After bumping to 1, the description doesn't change from level 0,
+        // so the items are skipped and the loop terminates.
+        assert_eq!(details, vec![1, 1, 1]);
+    }
+
+    #[test]
+    fn test_single_item() {
+        let items = vec!["only"];
+        let details = compute_disambiguation_details(&items, |item, _detail| item.to_string());
+        assert_eq!(details, vec![0]);
+    }
+
+    #[test]
+    fn test_empty_input() {
+        let items: Vec<&str> = vec![];
+        let details = compute_disambiguation_details(&items, |item, _detail| item.to_string());
+        let expected: Vec<usize> = vec![];
+        assert_eq!(details, expected);
+    }
+
+    #[test]
+    fn test_duplicate_paths_from_multiple_groups() {
+        use std::path::Path;
+
+        // Simulates the sidebar scenario: a path like /Users/rtfeldman/code/zed
+        // appears in two project groups (e.g. "zed" alone and "zed, roc").
+        // After deduplication, only unique paths should be disambiguated.
+        //
+        // Paths:
+        //   /Users/rtfeldman/code/worktrees/zed/focal-arrow/zed  (group 1)
+        //   /Users/rtfeldman/code/zed                             (group 2)
+        //   /Users/rtfeldman/code/zed                             (group 3, same path as group 2)
+        //   /Users/rtfeldman/code/roc                             (group 3)
+        //
+        // A naive flat_map collects duplicates. The duplicate /code/zed entries
+        // collide with each other and drive the detail to the full path.
+        // The fix is to deduplicate before disambiguating.
+
+        fn path_suffix(path: &Path, detail: usize) -> String {
+            let mut components: Vec<_> = path
+                .components()
+                .rev()
+                .filter_map(|c| match c {
+                    std::path::Component::Normal(s) => Some(s.to_string_lossy()),
+                    _ => None,
+                })
+                .take(detail + 1)
+                .collect();
+            components.reverse();
+            components.join("/")
+        }
+
+        let all_paths: Vec<&Path> = vec![
+            Path::new("/Users/rtfeldman/code/worktrees/zed/focal-arrow/zed"),
+            Path::new("/Users/rtfeldman/code/zed"),
+            Path::new("/Users/rtfeldman/code/roc"),
+        ];
+
+        let details =
+            compute_disambiguation_details(&all_paths, |path, detail| path_suffix(path, detail));
+
+        // focal-arrow/zed and code/zed both end in "zed", so they need detail 1.
+        // "roc" is unique at detail 0.
+        assert_eq!(details, vec![1, 1, 0]);
+
+        assert_eq!(path_suffix(all_paths[0], details[0]), "focal-arrow/zed");
+        assert_eq!(path_suffix(all_paths[1], details[1]), "code/zed");
+        assert_eq!(path_suffix(all_paths[2], details[2]), "roc");
+    }
+}

crates/util/src/markdown.rs 🔗

@@ -1,5 +1,62 @@
 use std::fmt::{Display, Formatter};
 
+/// Generates a URL-friendly slug from heading text (e.g. "Hello World" → "hello-world").
+pub fn generate_heading_slug(text: &str) -> String {
+    text.trim()
+        .chars()
+        .filter_map(|c| {
+            if c.is_alphanumeric() || c == '-' || c == '_' {
+                Some(c.to_lowercase().next().unwrap_or(c))
+            } else if c == ' ' {
+                Some('-')
+            } else {
+                None
+            }
+        })
+        .collect()
+}
+
+/// Returns true if the URL starts with a URI scheme (RFC 3986 §3.1).
+fn has_uri_scheme(url: &str) -> bool {
+    let mut chars = url.chars();
+    match chars.next() {
+        Some(c) if c.is_ascii_alphabetic() => {}
+        _ => return false,
+    }
+    for c in chars {
+        if c == ':' {
+            return true;
+        }
+        if !(c.is_ascii_alphanumeric() || c == '+' || c == '-' || c == '.') {
+            return false;
+        }
+    }
+    false
+}
+
+/// Splits a relative URL into its path and `#fragment` parts.
+/// Absolute URLs are returned as-is with no fragment.
+pub fn split_local_url_fragment(url: &str) -> (&str, Option<&str>) {
+    if has_uri_scheme(url) {
+        return (url, None);
+    }
+    match url.find('#') {
+        Some(pos) => {
+            let path = &url[..pos];
+            let fragment = &url[pos + 1..];
+            (
+                path,
+                if fragment.is_empty() {
+                    None
+                } else {
+                    Some(fragment)
+                },
+            )
+        }
+        None => (url, None),
+    }
+}
+
 /// Indicates that the wrapped `String` is markdown text.
 #[derive(Debug, Clone)]
 pub struct MarkdownString(pub String);
@@ -265,4 +322,55 @@ mod tests {
             "it can't be downgraded later"
         );
     }
+
+    #[test]
+    fn test_split_local_url_fragment() {
+        assert_eq!(split_local_url_fragment("#heading"), ("", Some("heading")));
+        assert_eq!(
+            split_local_url_fragment("./file.md#heading"),
+            ("./file.md", Some("heading"))
+        );
+        assert_eq!(split_local_url_fragment("./file.md"), ("./file.md", None));
+        assert_eq!(
+            split_local_url_fragment("https://example.com#frag"),
+            ("https://example.com#frag", None)
+        );
+        assert_eq!(
+            split_local_url_fragment("mailto:user@example.com"),
+            ("mailto:user@example.com", None)
+        );
+        assert_eq!(split_local_url_fragment("#"), ("", None));
+        assert_eq!(
+            split_local_url_fragment("../other.md#section"),
+            ("../other.md", Some("section"))
+        );
+        assert_eq!(
+            split_local_url_fragment("123:not-a-scheme#frag"),
+            ("123:not-a-scheme", Some("frag"))
+        );
+    }
+
+    #[test]
+    fn test_generate_heading_slug() {
+        assert_eq!(generate_heading_slug("Hello World"), "hello-world");
+        assert_eq!(generate_heading_slug("Hello  World"), "hello--world");
+        assert_eq!(generate_heading_slug("Hello-World"), "hello-world");
+        assert_eq!(
+            generate_heading_slug("Some **bold** text"),
+            "some-bold-text"
+        );
+        assert_eq!(generate_heading_slug("Let's try with Ü"), "lets-try-with-ü");
+        assert_eq!(
+            generate_heading_slug("heading with 123 numbers"),
+            "heading-with-123-numbers"
+        );
+        assert_eq!(
+            generate_heading_slug("What about (parens)?"),
+            "what-about-parens"
+        );
+        assert_eq!(
+            generate_heading_slug("  leading spaces  "),
+            "leading-spaces"
+        );
+    }
 }

crates/util/src/util.rs 🔗

@@ -1,5 +1,6 @@
 pub mod archive;
 pub mod command;
+pub mod disambiguate;
 pub mod fs;
 pub mod markdown;
 pub mod path_list;

crates/vercel/src/vercel.rs 🔗

@@ -13,7 +13,7 @@ pub enum Model {
     #[serde(rename = "custom")]
     Custom {
         name: String,
-        /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
+        /// The name displayed in the UI, such as in the agent panel model dropdown menu.
         display_name: Option<String>,
         max_tokens: u64,
         max_output_tokens: Option<u64>,

crates/workspace/src/multi_workspace.rs 🔗

@@ -101,6 +101,14 @@ pub enum MultiWorkspaceEvent {
     ActiveWorkspaceChanged,
     WorkspaceAdded(Entity<Workspace>),
     WorkspaceRemoved(EntityId),
+    WorktreePathAdded {
+        old_main_paths: PathList,
+        added_path: PathBuf,
+    },
+    WorktreePathRemoved {
+        old_main_paths: PathList,
+        removed_path: PathBuf,
+    },
 }
 
 pub enum SidebarEvent {
@@ -302,7 +310,7 @@ pub struct MultiWorkspace {
     workspaces: Vec<Entity<Workspace>>,
     active_workspace: ActiveWorkspace,
     project_group_keys: Vec<ProjectGroupKey>,
-    provisional_project_group_keys: HashMap<EntityId, ProjectGroupKey>,
+    workspace_group_keys: HashMap<EntityId, ProjectGroupKey>,
     sidebar: Option<Box<dyn SidebarHandle>>,
     sidebar_open: bool,
     sidebar_overlay: Option<AnyView>,
@@ -355,7 +363,7 @@ impl MultiWorkspace {
         Self {
             window_id: window.window_handle().window_id(),
             project_group_keys: Vec::new(),
-            provisional_project_group_keys: HashMap::default(),
+            workspace_group_keys: HashMap::default(),
             workspaces: Vec::new(),
             active_workspace: ActiveWorkspace::Transient(workspace),
             sidebar: None,
@@ -559,19 +567,11 @@ impl MultiWorkspace {
         cx.subscribe_in(&project, window, {
             let workspace = workspace.downgrade();
             move |this, _project, event, _window, cx| match event {
-                project::Event::WorktreeAdded(_) | project::Event::WorktreeRemoved(_) => {
+                project::Event::WorktreeAdded(_)
+                | project::Event::WorktreeRemoved(_)
+                | project::Event::WorktreeUpdatedRootRepoCommonDir(_) => {
                     if let Some(workspace) = workspace.upgrade() {
-                        this.add_project_group_key(workspace.read(cx).project_group_key(cx));
-                    }
-                }
-                project::Event::WorktreeUpdatedRootRepoCommonDir(_) => {
-                    if let Some(workspace) = workspace.upgrade() {
-                        this.maybe_clear_provisional_project_group_key(&workspace, cx);
-                        this.add_project_group_key(
-                            this.project_group_key_for_workspace(&workspace, cx),
-                        );
-                        this.remove_stale_project_group_keys(cx);
-                        cx.notify();
+                        this.handle_workspace_key_change(&workspace, cx);
                     }
                 }
                 _ => {}
@@ -587,7 +587,124 @@ impl MultiWorkspace {
         .detach();
     }
 
-    pub fn add_project_group_key(&mut self, project_group_key: ProjectGroupKey) {
+    fn handle_workspace_key_change(
+        &mut self,
+        workspace: &Entity<Workspace>,
+        cx: &mut Context<Self>,
+    ) {
+        let workspace_id = workspace.entity_id();
+        let old_key = self.project_group_key_for_workspace(workspace, cx);
+        let new_key = workspace.read(cx).project_group_key(cx);
+
+        if new_key.path_list().paths().is_empty() || old_key == new_key {
+            return;
+        }
+
+        let active_workspace = self.workspace().clone();
+
+        self.set_workspace_group_key(workspace, new_key.clone());
+
+        let changed_root_paths = workspace.read(cx).root_paths(cx);
+        let old_paths = old_key.path_list().paths();
+        let new_paths = new_key.path_list().paths();
+
+        // Remove workspaces that already had the new key and have the same
+        // root paths (true duplicates that this workspace is replacing).
+        //
+        // NOTE: These are dropped without prompting for unsaved changes because
+        // the user explicitly added a folder that makes this workspace
+        // identical to the duplicate — they are intentionally overwriting it.
+        let duplicate_workspaces: Vec<Entity<Workspace>> = self
+            .workspaces
+            .iter()
+            .filter(|ws| {
+                ws.entity_id() != workspace_id
+                    && self.project_group_key_for_workspace(ws, cx) == new_key
+                    && ws.read(cx).root_paths(cx) == changed_root_paths
+            })
+            .cloned()
+            .collect();
+
+        if duplicate_workspaces.contains(&active_workspace) {
+            // The active workspace is among the duplicates — drop the
+            // incoming workspace instead so the user stays where they are.
+            self.detach_workspace(workspace, cx);
+            self.workspaces.retain(|w| w != workspace);
+        } else {
+            for ws in &duplicate_workspaces {
+                self.detach_workspace(ws, cx);
+                self.workspaces.retain(|w| w != ws);
+            }
+        }
+
+        // Propagate folder adds/removes to linked worktree siblings
+        // (different root paths, same old key) so they stay in the group.
+        let group_workspaces: Vec<Entity<Workspace>> = self
+            .workspaces
+            .iter()
+            .filter(|ws| {
+                ws.entity_id() != workspace_id
+                    && self.project_group_key_for_workspace(ws, cx) == old_key
+            })
+            .cloned()
+            .collect();
+
+        for workspace in &group_workspaces {
+            // Pre-set this to stop later WorktreeAdded events from triggering
+            self.set_workspace_group_key(&workspace, new_key.clone());
+
+            let project = workspace.read(cx).project().clone();
+
+            for added_path in new_paths.iter().filter(|p| !old_paths.contains(p)) {
+                project
+                    .update(cx, |project, cx| {
+                        project.find_or_create_worktree(added_path, true, cx)
+                    })
+                    .detach_and_log_err(cx);
+            }
+
+            for removed_path in old_paths.iter().filter(|p| !new_paths.contains(p)) {
+                project.update(cx, |project, cx| {
+                    project.remove_worktree_for_main_worktree_path(removed_path, cx);
+                });
+            }
+        }
+
+        // Restore the active workspace after removals may have shifted
+        // the index. If the previously active workspace was removed,
+        // fall back to the workspace whose key just changed.
+        if let ActiveWorkspace::Persistent(_) = &self.active_workspace {
+            let target = if self.workspaces.contains(&active_workspace) {
+                &active_workspace
+            } else {
+                workspace
+            };
+            if let Some(new_index) = self.workspaces.iter().position(|ws| ws == target) {
+                self.active_workspace = ActiveWorkspace::Persistent(new_index);
+            }
+        }
+
+        self.remove_stale_project_group_keys(cx);
+
+        let old_main_paths = old_key.path_list().clone();
+        for added_path in new_paths.iter().filter(|p| !old_paths.contains(p)) {
+            cx.emit(MultiWorkspaceEvent::WorktreePathAdded {
+                old_main_paths: old_main_paths.clone(),
+                added_path: added_path.clone(),
+            });
+        }
+        for removed_path in old_paths.iter().filter(|p| !new_paths.contains(p)) {
+            cx.emit(MultiWorkspaceEvent::WorktreePathRemoved {
+                old_main_paths: old_main_paths.clone(),
+                removed_path: removed_path.clone(),
+            });
+        }
+
+        self.serialize(cx);
+        cx.notify();
+    }
+
+    fn add_project_group_key(&mut self, project_group_key: ProjectGroupKey) {
         if project_group_key.path_list().paths().is_empty() {
             return;
         }
@@ -598,12 +715,12 @@ impl MultiWorkspace {
         self.project_group_keys.insert(0, project_group_key);
     }
 
-    pub fn set_provisional_project_group_key(
+    pub(crate) fn set_workspace_group_key(
         &mut self,
         workspace: &Entity<Workspace>,
         project_group_key: ProjectGroupKey,
     ) {
-        self.provisional_project_group_keys
+        self.workspace_group_keys
             .insert(workspace.entity_id(), project_group_key.clone());
         self.add_project_group_key(project_group_key);
     }
@@ -613,28 +730,12 @@ impl MultiWorkspace {
         workspace: &Entity<Workspace>,
         cx: &App,
     ) -> ProjectGroupKey {
-        self.provisional_project_group_keys
+        self.workspace_group_keys
             .get(&workspace.entity_id())
             .cloned()
             .unwrap_or_else(|| workspace.read(cx).project_group_key(cx))
     }
 
-    fn maybe_clear_provisional_project_group_key(
-        &mut self,
-        workspace: &Entity<Workspace>,
-        cx: &App,
-    ) {
-        let live_key = workspace.read(cx).project_group_key(cx);
-        if self
-            .provisional_project_group_keys
-            .get(&workspace.entity_id())
-            .is_some_and(|key| *key == live_key)
-        {
-            self.provisional_project_group_keys
-                .remove(&workspace.entity_id());
-        }
-    }
-
     fn remove_stale_project_group_keys(&mut self, cx: &App) {
         let workspace_keys: HashSet<ProjectGroupKey> = self
             .workspaces
@@ -1045,7 +1146,6 @@ impl MultiWorkspace {
                     self.promote_transient(old, cx);
                 } else {
                     self.detach_workspace(&old, cx);
-                    cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(old.entity_id()));
                 }
             }
         } else {
@@ -1056,7 +1156,6 @@ impl MultiWorkspace {
             });
             if let Some(old) = self.active_workspace.set_transient(workspace) {
                 self.detach_workspace(&old, cx);
-                cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(old.entity_id()));
             }
         }
 
@@ -1083,7 +1182,7 @@ impl MultiWorkspace {
     /// Returns the index of the newly inserted workspace.
     fn promote_transient(&mut self, workspace: Entity<Workspace>, cx: &mut Context<Self>) -> usize {
         let project_group_key = self.project_group_key_for_workspace(&workspace, cx);
-        self.add_project_group_key(project_group_key);
+        self.set_workspace_group_key(&workspace, project_group_key);
         self.workspaces.push(workspace.clone());
         cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace));
         self.workspaces.len() - 1
@@ -1099,10 +1198,10 @@ impl MultiWorkspace {
         for workspace in std::mem::take(&mut self.workspaces) {
             if workspace != active {
                 self.detach_workspace(&workspace, cx);
-                cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(workspace.entity_id()));
             }
         }
         self.project_group_keys.clear();
+        self.workspace_group_keys.clear();
         self.active_workspace = ActiveWorkspace::Transient(active);
         cx.notify();
     }
@@ -1128,7 +1227,7 @@ impl MultiWorkspace {
                 workspace.set_multi_workspace(weak_self, cx);
             });
 
-            self.add_project_group_key(project_group_key);
+            self.set_workspace_group_key(&workspace, project_group_key);
             self.workspaces.push(workspace.clone());
             cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace));
             cx.notify();
@@ -1136,10 +1235,12 @@ impl MultiWorkspace {
         }
     }
 
-    /// Clears session state and DB binding for a workspace that is being
-    /// removed or replaced. The DB row is preserved so the workspace still
-    /// appears in the recent-projects list.
+    /// Detaches a workspace: clears session state, DB binding, cached
+    /// group key, and emits `WorkspaceRemoved`. The DB row is preserved
+    /// so the workspace still appears in the recent-projects list.
     fn detach_workspace(&mut self, workspace: &Entity<Workspace>, cx: &mut Context<Self>) {
+        self.workspace_group_keys.remove(&workspace.entity_id());
+        cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(workspace.entity_id()));
         workspace.update(cx, |workspace, _cx| {
             workspace.session_id.take();
             workspace._schedule_serialize_workspace.take();
@@ -1313,6 +1414,46 @@ impl MultiWorkspace {
         tasks
     }
 
+    #[cfg(any(test, feature = "test-support"))]
+    pub fn assert_project_group_key_integrity(&self, cx: &App) -> anyhow::Result<()> {
+        let stored_keys: HashSet<&ProjectGroupKey> = self.project_group_keys().collect();
+
+        let workspace_group_keys: HashSet<&ProjectGroupKey> =
+            self.workspace_group_keys.values().collect();
+        let extra_keys = &workspace_group_keys - &stored_keys;
+        anyhow::ensure!(
+            extra_keys.is_empty(),
+            "workspace_group_keys values not in project_group_keys: {:?}",
+            extra_keys,
+        );
+
+        let cached_ids: HashSet<EntityId> = self.workspace_group_keys.keys().copied().collect();
+        let workspace_ids: HashSet<EntityId> =
+            self.workspaces.iter().map(|ws| ws.entity_id()).collect();
+        anyhow::ensure!(
+            cached_ids == workspace_ids,
+            "workspace_group_keys entity IDs don't match workspaces.\n\
+             only in cache: {:?}\n\
+             only in workspaces: {:?}",
+            &cached_ids - &workspace_ids,
+            &workspace_ids - &cached_ids,
+        );
+
+        for workspace in self.workspaces() {
+            let live_key = workspace.read(cx).project_group_key(cx);
+            let cached_key = &self.workspace_group_keys[&workspace.entity_id()];
+            anyhow::ensure!(
+                *cached_key == live_key,
+                "workspace {:?} has live key {:?} but cached key {:?}",
+                workspace.entity_id(),
+                live_key,
+                cached_key,
+            );
+        }
+
+        Ok(())
+    }
+
     #[cfg(any(test, feature = "test-support"))]
     pub fn set_random_database_id(&mut self, cx: &mut Context<Self>) {
         self.workspace().update(cx, |workspace, _cx| {
@@ -1471,7 +1612,6 @@ impl MultiWorkspace {
 
                 for workspace in &removed_workspaces {
                     this.detach_workspace(workspace, cx);
-                    cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(workspace.entity_id()));
                 }
 
                 let removed_any = !removed_workspaces.is_empty();

crates/workspace/src/multi_workspace_tests.rs 🔗

@@ -185,157 +185,3 @@ async fn test_project_group_keys_duplicate_not_added(cx: &mut TestAppContext) {
         );
     });
 }
-
-#[gpui::test]
-async fn test_project_group_keys_on_worktree_added(cx: &mut TestAppContext) {
-    init_test(cx);
-    let fs = FakeFs::new(cx.executor());
-    fs.insert_tree("/root_a", json!({ "file.txt": "" })).await;
-    fs.insert_tree("/root_b", json!({ "file.txt": "" })).await;
-    let project = Project::test(fs, ["/root_a".as_ref()], cx).await;
-
-    let initial_key = project.read_with(cx, |p, cx| p.project_group_key(cx));
-
-    let (multi_workspace, cx) =
-        cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
-
-    multi_workspace.update(cx, |mw, cx| {
-        mw.open_sidebar(cx);
-    });
-
-    // Add a second worktree to the same project.
-    let (worktree, _) = project
-        .update(cx, |project, cx| {
-            project.find_or_create_worktree("/root_b", true, cx)
-        })
-        .await
-        .unwrap();
-    worktree
-        .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
-        .await;
-    cx.run_until_parked();
-
-    let updated_key = project.read_with(cx, |p, cx| p.project_group_key(cx));
-    assert_ne!(
-        initial_key, updated_key,
-        "key should change after adding a worktree"
-    );
-
-    multi_workspace.read_with(cx, |mw, _cx| {
-        let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect();
-        assert_eq!(
-            keys.len(),
-            2,
-            "should have both the original and updated key"
-        );
-        assert_eq!(*keys[0], updated_key);
-        assert_eq!(*keys[1], initial_key);
-    });
-}
-
-#[gpui::test]
-async fn test_project_group_keys_on_worktree_removed(cx: &mut TestAppContext) {
-    init_test(cx);
-    let fs = FakeFs::new(cx.executor());
-    fs.insert_tree("/root_a", json!({ "file.txt": "" })).await;
-    fs.insert_tree("/root_b", json!({ "file.txt": "" })).await;
-    let project = Project::test(fs, ["/root_a".as_ref(), "/root_b".as_ref()], cx).await;
-
-    let initial_key = project.read_with(cx, |p, cx| p.project_group_key(cx));
-
-    let (multi_workspace, cx) =
-        cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
-
-    multi_workspace.update(cx, |mw, cx| {
-        mw.open_sidebar(cx);
-    });
-
-    // Remove one worktree.
-    let worktree_b_id = project.read_with(cx, |project, cx| {
-        project
-            .worktrees(cx)
-            .find(|wt| wt.read(cx).root_name().as_unix_str() == "root_b")
-            .unwrap()
-            .read(cx)
-            .id()
-    });
-    project.update(cx, |project, cx| {
-        project.remove_worktree(worktree_b_id, cx);
-    });
-    cx.run_until_parked();
-
-    let updated_key = project.read_with(cx, |p, cx| p.project_group_key(cx));
-    assert_ne!(
-        initial_key, updated_key,
-        "key should change after removing a worktree"
-    );
-
-    multi_workspace.read_with(cx, |mw, _cx| {
-        let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect();
-        assert_eq!(
-            keys.len(),
-            2,
-            "should accumulate both the original and post-removal key"
-        );
-        assert_eq!(*keys[0], updated_key);
-        assert_eq!(*keys[1], initial_key);
-    });
-}
-
-#[gpui::test]
-async fn test_project_group_keys_across_multiple_workspaces_and_worktree_changes(
-    cx: &mut TestAppContext,
-) {
-    init_test(cx);
-    let fs = FakeFs::new(cx.executor());
-    fs.insert_tree("/root_a", json!({ "file.txt": "" })).await;
-    fs.insert_tree("/root_b", json!({ "file.txt": "" })).await;
-    fs.insert_tree("/root_c", json!({ "file.txt": "" })).await;
-    let project_a = Project::test(fs.clone(), ["/root_a".as_ref()], cx).await;
-    let project_b = Project::test(fs.clone(), ["/root_b".as_ref()], cx).await;
-
-    let key_a = project_a.read_with(cx, |p, cx| p.project_group_key(cx));
-    let key_b = project_b.read_with(cx, |p, cx| p.project_group_key(cx));
-
-    let (multi_workspace, cx) =
-        cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx));
-
-    multi_workspace.update(cx, |mw, cx| {
-        mw.open_sidebar(cx);
-    });
-
-    multi_workspace.update_in(cx, |mw, window, cx| {
-        mw.test_add_workspace(project_b, window, cx);
-    });
-
-    multi_workspace.read_with(cx, |mw, _cx| {
-        assert_eq!(mw.project_group_keys().count(), 2);
-    });
-
-    // Now add a worktree to project_a. This should produce a third key.
-    let (worktree, _) = project_a
-        .update(cx, |project, cx| {
-            project.find_or_create_worktree("/root_c", true, cx)
-        })
-        .await
-        .unwrap();
-    worktree
-        .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
-        .await;
-    cx.run_until_parked();
-
-    let key_a_updated = project_a.read_with(cx, |p, cx| p.project_group_key(cx));
-    assert_ne!(key_a, key_a_updated);
-
-    multi_workspace.read_with(cx, |mw, _cx| {
-        let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect();
-        assert_eq!(
-            keys.len(),
-            3,
-            "should have key_a, key_b, and the updated key_a with root_c"
-        );
-        assert_eq!(*keys[0], key_a_updated);
-        assert_eq!(*keys[1], key_b);
-        assert_eq!(*keys[2], key_a);
-    });
-}

crates/workspace/src/pane.rs 🔗

@@ -4897,36 +4897,9 @@ fn dirty_message_for(buffer_path: Option<ProjectPath>, path_style: PathStyle) ->
 }
 
 pub fn tab_details(items: &[Box<dyn ItemHandle>], _window: &Window, cx: &App) -> Vec<usize> {
-    let mut tab_details = items.iter().map(|_| 0).collect::<Vec<_>>();
-    let mut tab_descriptions = HashMap::default();
-    let mut done = false;
-    while !done {
-        done = true;
-
-        // Store item indices by their tab description.
-        for (ix, (item, detail)) in items.iter().zip(&tab_details).enumerate() {
-            let description = item.tab_content_text(*detail, cx);
-            if *detail == 0 || description != item.tab_content_text(detail - 1, cx) {
-                tab_descriptions
-                    .entry(description)
-                    .or_insert(Vec::new())
-                    .push(ix);
-            }
-        }
-
-        // If two or more items have the same tab description, increase their level
-        // of detail and try again.
-        for (_, item_ixs) in tab_descriptions.drain() {
-            if item_ixs.len() > 1 {
-                done = false;
-                for ix in item_ixs {
-                    tab_details[ix] += 1;
-                }
-            }
-        }
-    }
-
-    tab_details
+    util::disambiguate::compute_disambiguation_details(items, |item, detail| {
+        item.tab_content_text(detail, cx)
+    })
 }
 
 pub fn render_item_indicator(item: Box<dyn ItemHandle>, cx: &App) -> Option<Indicator> {

crates/workspace/src/workspace.rs 🔗

@@ -9886,7 +9886,7 @@ async fn open_remote_project_inner(
         });
 
         if let Some(project_group_key) = provisional_project_group_key.clone() {
-            multi_workspace.set_provisional_project_group_key(&new_workspace, project_group_key);
+            multi_workspace.set_workspace_group_key(&new_workspace, project_group_key);
         }
         multi_workspace.activate(new_workspace.clone(), window, cx);
         new_workspace

crates/worktree/src/worktree.rs 🔗

@@ -8,7 +8,8 @@ use clock::ReplicaId;
 use collections::{HashMap, HashSet, VecDeque};
 use encoding_rs::Encoding;
 use fs::{
-    Fs, MTime, PathEvent, PathEventKind, RemoveOptions, Watcher, copy_recursive, read_dir_items,
+    Fs, MTime, PathEvent, PathEventKind, RemoveOptions, TrashedEntry, Watcher, copy_recursive,
+    read_dir_items,
 };
 use futures::{
     FutureExt as _, Stream, StreamExt,
@@ -70,7 +71,7 @@ use text::{LineEnding, Rope};
 use util::{
     ResultExt, maybe,
     paths::{PathMatcher, PathStyle, SanitizedPath, home_dir},
-    rel_path::RelPath,
+    rel_path::{RelPath, RelPathBuf},
 };
 pub use worktree_settings::WorktreeSettings;
 
@@ -848,7 +849,7 @@ impl Worktree {
         entry_id: ProjectEntryId,
         trash: bool,
         cx: &mut Context<Worktree>,
-    ) -> Option<Task<Result<()>>> {
+    ) -> Option<Task<Result<Option<TrashedEntry>>>> {
         let task = match self {
             Worktree::Local(this) => this.delete_entry(entry_id, trash, cx),
             Worktree::Remote(this) => this.delete_entry(entry_id, trash, cx),
@@ -870,6 +871,20 @@ impl Worktree {
         Some(task)
     }
 
+    pub async fn restore_entry(
+        trash_entry: TrashedEntry,
+        worktree: Entity<Self>,
+        cx: &mut AsyncApp,
+    ) -> Result<RelPathBuf> {
+        let is_local = worktree.read_with(cx, |this, _| this.is_local());
+        if is_local {
+            LocalWorktree::restore_entry(trash_entry, worktree, cx).await
+        } else {
+            // TODO(dino): Add support for restoring entries in remote worktrees.
+            Err(anyhow!("Unsupported"))
+        }
+    }
+
     fn get_children_ids_recursive(&self, path: &RelPath, ids: &mut Vec<ProjectEntryId>) {
         let children_iter = self.child_entries(path);
         for child in children_iter {
@@ -1685,42 +1700,46 @@ impl LocalWorktree {
         entry_id: ProjectEntryId,
         trash: bool,
         cx: &Context<Worktree>,
-    ) -> Option<Task<Result<()>>> {
+    ) -> Option<Task<Result<Option<TrashedEntry>>>> {
         let entry = self.entry_for_id(entry_id)?.clone();
         let abs_path = self.absolutize(&entry.path);
         let fs = self.fs.clone();
 
         let delete = cx.background_spawn(async move {
-            if entry.is_file() {
-                if trash {
-                    fs.trash_file(&abs_path, Default::default()).await?;
-                } else {
+            let trashed_entry = match (entry.is_file(), trash) {
+                (true, true) => Some(fs.trash(&abs_path, Default::default()).await?),
+                (false, true) => Some(
+                    fs.trash(
+                        &abs_path,
+                        RemoveOptions {
+                            recursive: true,
+                            ignore_if_not_exists: false,
+                        },
+                    )
+                    .await?,
+                ),
+                (true, false) => {
                     fs.remove_file(&abs_path, Default::default()).await?;
+                    None
                 }
-            } else if trash {
-                fs.trash_dir(
-                    &abs_path,
-                    RemoveOptions {
-                        recursive: true,
-                        ignore_if_not_exists: false,
-                    },
-                )
-                .await?;
-            } else {
-                fs.remove_dir(
-                    &abs_path,
-                    RemoveOptions {
-                        recursive: true,
-                        ignore_if_not_exists: false,
-                    },
-                )
-                .await?;
-            }
-            anyhow::Ok(entry.path)
+                (false, false) => {
+                    fs.remove_dir(
+                        &abs_path,
+                        RemoveOptions {
+                            recursive: true,
+                            ignore_if_not_exists: false,
+                        },
+                    )
+                    .await?;
+                    None
+                }
+            };
+
+            anyhow::Ok((trashed_entry, entry.path))
         });
 
         Some(cx.spawn(async move |this, cx| {
-            let path = delete.await?;
+            let (trashed_entry, path) = delete.await?;
             this.update(cx, |this, _| {
                 this.as_local_mut()
                     .unwrap()
@@ -1728,10 +1747,39 @@ impl LocalWorktree {
             })?
             .recv()
             .await;
-            Ok(())
+
+            Ok(trashed_entry)
         }))
     }
 
+    pub async fn restore_entry(
+        trash_entry: TrashedEntry,
+        this: Entity<Worktree>,
+        cx: &mut AsyncApp,
+    ) -> Result<RelPathBuf> {
+        let Some((fs, worktree_abs_path, path_style)) = this.read_with(cx, |this, _cx| {
+            let local_worktree = match this {
+                Worktree::Local(local_worktree) => local_worktree,
+                Worktree::Remote(_) => return None,
+            };
+
+            let fs = local_worktree.fs.clone();
+            let path_style = local_worktree.path_style();
+            Some((fs, Arc::clone(local_worktree.abs_path()), path_style))
+        }) else {
+            return Err(anyhow!("Localworktree should not change into a remote one"));
+        };
+
+        let path_buf = fs.restore(trash_entry).await?;
+        let path = path_buf
+            .strip_prefix(worktree_abs_path)
+            .context("Could not strip prefix")?;
+        let path = RelPath::new(&path, path_style)?;
+        let path = path.into_owned();
+
+        Ok(path)
+    }
+
     pub fn copy_external_entries(
         &self,
         target_directory: Arc<RelPath>,
@@ -2099,7 +2147,7 @@ impl RemoteWorktree {
         entry_id: ProjectEntryId,
         trash: bool,
         cx: &Context<Worktree>,
-    ) -> Option<Task<Result<()>>> {
+    ) -> Option<Task<Result<Option<TrashedEntry>>>> {
         let response = self.client.request(proto::DeleteProjectEntry {
             project_id: self.project_id,
             entry_id: entry_id.to_proto(),
@@ -2119,6 +2167,12 @@ impl RemoteWorktree {
                 let snapshot = &mut this.background_snapshot.lock().0;
                 snapshot.delete_entry(entry_id);
                 this.snapshot = snapshot.clone();
+
+                // TODO: How can we actually track the deleted entry when
+                // working in remote? We likely only need to keep this
+                // information on the remote side in order to support restoring
+                // the trashed file.
+                None
             })
         }))
     }
@@ -2585,15 +2639,14 @@ impl Snapshot {
     }
 
     pub fn entry_for_path(&self, path: &RelPath) -> Option<&Entry> {
-        self.traverse_from_path(true, true, true, path)
-            .entry()
-            .and_then(|entry| {
-                if entry.path.as_ref() == path {
-                    Some(entry)
-                } else {
-                    None
-                }
-            })
+        let entry = self.traverse_from_path(true, true, true, path).entry();
+        entry.and_then(|entry| {
+            if entry.path.as_ref() == path {
+                Some(entry)
+            } else {
+                None
+            }
+        })
     }
 
     /// Resolves a path to an executable using the following heuristics:

crates/worktree/tests/integration/main.rs 🔗

@@ -2207,7 +2207,14 @@ fn randomly_mutate_worktree(
     match rng.random_range(0_u32..100) {
         0..=33 if entry.path.as_ref() != RelPath::empty() => {
             log::info!("deleting entry {:?} ({})", entry.path, entry.id.to_usize());
-            worktree.delete_entry(entry.id, false, cx).unwrap()
+            let task = worktree
+                .delete_entry(entry.id, false, cx)
+                .unwrap_or_else(|| Task::ready(Ok(None)));
+
+            cx.background_spawn(async move {
+                task.await?;
+                Ok(())
+            })
         }
         _ => {
             if entry.is_dir() {

crates/x_ai/src/x_ai.rs 🔗

@@ -48,7 +48,7 @@ pub enum Model {
     #[serde(rename = "custom")]
     Custom {
         name: String,
-        /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
+        /// The name displayed in the UI, such as in the agent panel model dropdown menu.
         display_name: Option<String>,
         max_tokens: u64,
         max_output_tokens: Option<u64>,

crates/zed/src/visual_test_runner.rs 🔗

@@ -573,6 +573,27 @@ fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()>
         }
     }
 
+    // Run Test: Sidebar with duplicate project names
+    println!("\n--- Test: sidebar_duplicate_names ---");
+    match run_sidebar_duplicate_project_names_visual_tests(
+        app_state.clone(),
+        &mut cx,
+        update_baseline,
+    ) {
+        Ok(TestResult::Passed) => {
+            println!("✓ sidebar_duplicate_names: PASSED");
+            passed += 1;
+        }
+        Ok(TestResult::BaselineUpdated(_)) => {
+            println!("✓ sidebar_duplicate_names: Baselines updated");
+            updated += 1;
+        }
+        Err(e) => {
+            eprintln!("✗ sidebar_duplicate_names: FAILED - {}", e);
+            failed += 1;
+        }
+    }
+
     // Run Test 9: Tool Permissions Settings UI visual test
     println!("\n--- Test 9: tool_permissions_settings ---");
     match run_tool_permissions_visual_tests(app_state.clone(), &mut cx, update_baseline) {
@@ -3069,6 +3090,279 @@ fn run_git_command(args: &[&str], dir: &std::path::Path) -> Result<()> {
     Ok(())
 }
 
+#[cfg(target_os = "macos")]
+/// Helper to create a project, add a worktree at the given path, and return the project.
+fn create_project_with_worktree(
+    worktree_dir: &Path,
+    app_state: &Arc<AppState>,
+    cx: &mut VisualTestAppContext,
+) -> Result<Entity<Project>> {
+    let project = cx.update(|cx| {
+        project::Project::local(
+            app_state.client.clone(),
+            app_state.node_runtime.clone(),
+            app_state.user_store.clone(),
+            app_state.languages.clone(),
+            app_state.fs.clone(),
+            None,
+            project::LocalProjectFlags {
+                init_worktree_trust: false,
+                ..Default::default()
+            },
+            cx,
+        )
+    });
+
+    let add_task = cx.update(|cx| {
+        project.update(cx, |project, cx| {
+            project.find_or_create_worktree(worktree_dir, true, cx)
+        })
+    });
+
+    cx.background_executor.allow_parking();
+    cx.foreground_executor
+        .block_test(add_task)
+        .context("Failed to add worktree")?;
+    cx.background_executor.forbid_parking();
+
+    cx.run_until_parked();
+    Ok(project)
+}
+
+#[cfg(target_os = "macos")]
+fn open_sidebar_test_window(
+    projects: Vec<Entity<Project>>,
+    app_state: &Arc<AppState>,
+    cx: &mut VisualTestAppContext,
+) -> Result<WindowHandle<MultiWorkspace>> {
+    anyhow::ensure!(!projects.is_empty(), "need at least one project");
+
+    let window_size = size(px(400.0), px(600.0));
+    let bounds = Bounds {
+        origin: point(px(0.0), px(0.0)),
+        size: window_size,
+    };
+
+    let mut projects_iter = projects.into_iter();
+    let first_project = projects_iter
+        .next()
+        .ok_or_else(|| anyhow::anyhow!("need at least one project"))?;
+    let remaining: Vec<_> = projects_iter.collect();
+
+    let multi_workspace_window: WindowHandle<MultiWorkspace> = cx
+        .update(|cx| {
+            cx.open_window(
+                WindowOptions {
+                    window_bounds: Some(WindowBounds::Windowed(bounds)),
+                    focus: false,
+                    show: false,
+                    ..Default::default()
+                },
+                |window, cx| {
+                    let first_ws = cx.new(|cx| {
+                        Workspace::new(None, first_project.clone(), app_state.clone(), window, cx)
+                    });
+                    cx.new(|cx| {
+                        let mut mw = MultiWorkspace::new(first_ws, window, cx);
+                        for project in remaining {
+                            let ws = cx.new(|cx| {
+                                Workspace::new(None, project, app_state.clone(), window, cx)
+                            });
+                            mw.activate(ws, window, cx);
+                        }
+                        mw
+                    })
+                },
+            )
+        })
+        .context("Failed to open MultiWorkspace window")?;
+
+    cx.run_until_parked();
+
+    // Create the sidebar outside the MultiWorkspace update to avoid a
+    // re-entrant read panic (Sidebar::new reads the MultiWorkspace).
+    let sidebar = cx
+        .update_window(multi_workspace_window.into(), |root_view, window, cx| {
+            let mw_handle: Entity<MultiWorkspace> = root_view
+                .downcast()
+                .map_err(|_| anyhow::anyhow!("Failed to downcast root view to MultiWorkspace"))?;
+            Ok::<_, anyhow::Error>(cx.new(|cx| sidebar::Sidebar::new(mw_handle, window, cx)))
+        })
+        .context("Failed to create sidebar")??;
+
+    multi_workspace_window
+        .update(cx, |mw, _window, cx| {
+            mw.register_sidebar(sidebar.clone(), cx);
+        })
+        .context("Failed to register sidebar")?;
+
+    cx.run_until_parked();
+
+    // Open the sidebar
+    multi_workspace_window
+        .update(cx, |mw, window, cx| {
+            mw.toggle_sidebar(window, cx);
+        })
+        .context("Failed to toggle sidebar")?;
+
+    // Let rendering settle
+    for _ in 0..10 {
+        cx.advance_clock(Duration::from_millis(100));
+        cx.run_until_parked();
+    }
+
+    // Refresh the window
+    cx.update_window(multi_workspace_window.into(), |_, window, _cx| {
+        window.refresh();
+    })?;
+
+    cx.run_until_parked();
+
+    Ok(multi_workspace_window)
+}
+
+#[cfg(target_os = "macos")]
+fn cleanup_sidebar_test_window(
+    window: WindowHandle<MultiWorkspace>,
+    cx: &mut VisualTestAppContext,
+) -> Result<()> {
+    window.update(cx, |mw, _window, cx| {
+        for workspace in mw.workspaces() {
+            let project = workspace.read(cx).project().clone();
+            project.update(cx, |project, cx| {
+                let ids: Vec<_> = project.worktrees(cx).map(|wt| wt.read(cx).id()).collect();
+                for id in ids {
+                    project.remove_worktree(id, cx);
+                }
+            });
+        }
+    })?;
+
+    cx.run_until_parked();
+
+    cx.update_window(window.into(), |_, window, _cx| {
+        window.remove_window();
+    })?;
+
+    cx.run_until_parked();
+
+    for _ in 0..15 {
+        cx.advance_clock(Duration::from_millis(100));
+        cx.run_until_parked();
+    }
+
+    Ok(())
+}
+
+#[cfg(target_os = "macos")]
+fn run_sidebar_duplicate_project_names_visual_tests(
+    app_state: Arc<AppState>,
+    cx: &mut VisualTestAppContext,
+    update_baseline: bool,
+) -> Result<TestResult> {
+    let temp_dir = tempfile::tempdir()?;
+    let temp_path = temp_dir.keep();
+    let canonical_temp = temp_path.canonicalize()?;
+
+    // Create directory structure where every leaf directory is named "zed" but
+    // lives at a distinct path. This lets us test that the sidebar correctly
+    // disambiguates projects whose names would otherwise collide.
+    //
+    //   code/zed/       — project1 (single worktree)
+    //   code/foo/zed/   — project2 (single worktree)
+    //   code/bar/zed/   — project3, first worktree
+    //   code/baz/zed/   — project3, second worktree
+    //
+    // No two projects share a worktree path, so ProjectGroupBuilder will
+    // place each in its own group.
+    let code_zed = canonical_temp.join("code").join("zed");
+    let foo_zed = canonical_temp.join("code").join("foo").join("zed");
+    let bar_zed = canonical_temp.join("code").join("bar").join("zed");
+    let baz_zed = canonical_temp.join("code").join("baz").join("zed");
+    std::fs::create_dir_all(&code_zed)?;
+    std::fs::create_dir_all(&foo_zed)?;
+    std::fs::create_dir_all(&bar_zed)?;
+    std::fs::create_dir_all(&baz_zed)?;
+
+    cx.update(|cx| {
+        cx.update_flags(true, vec!["agent-v2".to_string()]);
+    });
+
+    let mut has_baseline_update = None;
+
+    // Two single-worktree projects whose leaf name is "zed"
+    {
+        let project1 = create_project_with_worktree(&code_zed, &app_state, cx)?;
+        let project2 = create_project_with_worktree(&foo_zed, &app_state, cx)?;
+
+        let window = open_sidebar_test_window(vec![project1, project2], &app_state, cx)?;
+
+        let result = run_visual_test(
+            "sidebar_two_projects_same_leaf_name",
+            window.into(),
+            cx,
+            update_baseline,
+        );
+
+        cleanup_sidebar_test_window(window, cx)?;
+        match result? {
+            TestResult::Passed => {}
+            TestResult::BaselineUpdated(path) => {
+                has_baseline_update = Some(path);
+            }
+        }
+    }
+
+    // Three projects, third has two worktrees (all leaf names "zed")
+    //
+    // project1: code/zed
+    // project2: code/foo/zed
+    // project3: code/bar/zed + code/baz/zed
+    //
+    // Each project has a unique set of worktree paths, so they form
+    // separate groups. The sidebar must disambiguate all three.
+    {
+        let project1 = create_project_with_worktree(&code_zed, &app_state, cx)?;
+        let project2 = create_project_with_worktree(&foo_zed, &app_state, cx)?;
+
+        let project3 = create_project_with_worktree(&bar_zed, &app_state, cx)?;
+        let add_second_worktree = cx.update(|cx| {
+            project3.update(cx, |project, cx| {
+                project.find_or_create_worktree(&baz_zed, true, cx)
+            })
+        });
+        cx.background_executor.allow_parking();
+        cx.foreground_executor
+            .block_test(add_second_worktree)
+            .context("Failed to add second worktree to project 3")?;
+        cx.background_executor.forbid_parking();
+        cx.run_until_parked();
+
+        let window = open_sidebar_test_window(vec![project1, project2, project3], &app_state, cx)?;
+
+        let result = run_visual_test(
+            "sidebar_three_projects_with_multi_worktree",
+            window.into(),
+            cx,
+            update_baseline,
+        );
+
+        cleanup_sidebar_test_window(window, cx)?;
+        match result? {
+            TestResult::Passed => {}
+            TestResult::BaselineUpdated(path) => {
+                has_baseline_update = Some(path);
+            }
+        }
+    }
+
+    if let Some(path) = has_baseline_update {
+        Ok(TestResult::BaselineUpdated(path))
+    } else {
+        Ok(TestResult::Passed)
+    }
+}
+
 #[cfg(all(target_os = "macos", feature = "visual-tests"))]
 fn run_start_thread_in_selector_visual_tests(
     app_state: Arc<AppState>,

crates/zed/src/zed.rs 🔗

@@ -928,7 +928,7 @@ fn register_actions(
                             .insert(f32::from(theme_settings::clamp_font_size(buffer_font_size)).into());
                     });
                 } else {
-                    theme_settings::adjust_buffer_font_size(cx, |size| size + px(1.0));
+                    theme_settings::increase_buffer_font_size(cx);
                 }
             }
         })
@@ -945,7 +945,7 @@ fn register_actions(
                             .insert(f32::from(theme_settings::clamp_font_size(buffer_font_size)).into());
                     });
                 } else {
-                    theme_settings::adjust_buffer_font_size(cx, |size| size - px(1.0));
+                    theme_settings::decrease_buffer_font_size(cx);
                 }
             }
         })
@@ -4144,6 +4144,7 @@ mod tests {
             window.draw(cx).clear();
         });
 
+        // mouse_wheel_zoom is disabled by default — zoom should not work.
         let initial_font_size =
             cx.update(|_, cx| ThemeSettings::get_global(cx).buffer_font_size(cx).as_f32());
 
@@ -4154,6 +4155,34 @@ mod tests {
             ..Default::default()
         });
 
+        let font_size_after_disabled_zoom =
+            cx.update(|_, cx| ThemeSettings::get_global(cx).buffer_font_size(cx).as_f32());
+
+        assert_eq!(
+            initial_font_size, font_size_after_disabled_zoom,
+            "Editor buffer font-size should not change when mouse_wheel_zoom is disabled"
+        );
+
+        // Enable mouse_wheel_zoom and verify zoom works.
+        cx.update(|_, cx| {
+            SettingsStore::update_global(cx, |store, cx| {
+                store.update_user_settings(cx, |settings| {
+                    settings.editor.mouse_wheel_zoom = Some(true);
+                });
+            });
+        });
+
+        cx.update(|window, cx| {
+            window.draw(cx).clear();
+        });
+
+        cx.simulate_event(gpui::ScrollWheelEvent {
+            position: mouse_position,
+            delta: gpui::ScrollDelta::Pixels(point(px(0.), px(1.))),
+            modifiers: event_modifiers,
+            ..Default::default()
+        });
+
         let increased_font_size =
             cx.update(|_, cx| ThemeSettings::get_global(cx).buffer_font_size(cx).as_f32());
 
@@ -4180,6 +4209,37 @@ mod tests {
             decreased_font_size < increased_font_size,
             "Editor buffer font-size should have decreased from scroll-zoom"
         );
+
+        // Disable mouse_wheel_zoom again and verify zoom stops working.
+        cx.update(|_, cx| {
+            SettingsStore::update_global(cx, |store, cx| {
+                store.update_user_settings(cx, |settings| {
+                    settings.editor.mouse_wheel_zoom = Some(false);
+                });
+            });
+        });
+
+        let font_size_before =
+            cx.update(|_, cx| ThemeSettings::get_global(cx).buffer_font_size(cx).as_f32());
+
+        cx.update(|window, cx| {
+            window.draw(cx).clear();
+        });
+
+        cx.simulate_event(gpui::ScrollWheelEvent {
+            position: mouse_position,
+            delta: gpui::ScrollDelta::Pixels(point(px(0.), px(1.))),
+            modifiers: event_modifiers,
+            ..Default::default()
+        });
+
+        let font_size_after =
+            cx.update(|_, cx| ThemeSettings::get_global(cx).buffer_font_size(cx).as_f32());
+
+        assert_eq!(
+            font_size_before, font_size_after,
+            "Editor buffer font-size should not change when mouse_wheel_zoom is re-disabled"
+        );
     }
 
     #[gpui::test]

docs/.doc-examples/reference.md 🔗

@@ -68,7 +68,7 @@ Reads the content of a specified file in the project, allowing access to file co
 
 Allows the Agent to work through problems, brainstorm ideas, or plan without executing actions, useful for complex problem-solving.
 
-### `web_search` {#web-search}
+### `search_web` {#search-web}
 
 Searches the web for information, providing results with snippets and links from relevant web pages, useful for accessing real-time information.
 

docs/src/ai/tool-permissions.md 🔗

@@ -54,7 +54,7 @@ The `tool_permissions` setting lets you customize tool permissions by specifying
 | `restore_file_from_disk` | The file paths               |
 | `save_file`              | The file paths               |
 | `fetch`                  | The URL                      |
-| `web_search`             | The search query             |
+| `search_web`             | The search query             |
 
 For MCP tools, use the format `mcp:<server>:<tool_name>`.
 For example, a tool called `create_issue` on a server called `github` would be `mcp:github:create_issue`.

docs/src/ai/tools.md 🔗

@@ -57,7 +57,7 @@ Reads the content of a specified file in the project, allowing access to file co
 
 Allows the Agent to work through problems, brainstorm ideas, or plan without executing actions, useful for complex problem-solving.
 
-### `web_search`
+### `search_web`
 
 Searches the web for information, providing results with snippets and links from relevant web pages, useful for accessing real-time information.
 

docs/src/migrate/intellij.md 🔗

@@ -119,7 +119,7 @@ If you chose the JetBrains keymap during onboarding, most of your shortcuts shou
 
 | Action            | Shortcut                   | Notes                          |
 | ----------------- | -------------------------- | ------------------------------ |
-| Toggle Right Dock | `Cmd + R`                  | Assistant panel, notifications |
+| Toggle Right Dock | `Cmd + R`                  | Agent panel, notifications     |
 | Split Panes       | `Cmd + K`, then arrow keys | Create splits in any direction |
 
 ### How to Customize Keybindings

docs/src/migrate/pycharm.md 🔗

@@ -119,7 +119,7 @@ If you chose the JetBrains keymap during onboarding, most of your shortcuts shou
 
 | Action            | Shortcut                   | Notes                          |
 | ----------------- | -------------------------- | ------------------------------ |
-| Toggle Right Dock | `Cmd + R`                  | Assistant panel, notifications |
+| Toggle Right Dock | `Cmd + R`                  | Agent panel, notifications     |
 | Split Panes       | `Cmd + K`, then arrow keys | Create splits in any direction |
 
 ### How to Customize Keybindings

docs/src/migrate/rustrover.md 🔗

@@ -132,7 +132,7 @@ If you chose the JetBrains keymap during onboarding, most of your shortcuts shou
 
 | Action            | Shortcut                   | Notes                          |
 | ----------------- | -------------------------- | ------------------------------ |
-| Toggle Right Dock | `Cmd + R`                  | Assistant panel, notifications |
+| Toggle Right Dock | `Cmd + R`                  | Agent panel, notifications     |
 | Split Panes       | `Cmd + K`, then arrow keys | Create splits in any direction |
 
 ### How to Customize Keybindings

docs/src/migrate/webstorm.md 🔗

@@ -112,7 +112,7 @@ If you chose the JetBrains keymap during onboarding, most of your shortcuts shou
 
 | Action            | Keybinding                       | Notes                                                         |
 | ----------------- | -------------------------------- | ------------------------------------------------------------- |
-| Toggle Right Dock | {#kb workspace::ToggleRightDock} | Assistant panel, notifications                                |
+| Toggle Right Dock | {#kb workspace::ToggleRightDock} | Agent panel, notifications                                    |
 | Split Pane Right  | {#kb pane::SplitRight}           | Use other arrow keys to create splits in different directions |
 
 ### How to Customize Keybindings

docs/src/reference/all-settings.md 🔗

@@ -3396,6 +3396,16 @@ List of strings containing any combination of:
 
 Positive `float` values
 
+### Mouse Wheel Zoom
+
+- Description: Whether to zoom the editor font size with the mouse wheel while holding the primary modifier key (Cmd on macOS, Ctrl on other platforms).
+- Setting: `mouse_wheel_zoom`
+- Default: `false`
+
+**Options**
+
+`boolean` values
+
 ### Fast Scroll Sensitivity
 
 - Description: Scroll sensitivity multiplier for fast scrolling. This multiplier is applied to both the horizontal and vertical delta values while scrolling. Fast scrolling happens when a user holds the alt or option key while scrolling.

docs/src/vim.md 🔗

@@ -448,7 +448,7 @@ Here's a template with useful vim mode contexts to help you customize your vim m
 
 By default, you can navigate between the different files open in the editor with shortcuts like `ctrl+w` followed by one of `hjkl` to move to the left, down, up, or right, respectively.
 
-But you cannot use the same shortcuts to move between all the editor docks (the terminal, project panel, assistant panel, ...). If you want to use the same shortcuts to navigate to the docks, you can add the following key bindings to your user keymap.
+But you cannot use the same shortcuts to move between all the editor docks (the terminal, project panel, agent panel, ...). If you want to use the same shortcuts to navigate to the docks, you can add the following key bindings to your user keymap.
 
 ```json [keymap]
 {

tooling/xtask/src/tasks/workflows/autofix_pr.rs 🔗

@@ -62,12 +62,11 @@ fn run_autofix(pr_number: &WorkflowInput, run_clippy: &WorkflowInput) -> NamedJo
 
     fn install_cargo_machete() -> Step<Use> {
         named::uses(
-            "clechasseur",
-            "rs-cargo",
-            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
+            "taiki-e",
+            "install-action",
+            "02cc5f8ca9f2301050c0c099055816a41ee05507",
         )
-        .add_with(("command", "install"))
-        .add_with(("args", "cargo-machete@0.7.0"))
+        .add_with(("tool", "cargo-machete@0.7.0"))
     }
 
     fn run_cargo_fmt() -> Step<Run> {

tooling/xtask/src/tasks/workflows/cherry_pick.rs 🔗

@@ -39,8 +39,16 @@ fn run_cherry_pick(
             .add_env(("BRANCH", branch.to_string()))
             .add_env(("COMMIT", commit.to_string()))
             .add_env(("CHANNEL", channel.to_string()))
-            .add_env(("GIT_COMMITTER_NAME", "Zed Zippy"))
-            .add_env(("GIT_COMMITTER_EMAIL", "hi@zed.dev"))
+            .add_env(("GIT_AUTHOR_NAME", "zed-zippy[bot]"))
+            .add_env((
+                "GIT_AUTHOR_EMAIL",
+                "<234243425+zed-zippy[bot]@users.noreply.github.com>",
+            ))
+            .add_env(("GIT_COMMITTER_NAME", "zed-zippy[bot]"))
+            .add_env((
+                "GIT_COMMITTER_EMAIL",
+                "<234243425+zed-zippy[bot]@users.noreply.github.com>",
+            ))
             .add_env(("GITHUB_TOKEN", token))
     }
 

tooling/xtask/src/tasks/workflows/compliance_check.rs 🔗

@@ -1,14 +1,10 @@
-use gh_workflow::{Event, Job, Run, Schedule, Step, Workflow, WorkflowDispatch};
-use indoc::formatdoc;
+use gh_workflow::{Event, Job, Schedule, Workflow, WorkflowDispatch};
 
 use crate::tasks::workflows::{
-    release::{
-        COMPLIANCE_REPORT_PATH, COMPLIANCE_STEP_ID, ComplianceContext,
-        add_compliance_notification_steps,
-    },
+    release::{ComplianceContext, add_compliance_steps},
     runners,
     steps::{self, CommonJobConditions, named},
-    vars::{self, StepOutput},
+    vars::StepOutput,
 };
 
 pub fn compliance_check() -> Workflow {
@@ -37,31 +33,20 @@ fn scheduled_compliance_check() -> steps::NamedJob {
 
     let tag_output = StepOutput::new(&determine_version_step, "tag");
 
-    fn run_compliance_check(tag: &StepOutput) -> Step<Run> {
-        named::bash(
-            formatdoc! {r#"
-                cargo xtask compliance "$LATEST_TAG" --branch main --report-path "{COMPLIANCE_REPORT_PATH}"
-                "#,
-            }
-        )
-        .id(COMPLIANCE_STEP_ID)
-        .add_env(("LATEST_TAG", tag.to_string()))
-        .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID))
-        .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY))
-    }
-
     let job = Job::default()
         .with_repository_owner_guard()
         .runs_on(runners::LINUX_SMALL)
         .add_step(steps::checkout_repo().with_full_history())
         .add_step(steps::cache_rust_dependencies_namespace())
-        .add_step(determine_version_step)
-        .add_step(run_compliance_check(&tag_output));
-
-    named::job(add_compliance_notification_steps(
-        job,
-        ComplianceContext::Scheduled {
-            tag_source: tag_output,
-        },
-    ))
+        .add_step(determine_version_step);
+
+    named::job(
+        add_compliance_steps(
+            job,
+            ComplianceContext::Scheduled {
+                tag_source: tag_output,
+            },
+        )
+        .0,
+    )
 }

tooling/xtask/src/tasks/workflows/release.rs 🔗

@@ -6,7 +6,7 @@ use crate::tasks::workflows::{
     run_tests,
     runners::{self, Arch, Platform},
     steps::{self, FluentBuilder, NamedJob, dependant_job, named, release_job},
-    vars::{self, StepOutput, assets},
+    vars::{self, JobOutput, StepOutput, assets},
 };
 
 const CURRENT_ACTION_RUN_URL: &str =
@@ -22,7 +22,7 @@ pub(crate) fn release() -> Workflow {
     let check_scripts = run_tests::check_scripts();
 
     let create_draft_release = create_draft_release();
-    let compliance = compliance_check();
+    let (non_blocking_compliance_run, job_output) = compliance_check();
 
     let bundle = ReleaseBundleJobs {
         linux_aarch64: bundle_linux(
@@ -58,7 +58,10 @@ pub(crate) fn release() -> Workflow {
     };
 
     let upload_release_assets = upload_release_assets(&[&create_draft_release], &bundle);
-    let validate_release_assets = validate_release_assets(&[&upload_release_assets]);
+    let validate_release_assets = validate_release_assets(
+        &[&upload_release_assets, &non_blocking_compliance_run],
+        job_output,
+    );
 
     let auto_release_preview = auto_release_preview(&[&validate_release_assets]);
 
@@ -93,7 +96,10 @@ pub(crate) fn release() -> Workflow {
         .add_job(windows_clippy.name, windows_clippy.job)
         .add_job(check_scripts.name, check_scripts.job)
         .add_job(create_draft_release.name, create_draft_release.job)
-        .add_job(compliance.name, compliance.job)
+        .add_job(
+            non_blocking_compliance_run.name,
+            non_blocking_compliance_run.job,
+        )
         .map(|mut workflow| {
             for job in bundle.into_jobs() {
                 workflow = workflow.add_job(job.name, job.job);
@@ -158,25 +164,65 @@ pub(crate) const COMPLIANCE_STEP_ID: &str = "run-compliance-check";
 const NEEDS_REVIEW_PULLS_URL: &str = "https://github.com/zed-industries/zed/pulls?q=is%3Apr+is%3Aclosed+label%3A%22PR+state%3Aneeds+review%22";
 
 pub(crate) enum ComplianceContext {
-    Release,
+    Release { non_blocking_outcome: JobOutput },
     ReleaseNonBlocking,
     Scheduled { tag_source: StepOutput },
 }
 
-pub(crate) fn add_compliance_notification_steps(
+impl ComplianceContext {
+    fn tag_source(&self) -> Option<&StepOutput> {
+        match self {
+            ComplianceContext::Scheduled { tag_source } => Some(tag_source),
+            _ => None,
+        }
+    }
+}
+
+pub(crate) fn add_compliance_steps(
     job: gh_workflow::Job,
     context: ComplianceContext,
-) -> gh_workflow::Job {
+) -> (gh_workflow::Job, StepOutput) {
+    fn run_compliance_check(context: &ComplianceContext) -> (Step<Run>, StepOutput) {
+        let job = named::bash(
+            formatdoc! {r#"
+                cargo xtask compliance {target} --report-path "{COMPLIANCE_REPORT_PATH}"
+                "#,
+                target = if context.tag_source().is_some() { r#""$LATEST_TAG" --branch main"# } else { r#""$GITHUB_REF_NAME""# },
+            }
+        )
+        .id(COMPLIANCE_STEP_ID)
+        .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID))
+        .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY))
+        .when_some(context.tag_source(), |step, tag_source| {
+            step.add_env(("LATEST_TAG", tag_source.to_string()))
+        })
+        .when(
+            matches!(
+                context,
+                ComplianceContext::Scheduled { .. } | ComplianceContext::ReleaseNonBlocking
+            ),
+            |step| step.continue_on_error(true),
+        );
+
+        let result = StepOutput::new_unchecked(&job, "outcome");
+        (job, result)
+    }
+
     let upload_step = upload_artifact(COMPLIANCE_REPORT_ARTIFACT_PATH)
         .if_condition(Expression::new("always()"))
-        .when(matches!(context, ComplianceContext::Release), |step| {
-            step.add_with(("overwrite", true))
-        });
+        .when(
+            matches!(context, ComplianceContext::Release { .. }),
+            |step| step.add_with(("overwrite", true)),
+        );
 
     let (success_prefix, failure_prefix) = match context {
-        ComplianceContext::Release | ComplianceContext::ReleaseNonBlocking => {
+        ComplianceContext::Release { .. } => {
             ("✅ Compliance check passed", "❌ Compliance check failed")
         }
+        ComplianceContext::ReleaseNonBlocking => (
+            "✅ Compliance check passed",
+            "❌ Preliminary compliance check failed (but this can still be fixed while the builds are running!)",
+        ),
         ComplianceContext::Scheduled { .. } => (
             "✅ Scheduled compliance check passed",
             "⚠️ Scheduled compliance check failed",
@@ -200,7 +246,17 @@ pub(crate) fn add_compliance_notification_steps(
 
     let notification_step = Step::new("send_compliance_slack_notification")
         .run(&script)
-        .if_condition(Expression::new("always()"))
+        .if_condition(match &context {
+            ComplianceContext::Release {
+                non_blocking_outcome,
+            } => Expression::new(format!(
+                "failure() || {prior_outcome} != 'success'",
+                prior_outcome = non_blocking_outcome.expr()
+            )),
+            ComplianceContext::Scheduled { .. } | ComplianceContext::ReleaseNonBlocking => {
+                Expression::new("always()")
+            }
+        })
         .add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES))
         .add_env((
             "COMPLIANCE_OUTCOME",
@@ -208,8 +264,8 @@ pub(crate) fn add_compliance_notification_steps(
         ))
         .add_env((
             "COMPLIANCE_TAG",
-            match context {
-                ComplianceContext::Release | ComplianceContext::ReleaseNonBlocking => {
+            match &context {
+                ComplianceContext::Release { .. } | ComplianceContext::ReleaseNonBlocking => {
                     Context::github().ref_name().to_string()
                 }
                 ComplianceContext::Scheduled { tag_source } => tag_source.to_string(),
@@ -220,21 +276,21 @@ pub(crate) fn add_compliance_notification_steps(
             format!("{CURRENT_ACTION_RUN_URL}#artifacts"),
         ));
 
-    job.add_step(upload_step).add_step(notification_step)
-}
+    let (compliance_step, check_result) = run_compliance_check(&context);
 
-fn run_compliance_check() -> Step<Run> {
-    named::bash(formatdoc! {r#"
-        cargo xtask compliance "$GITHUB_REF_NAME" --report-path "{COMPLIANCE_REPORT_PATH}"
-        "#,
-    })
-    .id(COMPLIANCE_STEP_ID)
-    .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID))
-    .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY))
-    .continue_on_error(true)
+    (
+        job.add_step(compliance_step)
+            .add_step(upload_step)
+            .add_step(notification_step)
+            .when(
+                matches!(context, ComplianceContext::ReleaseNonBlocking),
+                |step| step.outputs([("outcome".to_string(), check_result.to_string())]),
+            ),
+        check_result,
+    )
 }
 
-fn compliance_check() -> NamedJob {
+fn compliance_check() -> (NamedJob, JobOutput) {
     let job = release_job(&[])
         .runs_on(runners::LINUX_SMALL)
         .add_step(
@@ -242,16 +298,17 @@ fn compliance_check() -> NamedJob {
                 .with_full_history()
                 .with_ref(Context::github().ref_()),
         )
-        .add_step(steps::cache_rust_dependencies_namespace())
-        .add_step(run_compliance_check());
+        .add_step(steps::cache_rust_dependencies_namespace());
+
+    let (compliance_job, check_result) =
+        add_compliance_steps(job, ComplianceContext::ReleaseNonBlocking);
+    let compliance_job = named::job(compliance_job);
+    let check_result = check_result.as_job_output(&compliance_job);
 
-    named::job(add_compliance_notification_steps(
-        job,
-        ComplianceContext::ReleaseNonBlocking,
-    ))
+    (compliance_job, check_result)
 }
 
-fn validate_release_assets(deps: &[&NamedJob]) -> NamedJob {
+fn validate_release_assets(deps: &[&NamedJob], context_check_result: JobOutput) -> NamedJob {
     let expected_assets: Vec<String> = assets::all().iter().map(|a| format!("\"{a}\"")).collect();
     let expected_assets_json = format!("[{}]", expected_assets.join(", "));
 
@@ -281,13 +338,17 @@ fn validate_release_assets(deps: &[&NamedJob]) -> NamedJob {
                 .with_full_history()
                 .with_ref(Context::github().ref_()),
         )
-        .add_step(steps::cache_rust_dependencies_namespace())
-        .add_step(run_compliance_check());
+        .add_step(steps::cache_rust_dependencies_namespace());
 
-    named::job(add_compliance_notification_steps(
-        job,
-        ComplianceContext::Release,
-    ))
+    named::job(
+        add_compliance_steps(
+            job,
+            ComplianceContext::Release {
+                non_blocking_outcome: context_check_result,
+            },
+        )
+        .0,
+    )
 }
 
 fn auto_release_preview(deps: &[&NamedJob]) -> NamedJob {

tooling/xtask/src/tasks/workflows/run_tests.rs 🔗

@@ -203,7 +203,7 @@ fn orchestrate_impl(rules: &[&PathCondition], target: OrchestrateTarget) -> Name
 
           # If assets/ changed, add crates that depend on those assets
           if echo "$CHANGED_FILES" | grep -qP '^assets/'; then
-            FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "storybook" "assets" | sort -u)
+            FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "assets" | sort -u)
           fi
 
           # Combine all changed packages
@@ -408,21 +408,15 @@ fn check_style() -> NamedJob {
 fn check_dependencies() -> NamedJob {
     fn install_cargo_machete() -> Step<Use> {
         named::uses(
-            "clechasseur",
-            "rs-cargo",
-            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
+            "taiki-e",
+            "install-action",
+            "02cc5f8ca9f2301050c0c099055816a41ee05507",
         )
-        .add_with(("command", "install"))
-        .add_with(("args", "cargo-machete@0.7.0"))
+        .add_with(("tool", "cargo-machete@0.7.0"))
     }
 
-    fn run_cargo_machete() -> Step<Use> {
-        named::uses(
-            "clechasseur",
-            "rs-cargo",
-            "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
-        )
-        .add_with(("command", "machete"))
+    fn run_cargo_machete() -> Step<Run> {
+        named::bash("cargo machete")
     }
 
     fn check_cargo_lock() -> Step<Run> {

tooling/xtask/src/tasks/workflows/vars.rs 🔗

@@ -167,7 +167,7 @@ impl StepOutput {
                 .run
                 .as_ref()
                 .is_none_or(|run_command| run_command.contains(name)),
-            "Step Output name {name} must occur at least once in run command with ID {step_id}!"
+            "Step output with name '{name}' must occur at least once in run command with ID {step_id}!"
         );
 
         Self { name, step_id }