Merge remote-tracking branch 'origin/main' into persist-worktree-3-wiring

Richard Feldman created

# Conflicts:
#	crates/agent_ui/src/agent_ui.rs
#	crates/agent_ui/src/thread_metadata_store.rs
#	crates/sidebar/src/sidebar.rs

Change summary

.github/workflows/compliance_check.yml                                |   33 
.github/workflows/release.yml                                         |   63 
Cargo.lock                                                            |  137 
Cargo.toml                                                            |    9 
assets/icons/folder_open_add.svg                                      |    5 
assets/icons/folder_plus.svg                                          |    5 
assets/icons/open_new_window.svg                                      |    7 
assets/images/ai_grid.svg                                             |  334 
assets/images/business_stamp.svg                                      |    0 
assets/images/pro_trial_stamp.svg                                     |    0 
assets/images/pro_user_stamp.svg                                      |    0 
assets/images/student_stamp.svg                                       |    0 
assets/keymaps/default-linux.json                                     |   14 
assets/keymaps/default-macos.json                                     |   14 
assets/keymaps/default-windows.json                                   |   13 
assets/keymaps/vim.json                                               |   16 
assets/settings/default.json                                          |   24 
crates/acp_thread/src/acp_thread.rs                                   |   11 
crates/agent/src/tool_permissions.rs                                  |    1 
crates/agent/src/tools/read_file_tool.rs                              |    2 
crates/agent_servers/src/acp.rs                                       |    2 
crates/agent_settings/src/agent_settings.rs                           |   36 
crates/agent_ui/src/agent_panel.rs                                    |  804 
crates/agent_ui/src/agent_registry_ui.rs                              |    2 
crates/agent_ui/src/agent_ui.rs                                       |  101 
crates/agent_ui/src/config_options.rs                                 |   17 
crates/agent_ui/src/conversation_view.rs                              |  245 
crates/agent_ui/src/conversation_view/thread_view.rs                  |  273 
crates/agent_ui/src/mention_set.rs                                    |    2 
crates/agent_ui/src/mode_selector.rs                                  |   21 
crates/agent_ui/src/model_selector.rs                                 |   19 
crates/agent_ui/src/profile_selector.rs                               |   16 
crates/agent_ui/src/thread_branch_picker.rs                           |  758 
crates/agent_ui/src/thread_metadata_store.rs                          |   55 
crates/agent_ui/src/thread_worktree_picker.rs                         |  621 
crates/agent_ui/src/threads_archive_view.rs                           |    1 
crates/agent_ui/src/ui.rs                                             |   13 
crates/ai_onboarding/src/agent_panel_onboarding_card.rs               |   96 
crates/ai_onboarding/src/agent_panel_onboarding_content.rs            |   25 
crates/ai_onboarding/src/ai_onboarding.rs                             |  225 
crates/ai_onboarding/src/ai_upsell_card.rs                            |  407 
crates/ai_onboarding/src/plan_definitions.rs                          |   14 
crates/anthropic/Cargo.toml                                           |    4 
crates/anthropic/src/anthropic.rs                                     |   84 
crates/anthropic/src/completion.rs                                    |  765 
crates/auto_update_ui/Cargo.toml                                      |    5 
crates/auto_update_ui/src/auto_update_ui.rs                           |   97 
crates/bedrock/src/models.rs                                          |   64 
crates/client/Cargo.toml                                              |    1 
crates/client/src/client.rs                                           |    2 
crates/client/src/llm_token.rs                                        |    2 
crates/cloud_api_client/Cargo.toml                                    |    1 
crates/cloud_api_client/src/cloud_api_client.rs                       |    3 
crates/cloud_api_client/src/llm_token.rs                              |   74 
crates/cloud_llm_client/Cargo.toml                                    |    3 
crates/cloud_llm_client/src/cloud_llm_client.rs                       |    1 
crates/collab/tests/integration/git_tests.rs                          |   12 
crates/collab/tests/integration/remote_editing_collaboration_tests.rs |    6 
crates/collab_ui/Cargo.toml                                           |    3 
crates/collab_ui/src/collab_panel.rs                                  |  374 
crates/collab_ui/src/collab_ui.rs                                     |    4 
crates/collab_ui/src/notification_panel.rs                            |  727 
crates/collab_ui/src/panel_settings.rs                                |   20 
crates/dev_container/src/devcontainer_json.rs                         |   65 
crates/dev_container/src/devcontainer_manifest.rs                     |   47 
crates/edit_prediction/Cargo.toml                                     |    3 
crates/edit_prediction/src/edit_prediction.rs                         |    2 
crates/edit_prediction/src/ollama.rs                                  |    2 
crates/edit_prediction/src/onboarding_modal.rs                        |   41 
crates/edit_prediction/src/zed_edit_prediction_delegate.rs            |    4 
crates/edit_prediction_cli/Cargo.toml                                 |    2 
crates/editor/src/display_map.rs                                      |   48 
crates/editor/src/display_map/block_map.rs                            |   14 
crates/editor/src/display_map/custom_highlights.rs                    |    9 
crates/editor/src/display_map/fold_map.rs                             |   33 
crates/editor/src/display_map/inlay_map.rs                            |   37 
crates/editor/src/display_map/tab_map.rs                              |   71 
crates/editor/src/display_map/wrap_map.rs                             |   19 
crates/editor/src/editor.rs                                           |   30 
crates/editor/src/element.rs                                          |   17 
crates/editor/src/semantic_tokens.rs                                  |  132 
crates/env_var/Cargo.toml                                             |    2 
crates/env_var/src/env_var.rs                                         |    2 
crates/feature_flags/src/flags.rs                                     |   10 
crates/file_finder/Cargo.toml                                         |    1 
crates/file_finder/src/file_finder.rs                                 |   69 
crates/file_finder/src/file_finder_tests.rs                           |  230 
crates/fs/src/fake_git_repo.rs                                        |  128 
crates/fs/src/fs.rs                                                   |    7 
crates/fs/tests/integration/fake_git_repo.rs                          |   12 
crates/fuzzy_nucleo/Cargo.toml                                        |   21 
crates/fuzzy_nucleo/LICENSE-GPL                                       |    1 
crates/fuzzy_nucleo/src/fuzzy_nucleo.rs                               |    5 
crates/fuzzy_nucleo/src/matcher.rs                                    |   39 
crates/fuzzy_nucleo/src/paths.rs                                      |  352 
crates/git/src/repository.rs                                          |  138 
crates/git_graph/src/git_graph.rs                                     |   71 
crates/git_ui/src/branch_picker.rs                                    |    2 
crates/git_ui/src/worktree_picker.rs                                  |    9 
crates/google_ai/Cargo.toml                                           |    4 
crates/google_ai/src/completion.rs                                    |  492 
crates/google_ai/src/google_ai.rs                                     |    3 
crates/gpui/Cargo.toml                                                |    1 
crates/gpui/src/gpui.rs                                               |    3 
crates/gpui/src/svg_renderer.rs                                       |  127 
crates/gpui/src/text_system/line.rs                                   |    2 
crates/gpui/src/window.rs                                             |    2 
crates/gpui_shared_string/Cargo.toml                                  |   17 
crates/gpui_shared_string/LICENSE-APACHE                              |    1 
crates/gpui_shared_string/gpui_shared_string.rs                       |    0 
crates/http_client/src/github_download.rs                             |   22 
crates/icons/src/icons.rs                                             |    3 
crates/language/src/buffer.rs                                         |   43 
crates/language/src/buffer_tests.rs                                   |    8 
crates/language_core/Cargo.toml                                       |    4 
crates/language_core/src/diagnostic.rs                                |    2 
crates/language_core/src/grammar.rs                                   |    2 
crates/language_core/src/language_config.rs                           |    2 
crates/language_core/src/language_name.rs                             |    2 
crates/language_core/src/lsp_adapter.rs                               |    2 
crates/language_core/src/manifest.rs                                  |    2 
crates/language_core/src/toolchain.rs                                 |    2 
crates/language_model/Cargo.toml                                      |    9 
crates/language_model/src/fake_provider.rs                            |    3 
crates/language_model/src/language_model.rs                           |  633 
crates/language_model/src/model/cloud_model.rs                        |   73 
crates/language_model/src/provider.rs                                 |   12 
crates/language_model/src/provider/anthropic.rs                       |   80 
crates/language_model/src/provider/google.rs                          |    5 
crates/language_model/src/provider/open_ai.rs                         |   28 
crates/language_model/src/provider/open_router.rs                     |   69 
crates/language_model/src/provider/x_ai.rs                            |    4 
crates/language_model/src/provider/zed.rs                             |    5 
crates/language_model/src/registry.rs                                 |    4 
crates/language_model/src/request.rs                                  |  626 
crates/language_model_core/Cargo.toml                                 |   27 
crates/language_model_core/LICENSE-GPL                                |    1 
crates/language_model_core/src/language_model_core.rs                 |  658 
crates/language_model_core/src/provider.rs                            |   21 
crates/language_model_core/src/rate_limiter.rs                        |    0 
crates/language_model_core/src/request.rs                             |  463 
crates/language_model_core/src/role.rs                                |    0 
crates/language_model_core/src/tool_schema.rs                         |   12 
crates/language_model_core/src/util.rs                                |   18 
crates/language_models/Cargo.toml                                     |    7 
crates/language_models/src/provider.rs                                |    2 
crates/language_models/src/provider/anthropic.rs                      |  779 
crates/language_models/src/provider/bedrock.rs                        |    2 
crates/language_models/src/provider/cloud.rs                          | 1007 
crates/language_models/src/provider/copilot_chat.rs                   |    8 
crates/language_models/src/provider/deepseek.rs                       |    2 
crates/language_models/src/provider/google.rs                         |  805 
crates/language_models/src/provider/lmstudio.rs                       |    2 
crates/language_models/src/provider/mistral.rs                        |    2 
crates/language_models/src/provider/open_ai.rs                        | 1756 
crates/language_models/src/provider/open_ai_compatible.rs             |    4 
crates/language_models/src/provider/open_router.rs                    |    2 
crates/language_models/src/provider/x_ai.rs                           |   40 
crates/language_models_cloud/Cargo.toml                               |   33 
crates/language_models_cloud/LICENSE-GPL                              |    1 
crates/language_models_cloud/src/language_models_cloud.rs             | 1059 
crates/markdown/src/html/html_parser.rs                               |  117 
crates/markdown/src/html/html_rendering.rs                            |   18 
crates/markdown/src/markdown.rs                                       |   69 
crates/multi_buffer/src/anchor.rs                                     |   77 
crates/multi_buffer/src/multi_buffer.rs                               |   77 
crates/multi_buffer/src/multi_buffer_tests.rs                         |   24 
crates/onboarding/Cargo.toml                                          |    2 
crates/onboarding/src/basics_page.rs                                  |  191 
crates/onboarding/src/onboarding.rs                                   |   18 
crates/onboarding/src/theme_preview.rs                                |   40 
crates/open_ai/Cargo.toml                                             |    7 
crates/open_ai/src/completion.rs                                      | 1693 
crates/open_ai/src/open_ai.rs                                         |   26 
crates/open_router/Cargo.toml                                         |    1 
crates/open_router/src/open_router.rs                                 |   68 
crates/outline_panel/src/outline_panel.rs                             |   13 
crates/picker/src/highlighted_match_with_paths.rs                     |   23 
crates/platform_title_bar/Cargo.toml                                  |    1 
crates/platform_title_bar/src/platform_title_bar.rs                   |    3 
crates/project/Cargo.toml                                             |    1 
crates/project/src/git_store.rs                                       |  102 
crates/project/src/lsp_store.rs                                       |   15 
crates/project/src/prettier_store.rs                                  |    2 
crates/project/src/project.rs                                         |   70 
crates/project/tests/integration/git_store.rs                         |   12 
crates/project/tests/integration/project_tests.rs                     |   15 
crates/project_panel/src/project_panel_tests.rs                       |  146 
crates/proto/proto/git.proto                                          |    1 
crates/recent_projects/src/recent_projects.rs                         |  127 
crates/recent_projects/src/sidebar_recent_projects.rs                 |   10 
crates/repl/src/kernels/ssh_kernel.rs                                 |    2 
crates/repl/src/kernels/wsl_kernel.rs                                 |    3 
crates/repl/src/notebook/notebook_ui.rs                               |  286 
crates/search/src/buffer_search.rs                                    |    1 
crates/search/src/project_search.rs                                   |  314 
crates/settings/src/vscode_import.rs                                  |    2 
crates/settings_content/Cargo.toml                                    |    1 
crates/settings_content/src/agent.rs                                  |    6 
crates/settings_content/src/language_model.rs                         |   34 
crates/settings_content/src/settings_content.rs                       |   26 
crates/settings_ui/Cargo.toml                                         |    1 
crates/settings_ui/src/components/input_field.rs                      |   59 
crates/settings_ui/src/page_data.rs                                   |  180 
crates/sidebar/Cargo.toml                                             |    2 
crates/sidebar/src/sidebar.rs                                         |  355 
crates/sidebar/src/sidebar_tests.rs                                   |   85 
crates/sidebar/src/thread_switcher.rs                                 |   64 
crates/tasks_ui/src/modal.rs                                          |    4 
crates/terminal_view/src/terminal_view.rs                             |    2 
crates/theme/src/registry.rs                                          |   17 
crates/theme_selector/Cargo.toml                                      |    4 
crates/theme_selector/src/icon_theme_selector.rs                      |  160 
crates/theme_selector/src/theme_selector.rs                           |  153 
crates/ui/src/components/ai.rs                                        |    4 
crates/ui/src/components/ai/agent_setup_button.rs                     |  110 
crates/ui/src/components/ai/parallel_agents_illustration.rs           |  149 
crates/ui/src/components/ai/thread_item.rs                            |   20 
crates/ui/src/components/collab/collab_notification.rs                |   56 
crates/ui/src/components/image.rs                                     |    3 
crates/ui/src/components/list/list_item.rs                            |    6 
crates/ui/src/components/notification/announcement_toast.rs           |   40 
crates/vim/src/command.rs                                             |    1 
crates/vim/src/motion.rs                                              |  261 
crates/vim/src/normal/search.rs                                       |   64 
crates/vim/src/state.rs                                               |   12 
crates/vim/src/vim.rs                                                 |    2 
crates/vim/test_data/test_matching_comments.json                      |   10 
crates/vim/test_data/test_matching_preprocessor_directives.json       |   18 
crates/web_search_providers/Cargo.toml                                |    1 
crates/web_search_providers/src/cloud.rs                              |    2 
crates/workspace/src/multi_workspace.rs                               |  160 
crates/workspace/src/multi_workspace_tests.rs                         |    2 
crates/workspace/src/pane.rs                                          |   96 
crates/workspace/src/persistence.rs                                   |    9 
crates/workspace/src/welcome.rs                                       |  181 
crates/workspace/src/workspace.rs                                     |  132 
crates/x_ai/Cargo.toml                                                |    2 
crates/x_ai/src/completion.rs                                         |   30 
crates/x_ai/src/x_ai.rs                                               |    2 
crates/zed/src/visual_test_runner.rs                                  |   28 
crates/zed/src/zed.rs                                                 |   68 
crates/zed/src/zed/app_menus.rs                                       |    2 
crates/zed/src/zed/open_listener.rs                                   |   15 
crates/zed_actions/src/lib.rs                                         |   32 
docs/src/ai/models.md                                                 |   67 
docs/src/vim.md                                                       |    2 
docs/src/visual-customization.md                                      |   14 
plan.md                                                               |  117 
tooling/xtask/src/tasks/workflows/compliance_check.rs                 |   50 
tooling/xtask/src/tasks/workflows/release.rs                          |  194 
251 files changed, 14,535 insertions(+), 9,533 deletions(-)

Detailed changes

.github/workflows/compliance_check.yml 🔗

@@ -6,6 +6,7 @@ env:
 on:
   schedule:
   - cron: 30 17 * * 2
+  workflow_dispatch: {}
 jobs:
   scheduled_compliance_check:
     if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
@@ -34,22 +35,44 @@ jobs:
         echo "tag=$TAG" >> "$GITHUB_OUTPUT"
     - id: run-compliance-check
       name: compliance_check::scheduled_compliance_check::run_compliance_check
-      run: cargo xtask compliance "$LATEST_TAG" --branch main --report-path target/compliance-report
+      run: |
+        echo "tag=$LATEST_TAG" >> "$GITHUB_OUTPUT"
+        cargo xtask compliance "$LATEST_TAG" --branch main --report-path compliance-report
       env:
         LATEST_TAG: ${{ steps.determine-version.outputs.tag }}
         GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }}
         GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
-    - name: compliance_check::scheduled_compliance_check::send_failure_slack_notification
-      if: failure()
+    - name: '@actions/upload-artifact compliance-report.md'
+      if: always()
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: compliance-report.md
+        path: target/compliance-report.md
+        if-no-files-found: error
+    - name: send_compliance_slack_notification
+      if: always()
       run: |
-        MESSAGE="⚠️ Scheduled compliance check failed for upcoming preview release $LATEST_TAG: There are PRs with missing reviews."
+        REPORT_CONTENT=""
+        if [ -f "target/compliance-report.md" ]; then
+            REPORT_CONTENT=$(cat "target/compliance-report.md")
+        fi
+
+        if [ "$COMPLIANCE_OUTCOME" == "success" ]; then
+            STATUS="✅ Scheduled compliance check passed for $COMPLIANCE_TAG"
+        else
+            STATUS="⚠️ Scheduled compliance check failed for $COMPLIANCE_TAG"
+        fi
+
+        MESSAGE=$(printf "%s\n\nReport: %s\nPRs needing review: %s\n\n%s" "$STATUS" "$ARTIFACT_URL" "https://github.com/zed-industries/zed/pulls?q=is%3Apr+is%3Aclosed+label%3A%22PR+state%3Aneeds+review%22" "$REPORT_CONTENT")
 
         curl -X POST -H 'Content-type: application/json' \
             --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \
             "$SLACK_WEBHOOK"
       env:
         SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
-        LATEST_TAG: ${{ steps.determine-version.outputs.tag }}
+        COMPLIANCE_OUTCOME: ${{ steps.run-compliance-check.outcome }}
+        COMPLIANCE_TAG: ${{ steps.determine-version.outputs.tag }}
+        ARTIFACT_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}#artifacts
 defaults:
   run:
     shell: bash -euxo pipefail {0}

.github/workflows/release.yml 🔗

@@ -295,9 +295,7 @@ jobs:
     timeout-minutes: 60
   compliance_check:
     if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
-    runs-on: namespace-profile-16x32-ubuntu-2204
-    env:
-      COMPLIANCE_FILE_PATH: compliance.md
+    runs-on: namespace-profile-2x4-ubuntu-2404
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd
@@ -312,25 +310,33 @@ jobs:
         path: ~/.rustup
     - id: run-compliance-check
       name: release::compliance_check::run_compliance_check
-      run: cargo xtask compliance "$GITHUB_REF_NAME" --report-path "$COMPLIANCE_FILE_OUTPUT"
+      run: |
+        cargo xtask compliance "$GITHUB_REF_NAME" --report-path compliance-report
       env:
         GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }}
         GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
-    - name: release::compliance_check::send_compliance_slack_notification
+    - name: '@actions/upload-artifact compliance-report.md'
+      if: always()
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: compliance-report.md
+        path: target/compliance-report.md
+        if-no-files-found: error
+    - name: send_compliance_slack_notification
       if: always()
       run: |
-        if [ "$COMPLIANCE_OUTCOME" == "success" ]; then
-            STATUS="✅ Compliance check passed for $GITHUB_REF_NAME"
-        else
-            STATUS="❌ Compliance check failed for $GITHUB_REF_NAME"
+        REPORT_CONTENT=""
+        if [ -f "target/compliance-report.md" ]; then
+            REPORT_CONTENT=$(cat "target/compliance-report.md")
         fi
 
-        REPORT_CONTENT=""
-        if [ -f "$COMPLIANCE_FILE_OUTPUT" ]; then
-            REPORT_CONTENT=$(cat "$REPORT_FILE")
+        if [ "$COMPLIANCE_OUTCOME" == "success" ]; then
+            STATUS="✅ Compliance check passed for $COMPLIANCE_TAG"
+        else
+            STATUS="❌ Compliance check failed for $COMPLIANCE_TAG"
         fi
 
-        MESSAGE=$(printf "%s\n\n%s" "$STATUS" "$REPORT_CONTENT")
+        MESSAGE=$(printf "%s\n\nReport: %s\nPRs needing review: %s\n\n%s" "$STATUS" "$ARTIFACT_URL" "https://github.com/zed-industries/zed/pulls?q=is%3Apr+is%3Aclosed+label%3A%22PR+state%3Aneeds+review%22" "$REPORT_CONTENT")
 
         curl -X POST -H 'Content-type: application/json' \
             --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \
@@ -338,6 +344,9 @@ jobs:
       env:
         SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
         COMPLIANCE_OUTCOME: ${{ steps.run-compliance-check.outcome }}
+        COMPLIANCE_TAG: ${{ github.ref_name }}
+        ARTIFACT_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}#artifacts
+    timeout-minutes: 60
   bundle_linux_aarch64:
     needs:
     - run_tests_linux
@@ -671,32 +680,42 @@ jobs:
         path: ~/.rustup
     - id: run-post-upload-compliance-check
       name: release::validate_release_assets::run_post_upload_compliance_check
-      run: cargo xtask compliance "$GITHUB_REF_NAME" --report-path target/compliance-report
+      run: |
+        cargo xtask compliance "$GITHUB_REF_NAME" --report-path compliance-report
       env:
         GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }}
         GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
-    - name: release::validate_release_assets::send_post_upload_compliance_notification
+    - name: '@actions/upload-artifact compliance-report.md'
+      if: always()
+      uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+      with:
+        name: compliance-report.md
+        path: target/compliance-report.md
+        if-no-files-found: error
+    - name: send_compliance_slack_notification
       if: always()
       run: |
-        if [ -z "$COMPLIANCE_OUTCOME" ] || [ "$COMPLIANCE_OUTCOME" == "skipped" ]; then
-            echo "Compliance check was skipped, not sending notification"
-            exit 0
+        REPORT_CONTENT=""
+        if [ -f "target/compliance-report.md" ]; then
+            REPORT_CONTENT=$(cat "target/compliance-report.md")
         fi
 
-        TAG="$GITHUB_REF_NAME"
-
         if [ "$COMPLIANCE_OUTCOME" == "success" ]; then
-            MESSAGE="✅ Post-upload compliance re-check passed for $TAG"
+            STATUS="✅ Compliance check passed for $COMPLIANCE_TAG"
         else
-            MESSAGE="❌ Post-upload compliance re-check failed for $TAG"
+            STATUS="❌ Compliance check failed for $COMPLIANCE_TAG"
         fi
 
+        MESSAGE=$(printf "%s\n\nReport: %s\nPRs needing review: %s\n\n%s" "$STATUS" "$ARTIFACT_URL" "https://github.com/zed-industries/zed/pulls?q=is%3Apr+is%3Aclosed+label%3A%22PR+state%3Aneeds+review%22" "$REPORT_CONTENT")
+
         curl -X POST -H 'Content-type: application/json' \
             --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \
             "$SLACK_WEBHOOK"
       env:
         SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
         COMPLIANCE_OUTCOME: ${{ steps.run-post-upload-compliance-check.outcome }}
+        COMPLIANCE_TAG: ${{ github.ref_name }}
+        ARTIFACT_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}#artifacts
   auto_release_preview:
     needs:
     - validate_release_assets

Cargo.lock 🔗

@@ -629,13 +629,17 @@ version = "0.1.0"
 dependencies = [
  "anyhow",
  "chrono",
+ "collections",
  "futures 0.3.32",
  "http_client",
+ "language_model_core",
+ "log",
  "schemars",
  "serde",
  "serde_json",
  "strum 0.27.2",
  "thiserror 2.0.17",
+ "tiktoken-rs",
 ]
 
 [[package]]
@@ -1229,10 +1233,13 @@ dependencies = [
 name = "auto_update_ui"
 version = "0.1.0"
 dependencies = [
+ "agent_settings",
  "anyhow",
  "auto_update",
  "client",
+ "db",
  "editor",
+ "fs",
  "gpui",
  "markdown_preview",
  "release_channel",
@@ -1240,9 +1247,11 @@ dependencies = [
  "serde",
  "serde_json",
  "smol",
+ "telemetry",
  "ui",
  "util",
  "workspace",
+ "zed_actions",
 ]
 
 [[package]]
@@ -2903,7 +2912,6 @@ dependencies = [
  "http_client",
  "http_client_tls",
  "httparse",
- "language_model",
  "log",
  "objc2-foundation",
  "parking_lot",
@@ -2959,6 +2967,7 @@ dependencies = [
  "http_client",
  "parking_lot",
  "serde_json",
+ "smol",
  "thiserror 2.0.17",
  "yawc",
 ]
@@ -3204,7 +3213,6 @@ dependencies = [
  "anyhow",
  "call",
  "channel",
- "chrono",
  "client",
  "collections",
  "db",
@@ -3213,7 +3221,6 @@ dependencies = [
  "fuzzy",
  "gpui",
  "livekit_client",
- "log",
  "menu",
  "notifications",
  "picker",
@@ -3228,7 +3235,6 @@ dependencies = [
  "theme",
  "theme_settings",
  "time",
- "time_format",
  "title_bar",
  "ui",
  "util",
@@ -5162,6 +5168,7 @@ dependencies = [
  "buffer_diff",
  "client",
  "clock",
+ "cloud_api_client",
  "cloud_api_types",
  "cloud_llm_client",
  "collections",
@@ -5641,7 +5648,7 @@ dependencies = [
 name = "env_var"
 version = "0.1.0"
 dependencies = [
- "gpui",
+ "gpui_shared_string",
 ]
 
 [[package]]
@@ -6183,6 +6190,7 @@ dependencies = [
  "file_icons",
  "futures 0.3.32",
  "fuzzy",
+ "fuzzy_nucleo",
  "gpui",
  "menu",
  "open_path_prompt",
@@ -6740,6 +6748,15 @@ dependencies = [
  "thread_local",
 ]
 
+[[package]]
+name = "fuzzy_nucleo"
+version = "0.1.0"
+dependencies = [
+ "gpui",
+ "nucleo",
+ "util",
+]
+
 [[package]]
 name = "gaoya"
 version = "0.2.0"
@@ -7458,11 +7475,13 @@ dependencies = [
  "anyhow",
  "futures 0.3.32",
  "http_client",
+ "language_model_core",
+ "log",
  "schemars",
  "serde",
  "serde_json",
- "settings",
  "strum 0.27.2",
+ "tiktoken-rs",
 ]
 
 [[package]]
@@ -7531,6 +7550,7 @@ dependencies = [
  "getrandom 0.3.4",
  "gpui_macros",
  "gpui_platform",
+ "gpui_shared_string",
  "gpui_util",
  "gpui_web",
  "http_client",
@@ -7700,6 +7720,16 @@ dependencies = [
  "gpui_windows",
 ]
 
+[[package]]
+name = "gpui_shared_string"
+version = "0.1.0"
+dependencies = [
+ "derive_more",
+ "gpui_util",
+ "schemars",
+ "serde",
+]
+
 [[package]]
 name = "gpui_tokio"
 version = "0.1.0"
@@ -9348,7 +9378,7 @@ version = "0.1.0"
 dependencies = [
  "anyhow",
  "collections",
- "gpui",
+ "gpui_shared_string",
  "log",
  "lsp",
  "parking_lot",
@@ -9387,12 +9417,8 @@ dependencies = [
 name = "language_model"
 version = "0.1.0"
 dependencies = [
- "anthropic",
  "anyhow",
  "base64 0.22.1",
- "cloud_api_client",
- "cloud_api_types",
- "cloud_llm_client",
  "collections",
  "credentials_provider",
  "env_var",
@@ -9401,16 +9427,31 @@ dependencies = [
  "http_client",
  "icons",
  "image",
+ "language_model_core",
  "log",
- "open_ai",
- "open_router",
  "parking_lot",
+ "serde",
+ "serde_json",
+ "thiserror 2.0.17",
+ "util",
+]
+
+[[package]]
+name = "language_model_core"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "cloud_llm_client",
+ "futures 0.3.32",
+ "gpui_shared_string",
+ "http_client",
+ "partial-json-fixer",
  "schemars",
  "serde",
  "serde_json",
  "smol",
+ "strum 0.27.2",
  "thiserror 2.0.17",
- "util",
 ]
 
 [[package]]
@@ -9426,8 +9467,8 @@ dependencies = [
  "base64 0.22.1",
  "bedrock",
  "client",
+ "cloud_api_client",
  "cloud_api_types",
- "cloud_llm_client",
  "collections",
  "component",
  "convert_case 0.8.0",
@@ -9446,6 +9487,7 @@ dependencies = [
  "http_client",
  "language",
  "language_model",
+ "language_models_cloud",
  "lmstudio",
  "log",
  "menu",
@@ -9454,17 +9496,14 @@ dependencies = [
  "open_ai",
  "open_router",
  "opencode",
- "partial-json-fixer",
  "pretty_assertions",
  "release_channel",
  "schemars",
- "semver",
  "serde",
  "serde_json",
  "settings",
  "smol",
  "strum 0.27.2",
- "thiserror 2.0.17",
  "tiktoken-rs",
  "tokio",
  "ui",
@@ -9474,6 +9513,28 @@ dependencies = [
  "x_ai",
 ]
 
+[[package]]
+name = "language_models_cloud"
+version = "0.1.0"
+dependencies = [
+ "anthropic",
+ "anyhow",
+ "cloud_llm_client",
+ "futures 0.3.32",
+ "google_ai",
+ "gpui",
+ "http_client",
+ "language_model",
+ "open_ai",
+ "schemars",
+ "semver",
+ "serde",
+ "serde_json",
+ "smol",
+ "thiserror 2.0.17",
+ "x_ai",
+]
+
 [[package]]
 name = "language_onboarding"
 version = "0.1.0"
@@ -11063,6 +11124,27 @@ dependencies = [
  "windows-sys 0.61.2",
 ]
 
+[[package]]
+name = "nucleo"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5262af4c94921c2646c5ac6ff7900c2af9cbb08dc26a797e18130a7019c039d4"
+dependencies = [
+ "nucleo-matcher",
+ "parking_lot",
+ "rayon",
+]
+
+[[package]]
+name = "nucleo-matcher"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bf33f538733d1a5a3494b836ba913207f14d9d4a1d3cd67030c5061bdd2cac85"
+dependencies = [
+ "memchr",
+ "unicode-segmentation",
+]
+
 [[package]]
 name = "num"
 version = "0.4.3"
@@ -11507,6 +11589,8 @@ version = "0.1.0"
 dependencies = [
  "anyhow",
  "client",
+ "cloud_api_types",
+ "collections",
  "component",
  "db",
  "documented",
@@ -11600,16 +11684,19 @@ name = "open_ai"
 version = "0.1.0"
 dependencies = [
  "anyhow",
+ "collections",
  "futures 0.3.32",
  "http_client",
+ "language_model_core",
  "log",
+ "pretty_assertions",
  "rand 0.9.2",
  "schemars",
  "serde",
  "serde_json",
- "settings",
  "strum 0.27.2",
  "thiserror 2.0.17",
+ "tiktoken-rs",
 ]
 
 [[package]]
@@ -11641,6 +11728,7 @@ dependencies = [
  "anyhow",
  "futures 0.3.32",
  "http_client",
+ "language_model_core",
  "schemars",
  "serde",
  "serde_json",
@@ -12809,7 +12897,6 @@ checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6"
 name = "platform_title_bar"
 version = "0.1.0"
 dependencies = [
- "feature_flags",
  "gpui",
  "project",
  "settings",
@@ -13203,6 +13290,7 @@ dependencies = [
  "fs",
  "futures 0.3.32",
  "fuzzy",
+ "fuzzy_nucleo",
  "git",
  "git2",
  "git_hosting_providers",
@@ -15769,6 +15857,7 @@ dependencies = [
  "collections",
  "derive_more",
  "gpui",
+ "language_model_core",
  "log",
  "schemars",
  "serde",
@@ -15840,7 +15929,6 @@ dependencies = [
  "edit_prediction",
  "edit_prediction_ui",
  "editor",
- "feature_flags",
  "fs",
  "futures 0.3.32",
  "fuzzy",
@@ -15990,7 +16078,6 @@ dependencies = [
  "anyhow",
  "chrono",
  "editor",
- "feature_flags",
  "fs",
  "futures 0.3.32",
  "git",
@@ -17651,12 +17738,15 @@ dependencies = [
 name = "theme_selector"
 version = "0.1.0"
 dependencies = [
+ "editor",
  "fs",
  "fuzzy",
  "gpui",
  "log",
  "picker",
+ "project",
  "serde",
+ "serde_json",
  "settings",
  "telemetry",
  "theme",
@@ -20151,6 +20241,7 @@ version = "0.1.0"
 dependencies = [
  "anyhow",
  "client",
+ "cloud_api_client",
  "cloud_api_types",
  "cloud_llm_client",
  "futures 0.3.32",
@@ -21754,9 +21845,11 @@ name = "x_ai"
 version = "0.1.0"
 dependencies = [
  "anyhow",
+ "language_model_core",
  "schemars",
  "serde",
  "strum 0.27.2",
+ "tiktoken-rs",
 ]
 
 [[package]]

Cargo.toml 🔗

@@ -78,6 +78,7 @@ members = [
     "crates/fs",
     "crates/fs_benchmarks",
     "crates/fuzzy",
+    "crates/fuzzy_nucleo",
     "crates/git",
     "crates/git_graph",
     "crates/git_hosting_providers",
@@ -86,6 +87,7 @@ members = [
     "crates/google_ai",
     "crates/grammars",
     "crates/gpui",
+    "crates/gpui_shared_string",
     "crates/gpui_linux",
     "crates/gpui_macos",
     "crates/gpui_macros",
@@ -109,7 +111,9 @@ members = [
     "crates/language_core",
     "crates/language_extension",
     "crates/language_model",
+    "crates/language_model_core",
     "crates/language_models",
+    "crates/language_models_cloud",
     "crates/language_onboarding",
     "crates/language_selector",
     "crates/language_tools",
@@ -325,6 +329,7 @@ file_finder = { path = "crates/file_finder" }
 file_icons = { path = "crates/file_icons" }
 fs = { path = "crates/fs" }
 fuzzy = { path = "crates/fuzzy" }
+fuzzy_nucleo = { path = "crates/fuzzy_nucleo" }
 git = { path = "crates/git" }
 git_graph = { path = "crates/git_graph" }
 git_hosting_providers = { path = "crates/git_hosting_providers" }
@@ -333,6 +338,7 @@ go_to_line = { path = "crates/go_to_line" }
 google_ai = { path = "crates/google_ai" }
 grammars = { path = "crates/grammars" }
 gpui = { path = "crates/gpui", default-features = false }
+gpui_shared_string = { path = "crates/gpui_shared_string" }
 gpui_linux = { path = "crates/gpui_linux", default-features = false }
 gpui_macos = { path = "crates/gpui_macos", default-features = false }
 gpui_macros = { path = "crates/gpui_macros" }
@@ -359,7 +365,9 @@ language = { path = "crates/language" }
 language_core = { path = "crates/language_core" }
 language_extension = { path = "crates/language_extension" }
 language_model = { path = "crates/language_model" }
+language_model_core = { path = "crates/language_model_core" }
 language_models = { path = "crates/language_models" }
+language_models_cloud = { path = "crates/language_models_cloud" }
 language_onboarding = { path = "crates/language_onboarding" }
 language_selector = { path = "crates/language_selector" }
 language_tools = { path = "crates/language_tools" }
@@ -609,6 +617,7 @@ naga = { version = "29.0", features = ["wgsl-in"] }
 nanoid = "0.4"
 nbformat = "1.2.0"
 nix = "0.29"
+nucleo = "0.5"
 num-format = "0.4.4"
 objc = "0.2"
 objc2-app-kit = { version = "0.3", default-features = false, features = [ "NSGraphics" ] }

assets/icons/folder_open_add.svg 🔗

@@ -0,0 +1,5 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M4.24135 9.23279L5.18103 7.41608C5.28319 7.21319 5.43858 7.0419 5.63058 6.92052C5.82258 6.79914 6.04397 6.73224 6.27106 6.72698H13.0117M13.0117 6.72698C13.2031 6.72664 13.392 6.77016 13.564 6.8542C13.736 6.93824 13.8864 7.06056 14.0037 7.21177C14.1211 7.36298 14.2022 7.53907 14.2409 7.72652C14.2796 7.91397 14.2749 8.10779 14.227 8.29311L13.9858 9.23279M13.0117 6.72698V5.47407C13.0117 5.14178 12.8797 4.8231 12.6447 4.58813C12.4098 4.35317 12.0911 4.22116 11.7588 4.22116H8.04392C7.8365 4.22113 7.63233 4.1696 7.44973 4.07119C7.26714 3.97279 7.11183 3.83059 6.99774 3.65736L6.49032 2.90561C6.37507 2.7306 6.21778 2.58728 6.03282 2.48878C5.84786 2.39028 5.64115 2.33975 5.43161 2.3418H2.98844C2.65615 2.3418 2.33747 2.47381 2.1025 2.70877C1.86754 2.94374 1.73553 3.26242 1.73553 3.59471V11.7386C1.73553 12.0709 1.86754 12.3896 2.1025 12.6245C2.33747 12.8595 2.65615 12.9915 2.98844 12.9915H7.3118" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M11.6 9.88724L11.6 14.1318" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M9.47626 12.5234L11.6 14.6471L13.7237 12.5234" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+</svg>

assets/icons/folder_plus.svg 🔗

@@ -1,5 +0,0 @@
-<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
-<path d="M8 7.29524V10.6536" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M6.3208 8.97442H9.67917" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M12.8 13C13.1183 13 13.4235 12.8761 13.6486 12.6554C13.8735 12.4349 14 12.1356 14 11.8236V5.94118C14 5.62916 13.8735 5.32992 13.6486 5.10929C13.4235 4.88866 13.1183 4.76471 12.8 4.76471H8.06C7.8593 4.76664 7.66133 4.71919 7.48418 4.6267C7.30703 4.53421 7.15637 4.39964 7.046 4.2353L6.56 3.52941C6.45073 3.36675 6.30199 3.23322 6.1271 3.14082C5.95221 3.04842 5.75666 3.00004 5.558 3H3.2C2.88174 3 2.57651 3.12395 2.35148 3.34458C2.12643 3.56521 2 3.86445 2 4.17647V11.8236C2 12.1356 2.12643 12.4349 2.35148 12.6554C2.57651 12.8761 2.88174 13 3.2 13H12.8Z" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-</svg>

assets/icons/open_new_window.svg 🔗

@@ -0,0 +1,7 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M14.4381 11.5973V4.40274C14.4381 3.7405 13.8616 3.20366 13.1505 3.20366H2.84956C2.13843 3.20366 1.56195 3.7405 1.56195 4.40274V11.5973C1.56195 12.2595 2.13843 12.7963 2.84956 12.7963H5.69262" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M6.71237 3.20366V5.75366" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M1.56195 5.75365H14.4381" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M4.13715 3.20366V5.75366" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M9.01288 13.0158H10.8129M10.8129 13.0158H12.6129M10.8129 13.0158V11.2158M10.8129 13.0158V14.8158" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+</svg>

assets/images/ai_grid.svg 🔗

@@ -1,334 +0,0 @@
-<svg width="400" height="92" viewBox="0 0 400 92" fill="none" xmlns="http://www.w3.org/2000/svg">
-<g clip-path="url(#clip0_2501_1466)">
-<path d="M73.6743 -4.41071L75.5416 -1.32632L73.4126 1.58358" stroke="black" stroke-width="1.5"/>
-<path d="M71.6108 -2.99939L72.5445 -1.4572L71.48 -0.00224495" stroke="black" stroke-width="1.5"/>
-<path d="M69.0085 -0.710689L68.9169 1.38731C66.8498 1.29706 65.9887 1.25947 63.9216 1.16922L63.9478 0.569787L69.1524 -4.00755L69.1786 -4.60698L64.1833 -4.82507L64.0917 -2.72707" stroke="black" stroke-width="1.5"/>
-<path d="M91.1332 -5.23706C90.8807 -5.51263 90.529 -5.67673 90.1555 -5.69303C89.7821 -5.70934 89.4174 -5.57672 89.1418 -5.3242C88.8661 -5.07169 88.7021 -4.72001 88.6858 -4.34656C88.6694 -3.97312 88.8021 -3.60849 89.0546 -3.33281L95.4319 3.62501C95.5424 3.74604 95.681 3.83803 95.8356 3.89288L97.9807 4.64767C98.0234 4.66247 98.0692 4.66544 98.1133 4.65629C98.1576 4.64713 98.1984 4.62618 98.2317 4.59567C98.2649 4.56516 98.2894 4.52621 98.3023 4.48296C98.3152 4.43971 98.3161 4.39377 98.305 4.35003L97.7405 2.14679C97.6998 1.98827 97.6207 1.84218 97.5104 1.72135L91.1332 -5.23706Z" stroke="black" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M88.9944 0.874369L87.9954 0.83075" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M91.8171 5.0014L91.8608 4.00236" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M89.9062 2.91609L88.8635 3.87152" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<g clip-path="url(#clip1_2501_1466)">
-<mask id="mask0_2501_1466" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="109" y="-8" width="17" height="17">
-<path d="M125.843 -7.13772L109.858 -7.83563L109.16 8.14914L125.145 8.84705L125.843 -7.13772Z" fill="white"/>
-</mask>
-<g mask="url(#mask0_2501_1466)">
-<path d="M120.459 1.53575L120.368 3.63375C117.887 3.52545 116.854 3.48034 114.374 3.37204L114.4 2.77261L120.603 -1.76111L120.63 -2.36054L114.635 -2.62225L114.544 -0.524252" stroke="black" stroke-width="1.5"/>
-<path d="M110.899 2.71985L110.724 6.71604L114.221 6.86871" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M123.886 3.28688L123.712 7.28308L120.215 7.13041" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-</g>
-</g>
-<path d="M141.74 -4.44171L140.298 -0.625665C140.224 -0.430368 140.105 -0.255126 139.951 -0.114073C139.798 0.026993 139.613 0.129928 139.412 0.186449L135.484 1.29086L139.3 2.73335C139.496 2.80717 139.671 2.92583 139.812 3.07978C139.953 3.23371 140.056 3.41863 140.113 3.61962L141.217 7.54686L142.659 3.73082C142.733 3.53552 142.852 3.36028 143.006 3.21922C143.16 3.07816 143.345 2.97522 143.546 2.9187L147.473 1.81429L143.657 0.371802C143.462 0.297979 143.286 0.179319 143.145 0.0253739C143.004 -0.12856 142.901 -0.313477 142.845 -0.514465L141.74 -4.44171Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M145.995 5.08642L145.879 7.75054M144.605 6.36028L147.269 6.4766" stroke="black" stroke-opacity="0.75" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M169.583 -0.223267L171.45 2.86112L169.321 5.77102" stroke="black" stroke-width="1.5"/>
-<path d="M167.52 1.18805L168.453 2.73024L167.389 4.18519" stroke="black" stroke-width="1.5"/>
-<path d="M164.917 3.47675L164.826 5.57475C162.759 5.4845 161.897 5.4469 159.83 5.35666L159.856 4.75723L165.061 0.179892L165.087 -0.419537L160.092 -0.637634L160 1.46037" stroke="black" stroke-width="1.5"/>
-<g clip-path="url(#clip2_2501_1466)">
-<mask id="mask1_2501_1466" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="181" y="-5" width="17" height="17">
-<path d="M197.774 -3.99716L181.79 -4.69507L181.092 11.2897L197.076 11.9876L197.774 -3.99716Z" fill="white"/>
-</mask>
-<g mask="url(#mask1_2501_1466)">
-<path d="M192.391 4.67631L192.299 6.77432C189.819 6.66602 188.785 6.6209 186.305 6.5126L186.331 5.91317L192.535 1.37946L192.561 0.780027L186.567 0.518311L186.475 2.61631" stroke="black" stroke-width="1.5"/>
-<path d="M183.048 0.86515L183.223 -3.13104L186.719 -2.97837" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M196.036 1.43219L196.21 -2.56401L192.714 -2.71667" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M182.83 5.86041L182.656 9.85661L186.152 10.0093" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M195.818 6.42745L195.643 10.4236L192.147 10.271" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-</g>
-</g>
-<path d="M211.019 -0.00268162C210.766 -0.278258 210.415 -0.442354 210.041 -0.458659C209.668 -0.474965 209.303 -0.342345 209.028 -0.0898278C208.752 0.162689 208.588 0.514369 208.571 0.887811C208.555 1.26125 208.688 1.62589 208.94 1.90157L215.318 8.85938C215.428 8.98042 215.567 9.0724 215.721 9.12725L217.866 9.88204C217.909 9.89685 217.955 9.89982 217.999 9.89067C218.043 9.88151 218.084 9.86056 218.117 9.83004C218.151 9.79953 218.175 9.76058 218.188 9.71733C218.201 9.67408 218.202 9.62815 218.191 9.5844L217.626 7.38117C217.586 7.22265 217.506 7.07656 217.396 6.95572L211.019 -0.00268162Z" stroke="black" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M209.501 2.52063L211.587 0.609772" stroke="black" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M208.88 6.10874L207.881 6.06512" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M218.174 3.32983L219.173 3.37345" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M211.703 10.2358L211.747 9.23673" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M215.351 -0.797205L215.307 0.201843" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M209.792 8.15047L208.749 9.1059" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M217.262 1.28815L218.305 0.332718" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M241.514 2.91736L243.382 6.00174L241.253 8.91165" stroke="black" stroke-width="1.5"/>
-<path d="M239.451 4.32867L240.385 5.87087L239.32 7.32582" stroke="black" stroke-width="1.5"/>
-<path d="M236.849 6.61738L236.757 8.71538C234.69 8.62513 233.829 8.58753 231.762 8.49728L231.788 7.89785L236.993 3.32052L237.019 2.72109L232.023 2.50299L231.932 4.60099" stroke="black" stroke-width="1.5"/>
-<path d="M258.973 2.09101C258.721 1.81543 258.369 1.65134 257.996 1.63503C257.622 1.61872 257.258 1.75134 256.982 2.00386C256.706 2.25638 256.542 2.60806 256.526 2.9815C256.509 3.35494 256.642 3.71958 256.895 3.99525L263.272 10.9531C263.382 11.0741 263.521 11.1661 263.676 11.2209L265.821 11.9757C265.863 11.9905 265.909 11.9935 265.953 11.9844C265.998 11.9752 266.039 11.9542 266.072 11.9237C266.105 11.8932 266.129 11.8543 266.142 11.811C266.155 11.7678 266.156 11.7218 266.145 11.6781L265.581 9.47486C265.54 9.31634 265.461 9.17025 265.35 9.04941L258.973 2.09101Z" stroke="black" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M257.456 4.61432L259.541 2.70346" stroke="black" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M256.834 8.20243L255.835 8.15881" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M266.128 5.42352L267.127 5.46714" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M259.657 12.3295L259.701 11.3304" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M263.305 1.29648L263.262 2.29553" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M257.746 10.2442L256.704 11.1996" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M265.216 3.38184L266.259 2.42641" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<g clip-path="url(#clip3_2501_1466)">
-<mask id="mask2_2501_1466" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="277" y="-1" width="17" height="18">
-<path d="M293.683 0.190342L277.698 -0.507568L277 15.4772L292.985 16.1751L293.683 0.190342Z" fill="white"/>
-</mask>
-<g mask="url(#mask2_2501_1466)">
-<path d="M288.3 8.86381L288.208 10.9618C285.727 10.8535 284.694 10.8084 282.214 10.7001L282.24 10.1007L288.443 5.56696L288.47 4.96753L282.475 4.70581L282.384 6.80381" stroke="black" stroke-width="1.5"/>
-<path d="M278.957 5.05265L279.131 1.05646L282.628 1.20913" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M291.945 5.61969L292.119 1.62349L288.622 1.47083" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M278.739 10.0479L278.564 14.0441L282.061 14.1968" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M291.726 10.6149L291.552 14.6111L288.055 14.4585" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-</g>
-</g>
-<g clip-path="url(#clip4_2501_1466)">
-<path d="M309.58 2.88629L308.138 6.70234C308.064 6.89763 307.945 7.07288 307.792 7.21393C307.638 7.355 307.453 7.45793 307.252 7.51445L303.324 8.61886L307.141 10.0614C307.336 10.1352 307.511 10.2538 307.652 10.4078C307.793 10.5617 307.896 10.7466 307.953 10.9476L309.057 14.8749L310.5 11.0588C310.573 10.8635 310.692 10.6883 310.846 10.5472C311 10.4062 311.185 10.3032 311.386 10.2467L315.313 9.14229L311.497 7.6998C311.302 7.62598 311.126 7.50732 310.985 7.35338C310.844 7.19944 310.741 7.01453 310.685 6.81354L309.58 2.88629Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M304.918 2.68273L304.802 5.34686M303.528 3.95664L306.192 4.07296" stroke="black" stroke-opacity="0.75" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M313.836 12.4144L313.719 15.0785M312.446 13.6882L315.11 13.8045" stroke="black" stroke-opacity="0.75" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-</g>
-<path d="M337.423 7.1048L339.29 10.1892L337.161 13.0991" stroke="black" stroke-width="1.5"/>
-<path d="M335.36 8.51611L336.293 10.0583L335.229 11.5133" stroke="black" stroke-width="1.5"/>
-<path d="M332.757 10.8048L332.666 12.9028C330.599 12.8126 329.737 12.775 327.67 12.6847L327.697 12.0853L332.901 7.50796L332.927 6.90853L327.932 6.69043L327.841 8.78843" stroke="black" stroke-width="1.5"/>
-<g clip-path="url(#clip5_2501_1466)">
-<mask id="mask3_2501_1466" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="348" y="2" width="18" height="18">
-<path d="M365.614 3.33091L349.63 2.633L348.932 18.6178L364.917 19.3157L365.614 3.33091Z" fill="white"/>
-</mask>
-<g mask="url(#mask3_2501_1466)">
-<path d="M360.231 12.0044L360.139 14.1024C357.659 13.9941 356.625 13.949 354.145 13.8407L354.171 13.2412L360.375 8.70752L360.401 8.10809L354.407 7.84637L354.315 9.94438" stroke="black" stroke-width="1.5"/>
-<path d="M350.888 8.19321L351.063 4.19702L354.559 4.34969" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M363.876 8.76025L364.05 4.76406L360.554 4.61139" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M350.67 13.1885L350.496 17.1847L353.992 17.3373" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M363.658 13.7555L363.483 17.7517L359.987 17.599" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-</g>
-</g>
-<path d="M378.859 7.32532C378.606 7.04974 378.255 6.88565 377.881 6.86934C377.508 6.85304 377.143 6.98566 376.868 7.23818C376.592 7.49069 376.428 7.84237 376.412 8.21581C376.395 8.58926 376.528 8.95389 376.78 9.22957L383.158 16.1874C383.268 16.3084 383.407 16.4004 383.561 16.4553L385.707 17.21C385.749 17.2249 385.795 17.2278 385.839 17.2187C385.883 17.2095 385.924 17.1886 385.958 17.158C385.991 17.1275 386.015 17.0886 386.028 17.0453C386.041 17.0021 386.042 16.9562 386.031 16.9124L385.466 14.7092C385.426 14.5507 385.347 14.4046 385.236 14.2837L378.859 7.32532Z" stroke="black" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M377.341 9.84863L379.427 7.93778" stroke="black" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M376.72 13.4367L375.721 13.3931" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M386.014 10.6578L387.013 10.7015" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M379.543 17.5638L379.587 16.5647" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M383.191 6.5308L383.147 7.52985" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M377.632 15.4785L376.589 16.4339" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M385.102 8.61615L386.145 7.66072" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<g clip-path="url(#clip6_2501_1466)">
-<mask id="mask4_2501_1466" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="58" y="46" width="18" height="17">
-<path d="M75.446 46.7152L59.4612 46.0173L58.7633 62.0021L74.748 62.7L75.446 46.7152Z" fill="white"/>
-</mask>
-<g mask="url(#mask4_2501_1466)">
-<path d="M70.0625 55.3887L69.9709 57.4867C67.4904 57.3784 66.457 57.3333 63.9766 57.225L64.0027 56.6256L70.2064 52.0919L70.2326 51.4924L64.2383 51.2307L64.1467 53.3287" stroke="black" stroke-width="1.5"/>
-<path d="M60.7198 51.5776L60.8943 47.5814L64.391 47.734" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M73.7074 52.1446L73.8819 48.1484L70.3853 47.9957" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M60.5017 56.5728L60.3272 60.569L63.8239 60.7217" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M73.4893 57.1399L73.3149 61.136L69.8182 60.9834" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-</g>
-</g>
-<path d="M91.3434 49.4113L89.9009 53.2273C89.8271 53.4226 89.7084 53.5978 89.5545 53.7389C89.4006 53.88 89.2156 53.9829 89.0147 54.0394L85.0874 55.1438L88.9035 56.5863C89.0988 56.6601 89.274 56.7788 89.415 56.9327C89.5561 57.0867 89.659 57.2716 89.7156 57.4726L90.82 61.3998L92.2625 57.5838C92.3363 57.3885 92.4549 57.2132 92.6089 57.0722C92.7628 56.9311 92.9477 56.8282 93.1487 56.7717L97.076 55.6673L93.2599 54.2248C93.0646 54.1509 92.8894 54.0323 92.7483 53.8783C92.6073 53.7244 92.5043 53.5395 92.4478 53.3385L91.3434 49.4113Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M86.6812 49.2077L86.5649 51.8718M85.291 50.4816L87.9551 50.5979" stroke="black" stroke-opacity="0.75" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M95.5984 58.9394L95.4821 61.6035M94.2082 60.2132L96.8723 60.3296" stroke="black" stroke-opacity="0.75" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M112.668 51.7565C112.415 51.481 112.063 51.3169 111.69 51.3006C111.316 51.2843 110.952 51.4169 110.676 51.6694C110.401 51.9219 110.237 52.2736 110.22 52.647C110.204 53.0205 110.337 53.3851 110.589 53.6608L116.966 60.6186C117.077 60.7396 117.215 60.8316 117.37 60.8865L119.515 61.6413C119.558 61.6561 119.604 61.659 119.648 61.6499C119.692 61.6407 119.733 61.6198 119.766 61.5893C119.799 61.5587 119.824 61.5198 119.837 61.4765C119.85 61.4333 119.85 61.3874 119.839 61.3436L119.275 59.1404C119.234 58.9819 119.155 58.8358 119.045 58.7149L112.668 51.7565Z" stroke="black" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M111.15 54.2798L113.235 52.369" stroke="black" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M110.529 57.868L109.53 57.8243" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M119.822 55.0891L120.821 55.1327" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M113.352 61.995L113.395 60.9959" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M117 50.962L116.956 51.9611" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M111.441 59.9097L110.398 60.8651" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M118.911 53.0474L119.953 52.0919" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M143.163 54.6766L145.03 57.761L142.901 60.6709" stroke="black" stroke-width="1.5"/>
-<path d="M141.1 56.088L142.033 57.6301L140.969 59.0851" stroke="black" stroke-width="1.5"/>
-<path d="M138.497 58.3767L138.406 60.4747C136.339 60.3844 135.477 60.3468 133.41 60.2566L133.437 59.6571L138.641 55.0798L138.667 54.4804L133.672 54.2623L133.581 56.3603" stroke="black" stroke-width="1.5"/>
-<g clip-path="url(#clip7_2501_1466)">
-<mask id="mask5_2501_1466" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="154" y="50" width="18" height="17">
-<path d="M171.355 50.9027L155.37 50.2048L154.672 66.1895L170.657 66.8875L171.355 50.9027Z" fill="white"/>
-</mask>
-<g mask="url(#mask5_2501_1466)">
-<path d="M165.971 59.5762L165.88 61.6742C163.399 61.5659 162.366 61.5207 159.885 61.4124L159.911 60.813L166.115 56.2793L166.141 55.6799L160.147 55.4182L160.055 57.5162" stroke="black" stroke-width="1.5"/>
-<path d="M156.629 55.765L156.803 51.7688L160.3 51.9215" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M169.616 56.332L169.791 52.3358L166.294 52.1832" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M156.41 60.7603L156.236 64.7564L159.733 64.9091" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M169.398 61.3273L169.224 65.3235L165.727 65.1708" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-</g>
-</g>
-<path d="M187.252 53.5987L185.81 57.4147C185.736 57.61 185.617 57.7853 185.463 57.9263C185.309 58.0674 185.124 58.1703 184.923 58.2269L180.996 59.3313L184.812 60.7738C185.007 60.8476 185.183 60.9662 185.324 61.1202C185.465 61.2741 185.568 61.459 185.624 61.66L186.729 65.5873L188.171 61.7712C188.245 61.5759 188.364 61.4007 188.518 61.2596C188.672 61.1186 188.856 61.0156 189.057 60.9591L192.985 59.8547L189.169 58.4122C188.973 58.3384 188.798 58.2197 188.657 58.0658C188.516 57.9118 188.413 57.7269 188.357 57.5259L187.252 53.5987Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M182.59 53.3951L182.474 56.0593M181.2 54.669L183.864 54.7854" stroke="black" stroke-opacity="0.75" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M191.507 63.1268L191.391 65.7909M190.117 64.4007L192.781 64.517" stroke="black" stroke-opacity="0.75" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M208.576 55.944C208.324 55.6685 207.972 55.5044 207.599 55.4881C207.225 55.4718 206.861 55.6044 206.585 55.8569C206.309 56.1094 206.145 56.4611 206.129 56.8345C206.113 57.208 206.245 57.5726 206.498 57.8483L212.875 64.8061C212.986 64.9271 213.124 65.0191 213.279 65.074L215.424 65.8288C215.466 65.8436 215.512 65.8465 215.556 65.8374C215.601 65.8282 215.642 65.8073 215.675 65.7768C215.708 65.7462 215.732 65.7073 215.745 65.664C215.758 65.6208 215.759 65.5749 215.748 65.5311L215.184 63.3279C215.143 63.1694 215.064 63.0233 214.953 62.9024L208.576 55.944Z" stroke="black" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M207.059 58.4673L209.144 56.5565" stroke="black" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M206.438 62.0555L205.438 62.0118" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M215.731 59.2766L216.73 59.3202" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M209.26 66.1825L209.304 65.1834" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M212.908 55.1495L212.865 56.1486" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M207.349 64.0972L206.307 65.0526" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M214.819 57.2349L215.862 56.2794" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M239.072 58.8641L240.939 61.9485L238.81 64.8584" stroke="black" stroke-width="1.5"/>
-<path d="M237.008 60.2754L237.942 61.8176L236.877 63.2725" stroke="black" stroke-width="1.5"/>
-<path d="M234.406 62.5641L234.314 64.6621C232.247 64.5718 231.386 64.5342 229.319 64.444L229.345 63.8446L234.55 59.2672L234.576 58.6678L229.581 58.4497L229.489 60.5477" stroke="black" stroke-width="1.5"/>
-<g clip-path="url(#clip8_2501_1466)">
-<mask id="mask6_2501_1466" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="250" y="54" width="18" height="18">
-<path d="M267.263 55.0902L251.278 54.3923L250.58 70.377L266.565 71.075L267.263 55.0902Z" fill="white"/>
-</mask>
-<g mask="url(#mask6_2501_1466)">
-<path d="M261.88 63.7637L261.788 65.8617C259.308 65.7534 258.274 65.7082 255.794 65.5999L255.82 65.0005L262.024 60.4668L262.05 59.8674L256.055 59.6057L255.964 61.7037" stroke="black" stroke-width="1.5"/>
-<path d="M252.537 59.9525L252.711 55.9563L256.208 56.109" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M265.525 60.5195L265.699 56.5233L262.202 56.3707" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M252.319 64.9478L252.144 68.9439L255.641 69.0966" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M265.306 65.5148L265.132 69.511L261.635 69.3583" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-</g>
-</g>
-<g clip-path="url(#clip9_2501_1466)">
-<path d="M283.161 57.7862L281.718 61.6022C281.644 61.7975 281.526 61.9728 281.372 62.1138C281.218 62.2549 281.033 62.3578 280.832 62.4144L276.905 63.5188L280.721 64.9613C280.916 65.0351 281.091 65.1537 281.232 65.3077C281.373 65.4616 281.476 65.6465 281.533 65.8475L282.637 69.7748L284.08 65.9587C284.154 65.7634 284.272 65.5882 284.426 65.4471C284.58 65.3061 284.765 65.2031 284.966 65.1466L288.893 64.0422L285.077 62.5997C284.882 62.5259 284.707 62.4072 284.566 62.2533C284.425 62.0993 284.322 61.9144 284.265 61.7134L283.161 57.7862Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M278.499 57.5826L278.382 60.2468M277.108 58.8565L279.772 58.9729" stroke="black" stroke-opacity="0.75" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M287.416 67.3143L287.3 69.9784M286.026 68.5881L288.69 68.7044" stroke="black" stroke-opacity="0.75" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-</g>
-<path d="M304.485 60.1315C304.232 59.8559 303.881 59.6918 303.507 59.6755C303.134 59.6592 302.769 59.7918 302.494 60.0443C302.218 60.2968 302.054 60.6485 302.038 61.022C302.021 61.3954 302.154 61.76 302.406 62.0357L308.784 68.9935C308.894 69.1146 309.033 69.2066 309.187 69.2614L311.332 70.0162C311.375 70.031 311.421 70.034 311.465 70.0248C311.509 70.0157 311.55 69.9947 311.583 69.9642C311.617 69.9337 311.641 69.8947 311.654 69.8515C311.667 69.8082 311.668 69.7623 311.657 69.7186L311.092 67.5153C311.052 67.3568 310.973 67.2107 310.862 67.0899L304.485 60.1315Z" stroke="black" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M302.967 62.6548L305.053 60.7439" stroke="black" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M302.346 66.2429L301.347 66.1993" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M311.64 63.464L312.639 63.5076" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M305.169 70.3699L305.213 69.3709" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M308.817 59.337L308.773 60.336" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M303.258 68.2846L302.215 69.2401" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M310.728 61.4223L311.771 60.4669" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<g clip-path="url(#clip10_2501_1466)">
-<path d="M331.115 59.8799L329.673 63.6959C329.599 63.8912 329.48 64.0665 329.326 64.2075C329.172 64.3486 328.987 64.4515 328.786 64.508L324.859 65.6125L328.675 67.0549C328.87 67.1288 329.046 67.2474 329.187 67.4014C329.328 67.5553 329.431 67.7402 329.487 67.9412L330.592 71.8685L332.034 68.0524C332.108 67.8571 332.227 67.6819 332.381 67.5408C332.535 67.3998 332.719 67.2968 332.92 67.2403L336.848 66.1359L333.032 64.6934C332.836 64.6196 332.661 64.5009 332.52 64.347C332.379 64.193 332.276 64.0081 332.22 63.8071L331.115 59.8799Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M326.453 59.6763L326.337 62.3404M325.063 60.9502L327.727 61.0666" stroke="black" stroke-opacity="0.75" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M335.37 69.4079L335.254 72.0721M333.98 70.6818L336.644 70.7981" stroke="black" stroke-opacity="0.75" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-</g>
-<path d="M358.958 64.0984L360.825 67.1828L358.696 70.0927" stroke="black" stroke-width="1.5"/>
-<path d="M356.894 65.5097L357.828 67.0519L356.763 68.5068" stroke="black" stroke-width="1.5"/>
-<path d="M354.292 67.7984L354.2 69.8964C352.133 69.8062 351.272 69.7686 349.205 69.6783L349.231 69.0789L354.436 64.5015L354.462 63.9021L349.467 63.684L349.375 65.782" stroke="black" stroke-width="1.5"/>
-<g clip-path="url(#clip11_2501_1466)">
-<mask id="mask7_2501_1466" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="370" y="59" width="18" height="18">
-<path d="M387.149 60.3245L371.164 59.6266L370.466 75.6114L386.451 76.3093L387.149 60.3245Z" fill="white"/>
-</mask>
-<g mask="url(#mask7_2501_1466)">
-<path d="M381.766 68.998L381.674 71.096C379.194 70.9877 378.16 70.9426 375.68 70.8343L375.706 70.2348L381.91 65.7011L381.936 65.1017L375.941 64.84L375.85 66.938" stroke="black" stroke-width="1.5"/>
-<path d="M372.423 65.1868L372.597 61.1906L376.094 61.3433" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M385.411 65.7538L385.585 61.7576L382.088 61.605" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M372.205 70.1821L372.03 74.1783L375.527 74.3309" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M385.192 70.7491L385.018 74.7453L381.521 74.5926" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-</g>
-</g>
-<path d="M402.246 69.8922L402.154 71.9902C400.087 71.8999 399.226 71.8623 397.159 71.7721L397.185 71.1726L402.39 66.5953L402.416 65.9959L397.421 65.7778L397.329 67.8758" stroke="black" stroke-width="1.5"/>
-<path d="M53.9459 21.166C53.6934 20.8904 53.3417 20.7263 52.9683 20.71C52.5948 20.6937 52.2302 20.8263 51.9545 21.0788C51.6789 21.3313 51.5149 21.683 51.4985 22.0565C51.4821 22.4299 51.6148 22.7945 51.8673 23.0702L58.2446 30.028C58.3551 30.1491 58.4938 30.241 58.6483 30.2959L60.7934 31.0507C60.8361 31.0655 60.8819 31.0685 60.9261 31.0593C60.9703 31.0501 61.0112 31.0292 61.0444 30.9987C61.0777 30.9682 61.1021 30.9292 61.115 30.886C61.1279 30.8427 61.1288 30.7968 61.1177 30.753L60.5533 28.5498C60.5126 28.3913 60.4335 28.2452 60.3231 28.1244L53.9459 21.166Z" stroke="black" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M52.4282 23.6893L54.5136 21.7784" stroke="black" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M51.8072 27.2774L50.8081 27.2338" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M61.1006 24.4985L62.0996 24.5421" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M54.6299 31.4044L54.6735 30.4054" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M58.278 20.3714L58.2344 21.3705" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M52.7189 29.3191L51.6763 30.2745" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M60.189 22.4568L61.2316 21.5014" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M84.4414 24.0861L86.3086 27.1704L84.1797 30.0803" stroke="black" stroke-width="1.5"/>
-<path d="M82.3779 25.4974L83.3115 27.0396L82.2471 28.4945" stroke="black" stroke-width="1.5"/>
-<path d="M79.7756 27.7861L79.684 29.8841C77.6169 29.7938 76.7558 29.7562 74.6887 29.666L74.7149 29.0666L79.9195 24.4892L79.9457 23.8898L74.9504 23.6717L74.8588 25.7697" stroke="black" stroke-width="1.5"/>
-<path d="M104.553 21.9613L103.111 25.7773C103.037 25.9726 102.918 26.1479 102.764 26.2889C102.611 26.43 102.426 26.5329 102.225 26.5895L98.2974 27.6939L102.113 29.1364C102.309 29.2102 102.484 29.3288 102.625 29.4828C102.766 29.6367 102.869 29.8216 102.926 30.0226L104.03 33.9499L105.472 30.1338C105.546 29.9385 105.665 29.7633 105.819 29.6222C105.973 29.4812 106.158 29.3782 106.359 29.3217L110.286 28.2173L106.47 26.7748C106.275 26.701 106.099 26.5823 105.958 26.4284C105.817 26.2745 105.714 26.0895 105.658 25.8885L104.553 21.9613Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M99.8912 21.7577L99.7748 24.4219M98.5009 23.0317L101.165 23.148" stroke="black" stroke-opacity="0.75" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M108.808 31.4894L108.692 34.1536M107.418 32.7633L110.082 32.8796" stroke="black" stroke-opacity="0.75" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<g clip-path="url(#clip12_2501_1466)">
-<mask id="mask8_2501_1466" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="119" y="20" width="18" height="18">
-<path d="M136.61 21.359L120.625 20.6611L119.927 36.6459L135.912 37.3438L136.61 21.359Z" fill="white"/>
-</mask>
-<g mask="url(#mask8_2501_1466)">
-<path d="M131.227 30.0325L131.135 32.1305C128.654 32.0222 127.621 31.9771 125.141 31.8688L125.167 31.2694L131.37 26.7357L131.397 26.1362L125.402 25.8745L125.311 27.9725" stroke="black" stroke-width="1.5"/>
-<path d="M121.884 26.2214L122.058 22.2252L125.555 22.3778" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M134.872 26.7884L135.046 22.7922L131.549 22.6395" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M121.666 31.2166L121.491 35.2128L124.988 35.3655" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M134.653 31.7836L134.479 35.7798L130.982 35.6272" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-</g>
-</g>
-<path d="M149.855 25.3535C149.602 25.0779 149.25 24.9138 148.877 24.8975C148.504 24.8812 148.139 25.0138 147.863 25.2663C147.588 25.5188 147.424 25.8705 147.407 26.244C147.391 26.6174 147.524 26.982 147.776 27.2577L154.153 34.2155C154.264 34.3366 154.402 34.4285 154.557 34.4834L156.702 35.2382C156.745 35.253 156.791 35.256 156.835 35.2468C156.879 35.2376 156.92 35.2167 156.953 35.1862C156.986 35.1557 157.011 35.1167 157.024 35.0735C157.037 35.0302 157.038 34.9843 157.026 34.9405L156.462 32.7373C156.421 32.5788 156.342 32.4327 156.232 32.3119L149.855 25.3535Z" stroke="black" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M148.337 27.8768L150.422 25.9659" stroke="black" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M147.716 31.4649L146.717 31.4213" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M157.009 28.686L158.008 28.7296" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M150.539 35.5919L150.582 34.5929" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M154.187 24.5589L154.143 25.558" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M148.628 33.5066L147.585 34.462" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M156.098 26.6443L157.14 25.6889" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M180.35 28.2735L182.217 31.3579L180.088 34.2678" stroke="black" stroke-width="1.5"/>
-<path d="M178.287 29.6848L179.22 31.227L178.156 32.682" stroke="black" stroke-width="1.5"/>
-<path d="M175.684 31.9735L175.593 34.0715C173.526 33.9813 172.664 33.9437 170.597 33.8534L170.624 33.254L175.828 28.6767L175.854 28.0772L170.859 27.8591L170.768 29.9571" stroke="black" stroke-width="1.5"/>
-<path d="M200.462 26.1487L199.02 29.9648C198.946 30.1601 198.827 30.3353 198.673 30.4764C198.519 30.6174 198.334 30.7204 198.133 30.7769L194.206 31.8813L198.022 33.3238C198.217 33.3976 198.393 33.5163 198.534 33.6702C198.675 33.8242 198.778 34.0091 198.834 34.2101L199.939 38.1373L201.381 34.3213C201.455 34.126 201.574 33.9507 201.728 33.8097C201.881 33.6686 202.066 33.5657 202.267 33.5092L206.195 32.4047L202.379 30.9623C202.183 30.8884 202.008 30.7698 201.867 30.6158C201.726 30.4619 201.623 30.277 201.566 30.076L200.462 26.1487Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M195.8 25.9452L195.684 28.6093M194.41 27.2191L197.074 27.3354" stroke="black" stroke-opacity="0.75" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M204.717 35.6769L204.601 38.341M203.327 36.9507L205.991 37.0671" stroke="black" stroke-opacity="0.75" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<g clip-path="url(#clip13_2501_1466)">
-<mask id="mask9_2501_1466" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="215" y="24" width="18" height="18">
-<path d="M232.519 25.5465L216.534 24.8486L215.836 40.8333L231.821 41.5313L232.519 25.5465Z" fill="white"/>
-</mask>
-<g mask="url(#mask9_2501_1466)">
-<path d="M227.135 34.22L227.044 36.318C224.563 36.2097 223.53 36.1645 221.049 36.0562L221.075 35.4568L227.279 30.9231L227.305 30.3237L221.311 30.062L221.219 32.16" stroke="black" stroke-width="1.5"/>
-<path d="M217.793 30.4088L217.967 26.4126L221.464 26.5653" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M230.78 30.9758L230.955 26.9796L227.458 26.827" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M217.574 35.4041L217.4 39.4002L220.897 39.5529" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M230.562 35.9711L230.388 39.9673L226.891 39.8146" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-</g>
-</g>
-<path d="M245.763 29.5409C245.511 29.2653 245.159 29.1012 244.786 29.0849C244.412 29.0686 244.048 29.2012 243.772 29.4538C243.496 29.7063 243.332 30.0579 243.316 30.4314C243.3 30.8048 243.432 31.1695 243.685 31.4451L250.062 38.403C250.173 38.524 250.311 38.616 250.466 38.6708L252.611 39.4256C252.654 39.4404 252.699 39.4434 252.743 39.4342C252.788 39.4251 252.829 39.4041 252.862 39.3736C252.895 39.3431 252.919 39.3042 252.932 39.2609C252.945 39.2177 252.946 39.1717 252.935 39.128L252.371 36.9247C252.33 36.7662 252.251 36.6201 252.141 36.4993L245.763 29.5409Z" stroke="black" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M244.246 32.0642L246.331 30.1534" stroke="black" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M243.625 35.6523L242.625 35.6087" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M252.918 32.8734L253.917 32.917" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M246.447 39.7794L246.491 38.7803" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M250.095 28.7464L250.052 29.7454" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M244.536 37.694L243.494 38.6495" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M252.006 30.8317L253.049 29.8763" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M276.259 32.461L278.126 35.5454L275.997 38.4553" stroke="black" stroke-width="1.5"/>
-<path d="M274.195 33.8723L275.129 35.4145L274.064 36.8695" stroke="black" stroke-width="1.5"/>
-<path d="M271.593 36.161L271.501 38.259C269.434 38.1688 268.573 38.1312 266.506 38.0409L266.532 37.4415L271.737 32.8642L271.763 32.2647L266.768 32.0466L266.676 34.1446" stroke="black" stroke-width="1.5"/>
-<g clip-path="url(#clip14_2501_1466)">
-<mask id="mask10_2501_1466" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="287" y="27" width="18" height="18">
-<path d="M304.45 28.687L288.465 27.9891L287.767 43.9739L303.752 44.6718L304.45 28.687Z" fill="white"/>
-</mask>
-<g mask="url(#mask10_2501_1466)">
-<path d="M299.067 37.3605L298.975 39.4585C296.495 39.3502 295.461 39.3051 292.981 39.1968L293.007 38.5974L299.211 34.0637L299.237 33.4642L293.242 33.2025L293.151 35.3005" stroke="black" stroke-width="1.5"/>
-<path d="M289.724 33.5494L289.898 29.5532L293.395 29.7058" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M302.712 34.1164L302.886 30.1202L299.389 29.9675" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M289.506 38.5446L289.331 42.5408L292.828 42.6935" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-<path d="M302.493 39.1117L302.319 43.1078L298.822 42.9552" stroke="black" stroke-opacity="0.5" stroke-width="1.5"/>
-</g>
-</g>
-<path d="M317.695 32.6815C317.442 32.4059 317.091 32.2419 316.717 32.2255C316.344 32.2092 315.979 32.3419 315.703 32.5944C315.428 32.8469 315.264 33.1986 315.247 33.572C315.231 33.9455 315.364 34.3101 315.616 34.5858L321.993 41.5436C322.104 41.6646 322.243 41.7566 322.397 41.8115L324.542 42.5662C324.585 42.5811 324.631 42.584 324.675 42.5749C324.719 42.5657 324.76 42.5448 324.793 42.5142C324.826 42.4837 324.851 42.4448 324.864 42.4015C324.877 42.3583 324.878 42.3124 324.866 42.2686L324.302 40.0654C324.261 39.9069 324.182 39.7608 324.072 39.6399L317.695 32.6815Z" stroke="black" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M316.177 35.2048L318.262 33.294" stroke="black" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M315.556 38.7929L314.557 38.7493" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M324.849 36.014L325.848 36.0577" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M318.379 42.92L318.422 41.9209" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M322.027 31.887L321.983 32.886" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M316.468 40.8347L315.425 41.7901" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M323.938 33.9724L324.98 33.0169" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M344.325 32.4299L342.882 36.246C342.809 36.4413 342.69 36.6165 342.536 36.7576C342.382 36.8986 342.197 37.0016 341.996 37.0581L338.069 38.1625L341.885 39.605C342.08 39.6788 342.255 39.7975 342.396 39.9514C342.538 40.1054 342.64 40.2903 342.697 40.4913L343.801 44.4185L345.244 40.6025C345.318 40.4072 345.436 40.2319 345.59 40.0909C345.744 39.9498 345.929 39.8469 346.13 39.7903L350.057 38.6859L346.241 37.2434C346.046 37.1696 345.871 37.051 345.73 36.897C345.589 36.7431 345.486 36.5582 345.429 36.3572L344.325 32.4299Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M339.663 32.2264L339.546 34.8905M338.272 33.5003L340.937 33.6166" stroke="black" stroke-opacity="0.75" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M348.58 41.9579L348.464 44.6221M347.19 43.2318L349.854 43.3481" stroke="black" stroke-opacity="0.75" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M372.167 36.6484L374.035 39.7328L371.906 42.6427" stroke="black" stroke-width="1.5"/>
-<path d="M370.104 38.0598L371.038 39.6019L369.973 41.0569" stroke="black" stroke-width="1.5"/>
-<path d="M367.502 40.3485L367.41 42.4465C365.343 42.3562 364.482 42.3186 362.415 42.2284L362.441 41.6289L367.646 37.0516L367.672 36.4522L362.677 36.2341L362.585 38.3321" stroke="black" stroke-width="1.5"/>
-<path d="M392.279 34.5237L390.837 38.3397C390.763 38.535 390.644 38.7103 390.49 38.8513C390.336 38.9924 390.151 39.0953 389.95 39.1518L386.023 40.2563L389.839 41.6987C390.035 41.7726 390.21 41.8912 390.351 42.0452C390.492 42.1991 390.595 42.384 390.651 42.585L391.756 46.5123L393.198 42.6962C393.272 42.5009 393.391 42.3257 393.545 42.1846C393.699 42.0435 393.884 41.9406 394.085 41.8841L398.012 40.7797L394.196 39.3372C394 39.2634 393.825 39.1447 393.684 38.9908C393.543 38.8368 393.44 38.6519 393.384 38.4509L392.279 34.5237Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M387.617 34.3201L387.501 36.9842M386.227 35.594L388.891 35.7103" stroke="black" stroke-opacity="0.75" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M396.534 44.0517L396.418 46.7159M395.144 45.3256L397.808 45.4419" stroke="black" stroke-opacity="0.75" stroke-width="1.42857" stroke-linecap="round" stroke-linejoin="round"/>
-</g>
-<defs>
-<clipPath id="clip0_2501_1466">
-<rect width="400" height="92" fill="white"/>
-</clipPath>
-<clipPath id="clip1_2501_1466">
-<rect width="16" height="16" fill="white" transform="translate(109.858 -7.83563) rotate(2.5)"/>
-</clipPath>
-<clipPath id="clip2_2501_1466">
-<rect width="16" height="16" fill="white" transform="translate(181.79 -4.69507) rotate(2.5)"/>
-</clipPath>
-<clipPath id="clip3_2501_1466">
-<rect width="16" height="16" fill="white" transform="translate(277.698 -0.507568) rotate(2.5)"/>
-</clipPath>
-<clipPath id="clip4_2501_1466">
-<rect width="16" height="16" fill="white" transform="translate(301.675 0.539246) rotate(2.5)"/>
-</clipPath>
-<clipPath id="clip5_2501_1466">
-<rect width="16" height="16" fill="white" transform="translate(349.63 2.633) rotate(2.5)"/>
-</clipPath>
-<clipPath id="clip6_2501_1466">
-<rect width="16" height="16" fill="white" transform="translate(59.4612 46.0173) rotate(2.5)"/>
-</clipPath>
-<clipPath id="clip7_2501_1466">
-<rect width="16" height="16" fill="white" transform="translate(155.37 50.2048) rotate(2.5)"/>
-</clipPath>
-<clipPath id="clip8_2501_1466">
-<rect width="16" height="16" fill="white" transform="translate(251.278 54.3923) rotate(2.5)"/>
-</clipPath>
-<clipPath id="clip9_2501_1466">
-<rect width="16" height="16" fill="white" transform="translate(275.256 55.4391) rotate(2.5)"/>
-</clipPath>
-<clipPath id="clip10_2501_1466">
-<rect width="16" height="16" fill="white" transform="translate(323.21 57.5328) rotate(2.5)"/>
-</clipPath>
-<clipPath id="clip11_2501_1466">
-<rect width="16" height="16" fill="white" transform="translate(371.164 59.6266) rotate(2.5)"/>
-</clipPath>
-<clipPath id="clip12_2501_1466">
-<rect width="16" height="16" fill="white" transform="translate(120.625 20.6611) rotate(2.5)"/>
-</clipPath>
-<clipPath id="clip13_2501_1466">
-<rect width="16" height="16" fill="white" transform="translate(216.534 24.8486) rotate(2.5)"/>
-</clipPath>
-<clipPath id="clip14_2501_1466">
-<rect width="16" height="16" fill="white" transform="translate(288.465 27.9891) rotate(2.5)"/>
-</clipPath>
-</defs>
-</svg>

assets/keymaps/default-linux.json 🔗

@@ -1189,6 +1189,7 @@
       "ctrl-e": ["terminal::SendKeystroke", "ctrl-e"],
       "ctrl-o": ["terminal::SendKeystroke", "ctrl-o"],
       "ctrl-w": ["terminal::SendKeystroke", "ctrl-w"],
+      "ctrl-r": ["terminal::SendKeystroke", "ctrl-r"],
       "ctrl-backspace": ["terminal::SendKeystroke", "ctrl-w"],
       "ctrl-shift-a": "editor::SelectAll",
       "find": "buffer_search::Deploy",
@@ -1435,7 +1436,7 @@
   {
     "context": "NotebookEditor",
     "bindings": {
-      "shift-enter": "notebook::Run",
+      "shift-enter": "notebook::RunAndAdvance",
       "ctrl-enter": "notebook::Run",
       "ctrl-shift-enter": "notebook::RunAll",
       "alt-up": "notebook::MoveCellUp",
@@ -1446,11 +1447,19 @@
       "ctrl-c": "notebook::InterruptKernel",
     },
   },
+  {
+    "context": "NotebookEditor && notebook_mode == command",
+    "bindings": {
+      "enter": "notebook::EnterEditMode",
+      "down": "menu::SelectNext",
+      "up": "menu::SelectPrevious",
+    },
+  },
   {
     "context": "NotebookEditor > Editor",
     "bindings": {
       "enter": "editor::Newline",
-      "shift-enter": "notebook::Run",
+      "shift-enter": "notebook::RunAndAdvance",
       "ctrl-enter": "notebook::Run",
       "ctrl-shift-enter": "notebook::RunAll",
       "alt-up": "notebook::MoveCellUp",
@@ -1459,6 +1468,7 @@
       "ctrl-shift-m": "notebook::AddMarkdownBlock",
       "ctrl-shift-r": "notebook::RestartKernel",
       "ctrl-c": "notebook::InterruptKernel",
+      "escape": "notebook::EnterCommandMode",
     },
   },
   {

assets/keymaps/default-macos.json 🔗

@@ -1269,6 +1269,7 @@
       "escape": ["terminal::SendKeystroke", "escape"],
       "enter": ["terminal::SendKeystroke", "enter"],
       "ctrl-c": ["terminal::SendKeystroke", "ctrl-c"],
+      "ctrl-r": ["terminal::SendKeystroke", "ctrl-r"],
       "ctrl-backspace": ["terminal::SendKeystroke", "ctrl-w"],
       "shift-pageup": "terminal::ScrollPageUp",
       "cmd-up": "terminal::ScrollPageUp",
@@ -1571,7 +1572,7 @@
   {
     "context": "NotebookEditor",
     "bindings": {
-      "shift-enter": "notebook::Run",
+      "shift-enter": "notebook::RunAndAdvance",
       "cmd-enter": "notebook::Run",
       "cmd-shift-enter": "notebook::RunAll",
       "alt-up": "notebook::MoveCellUp",
@@ -1582,11 +1583,19 @@
       "cmd-c": "notebook::InterruptKernel",
     },
   },
+  {
+    "context": "NotebookEditor && notebook_mode == command",
+    "bindings": {
+      "enter": "notebook::EnterEditMode",
+      "down": "menu::SelectNext",
+      "up": "menu::SelectPrevious",
+    },
+  },
   {
     "context": "NotebookEditor > Editor",
     "bindings": {
       "enter": "editor::Newline",
-      "shift-enter": "notebook::Run",
+      "shift-enter": "notebook::RunAndAdvance",
       "cmd-enter": "notebook::Run",
       "cmd-shift-enter": "notebook::RunAll",
       "alt-up": "notebook::MoveCellUp",
@@ -1595,6 +1604,7 @@
       "cmd-shift-m": "notebook::AddMarkdownBlock",
       "cmd-shift-r": "notebook::RestartKernel",
       "cmd-c": "notebook::InterruptKernel",
+      "escape": "notebook::EnterCommandMode",
     },
   },
 ]

assets/keymaps/default-windows.json 🔗

@@ -1488,7 +1488,7 @@
   {
     "context": "NotebookEditor",
     "bindings": {
-      "shift-enter": "notebook::Run",
+      "shift-enter": "notebook::RunAndAdvance",
       "ctrl-enter": "notebook::Run",
       "ctrl-shift-enter": "notebook::RunAll",
       "alt-up": "notebook::MoveCellUp",
@@ -1499,11 +1499,19 @@
       "ctrl-c": "notebook::InterruptKernel",
     },
   },
+  {
+    "context": "NotebookEditor && notebook_mode == command",
+    "bindings": {
+      "enter": "notebook::EnterEditMode",
+      "down": "menu::SelectNext",
+      "up": "menu::SelectPrevious",
+    },
+  },
   {
     "context": "NotebookEditor > Editor",
     "bindings": {
       "enter": "editor::Newline",
-      "shift-enter": "notebook::Run",
+      "shift-enter": "notebook::RunAndAdvance",
       "ctrl-enter": "notebook::Run",
       "ctrl-shift-enter": "notebook::RunAll",
       "alt-up": "notebook::MoveCellUp",
@@ -1512,6 +1520,7 @@
       "ctrl-shift-m": "notebook::AddMarkdownBlock",
       "ctrl-shift-r": "notebook::RestartKernel",
       "ctrl-c": "notebook::InterruptKernel",
+      "escape": "notebook::EnterCommandMode",
     },
   },
 ]

assets/keymaps/vim.json 🔗

@@ -1110,10 +1110,24 @@
   },
   {
     "context": "NotebookEditor > Editor && VimControl && vim_mode == normal",
-
     "bindings": {
       "j": "notebook::NotebookMoveDown",
       "k": "notebook::NotebookMoveUp",
+      "escape": "notebook::EnterCommandMode",
+    },
+  },
+  {
+    "context": "NotebookEditor && notebook_mode == command",
+    "bindings": {
+      "j": "menu::SelectNext",
+      "k": "menu::SelectPrevious",
+      "g g": "menu::SelectFirst",
+      "shift-g": "menu::SelectLast",
+      "i": "notebook::EnterEditMode",
+      "a": "notebook::EnterEditMode",
+      "enter": "notebook::EnterEditMode",
+      "shift-enter": "notebook::RunAndAdvance",
+      "ctrl-enter": "notebook::Run",
     },
   },
   {

assets/settings/default.json 🔗

@@ -717,7 +717,7 @@
     // Default width of the project panel.
     "default_width": 240,
     // Where to dock the project panel. Can be 'left' or 'right'.
-    "dock": "left",
+    "dock": "right",
     // Spacing between worktree entries in the project panel. Can be 'comfortable' or 'standard'.
     "entry_spacing": "comfortable",
     // Whether to show file icons in the project panel.
@@ -819,7 +819,7 @@
     // Default width of the outline panel.
     "default_width": 300,
     // Where to dock the outline panel. Can be 'left' or 'right'.
-    "dock": "left",
+    "dock": "right",
     // Whether to show file icons in the outline panel.
     "file_icons": true,
     // Whether to show folder icons or chevrons for directories in the outline panel.
@@ -871,7 +871,7 @@
     // Whether to show the collaboration panel button in the status bar.
     "button": true,
     // Where to dock the collaboration panel. Can be 'left' or 'right'.
-    "dock": "left",
+    "dock": "right",
     // Default width of the collaboration panel.
     "default_width": 240,
   },
@@ -879,7 +879,7 @@
     // Whether to show the git panel button in the status bar.
     "button": true,
     // Where to dock the git panel. Can be 'left' or 'right'.
-    "dock": "left",
+    "dock": "right",
     // Default width of the git panel.
     "default_width": 360,
     // Style of the git status indicator in the panel.
@@ -936,16 +936,6 @@
     // For example: typing `:wave:` gets replaced with `👋`.
     "auto_replace_emoji_shortcode": true,
   },
-  "notification_panel": {
-    // Whether to show the notification panel button in the status bar.
-    "button": true,
-    // Where to dock the notification panel. Can be 'left' or 'right'.
-    "dock": "right",
-    // Default width of the notification panel.
-    "default_width": 380,
-    // Whether to show a badge on the notification panel icon with the count of unread notifications.
-    "show_count_badge": false,
-  },
   "agent": {
     // Whether the inline assistant should use streaming tools, when available
     "inline_assistant_use_streaming_tools": true,
@@ -954,7 +944,7 @@
     // Whether to show the agent panel button in the status bar.
     "button": true,
     // Where to dock the agent panel. Can be 'left', 'right' or 'bottom'.
-    "dock": "right",
+    "dock": "left",
     // Whether the agent panel should use flexible (proportional) sizing.
     //
     // Default: true
@@ -965,6 +955,9 @@
     "default_width": 640,
     // Default height when the agent panel is docked to the bottom.
     "default_height": 320,
+    // Maximum content width when the agent panel is wider than this value.
+    // Content will be centered within the panel.
+    "max_content_width": 850,
     // The default model to use when creating new threads.
     "default_model": {
       // The provider to use.
@@ -2417,6 +2410,7 @@
     "toggle_relative_line_numbers": false,
     "use_system_clipboard": "always",
     "use_smartcase_find": false,
+    "use_regex_search": true,
     "gdefault": false,
     "highlight_on_yank_duration": 200,
     "custom_digraphs": {},

crates/acp_thread/src/acp_thread.rs 🔗

@@ -1032,6 +1032,7 @@ pub struct AcpThread {
     connection: Rc<dyn AgentConnection>,
     token_usage: Option<TokenUsage>,
     prompt_capabilities: acp::PromptCapabilities,
+    available_commands: Vec<acp::AvailableCommand>,
     _observe_prompt_capabilities: Task<anyhow::Result<()>>,
     terminals: HashMap<acp::TerminalId, Entity<Terminal>>,
     pending_terminal_output: HashMap<acp::TerminalId, Vec<Vec<u8>>>,
@@ -1220,6 +1221,7 @@ impl AcpThread {
             session_id,
             token_usage: None,
             prompt_capabilities,
+            available_commands: Vec::new(),
             _observe_prompt_capabilities: task,
             terminals: HashMap::default(),
             pending_terminal_output: HashMap::default(),
@@ -1239,6 +1241,10 @@ impl AcpThread {
         self.prompt_capabilities.clone()
     }
 
+    pub fn available_commands(&self) -> &[acp::AvailableCommand] {
+        &self.available_commands
+    }
+
     pub fn draft_prompt(&self) -> Option<&[acp::ContentBlock]> {
         self.draft_prompt.as_deref()
     }
@@ -1419,7 +1425,10 @@ impl AcpThread {
             acp::SessionUpdate::AvailableCommandsUpdate(acp::AvailableCommandsUpdate {
                 available_commands,
                 ..
-            }) => cx.emit(AcpThreadEvent::AvailableCommandsUpdated(available_commands)),
+            }) => {
+                self.available_commands = available_commands.clone();
+                cx.emit(AcpThreadEvent::AvailableCommandsUpdated(available_commands));
+            }
             acp::SessionUpdate::CurrentModeUpdate(acp::CurrentModeUpdate {
                 current_mode_id,
                 ..

crates/agent/src/tool_permissions.rs 🔗

@@ -574,6 +574,7 @@ mod tests {
             flexible: true,
             default_width: px(300.),
             default_height: px(600.),
+            max_content_width: px(850.),
             default_model: None,
             inline_assistant_model: None,
             inline_assistant_use_streaming_tools: false,

crates/agent/src/tools/read_file_tool.rs 🔗

@@ -5,7 +5,7 @@ use futures::FutureExt as _;
 use gpui::{App, Entity, SharedString, Task};
 use indoc::formatdoc;
 use language::Point;
-use language_model::{LanguageModelImage, LanguageModelToolResultContent};
+use language_model::{LanguageModelImage, LanguageModelImageExt, LanguageModelToolResultContent};
 use project::{AgentLocation, ImageItem, Project, WorktreeSettings, image_store};
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};

crates/agent_servers/src/acp.rs 🔗

@@ -325,7 +325,7 @@ impl AcpConnection {
             // Use the one the agent provides if we have one
             .map(|info| info.name.into())
             // Otherwise, just use the name
-            .unwrap_or_else(|| agent_id.0.to_string().into());
+            .unwrap_or_else(|| agent_id.0.clone());
 
         let session_list = if response
             .agent_capabilities

crates/agent_settings/src/agent_settings.rs 🔗

@@ -31,7 +31,6 @@ pub struct PanelLayout {
     pub(crate) outline_panel_dock: Option<DockSide>,
     pub(crate) collaboration_panel_dock: Option<DockPosition>,
     pub(crate) git_panel_dock: Option<DockPosition>,
-    pub(crate) notification_panel_button: Option<bool>,
 }
 
 impl PanelLayout {
@@ -41,7 +40,6 @@ impl PanelLayout {
         outline_panel_dock: Some(DockSide::Right),
         collaboration_panel_dock: Some(DockPosition::Right),
         git_panel_dock: Some(DockPosition::Right),
-        notification_panel_button: Some(false),
     };
 
     const EDITOR: Self = Self {
@@ -50,7 +48,6 @@ impl PanelLayout {
         outline_panel_dock: Some(DockSide::Left),
         collaboration_panel_dock: Some(DockPosition::Left),
         git_panel_dock: Some(DockPosition::Left),
-        notification_panel_button: Some(true),
     };
 
     pub fn is_agent_layout(&self) -> bool {
@@ -68,7 +65,6 @@ impl PanelLayout {
             outline_panel_dock: content.outline_panel.as_ref().and_then(|p| p.dock),
             collaboration_panel_dock: content.collaboration_panel.as_ref().and_then(|p| p.dock),
             git_panel_dock: content.git_panel.as_ref().and_then(|p| p.dock),
-            notification_panel_button: content.notification_panel.as_ref().and_then(|p| p.button),
         }
     }
 
@@ -78,7 +74,6 @@ impl PanelLayout {
         settings.outline_panel.get_or_insert_default().dock = self.outline_panel_dock;
         settings.collaboration_panel.get_or_insert_default().dock = self.collaboration_panel_dock;
         settings.git_panel.get_or_insert_default().dock = self.git_panel_dock;
-        settings.notification_panel.get_or_insert_default().button = self.notification_panel_button;
     }
 
     fn write_diff_to(&self, current_merged: &PanelLayout, settings: &mut SettingsContent) {
@@ -98,10 +93,6 @@ impl PanelLayout {
         if self.git_panel_dock != current_merged.git_panel_dock {
             settings.git_panel.get_or_insert_default().dock = self.git_panel_dock;
         }
-        if self.notification_panel_button != current_merged.notification_panel_button {
-            settings.notification_panel.get_or_insert_default().button =
-                self.notification_panel_button;
-        }
     }
 
     fn backfill_to(&self, user_layout: &PanelLayout, settings: &mut SettingsContent) {
@@ -121,10 +112,6 @@ impl PanelLayout {
         if user_layout.git_panel_dock.is_none() {
             settings.git_panel.get_or_insert_default().dock = self.git_panel_dock;
         }
-        if user_layout.notification_panel_button.is_none() {
-            settings.notification_panel.get_or_insert_default().button =
-                self.notification_panel_button;
-        }
     }
 }
 
@@ -154,6 +141,7 @@ pub struct AgentSettings {
     pub sidebar_side: SidebarDockPosition,
     pub default_width: Pixels,
     pub default_height: Pixels,
+    pub max_content_width: Pixels,
     pub default_model: Option<LanguageModelSelection>,
     pub inline_assistant_model: Option<LanguageModelSelection>,
     pub inline_assistant_use_streaming_tools: bool,
@@ -600,6 +588,7 @@ impl Settings for AgentSettings {
             sidebar_side: agent.sidebar_side.unwrap(),
             default_width: px(agent.default_width.unwrap()),
             default_height: px(agent.default_height.unwrap()),
+            max_content_width: px(agent.max_content_width.unwrap()),
             flexible: agent.flexible.unwrap(),
             default_model: Some(agent.default_model.unwrap()),
             inline_assistant_model: agent.inline_assistant_model,
@@ -739,14 +728,6 @@ mod tests {
     use settings::ToolPermissionMode;
     use settings::ToolPermissionsContent;
 
-    fn set_agent_v2_defaults(cx: &mut gpui::App) {
-        SettingsStore::update_global(cx, |store, cx| {
-            store.update_default_settings(cx, |defaults| {
-                PanelLayout::AGENT.write_to(defaults);
-            });
-        });
-    }
-
     #[test]
     fn test_compiled_regex_case_insensitive() {
         let regex = CompiledRegex::new("rm\\s+-rf", false).unwrap();
@@ -1227,9 +1208,6 @@ mod tests {
         project::DisableAiSettings::register(cx);
         AgentSettings::register(cx);
 
-        // Test defaults are editor layout; switch to agent V2.
-        set_agent_v2_defaults(cx);
-
         // Should be Agent with an empty user layout (user hasn't customized).
         let layout = AgentSettings::get_layout(cx);
         let WindowLayout::Agent(Some(user_layout)) = layout else {
@@ -1255,7 +1233,6 @@ mod tests {
         assert_eq!(user_layout.outline_panel_dock, None);
         assert_eq!(user_layout.collaboration_panel_dock, None);
         assert_eq!(user_layout.git_panel_dock, None);
-        assert_eq!(user_layout.notification_panel_button, None);
 
         // User sets a combination that doesn't match either preset:
         // agent on the left but project panel also on the left.
@@ -1363,9 +1340,6 @@ mod tests {
             project::DisableAiSettings::register(cx);
             AgentSettings::register(cx);
 
-            // Apply the agent V2 defaults.
-            set_agent_v2_defaults(cx);
-
             // User has agent=left (matches preset) and project_panel=left (does not)
             SettingsStore::update_global(cx, |store, cx| {
                 store
@@ -1454,7 +1428,7 @@ mod tests {
 
         cx.run_until_parked();
 
-        // Read back the file and apply it, then switch to agent V2 defaults.
+        // Read back the file and apply it.
         let written = fs.load(paths::settings_file().as_path()).await.unwrap();
         cx.update(|cx| {
             SettingsStore::update_global(cx, |store, cx| {
@@ -1478,10 +1452,6 @@ mod tests {
                 Some(DockPosition::Left)
             );
             assert_eq!(user_layout.git_panel_dock, Some(DockPosition::Left));
-            assert_eq!(user_layout.notification_panel_button, Some(true));
-
-            // Now switch defaults to agent V2.
-            set_agent_v2_defaults(cx);
 
             // Even though defaults are now agent, the backfilled user settings
             // keep everything in the editor layout. The user's experience

crates/agent_ui/src/agent_panel.rs 🔗

@@ -19,7 +19,6 @@ use project::AgentId;
 use serde::{Deserialize, Serialize};
 use settings::{LanguageModelProviderSetting, LanguageModelSelection};
 
-use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt as _};
 use zed_actions::agent::{
     AddSelectionToThread, ConflictContent, ReauthenticateAgent, ResolveConflictedFilesWithAgent,
     ResolveConflictsWithAgent, ReviewBranchDiff,
@@ -28,25 +27,24 @@ use zed_actions::agent::{
 use crate::thread_metadata_store::ThreadMetadataStore;
 use crate::{
     AddContextServer, AgentDiffPane, ConversationView, CopyThreadToClipboard, CycleStartThreadIn,
-    Follow, InlineAssistant, LoadThreadFromClipboard, NewThread, OpenActiveThreadAsMarkdown,
-    OpenAgentDiff, OpenHistory, ResetTrialEndUpsell, ResetTrialUpsell, StartThreadIn,
-    ToggleNavigationMenu, ToggleNewThreadMenu, ToggleOptionsMenu,
+    Follow, InlineAssistant, LoadThreadFromClipboard, NewThread, NewWorktreeBranchTarget,
+    OpenActiveThreadAsMarkdown, OpenAgentDiff, OpenHistory, ResetTrialEndUpsell, ResetTrialUpsell,
+    StartThreadIn, ToggleNavigationMenu, ToggleNewThreadMenu, ToggleOptionsMenu,
     agent_configuration::{AgentConfiguration, AssistantConfigurationEvent},
     conversation_view::{AcpThreadViewEvent, ThreadView},
+    thread_branch_picker::ThreadBranchPicker,
+    thread_worktree_picker::ThreadWorktreePicker,
     ui::EndTrialUpsell,
 };
 use crate::{
     Agent, AgentInitialContent, ExternalSourcePrompt, NewExternalAgentThread,
     NewNativeAgentThreadFromSummary,
 };
-use crate::{
-    DEFAULT_THREAD_TITLE,
-    ui::{AcpOnboardingModal, HoldForDefault},
-};
+use crate::{DEFAULT_THREAD_TITLE, ui::AcpOnboardingModal};
 use crate::{ExpandMessageEditor, ThreadHistoryView};
 use crate::{ManageProfiles, ThreadHistoryViewEvent};
 use crate::{ThreadHistory, agent_connection_store::AgentConnectionStore};
-use agent_settings::AgentSettings;
+use agent_settings::{AgentSettings, WindowLayout};
 use ai_onboarding::AgentPanelOnboarding;
 use anyhow::{Context as _, Result, anyhow};
 use client::UserStore;
@@ -73,8 +71,8 @@ use terminal::terminal_settings::TerminalSettings;
 use terminal_view::{TerminalView, terminal_panel::TerminalPanel};
 use theme_settings::ThemeSettings;
 use ui::{
-    Button, Callout, CommonAnimationExt, ContextMenu, ContextMenuEntry, DocumentationSide,
-    PopoverMenu, PopoverMenuHandle, Tab, Tooltip, prelude::*, utils::WithRemSize,
+    Button, ButtonLike, Callout, CommonAnimationExt, ContextMenu, ContextMenuEntry, PopoverMenu,
+    PopoverMenuHandle, Tab, Tooltip, prelude::*, utils::WithRemSize,
 };
 use util::{ResultExt as _, debug_panic};
 use workspace::{
@@ -281,7 +279,7 @@ pub fn init(cx: &mut App) {
                     if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
                         panel.update(cx, |panel, _| {
                             panel
-                                .on_boarding_upsell_dismissed
+                                .new_user_onboarding_upsell_dismissed
                                 .store(false, Ordering::Release);
                         });
                     }
@@ -616,13 +614,130 @@ enum WhichFontSize {
     None,
 }
 
+struct StartThreadInLabel {
+    prefix: Option<SharedString>,
+    label: SharedString,
+    suffix: Option<SharedString>,
+}
+
 impl StartThreadIn {
-    fn label(&self) -> SharedString {
+    fn trigger_label(&self, project: &Project, cx: &App) -> StartThreadInLabel {
+        match self {
+            Self::LocalProject => {
+                let suffix = project.active_repository(cx).and_then(|repo| {
+                    let repo = repo.read(cx);
+                    let work_dir = &repo.original_repo_abs_path;
+                    let visible_paths: Vec<_> = project
+                        .visible_worktrees(cx)
+                        .map(|wt| wt.read(cx).abs_path().to_path_buf())
+                        .collect();
+
+                    for linked in repo.linked_worktrees() {
+                        if visible_paths.contains(&linked.path) {
+                            return Some(SharedString::from(format!(
+                                "({})",
+                                linked.display_name()
+                            )));
+                        }
+                    }
+
+                    if visible_paths
+                        .iter()
+                        .any(|p| p.as_path() == work_dir.as_ref())
+                    {
+                        return Some("(main)".into());
+                    }
+
+                    None
+                });
+
+                StartThreadInLabel {
+                    prefix: None,
+                    label: "Current Worktree".into(),
+                    suffix,
+                }
+            }
+            Self::NewWorktree {
+                worktree_name: Some(worktree_name),
+                ..
+            } => StartThreadInLabel {
+                prefix: Some("New:".into()),
+                label: worktree_name.clone().into(),
+                suffix: None,
+            },
+            Self::NewWorktree { .. } => StartThreadInLabel {
+                prefix: None,
+                label: "New Git Worktree".into(),
+                suffix: None,
+            },
+            Self::LinkedWorktree { display_name, .. } => StartThreadInLabel {
+                prefix: Some("From:".into()),
+                label: display_name.clone().into(),
+                suffix: None,
+            },
+        }
+    }
+
+    fn branch_trigger_label(&self, project: &Project, cx: &App) -> Option<StartThreadInLabel> {
         match self {
-            Self::LocalProject => "Current Worktree".into(),
-            Self::NewWorktree => "New Git Worktree".into(),
+            Self::NewWorktree { branch_target, .. } => {
+                let (branch_name, is_occupied) = match branch_target {
+                    NewWorktreeBranchTarget::CurrentBranch => {
+                        let name: SharedString = if project.repositories(cx).len() > 1 {
+                            "current branches".into()
+                        } else {
+                            project
+                                .active_repository(cx)
+                                .and_then(|repo| {
+                                    repo.read(cx)
+                                        .branch
+                                        .as_ref()
+                                        .map(|branch| SharedString::from(branch.name().to_string()))
+                                })
+                                .unwrap_or_else(|| "HEAD".into())
+                        };
+                        (name, false)
+                    }
+                    NewWorktreeBranchTarget::ExistingBranch { name } => {
+                        let occupied = Self::is_branch_occupied(name, project, cx);
+                        (name.clone().into(), occupied)
+                    }
+                    NewWorktreeBranchTarget::CreateBranch {
+                        from_ref: Some(from_ref),
+                        ..
+                    } => {
+                        let occupied = Self::is_branch_occupied(from_ref, project, cx);
+                        (from_ref.clone().into(), occupied)
+                    }
+                    NewWorktreeBranchTarget::CreateBranch { name, .. } => {
+                        (name.clone().into(), false)
+                    }
+                };
+
+                let prefix = if is_occupied {
+                    Some("New From:".into())
+                } else {
+                    None
+                };
+
+                Some(StartThreadInLabel {
+                    prefix,
+                    label: branch_name,
+                    suffix: None,
+                })
+            }
+            _ => None,
         }
     }
+
+    fn is_branch_occupied(branch_name: &str, project: &Project, cx: &App) -> bool {
+        project.repositories(cx).values().any(|repo| {
+            repo.read(cx)
+                .linked_worktrees
+                .iter()
+                .any(|wt| wt.branch_name() == Some(branch_name))
+        })
+    }
 }
 
 #[derive(Clone, Debug)]
@@ -632,6 +747,17 @@ pub enum WorktreeCreationStatus {
     Error(SharedString),
 }
 
+#[derive(Clone, Debug)]
+enum WorktreeCreationArgs {
+    New {
+        worktree_name: Option<String>,
+        branch_target: NewWorktreeBranchTarget,
+    },
+    Linked {
+        worktree_path: PathBuf,
+    },
+}
+
 impl ActiveView {
     pub fn which_font_size_used(&self) -> WhichFontSize {
         match self {
@@ -662,7 +788,8 @@ pub struct AgentPanel {
     previous_view: Option<ActiveView>,
     background_threads: HashMap<acp::SessionId, Entity<ConversationView>>,
     new_thread_menu_handle: PopoverMenuHandle<ContextMenu>,
-    start_thread_in_menu_handle: PopoverMenuHandle<ContextMenu>,
+    start_thread_in_menu_handle: PopoverMenuHandle<ThreadWorktreePicker>,
+    thread_branch_menu_handle: PopoverMenuHandle<ThreadBranchPicker>,
     agent_panel_menu_handle: PopoverMenuHandle<ContextMenu>,
     agent_navigation_menu_handle: PopoverMenuHandle<ContextMenu>,
     agent_navigation_menu: Option<Entity<ContextMenu>>,
@@ -670,7 +797,10 @@ pub struct AgentPanel {
     _project_subscription: Subscription,
     zoomed: bool,
     pending_serialization: Option<Task<Result<()>>>,
-    onboarding: Entity<AgentPanelOnboarding>,
+    new_user_onboarding: Entity<AgentPanelOnboarding>,
+    new_user_onboarding_upsell_dismissed: AtomicBool,
+    agent_layout_onboarding: Entity<ai_onboarding::AgentLayoutOnboarding>,
+    agent_layout_onboarding_dismissed: AtomicBool,
     selected_agent: Agent,
     start_thread_in: StartThreadIn,
     worktree_creation_status: Option<WorktreeCreationStatus>,
@@ -678,7 +808,6 @@ pub struct AgentPanel {
     _active_thread_focus_subscription: Option<Subscription>,
     _worktree_creation_task: Option<Task<()>>,
     show_trust_workspace_message: bool,
-    on_boarding_upsell_dismissed: AtomicBool,
     _active_view_observation: Option<Subscription>,
 }
 
@@ -689,7 +818,7 @@ impl AgentPanel {
         };
 
         let selected_agent = self.selected_agent.clone();
-        let start_thread_in = Some(self.start_thread_in);
+        let start_thread_in = Some(self.start_thread_in.clone());
 
         let last_active_thread = self.active_agent_thread(cx).map(|thread| {
             let thread = thread.read(cx);
@@ -794,18 +923,19 @@ impl AgentPanel {
                         } else if let Some(agent) = global_fallback {
                             panel.selected_agent = agent;
                         }
-                        if let Some(start_thread_in) = serialized_panel.start_thread_in {
-                            let is_worktree_flag_enabled =
-                                cx.has_flag::<AgentV2FeatureFlag>();
+                        if let Some(ref start_thread_in) = serialized_panel.start_thread_in {
                             let is_valid = match &start_thread_in {
                                 StartThreadIn::LocalProject => true,
-                                StartThreadIn::NewWorktree => {
+                                StartThreadIn::NewWorktree { .. } => {
                                     let project = panel.project.read(cx);
-                                    is_worktree_flag_enabled && !project.is_via_collab()
+                                    !project.is_via_collab()
+                                }
+                                StartThreadIn::LinkedWorktree { path, .. } => {
+                                    path.exists()
                                 }
                             };
                             if is_valid {
-                                panel.start_thread_in = start_thread_in;
+                                panel.start_thread_in = start_thread_in.clone();
                             } else {
                                 log::info!(
                                     "deserialized start_thread_in {:?} is no longer valid, falling back to LocalProject",
@@ -911,18 +1041,55 @@ impl AgentPanel {
                 client,
                 move |_window, cx| {
                     weak_panel
-                        .update(cx, |panel, _| {
-                            panel
-                                .on_boarding_upsell_dismissed
-                                .store(true, Ordering::Release);
+                        .update(cx, |panel, cx| {
+                            panel.dismiss_ai_onboarding(cx);
                         })
                         .ok();
-                    OnboardingUpsell::set_dismissed(true, cx);
                 },
                 cx,
             )
         });
 
+        let weak_panel = cx.entity().downgrade();
+
+        let layout = AgentSettings::get_layout(cx);
+        let is_agent_layout = matches!(layout, WindowLayout::Agent(_));
+
+        let agent_layout_onboarding = cx.new(|_cx| ai_onboarding::AgentLayoutOnboarding {
+            use_agent_layout: Arc::new({
+                let fs = fs.clone();
+                let weak_panel = weak_panel.clone();
+                move |_window, cx| {
+                    AgentSettings::set_layout(WindowLayout::Agent(None), fs.clone(), cx);
+                    weak_panel
+                        .update(cx, |panel, cx| {
+                            panel.dismiss_agent_layout_onboarding(cx);
+                        })
+                        .ok();
+                }
+            }),
+            revert_to_editor_layout: Arc::new({
+                let fs = fs.clone();
+                let weak_panel = weak_panel.clone();
+                move |_window, cx| {
+                    AgentSettings::set_layout(WindowLayout::Editor(None), fs.clone(), cx);
+                    weak_panel
+                        .update(cx, |panel, cx| {
+                            panel.dismiss_agent_layout_onboarding(cx);
+                        })
+                        .ok();
+                }
+            }),
+            dismissed: Arc::new(move |_window, cx| {
+                weak_panel
+                    .update(cx, |panel, cx| {
+                        panel.dismiss_agent_layout_onboarding(cx);
+                    })
+                    .ok();
+            }),
+            is_agent_layout,
+        });
+
         // Subscribe to extension events to sync agent servers when extensions change
         let extension_subscription = if let Some(extension_events) = ExtensionEvents::try_global(cx)
         {
@@ -979,6 +1146,7 @@ impl AgentPanel {
             background_threads: HashMap::default(),
             new_thread_menu_handle: PopoverMenuHandle::default(),
             start_thread_in_menu_handle: PopoverMenuHandle::default(),
+            thread_branch_menu_handle: PopoverMenuHandle::default(),
             agent_panel_menu_handle: PopoverMenuHandle::default(),
             agent_navigation_menu_handle: PopoverMenuHandle::default(),
             agent_navigation_menu: None,
@@ -986,7 +1154,8 @@ impl AgentPanel {
             _project_subscription,
             zoomed: false,
             pending_serialization: None,
-            onboarding,
+            new_user_onboarding: onboarding,
+            agent_layout_onboarding,
             thread_store,
             selected_agent: Agent::default(),
             start_thread_in: StartThreadIn::default(),
@@ -995,7 +1164,10 @@ impl AgentPanel {
             _active_thread_focus_subscription: None,
             _worktree_creation_task: None,
             show_trust_workspace_message: false,
-            on_boarding_upsell_dismissed: AtomicBool::new(OnboardingUpsell::dismissed(cx)),
+            new_user_onboarding_upsell_dismissed: AtomicBool::new(OnboardingUpsell::dismissed(cx)),
+            agent_layout_onboarding_dismissed: AtomicBool::new(AgentLayoutOnboarding::dismissed(
+                cx,
+            )),
             _active_view_observation: None,
         };
 
@@ -1948,24 +2120,37 @@ impl AgentPanel {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
-        if matches!(action, StartThreadIn::NewWorktree) && !cx.has_flag::<AgentV2FeatureFlag>() {
-            return;
-        }
-
-        let new_target = match *action {
+        let new_target = match action {
             StartThreadIn::LocalProject => StartThreadIn::LocalProject,
-            StartThreadIn::NewWorktree => {
+            StartThreadIn::NewWorktree { .. } => {
+                if !self.project_has_git_repository(cx) {
+                    log::error!(
+                        "set_start_thread_in: cannot use worktree mode without a git repository"
+                    );
+                    return;
+                }
+                if self.project.read(cx).is_via_collab() {
+                    log::error!(
+                        "set_start_thread_in: cannot use worktree mode in a collab project"
+                    );
+                    return;
+                }
+                action.clone()
+            }
+            StartThreadIn::LinkedWorktree { .. } => {
                 if !self.project_has_git_repository(cx) {
                     log::error!(
-                        "set_start_thread_in: cannot use NewWorktree without a git repository"
+                        "set_start_thread_in: cannot use LinkedWorktree without a git repository"
                     );
                     return;
                 }
                 if self.project.read(cx).is_via_collab() {
-                    log::error!("set_start_thread_in: cannot use NewWorktree in a collab project");
+                    log::error!(
+                        "set_start_thread_in: cannot use LinkedWorktree in a collab project"
+                    );
                     return;
                 }
-                StartThreadIn::NewWorktree
+                action.clone()
             }
         };
         self.start_thread_in = new_target;
@@ -1977,9 +2162,14 @@ impl AgentPanel {
     }
 
     fn cycle_start_thread_in(&mut self, window: &mut Window, cx: &mut Context<Self>) {
-        let next = match self.start_thread_in {
-            StartThreadIn::LocalProject => StartThreadIn::NewWorktree,
-            StartThreadIn::NewWorktree => StartThreadIn::LocalProject,
+        let next = match &self.start_thread_in {
+            StartThreadIn::LocalProject => StartThreadIn::NewWorktree {
+                worktree_name: None,
+                branch_target: NewWorktreeBranchTarget::default(),
+            },
+            StartThreadIn::NewWorktree { .. } | StartThreadIn::LinkedWorktree { .. } => {
+                StartThreadIn::LocalProject
+            }
         };
         self.set_start_thread_in(&next, window, cx);
     }
@@ -1991,7 +2181,10 @@ impl AgentPanel {
             NewThreadLocation::LocalProject => StartThreadIn::LocalProject,
             NewThreadLocation::NewWorktree => {
                 if self.project_has_git_repository(cx) {
-                    StartThreadIn::NewWorktree
+                    StartThreadIn::NewWorktree {
+                        worktree_name: None,
+                        branch_target: NewWorktreeBranchTarget::default(),
+                    }
                 } else {
                     StartThreadIn::LocalProject
                 }
@@ -2219,15 +2412,39 @@ impl AgentPanel {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
-        if self.start_thread_in == StartThreadIn::NewWorktree {
-            self.handle_worktree_creation_requested(content, window, cx);
-        } else {
-            cx.defer_in(window, move |_this, window, cx| {
-                thread_view.update(cx, |thread_view, cx| {
-                    let editor = thread_view.message_editor.clone();
-                    thread_view.send_impl(editor, window, cx);
+        match &self.start_thread_in {
+            StartThreadIn::NewWorktree {
+                worktree_name,
+                branch_target,
+            } => {
+                self.handle_worktree_requested(
+                    content,
+                    WorktreeCreationArgs::New {
+                        worktree_name: worktree_name.clone(),
+                        branch_target: branch_target.clone(),
+                    },
+                    window,
+                    cx,
+                );
+            }
+            StartThreadIn::LinkedWorktree { path, .. } => {
+                self.handle_worktree_requested(
+                    content,
+                    WorktreeCreationArgs::Linked {
+                        worktree_path: path.clone(),
+                    },
+                    window,
+                    cx,
+                );
+            }
+            StartThreadIn::LocalProject => {
+                cx.defer_in(window, move |_this, window, cx| {
+                    thread_view.update(cx, |thread_view, cx| {
+                        let editor = thread_view.message_editor.clone();
+                        thread_view.send_impl(editor, window, cx);
+                    });
                 });
-            });
+            }
         }
     }
 
@@ -2289,6 +2506,33 @@ impl AgentPanel {
         (git_repos, non_git_paths)
     }
 
+    fn resolve_worktree_branch_target(
+        branch_target: &NewWorktreeBranchTarget,
+        existing_branches: &HashSet<String>,
+        occupied_branches: &HashSet<String>,
+    ) -> Result<(String, bool, Option<String>)> {
+        let generate_branch_name = || -> Result<String> {
+            let refs: Vec<&str> = existing_branches.iter().map(|s| s.as_str()).collect();
+            let mut rng = rand::rng();
+            crate::branch_names::generate_branch_name(&refs, &mut rng)
+                .ok_or_else(|| anyhow!("Failed to generate a unique branch name"))
+        };
+
+        match branch_target {
+            NewWorktreeBranchTarget::CreateBranch { name, from_ref } => {
+                Ok((name.clone(), false, from_ref.clone()))
+            }
+            NewWorktreeBranchTarget::ExistingBranch { name } => {
+                if occupied_branches.contains(name) {
+                    Ok((generate_branch_name()?, false, Some(name.clone())))
+                } else {
+                    Ok((name.clone(), true, None))
+                }
+            }
+            NewWorktreeBranchTarget::CurrentBranch => Ok((generate_branch_name()?, false, None)),
+        }
+    }
+
     /// Kicks off an async git-worktree creation for each repository. Returns:
     ///
     /// - `creation_infos`: a vec of `(repo, new_path, receiver)` tuples—the
@@ -2297,7 +2541,10 @@ impl AgentPanel {
     ///   later to remap open editor tabs into the new workspace.
     fn start_worktree_creations(
         git_repos: &[Entity<project::git_store::Repository>],
+        worktree_name: Option<String>,
         branch_name: &str,
+        use_existing_branch: bool,
+        start_point: Option<String>,
         worktree_directory_setting: &str,
         cx: &mut Context<Self>,
     ) -> Result<(
@@ -2311,12 +2558,27 @@ impl AgentPanel {
         let mut creation_infos = Vec::new();
         let mut path_remapping = Vec::new();
 
+        let worktree_name = worktree_name.unwrap_or_else(|| branch_name.to_string());
+
         for repo in git_repos {
             let (work_dir, new_path, receiver) = repo.update(cx, |repo, _cx| {
                 let new_path =
-                    repo.path_for_new_linked_worktree(branch_name, worktree_directory_setting)?;
-                let receiver =
-                    repo.create_worktree(branch_name.to_string(), new_path.clone(), None);
+                    repo.path_for_new_linked_worktree(&worktree_name, worktree_directory_setting)?;
+                let target = if use_existing_branch {
+                    debug_assert!(
+                        git_repos.len() == 1,
+                        "use_existing_branch should only be true for a single repo"
+                    );
+                    git::repository::CreateWorktreeTarget::ExistingBranch {
+                        branch_name: branch_name.to_string(),
+                    }
+                } else {
+                    git::repository::CreateWorktreeTarget::NewBranch {
+                        branch_name: branch_name.to_string(),
+                        base_sha: start_point.clone(),
+                    }
+                };
+                let receiver = repo.create_worktree(target, new_path.clone());
                 let work_dir = repo.work_directory_abs_path.clone();
                 anyhow::Ok((work_dir, new_path, receiver))
             })?;
@@ -2419,9 +2681,10 @@ impl AgentPanel {
         cx.notify();
     }
 
-    fn handle_worktree_creation_requested(
+    fn handle_worktree_requested(
         &mut self,
         content: Vec<acp::ContentBlock>,
+        args: WorktreeCreationArgs,
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
@@ -2437,7 +2700,7 @@ impl AgentPanel {
 
         let (git_repos, non_git_paths) = self.classify_worktrees(cx);
 
-        if git_repos.is_empty() {
+        if matches!(args, WorktreeCreationArgs::New { .. }) && git_repos.is_empty() {
             self.set_worktree_creation_error(
                 "No git repositories found in the project".into(),
                 window,
@@ -2446,17 +2709,31 @@ impl AgentPanel {
             return;
         }
 
-        // Kick off branch listing as early as possible so it can run
-        // concurrently with the remaining synchronous setup work.
-        let branch_receivers: Vec<_> = git_repos
-            .iter()
-            .map(|repo| repo.update(cx, |repo, _cx| repo.branches()))
-            .collect();
-
-        let worktree_directory_setting = ProjectSettings::get_global(cx)
-            .git
-            .worktree_directory
-            .clone();
+        let (branch_receivers, worktree_receivers, worktree_directory_setting) =
+            if matches!(args, WorktreeCreationArgs::New { .. }) {
+                (
+                    Some(
+                        git_repos
+                            .iter()
+                            .map(|repo| repo.update(cx, |repo, _cx| repo.branches()))
+                            .collect::<Vec<_>>(),
+                    ),
+                    Some(
+                        git_repos
+                            .iter()
+                            .map(|repo| repo.update(cx, |repo, _cx| repo.worktrees()))
+                            .collect::<Vec<_>>(),
+                    ),
+                    Some(
+                        ProjectSettings::get_global(cx)
+                            .git
+                            .worktree_directory
+                            .clone(),
+                    ),
+                )
+            } else {
+                (None, None, None)
+            };
 
         let active_file_path = self.workspace.upgrade().and_then(|workspace| {
             let workspace = workspace.read(cx);
@@ -2476,77 +2753,124 @@ impl AgentPanel {
         let selected_agent = self.selected_agent();
 
         let task = cx.spawn_in(window, async move |this, cx| {
-            // Await the branch listings we kicked off earlier.
-            let mut existing_branches = Vec::new();
-            for result in futures::future::join_all(branch_receivers).await {
-                match result {
-                    Ok(Ok(branches)) => {
-                        for branch in branches {
-                            existing_branches.push(branch.name().to_string());
+            let (all_paths, path_remapping, has_non_git) = match args {
+                WorktreeCreationArgs::New {
+                    worktree_name,
+                    branch_target,
+                } => {
+                    let branch_receivers = branch_receivers
+                        .expect("branch receivers must be prepared for new worktree creation");
+                    let worktree_receivers = worktree_receivers
+                        .expect("worktree receivers must be prepared for new worktree creation");
+                    let worktree_directory_setting = worktree_directory_setting
+                        .expect("worktree directory must be prepared for new worktree creation");
+
+                    let mut existing_branches = HashSet::default();
+                    for result in futures::future::join_all(branch_receivers).await {
+                        match result {
+                            Ok(Ok(branches)) => {
+                                for branch in branches {
+                                    existing_branches.insert(branch.name().to_string());
+                                }
+                            }
+                            Ok(Err(err)) => {
+                                Err::<(), _>(err).log_err();
+                            }
+                            Err(_) => {}
                         }
                     }
-                    Ok(Err(err)) => {
-                        Err::<(), _>(err).log_err();
+
+                    let mut occupied_branches = HashSet::default();
+                    for result in futures::future::join_all(worktree_receivers).await {
+                        match result {
+                            Ok(Ok(worktrees)) => {
+                                for worktree in worktrees {
+                                    if let Some(branch_name) = worktree.branch_name() {
+                                        occupied_branches.insert(branch_name.to_string());
+                                    }
+                                }
+                            }
+                            Ok(Err(err)) => {
+                                Err::<(), _>(err).log_err();
+                            }
+                            Err(_) => {}
+                        }
                     }
-                    Err(_) => {}
-                }
-            }
 
-            let existing_branch_refs: Vec<&str> =
-                existing_branches.iter().map(|s| s.as_str()).collect();
-            let mut rng = rand::rng();
-            let branch_name =
-                match crate::branch_names::generate_branch_name(&existing_branch_refs, &mut rng) {
-                    Some(name) => name,
-                    None => {
-                        this.update_in(cx, |this, window, cx| {
-                            this.set_worktree_creation_error(
-                                "Failed to generate a unique branch name".into(),
-                                window,
+                    let (branch_name, use_existing_branch, start_point) =
+                        match Self::resolve_worktree_branch_target(
+                            &branch_target,
+                            &existing_branches,
+                            &occupied_branches,
+                        ) {
+                            Ok(target) => target,
+                            Err(err) => {
+                                this.update_in(cx, |this, window, cx| {
+                                    this.set_worktree_creation_error(
+                                        err.to_string().into(),
+                                        window,
+                                        cx,
+                                    );
+                                })?;
+                                return anyhow::Ok(());
+                            }
+                        };
+
+                    let (creation_infos, path_remapping) =
+                        match this.update_in(cx, |_this, _window, cx| {
+                            Self::start_worktree_creations(
+                                &git_repos,
+                                worktree_name,
+                                &branch_name,
+                                use_existing_branch,
+                                start_point,
+                                &worktree_directory_setting,
                                 cx,
-                            );
-                        })?;
-                        return anyhow::Ok(());
-                    }
-                };
+                            )
+                        }) {
+                            Ok(Ok(result)) => result,
+                            Ok(Err(err)) | Err(err) => {
+                                this.update_in(cx, |this, window, cx| {
+                                    this.set_worktree_creation_error(
+                                        format!("Failed to validate worktree directory: {err}")
+                                            .into(),
+                                        window,
+                                        cx,
+                                    );
+                                })
+                                .log_err();
+                                return anyhow::Ok(());
+                            }
+                        };
 
-            let (creation_infos, path_remapping) = match this.update_in(cx, |_this, _window, cx| {
-                Self::start_worktree_creations(
-                    &git_repos,
-                    &branch_name,
-                    &worktree_directory_setting,
-                    cx,
-                )
-            }) {
-                Ok(Ok(result)) => result,
-                Ok(Err(err)) | Err(err) => {
-                    this.update_in(cx, |this, window, cx| {
-                        this.set_worktree_creation_error(
-                            format!("Failed to validate worktree directory: {err}").into(),
-                            window,
-                            cx,
-                        );
-                    })
-                    .log_err();
-                    return anyhow::Ok(());
-                }
-            };
+                    let created_paths =
+                        match Self::await_and_rollback_on_failure(creation_infos, cx).await {
+                            Ok(paths) => paths,
+                            Err(err) => {
+                                this.update_in(cx, |this, window, cx| {
+                                    this.set_worktree_creation_error(
+                                        format!("{err}").into(),
+                                        window,
+                                        cx,
+                                    );
+                                })?;
+                                return anyhow::Ok(());
+                            }
+                        };
 
-            let created_paths = match Self::await_and_rollback_on_failure(creation_infos, cx).await
-            {
-                Ok(paths) => paths,
-                Err(err) => {
-                    this.update_in(cx, |this, window, cx| {
-                        this.set_worktree_creation_error(format!("{err}").into(), window, cx);
-                    })?;
-                    return anyhow::Ok(());
+                    let mut all_paths = created_paths;
+                    let has_non_git = !non_git_paths.is_empty();
+                    all_paths.extend(non_git_paths.iter().cloned());
+                    (all_paths, path_remapping, has_non_git)
+                }
+                WorktreeCreationArgs::Linked { worktree_path } => {
+                    let mut all_paths = vec![worktree_path];
+                    let has_non_git = !non_git_paths.is_empty();
+                    all_paths.extend(non_git_paths.iter().cloned());
+                    (all_paths, Vec::new(), has_non_git)
                 }
             };
 
-            let mut all_paths = created_paths;
-            let has_non_git = !non_git_paths.is_empty();
-            all_paths.extend(non_git_paths.iter().cloned());
-
             let app_state = match workspace.upgrade() {
                 Some(workspace) => cx.update(|_, cx| workspace.read(cx).app_state().clone())?,
                 None => {
@@ -2562,7 +2886,7 @@ impl AgentPanel {
             };
 
             let this_for_error = this.clone();
-            if let Err(err) = Self::setup_new_workspace(
+            if let Err(err) = Self::open_worktree_workspace_and_start_thread(
                 this,
                 all_paths,
                 app_state,
@@ -2595,7 +2919,7 @@ impl AgentPanel {
         }));
     }
 
-    async fn setup_new_workspace(
+    async fn open_worktree_workspace_and_start_thread(
         this: WeakEntity<Self>,
         all_paths: Vec<PathBuf>,
         app_state: Arc<workspace::AppState>,
@@ -2989,17 +3313,11 @@ impl AgentPanel {
 
     fn render_panel_options_menu(
         &self,
-        window: &mut Window,
+        _window: &mut Window,
         cx: &mut Context<Self>,
     ) -> impl IntoElement {
         let focus_handle = self.focus_handle(cx);
 
-        let full_screen_label = if self.is_zoomed(window, cx) {
-            "Disable Full Screen"
-        } else {
-            "Enable Full Screen"
-        };
-
         let conversation_view = match &self.active_view {
             ActiveView::AgentThread { conversation_view } => Some(conversation_view.clone()),
             _ => None,
@@ -3075,8 +3393,7 @@ impl AgentPanel {
                             .action("Profiles", Box::new(ManageProfiles::default()))
                             .action("Settings", Box::new(OpenSettings))
                             .separator()
-                            .action("Toggle Threads Sidebar", Box::new(ToggleWorkspaceSidebar))
-                            .action(full_screen_label, Box::new(ToggleZoom));
+                            .action("Toggle Threads Sidebar", Box::new(ToggleWorkspaceSidebar));
 
                         if has_auth_methods {
                             menu = menu.action("Reauthenticate", Box::new(ReauthenticateAgent))
@@ -3088,47 +3405,6 @@ impl AgentPanel {
             })
     }
 
-    fn render_recent_entries_menu(
-        &self,
-        icon: IconName,
-        corner: Corner,
-        cx: &mut Context<Self>,
-    ) -> impl IntoElement {
-        let focus_handle = self.focus_handle(cx);
-
-        PopoverMenu::new("agent-nav-menu")
-            .trigger_with_tooltip(
-                IconButton::new("agent-nav-menu", icon).icon_size(IconSize::Small),
-                {
-                    move |_window, cx| {
-                        Tooltip::for_action_in(
-                            "Toggle Recently Updated Threads",
-                            &ToggleNavigationMenu,
-                            &focus_handle,
-                            cx,
-                        )
-                    }
-                },
-            )
-            .anchor(corner)
-            .with_handle(self.agent_navigation_menu_handle.clone())
-            .menu({
-                let menu = self.agent_navigation_menu.clone();
-                move |window, cx| {
-                    telemetry::event!("View Thread History Clicked");
-
-                    if let Some(menu) = menu.as_ref() {
-                        menu.update(cx, |_, cx| {
-                            cx.defer_in(window, |menu, window, cx| {
-                                menu.rebuild(window, cx);
-                            });
-                        })
-                    }
-                    menu.clone()
-                }
-            })
-    }
-
     fn render_toolbar_back_button(&self, cx: &mut Context<Self>) -> impl IntoElement {
         let focus_handle = self.focus_handle(cx);
 
@@ -3149,24 +3425,16 @@ impl AgentPanel {
     }
 
     fn render_start_thread_in_selector(&self, cx: &mut Context<Self>) -> impl IntoElement {
-        use settings::{NewThreadLocation, Settings};
-
         let focus_handle = self.focus_handle(cx);
-        let has_git_repo = self.project_has_git_repository(cx);
-        let is_via_collab = self.project.read(cx).is_via_collab();
-        let fs = self.fs.clone();
 
         let is_creating = matches!(
             self.worktree_creation_status,
             Some(WorktreeCreationStatus::Creating)
         );
 
-        let current_target = self.start_thread_in;
-        let trigger_label = self.start_thread_in.label();
-
-        let new_thread_location = AgentSettings::get_global(cx).new_thread_location;
-        let is_local_default = new_thread_location == NewThreadLocation::LocalProject;
-        let is_new_worktree_default = new_thread_location == NewThreadLocation::NewWorktree;
+        let trigger_parts = self
+            .start_thread_in
+            .trigger_label(self.project.read(cx), cx);
 
         let icon = if self.start_thread_in_menu_handle.is_deployed() {
             IconName::ChevronUp
@@ -3174,17 +3442,20 @@ impl AgentPanel {
             IconName::ChevronDown
         };
 
-        let trigger_button = Button::new("thread-target-trigger", trigger_label)
-            .end_icon(Icon::new(icon).size(IconSize::XSmall).color(Color::Muted))
-            .disabled(is_creating);
+        let trigger_button = ButtonLike::new("thread-target-trigger")
+            .disabled(is_creating)
+            .when_some(trigger_parts.prefix, |this, prefix| {
+                this.child(Label::new(prefix).color(Color::Muted))
+            })
+            .child(Label::new(trigger_parts.label))
+            .when_some(trigger_parts.suffix, |this, suffix| {
+                this.child(Label::new(suffix).color(Color::Muted))
+            })
+            .child(Icon::new(icon).size(IconSize::XSmall).color(Color::Muted));
 
-        let dock_position = AgentSettings::get_global(cx).dock;
-        let documentation_side = match dock_position {
-            settings::DockPosition::Left => DocumentationSide::Right,
-            settings::DockPosition::Bottom | settings::DockPosition::Right => {
-                DocumentationSide::Left
-            }
-        };
+        let project = self.project.clone();
+        let current_target = self.start_thread_in.clone();
+        let fs = self.fs.clone();
 
         PopoverMenu::new("thread-target-selector")
             .trigger_with_tooltip(trigger_button, {
@@ -3198,89 +3469,60 @@ impl AgentPanel {
                 }
             })
             .menu(move |window, cx| {
-                let is_local_selected = current_target == StartThreadIn::LocalProject;
-                let is_new_worktree_selected = current_target == StartThreadIn::NewWorktree;
                 let fs = fs.clone();
+                Some(cx.new(|cx| {
+                    ThreadWorktreePicker::new(project.clone(), &current_target, fs, window, cx)
+                }))
+            })
+            .with_handle(self.start_thread_in_menu_handle.clone())
+            .anchor(Corner::TopLeft)
+            .offset(gpui::Point {
+                x: px(1.0),
+                y: px(1.0),
+            })
+    }
+
+    fn render_new_worktree_branch_selector(&self, cx: &mut Context<Self>) -> impl IntoElement {
+        let is_creating = matches!(
+            self.worktree_creation_status,
+            Some(WorktreeCreationStatus::Creating)
+        );
 
-                Some(ContextMenu::build(window, cx, move |menu, _window, _cx| {
-                    let new_worktree_disabled = !has_git_repo || is_via_collab;
+        let project_ref = self.project.read(cx);
+        let trigger_parts = self
+            .start_thread_in
+            .branch_trigger_label(project_ref, cx)
+            .unwrap_or_else(|| StartThreadInLabel {
+                prefix: Some("From:".into()),
+                label: "HEAD".into(),
+                suffix: None,
+            });
 
-                    menu.header("Start Thread In…")
-                        .item(
-                            ContextMenuEntry::new("Current Worktree")
-                                .toggleable(IconPosition::End, is_local_selected)
-                                .documentation_aside(documentation_side, move |_| {
-                                    HoldForDefault::new(is_local_default)
-                                        .more_content(false)
-                                        .into_any_element()
-                                })
-                                .handler({
-                                    let fs = fs.clone();
-                                    move |window, cx| {
-                                        if window.modifiers().secondary() {
-                                            update_settings_file(fs.clone(), cx, |settings, _| {
-                                                settings
-                                                    .agent
-                                                    .get_or_insert_default()
-                                                    .set_new_thread_location(
-                                                        NewThreadLocation::LocalProject,
-                                                    );
-                                            });
-                                        }
-                                        window.dispatch_action(
-                                            Box::new(StartThreadIn::LocalProject),
-                                            cx,
-                                        );
-                                    }
-                                }),
-                        )
-                        .item({
-                            let entry = ContextMenuEntry::new("New Git Worktree")
-                                .toggleable(IconPosition::End, is_new_worktree_selected)
-                                .disabled(new_worktree_disabled)
-                                .handler({
-                                    let fs = fs.clone();
-                                    move |window, cx| {
-                                        if window.modifiers().secondary() {
-                                            update_settings_file(fs.clone(), cx, |settings, _| {
-                                                settings
-                                                    .agent
-                                                    .get_or_insert_default()
-                                                    .set_new_thread_location(
-                                                        NewThreadLocation::NewWorktree,
-                                                    );
-                                            });
-                                        }
-                                        window.dispatch_action(
-                                            Box::new(StartThreadIn::NewWorktree),
-                                            cx,
-                                        );
-                                    }
-                                });
-
-                            if new_worktree_disabled {
-                                entry.documentation_aside(documentation_side, move |_| {
-                                    let reason = if !has_git_repo {
-                                        "No git repository found in this project."
-                                    } else {
-                                        "Not available for remote/collab projects yet."
-                                    };
-                                    Label::new(reason)
-                                        .color(Color::Muted)
-                                        .size(LabelSize::Small)
-                                        .into_any_element()
-                                })
-                            } else {
-                                entry.documentation_aside(documentation_side, move |_| {
-                                    HoldForDefault::new(is_new_worktree_default)
-                                        .more_content(false)
-                                        .into_any_element()
-                                })
-                            }
-                        })
+        let icon = if self.thread_branch_menu_handle.is_deployed() {
+            IconName::ChevronUp
+        } else {
+            IconName::ChevronDown
+        };
+
+        let trigger_button = ButtonLike::new("thread-branch-trigger")
+            .disabled(is_creating)
+            .when_some(trigger_parts.prefix, |this, prefix| {
+                this.child(Label::new(prefix).color(Color::Muted))
+            })
+            .child(Label::new(trigger_parts.label))
+            .child(Icon::new(icon).size(IconSize::XSmall).color(Color::Muted));
+
+        let project = self.project.clone();
+        let current_target = self.start_thread_in.clone();
+
+        PopoverMenu::new("thread-branch-selector")
+            .trigger_with_tooltip(trigger_button, Tooltip::text("Choose Worktree Branch…"))
+            .menu(move |window, cx| {
+                Some(cx.new(|cx| {
+                    ThreadBranchPicker::new(project.clone(), &current_target, window, cx)
                 }))
             })
-            .with_handle(self.start_thread_in_menu_handle.clone())
+            .with_handle(self.thread_branch_menu_handle.clone())
             .anchor(Corner::TopLeft)
             .offset(gpui::Point {
                 x: px(1.0),

crates/agent_ui/src/agent_registry_ui.rs 🔗

@@ -382,7 +382,7 @@ impl AgentRegistryPage {
             self.install_button(agent, install_status, supports_current_platform, cx);
 
         let repository_button = agent.repository().map(|repository| {
-            let repository_for_tooltip: SharedString = repository.to_string().into();
+            let repository_for_tooltip = repository.clone();
             let repository_for_click = repository.to_string();
 
             IconButton::new(

crates/agent_ui/src/agent_ui.rs 🔗

@@ -28,14 +28,17 @@ mod terminal_codegen;
 mod terminal_inline_assistant;
 #[cfg(any(test, feature = "test-support"))]
 pub mod test_support;
+mod thread_branch_picker;
 mod thread_history;
 mod thread_history_view;
 mod thread_import;
 pub mod thread_metadata_store;
 pub mod thread_worktree_archive;
+mod thread_worktree_picker;
 pub mod threads_archive_view;
 mod ui;
 
+use std::path::PathBuf;
 use std::rc::Rc;
 use std::sync::Arc;
 
@@ -43,9 +46,9 @@ use ::ui::IconName;
 use agent_client_protocol as acp;
 use agent_settings::{AgentProfileId, AgentSettings};
 use command_palette_hooks::CommandPaletteFilter;
-use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt as _};
+use feature_flags::FeatureFlagAppExt as _;
 use fs::Fs;
-use gpui::{Action, App, Context, Entity, SharedString, UpdateGlobal as _, Window, actions};
+use gpui::{Action, App, Context, Entity, SharedString, Window, actions};
 use language::{
     LanguageRegistry,
     language_settings::{AllLanguageSettings, EditPredictionProvider},
@@ -57,7 +60,7 @@ use project::{AgentId, DisableAiSettings};
 use prompt_store::PromptBuilder;
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{DockPosition, DockSide, LanguageModelSelection, Settings as _, SettingsStore};
+use settings::{LanguageModelSelection, Settings as _, SettingsStore};
 use std::any::TypeId;
 use workspace::Workspace;
 
@@ -78,7 +81,6 @@ use zed_actions;
 
 pub const DEFAULT_THREAD_TITLE: &str = "New Thread";
 const PARALLEL_AGENT_LAYOUT_BACKFILL_KEY: &str = "parallel_agent_layout_backfilled";
-
 actions!(
     agent,
     [
@@ -315,16 +317,42 @@ impl Agent {
     }
 }
 
+/// Describes which branch to use when creating a new git worktree.
+#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
+#[serde(rename_all = "snake_case", tag = "kind")]
+pub enum NewWorktreeBranchTarget {
+    /// Create a new randomly named branch from the current HEAD.
+    /// Will match worktree name if the newly created worktree was also randomly named.
+    #[default]
+    CurrentBranch,
+    /// Check out an existing branch, or create a new branch from it if it's
+    /// already occupied by another worktree.
+    ExistingBranch { name: String },
+    /// Create a new branch with an explicit name, optionally from a specific ref.
+    CreateBranch {
+        name: String,
+        #[serde(default)]
+        from_ref: Option<String>,
+    },
+}
+
 /// Sets where new threads will run.
-#[derive(
-    Clone, Copy, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema, Action,
-)]
+#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema, Action)]
 #[action(namespace = agent)]
 #[serde(rename_all = "snake_case", tag = "kind")]
 pub enum StartThreadIn {
     #[default]
     LocalProject,
-    NewWorktree,
+    NewWorktree {
+        /// When this is None, Zed will randomly generate a worktree name
+        /// otherwise, the provided name will be used.
+        #[serde(default)]
+        worktree_name: Option<String>,
+        #[serde(default)]
+        branch_target: NewWorktreeBranchTarget,
+    },
+    /// A linked worktree that already exists on disk.
+    LinkedWorktree { path: PathBuf, display_name: String },
 }
 
 /// Content to initialize new external agent with.
@@ -483,45 +511,10 @@ pub fn init(
     })
     .detach();
 
-    // TODO: remove this field when we're ready remove the feature flag
-    maybe_backfill_editor_layout(fs, is_new_install, false, cx);
-
-    cx.observe_flag::<AgentV2FeatureFlag, _>(|is_enabled, cx| {
-        SettingsStore::update_global(cx, |store, cx| {
-            store.update_default_settings(cx, |defaults| {
-                if is_enabled {
-                    defaults.agent.get_or_insert_default().dock = Some(DockPosition::Left);
-                    defaults.project_panel.get_or_insert_default().dock = Some(DockSide::Right);
-                    defaults.outline_panel.get_or_insert_default().dock = Some(DockSide::Right);
-                    defaults.collaboration_panel.get_or_insert_default().dock =
-                        Some(DockPosition::Right);
-                    defaults.git_panel.get_or_insert_default().dock = Some(DockPosition::Right);
-                    defaults.notification_panel.get_or_insert_default().button = Some(false);
-                } else {
-                    defaults.agent.get_or_insert_default().dock = Some(DockPosition::Right);
-                    defaults.project_panel.get_or_insert_default().dock = Some(DockSide::Left);
-                    defaults.outline_panel.get_or_insert_default().dock = Some(DockSide::Left);
-                    defaults.collaboration_panel.get_or_insert_default().dock =
-                        Some(DockPosition::Left);
-                    defaults.git_panel.get_or_insert_default().dock = Some(DockPosition::Left);
-                    defaults.notification_panel.get_or_insert_default().button = Some(true);
-                }
-            });
-        });
-    })
-    .detach();
+    maybe_backfill_editor_layout(fs, is_new_install, cx);
 }
 
-fn maybe_backfill_editor_layout(
-    fs: Arc<dyn Fs>,
-    is_new_install: bool,
-    should_run: bool,
-    cx: &mut App,
-) {
-    if !should_run {
-        return;
-    }
-
+fn maybe_backfill_editor_layout(fs: Arc<dyn Fs>, is_new_install: bool, cx: &mut App) {
     let kvp = db::kvp::KeyValueStore::global(cx);
     let already_backfilled =
         util::ResultExt::log_err(kvp.read_kvp(PARALLEL_AGENT_LAYOUT_BACKFILL_KEY))
@@ -546,7 +539,7 @@ fn maybe_backfill_editor_layout(
 fn update_command_palette_filter(cx: &mut App) {
     let disable_ai = DisableAiSettings::get_global(cx).disable_ai;
     let agent_enabled = AgentSettings::get_global(cx).enabled;
-    let agent_v2_enabled = cx.has_flag::<AgentV2FeatureFlag>();
+
     let edit_prediction_provider = AllLanguageSettings::get_global(cx)
         .edit_predictions
         .provider;
@@ -615,11 +608,7 @@ fn update_command_palette_filter(cx: &mut App) {
             filter.show_action_types(&[TypeId::of::<zed_actions::OpenZedPredictOnboarding>()]);
         }
 
-        if agent_v2_enabled {
-            filter.show_namespace("multi_workspace");
-        } else {
-            filter.hide_namespace("multi_workspace");
-        }
+        filter.show_namespace("multi_workspace");
     });
 }
 
@@ -688,7 +677,6 @@ mod tests {
     use command_palette_hooks::CommandPaletteFilter;
     use db::kvp::KeyValueStore;
     use editor::actions::AcceptEditPrediction;
-    use feature_flags::FeatureFlagAppExt;
     use gpui::{BorrowAppContext, TestAppContext, px};
     use project::DisableAiSettings;
     use settings::{
@@ -714,6 +702,7 @@ mod tests {
             flexible: true,
             default_width: px(300.),
             default_height: px(600.),
+            max_content_width: px(850.),
             default_model: None,
             inline_assistant_model: None,
             inline_assistant_use_streaming_tools: false,
@@ -856,7 +845,7 @@ mod tests {
                     .is_none()
             );
 
-            maybe_backfill_editor_layout(fs.clone(), false, true, cx);
+            maybe_backfill_editor_layout(fs.clone(), false, cx);
         });
 
         cx.run_until_parked();
@@ -875,7 +864,7 @@ mod tests {
         let fs = setup_backfill_test(cx).await;
 
         cx.update(|cx| {
-            maybe_backfill_editor_layout(fs.clone(), true, true, cx);
+            maybe_backfill_editor_layout(fs.clone(), true, cx);
         });
 
         cx.run_until_parked();
@@ -897,7 +886,7 @@ mod tests {
         let fs = setup_backfill_test(cx).await;
 
         cx.update(|cx| {
-            maybe_backfill_editor_layout(fs.clone(), false, true, cx);
+            maybe_backfill_editor_layout(fs.clone(), false, cx);
         });
 
         cx.run_until_parked();
@@ -905,7 +894,7 @@ mod tests {
         let after_first = fs.load(paths::settings_file().as_path()).await.unwrap();
 
         cx.update(|cx| {
-            maybe_backfill_editor_layout(fs.clone(), false, true, cx);
+            maybe_backfill_editor_layout(fs.clone(), false, cx);
         });
 
         cx.run_until_parked();

crates/agent_ui/src/config_options.rs 🔗

@@ -3,7 +3,7 @@ use std::{cmp::Reverse, rc::Rc, sync::Arc};
 use acp_thread::AgentSessionConfigOptions;
 use agent_client_protocol as acp;
 use agent_servers::AgentServer;
-use agent_settings::AgentSettings;
+
 use collections::HashSet;
 use fs::Fs;
 use fuzzy::StringMatchCandidate;
@@ -13,14 +13,13 @@ use gpui::{
 use ordered_float::OrderedFloat;
 use picker::popover_menu::PickerPopoverMenu;
 use picker::{Picker, PickerDelegate};
-use settings::{Settings, SettingsStore};
+use settings::SettingsStore;
 use ui::{
-    DocumentationSide, ElevationIndex, IconButton, ListItem, ListItemSpacing, PopoverMenuHandle,
-    Tooltip, prelude::*,
+    ElevationIndex, IconButton, ListItem, ListItemSpacing, PopoverMenuHandle, Tooltip, prelude::*,
 };
 use util::ResultExt as _;
 
-use crate::ui::HoldForDefault;
+use crate::ui::{HoldForDefault, documentation_aside_side};
 
 const PICKER_THRESHOLD: usize = 5;
 
@@ -695,13 +694,7 @@ impl PickerDelegate for ConfigOptionPickerDelegate {
                 let description = description.clone();
                 let is_default = *is_default;
 
-                let settings = AgentSettings::get_global(cx);
-                let side = match settings.dock {
-                    settings::DockPosition::Left => DocumentationSide::Right,
-                    settings::DockPosition::Bottom | settings::DockPosition::Right => {
-                        DocumentationSide::Left
-                    }
-                };
+                let side = documentation_aside_side(cx);
 
                 ui::DocumentationAside::new(
                     side,

crates/agent_ui/src/conversation_view.rs 🔗

@@ -22,7 +22,7 @@ use editor::scroll::Autoscroll;
 use editor::{
     Editor, EditorEvent, EditorMode, MultiBuffer, PathKey, SelectionEffects, SizingBehavior,
 };
-use feature_flags::{AgentSharingFeatureFlag, AgentV2FeatureFlag, FeatureFlagAppExt as _};
+use feature_flags::{AgentSharingFeatureFlag, FeatureFlagAppExt as _};
 use file_icons::FileIcons;
 use fs::Fs;
 use futures::FutureExt as _;
@@ -54,8 +54,8 @@ use theme_settings::AgentFontSize;
 use ui::{
     Callout, CircularProgress, CommonAnimationExt, ContextMenu, ContextMenuEntry, CopyButton,
     DecoratedIcon, DiffStat, Disclosure, Divider, DividerColor, IconDecoration, IconDecorationKind,
-    KeyBinding, PopoverMenu, PopoverMenuHandle, SpinnerLabel, TintColor, Tooltip, WithScrollbar,
-    prelude::*, right_click_menu,
+    KeyBinding, PopoverMenu, PopoverMenuHandle, TintColor, Tooltip, WithScrollbar, prelude::*,
+    right_click_menu,
 };
 use util::{ResultExt, size::format_file_size, time::duration_alt_display};
 use util::{debug_panic, defer};
@@ -812,7 +812,7 @@ impl ConversationView {
         let agent_id = self.agent.agent_id();
         let session_capabilities = Arc::new(RwLock::new(SessionCapabilities::new(
             thread.read(cx).prompt_capabilities(),
-            vec![],
+            thread.read(cx).available_commands().to_vec(),
         )));
 
         let action_log = thread.read(cx).action_log().clone();
@@ -1448,40 +1448,24 @@ impl ConversationView {
                 self.emit_token_limit_telemetry_if_needed(thread, cx);
             }
             AcpThreadEvent::AvailableCommandsUpdated(available_commands) => {
-                let mut available_commands = available_commands.clone();
+                if let Some(thread_view) = self.thread_view(&thread_id) {
+                    let has_commands = !available_commands.is_empty();
 
-                if thread
-                    .read(cx)
-                    .connection()
-                    .auth_methods()
-                    .iter()
-                    .any(|method| method.id().0.as_ref() == "claude-login")
-                {
-                    available_commands.push(acp::AvailableCommand::new("login", "Authenticate"));
-                    available_commands.push(acp::AvailableCommand::new("logout", "Authenticate"));
-                }
-
-                let has_commands = !available_commands.is_empty();
-                if let Some(active) = self.active_thread() {
-                    active.update(cx, |active, _cx| {
-                        active
-                            .session_capabilities
-                            .write()
-                            .set_available_commands(available_commands);
-                    });
-                }
-
-                let agent_display_name = self
-                    .agent_server_store
-                    .read(cx)
-                    .agent_display_name(&self.agent.agent_id())
-                    .unwrap_or_else(|| self.agent.agent_id().0.to_string().into());
+                    let agent_display_name = self
+                        .agent_server_store
+                        .read(cx)
+                        .agent_display_name(&self.agent.agent_id())
+                        .unwrap_or_else(|| self.agent.agent_id().0.to_string().into());
 
-                if let Some(active) = self.active_thread() {
                     let new_placeholder =
                         placeholder_text(agent_display_name.as_ref(), has_commands);
-                    active.update(cx, |active, cx| {
-                        active.message_editor.update(cx, |editor, cx| {
+
+                    thread_view.update(cx, |thread_view, cx| {
+                        thread_view
+                            .session_capabilities
+                            .write()
+                            .set_available_commands(available_commands.clone());
+                        thread_view.message_editor.update(cx, |editor, cx| {
                             editor.set_placeholder_text(&new_placeholder, window, cx);
                         });
                     });
@@ -2348,9 +2332,9 @@ impl ConversationView {
         }
     }
 
+    #[cfg(feature = "audio")]
     fn play_notification_sound(&self, window: &Window, cx: &mut App) {
-        let settings = AgentSettings::get_global(cx);
-        let _visible = window.is_window_active()
+        let visible = window.is_window_active()
             && if let Some(mw) = window.root::<MultiWorkspace>().flatten() {
                 self.agent_panel_visible(&mw, cx)
             } else {
@@ -2358,8 +2342,8 @@ impl ConversationView {
                     .upgrade()
                     .is_some_and(|workspace| AgentPanel::is_visible(&workspace, cx))
             };
-        #[cfg(feature = "audio")]
-        if settings.play_sound_when_agent_done.should_play(_visible) {
+        let settings = AgentSettings::get_global(cx);
+        if settings.play_sound_when_agent_done.should_play(visible) {
             Audio::play_sound(Sound::AgentDone, cx);
         }
     }
@@ -2661,7 +2645,6 @@ impl ConversationView {
 impl Render for ConversationView {
     fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
         self.sync_queued_message_editors(window, cx);
-        let v2_flag = cx.has_flag::<AgentV2FeatureFlag>();
 
         v_flex()
             .track_focus(&self.focus_handle)
@@ -2670,17 +2653,18 @@ impl Render for ConversationView {
             .child(match &self.server_state {
                 ServerState::Loading { .. } => v_flex()
                     .flex_1()
-                    .when(v2_flag, |this| {
-                        this.size_full().items_center().justify_center().child(
-                            Label::new("Loading…").color(Color::Muted).with_animation(
-                                "loading-agent-label",
-                                Animation::new(Duration::from_secs(2))
-                                    .repeat()
-                                    .with_easing(pulsating_between(0.3, 0.7)),
-                                |label, delta| label.alpha(delta),
-                            ),
-                        )
-                    })
+                    .size_full()
+                    .items_center()
+                    .justify_center()
+                    .child(
+                        Label::new("Loading…").color(Color::Muted).with_animation(
+                            "loading-agent-label",
+                            Animation::new(Duration::from_secs(2))
+                                .repeat()
+                                .with_easing(pulsating_between(0.3, 0.7)),
+                            |label, delta| label.alpha(delta),
+                        ),
+                    )
                     .into_any(),
                 ServerState::LoadError { error: e, .. } => v_flex()
                     .flex_1()
@@ -2989,6 +2973,166 @@ pub(crate) mod tests {
         });
     }
 
+    #[derive(Clone)]
+    struct RestoredAvailableCommandsConnection;
+
+    impl AgentConnection for RestoredAvailableCommandsConnection {
+        fn agent_id(&self) -> AgentId {
+            AgentId::new("restored-available-commands")
+        }
+
+        fn telemetry_id(&self) -> SharedString {
+            "restored-available-commands".into()
+        }
+
+        fn new_session(
+            self: Rc<Self>,
+            project: Entity<Project>,
+            _work_dirs: PathList,
+            cx: &mut App,
+        ) -> Task<gpui::Result<Entity<AcpThread>>> {
+            let thread = build_test_thread(
+                self,
+                project,
+                "RestoredAvailableCommandsConnection",
+                SessionId::new("new-session"),
+                cx,
+            );
+            Task::ready(Ok(thread))
+        }
+
+        fn supports_load_session(&self) -> bool {
+            true
+        }
+
+        fn load_session(
+            self: Rc<Self>,
+            session_id: acp::SessionId,
+            project: Entity<Project>,
+            _work_dirs: PathList,
+            _title: Option<SharedString>,
+            cx: &mut App,
+        ) -> Task<gpui::Result<Entity<AcpThread>>> {
+            let thread = build_test_thread(
+                self,
+                project,
+                "RestoredAvailableCommandsConnection",
+                session_id,
+                cx,
+            );
+
+            thread
+                .update(cx, |thread, cx| {
+                    thread.handle_session_update(
+                        acp::SessionUpdate::AvailableCommandsUpdate(
+                            acp::AvailableCommandsUpdate::new(vec![acp::AvailableCommand::new(
+                                "help", "Get help",
+                            )]),
+                        ),
+                        cx,
+                    )
+                })
+                .expect("available commands update should succeed");
+
+            Task::ready(Ok(thread))
+        }
+
+        fn auth_methods(&self) -> &[acp::AuthMethod] {
+            &[]
+        }
+
+        fn authenticate(
+            &self,
+            _method_id: acp::AuthMethodId,
+            _cx: &mut App,
+        ) -> Task<gpui::Result<()>> {
+            Task::ready(Ok(()))
+        }
+
+        fn prompt(
+            &self,
+            _id: Option<acp_thread::UserMessageId>,
+            _params: acp::PromptRequest,
+            _cx: &mut App,
+        ) -> Task<gpui::Result<acp::PromptResponse>> {
+            Task::ready(Ok(acp::PromptResponse::new(acp::StopReason::EndTurn)))
+        }
+
+        fn cancel(&self, _session_id: &acp::SessionId, _cx: &mut App) {}
+
+        fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
+            self
+        }
+    }
+
+    #[gpui::test]
+    async fn test_restored_threads_keep_available_commands(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let fs = FakeFs::new(cx.executor());
+        let project = Project::test(fs, [], cx).await;
+        let (multi_workspace, cx) =
+            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+        let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+
+        let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx)));
+        let connection_store =
+            cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx)));
+
+        let conversation_view = cx.update(|window, cx| {
+            cx.new(|cx| {
+                ConversationView::new(
+                    Rc::new(StubAgentServer::new(RestoredAvailableCommandsConnection)),
+                    connection_store,
+                    Agent::Custom { id: "Test".into() },
+                    Some(SessionId::new("restored-session")),
+                    None,
+                    None,
+                    None,
+                    workspace.downgrade(),
+                    project,
+                    Some(thread_store),
+                    None,
+                    window,
+                    cx,
+                )
+            })
+        });
+
+        cx.run_until_parked();
+
+        let message_editor = message_editor(&conversation_view, cx);
+        let editor =
+            message_editor.update(cx, |message_editor, _cx| message_editor.editor().clone());
+        let placeholder = editor.update(cx, |editor, cx| editor.placeholder_text(cx));
+
+        active_thread(&conversation_view, cx).read_with(cx, |view, _cx| {
+            let available_commands = view
+                .session_capabilities
+                .read()
+                .available_commands()
+                .to_vec();
+            assert_eq!(available_commands.len(), 1);
+            assert_eq!(available_commands[0].name.as_str(), "help");
+            assert_eq!(available_commands[0].description.as_str(), "Get help");
+        });
+
+        assert_eq!(
+            placeholder,
+            Some("Message Test — @ to include context, / for commands".to_string())
+        );
+
+        message_editor.update_in(cx, |editor, window, cx| {
+            editor.set_text("/help", window, cx);
+        });
+
+        let contents_result = message_editor
+            .update(cx, |editor, cx| editor.contents(false, cx))
+            .await;
+
+        assert!(contents_result.is_ok());
+    }
+
     #[gpui::test]
     async fn test_resume_thread_uses_session_cwd_when_inside_project(cx: &mut TestAppContext) {
         init_test(cx);
@@ -3293,7 +3437,6 @@ pub(crate) mod tests {
         let fs = FakeFs::new(cx.executor());
 
         cx.update(|cx| {
-            cx.update_flags(true, vec!["agent-v2".to_string()]);
             agent::ThreadStore::init_global(cx);
             language_model::LanguageModelRegistry::test(cx);
             <dyn Fs>::set_global(fs.clone(), cx);

crates/agent_ui/src/conversation_view/thread_view.rs 🔗

@@ -14,7 +14,7 @@ use gpui::{Corner, List};
 use heapless::Vec as ArrayVec;
 use language_model::{LanguageModelEffortLevel, Speed};
 use settings::update_settings_file;
-use ui::{ButtonLike, SplitButton, SplitButtonStyle, Tab};
+use ui::{ButtonLike, SpinnerLabel, SpinnerVariant, SplitButton, SplitButtonStyle, Tab};
 use workspace::SERIALIZATION_THROTTLE_TIME;
 
 use super::*;
@@ -164,6 +164,46 @@ impl ThreadFeedbackState {
     }
 }
 
+struct GeneratingSpinner {
+    variant: SpinnerVariant,
+}
+
+impl GeneratingSpinner {
+    fn new(variant: SpinnerVariant) -> Self {
+        Self { variant }
+    }
+}
+
+impl Render for GeneratingSpinner {
+    fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
+        SpinnerLabel::with_variant(self.variant).size(LabelSize::Small)
+    }
+}
+
+#[derive(IntoElement)]
+struct GeneratingSpinnerElement {
+    variant: SpinnerVariant,
+}
+
+impl GeneratingSpinnerElement {
+    fn new(variant: SpinnerVariant) -> Self {
+        Self { variant }
+    }
+}
+
+impl RenderOnce for GeneratingSpinnerElement {
+    fn render(self, window: &mut Window, cx: &mut App) -> impl IntoElement {
+        let id = match self.variant {
+            SpinnerVariant::Dots => "generating-spinner-view",
+            SpinnerVariant::Sand => "confirmation-spinner-view",
+            _ => "spinner-view",
+        };
+        window.with_id(id, |window| {
+            window.use_state(cx, |_, _| GeneratingSpinner::new(self.variant))
+        })
+    }
+}
+
 pub enum AcpThreadViewEvent {
     FirstSendRequested { content: Vec<acp::ContentBlock> },
     MessageSentOrQueued,
@@ -344,7 +384,8 @@ impl ThreadView {
     ) -> Self {
         let id = thread.read(cx).session_id().clone();
 
-        let placeholder = placeholder_text(agent_display_name.as_ref(), false);
+        let has_commands = !session_capabilities.read().available_commands().is_empty();
+        let placeholder = placeholder_text(agent_display_name.as_ref(), has_commands);
 
         let history_subscription = history.as_ref().map(|h| {
             cx.observe(h, |this, history, cx| {
@@ -868,7 +909,10 @@ impl ThreadView {
                 .upgrade()
                 .and_then(|workspace| workspace.read(cx).panel::<AgentPanel>(cx))
                 .is_some_and(|panel| {
-                    panel.read(cx).start_thread_in() == &StartThreadIn::NewWorktree
+                    !matches!(
+                        panel.read(cx).start_thread_in(),
+                        StartThreadIn::LocalProject
+                    )
                 });
 
         if intercept_first_send {
@@ -3010,14 +3054,12 @@ impl ThreadView {
         let is_done = thread.read(cx).status() == ThreadStatus::Idle;
         let is_canceled_or_failed = self.is_subagent_canceled_or_failed(cx);
 
+        let max_content_width = AgentSettings::get_global(cx).max_content_width;
+
         Some(
             h_flex()
-                .h(Tab::container_height(cx))
-                .pl_2()
-                .pr_1p5()
                 .w_full()
-                .justify_between()
-                .gap_1()
+                .h(Tab::container_height(cx))
                 .border_b_1()
                 .when(is_done && is_canceled_or_failed, |this| {
                     this.border_dashed()
@@ -3026,50 +3068,61 @@ impl ThreadView {
                 .bg(cx.theme().colors().editor_background.opacity(0.2))
                 .child(
                     h_flex()
-                        .flex_1()
-                        .gap_2()
+                        .size_full()
+                        .max_w(max_content_width)
+                        .mx_auto()
+                        .pl_2()
+                        .pr_1()
+                        .flex_shrink_0()
+                        .justify_between()
+                        .gap_1()
                         .child(
-                            Icon::new(IconName::ForwardArrowUp)
-                                .size(IconSize::Small)
-                                .color(Color::Muted),
+                            h_flex()
+                                .flex_1()
+                                .gap_2()
+                                .child(
+                                    Icon::new(IconName::ForwardArrowUp)
+                                        .size(IconSize::Small)
+                                        .color(Color::Muted),
+                                )
+                                .child(self.title_editor.clone())
+                                .when(is_done && is_canceled_or_failed, |this| {
+                                    this.child(Icon::new(IconName::Close).color(Color::Error))
+                                })
+                                .when(is_done && !is_canceled_or_failed, |this| {
+                                    this.child(Icon::new(IconName::Check).color(Color::Success))
+                                }),
                         )
-                        .child(self.title_editor.clone())
-                        .when(is_done && is_canceled_or_failed, |this| {
-                            this.child(Icon::new(IconName::Close).color(Color::Error))
-                        })
-                        .when(is_done && !is_canceled_or_failed, |this| {
-                            this.child(Icon::new(IconName::Check).color(Color::Success))
-                        }),
-                )
-                .child(
-                    h_flex()
-                        .gap_0p5()
-                        .when(!is_done, |this| {
-                            this.child(
-                                IconButton::new("stop_subagent", IconName::Stop)
-                                    .icon_size(IconSize::Small)
-                                    .icon_color(Color::Error)
-                                    .tooltip(Tooltip::text("Stop Subagent"))
-                                    .on_click(move |_, _, cx| {
-                                        thread.update(cx, |thread, cx| {
-                                            thread.cancel(cx).detach();
-                                        });
-                                    }),
-                            )
-                        })
                         .child(
-                            IconButton::new("minimize_subagent", IconName::Minimize)
-                                .icon_size(IconSize::Small)
-                                .tooltip(Tooltip::text("Minimize Subagent"))
-                                .on_click(move |_, window, cx| {
-                                    let _ = server_view.update(cx, |server_view, cx| {
-                                        server_view.navigate_to_session(
-                                            parent_session_id.clone(),
-                                            window,
-                                            cx,
-                                        );
-                                    });
-                                }),
+                            h_flex()
+                                .gap_0p5()
+                                .when(!is_done, |this| {
+                                    this.child(
+                                        IconButton::new("stop_subagent", IconName::Stop)
+                                            .icon_size(IconSize::Small)
+                                            .icon_color(Color::Error)
+                                            .tooltip(Tooltip::text("Stop Subagent"))
+                                            .on_click(move |_, _, cx| {
+                                                thread.update(cx, |thread, cx| {
+                                                    thread.cancel(cx).detach();
+                                                });
+                                            }),
+                                    )
+                                })
+                                .child(
+                                    IconButton::new("minimize_subagent", IconName::Dash)
+                                        .icon_size(IconSize::Small)
+                                        .tooltip(Tooltip::text("Minimize Subagent"))
+                                        .on_click(move |_, window, cx| {
+                                            let _ = server_view.update(cx, |server_view, cx| {
+                                                server_view.navigate_to_session(
+                                                    parent_session_id.clone(),
+                                                    window,
+                                                    cx,
+                                                );
+                                            });
+                                        }),
+                                ),
                         ),
                 ),
         )
@@ -3088,13 +3141,15 @@ impl ThreadView {
         let editor_bg_color = cx.theme().colors().editor_background;
         let editor_expanded = self.editor_expanded;
         let has_messages = self.list_state.item_count() > 0;
-        let v2_empty_state = cx.has_flag::<AgentV2FeatureFlag>() && !has_messages;
+        let v2_empty_state = !has_messages;
         let (expand_icon, expand_tooltip) = if editor_expanded {
             (IconName::Minimize, "Minimize Message Editor")
         } else {
             (IconName::Maximize, "Expand Message Editor")
         };
 
+        let max_content_width = AgentSettings::get_global(cx).max_content_width;
+
         v_flex()
             .on_action(cx.listener(Self::expand_message_editor))
             .p_2()
@@ -3109,47 +3164,59 @@ impl ThreadView {
             })
             .child(
                 v_flex()
-                    .relative()
-                    .size_full()
-                    .when(v2_empty_state, |this| this.flex_1())
-                    .pt_1()
-                    .pr_2p5()
-                    .child(self.message_editor.clone())
-                    .when(!v2_empty_state, |this| {
-                        this.child(
-                            h_flex()
-                                .absolute()
-                                .top_0()
-                                .right_0()
-                                .opacity(0.5)
-                                .hover(|this| this.opacity(1.0))
-                                .child(
-                                    IconButton::new("toggle-height", expand_icon)
-                                        .icon_size(IconSize::Small)
-                                        .icon_color(Color::Muted)
-                                        .tooltip({
-                                            move |_window, cx| {
-                                                Tooltip::for_action_in(
-                                                    expand_tooltip,
-                                                    &ExpandMessageEditor,
-                                                    &focus_handle,
-                                                    cx,
-                                                )
-                                            }
-                                        })
-                                        .on_click(cx.listener(|this, _, window, cx| {
-                                            this.expand_message_editor(
-                                                &ExpandMessageEditor,
-                                                window,
-                                                cx,
-                                            );
-                                        })),
-                                ),
-                        )
-                    }),
+                    .flex_1()
+                    .min_h_0()
+                    .w_full()
+                    .max_w(max_content_width)
+                    .mx_auto()
+                    .child(
+                        v_flex()
+                            .relative()
+                            .min_h_0()
+                            .size_full()
+                            .when(v2_empty_state, |this| this.flex_1())
+                            .pt_1()
+                            .pr_2p5()
+                            .child(self.message_editor.clone())
+                            .when(!v2_empty_state, |this| {
+                                this.child(
+                                    h_flex()
+                                        .absolute()
+                                        .top_0()
+                                        .right_0()
+                                        .opacity(0.5)
+                                        .hover(|this| this.opacity(1.0))
+                                        .child(
+                                            IconButton::new("toggle-height", expand_icon)
+                                                .icon_size(IconSize::Small)
+                                                .icon_color(Color::Muted)
+                                                .tooltip({
+                                                    move |_window, cx| {
+                                                        Tooltip::for_action_in(
+                                                            expand_tooltip,
+                                                            &ExpandMessageEditor,
+                                                            &focus_handle,
+                                                            cx,
+                                                        )
+                                                    }
+                                                })
+                                                .on_click(cx.listener(|this, _, window, cx| {
+                                                    this.expand_message_editor(
+                                                        &ExpandMessageEditor,
+                                                        window,
+                                                        cx,
+                                                    );
+                                                })),
+                                        ),
+                                )
+                            }),
+                    ),
             )
             .child(
                 h_flex()
+                    .w_full()
+                    .max_w(max_content_width)
+                    .mx_auto()
                     .flex_none()
                     .flex_wrap()
                     .justify_between()
@@ -4253,10 +4320,10 @@ impl Render for TokenUsageTooltip {
 }
 
 impl ThreadView {
-    pub(crate) fn render_entries(&mut self, cx: &mut Context<Self>) -> List {
+    fn render_entries(&mut self, cx: &mut Context<Self>) -> List {
         list(
             self.list_state.clone(),
-            cx.processor(|this, index: usize, window, cx| {
+            cx.processor(move |this, index: usize, window, cx| {
                 let entries = this.thread.read(cx).entries();
                 if let Some(entry) = entries.get(index) {
                     this.render_entry(index, entries.len(), entry, window, cx)
@@ -5096,7 +5163,7 @@ impl ThreadView {
 
     pub(crate) fn sync_editor_mode_for_empty_state(&mut self, cx: &mut Context<Self>) {
         let has_messages = self.list_state.item_count() > 0;
-        let v2_empty_state = cx.has_flag::<AgentV2FeatureFlag>() && !has_messages;
+        let v2_empty_state = !has_messages;
 
         let mode = if v2_empty_state {
             EditorMode::Full {
@@ -5171,7 +5238,8 @@ impl ThreadView {
                     this.child(
                         h_flex()
                             .w_2()
-                            .child(SpinnerLabel::sand().size(LabelSize::Small)),
+                            .justify_center()
+                            .child(GeneratingSpinnerElement::new(SpinnerVariant::Sand)),
                     )
                     .child(
                         div().min_w(rems(8.)).child(
@@ -5183,7 +5251,12 @@ impl ThreadView {
                 } else if is_blocked_on_terminal_command {
                     this
                 } else {
-                    this.child(SpinnerLabel::new().size(LabelSize::Small))
+                    this.child(
+                        h_flex()
+                            .w_2()
+                            .justify_center()
+                            .child(GeneratingSpinnerElement::new(SpinnerVariant::Dots)),
+                    )
                 }
             })
             .when_some(elapsed_label, |this, elapsed| {
@@ -6325,7 +6398,6 @@ impl ThreadView {
                                     .when(is_collapsible || failed_or_canceled, |this| {
                                         let diff_for_discard = if has_revealed_diff
                                             && is_cancelled_edit
-                                            && cx.has_flag::<AgentV2FeatureFlag>()
                                         {
                                             tool_call.diffs().next().cloned()
                                         } else {
@@ -7389,9 +7461,8 @@ impl ThreadView {
             .gap_2()
             .map(|this| {
                 if card_layout {
-                    this.when(context_ix > 0, |this| {
-                        this.pt_2()
-                            .border_t_1()
+                    this.p_2().when(context_ix > 0, |this| {
+                        this.border_t_1()
                             .border_color(self.tool_card_border_color(cx))
                     })
                 } else {
@@ -8554,10 +8625,14 @@ impl ThreadView {
 impl Render for ThreadView {
     fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
         let has_messages = self.list_state.item_count() > 0;
-        let v2_empty_state = cx.has_flag::<AgentV2FeatureFlag>() && !has_messages;
+        let v2_empty_state = !has_messages;
+
+        let max_content_width = AgentSettings::get_global(cx).max_content_width;
 
         let conversation = v_flex()
-            .when(!v2_empty_state, |this| this.flex_1())
+            .mx_auto()
+            .max_w(max_content_width)
+            .when(!v2_empty_state, |this| this.flex_1().size_full())
             .map(|this| {
                 let this = this.when(self.resumed_without_history, |this| {
                     this.child(Self::render_resume_notice(cx))

crates/agent_ui/src/mention_set.rs 🔗

@@ -18,7 +18,7 @@ use gpui::{
 use http_client::{AsyncBody, HttpClientWithUrl};
 use itertools::Either;
 use language::Buffer;
-use language_model::LanguageModelImage;
+use language_model::{LanguageModelImage, LanguageModelImageExt};
 use multi_buffer::MultiBufferRow;
 use postage::stream::Stream as _;
 use project::{Project, ProjectItem, ProjectPath, Worktree};

crates/agent_ui/src/mode_selector.rs 🔗

@@ -1,17 +1,20 @@
 use acp_thread::AgentSessionModes;
 use agent_client_protocol as acp;
 use agent_servers::AgentServer;
-use agent_settings::AgentSettings;
+
 use fs::Fs;
 use gpui::{Context, Entity, WeakEntity, Window, prelude::*};
-use settings::Settings as _;
+
 use std::{rc::Rc, sync::Arc};
 use ui::{
-    Button, ContextMenu, ContextMenuEntry, DocumentationSide, KeyBinding, PopoverMenu,
-    PopoverMenuHandle, Tooltip, prelude::*,
+    Button, ContextMenu, ContextMenuEntry, KeyBinding, PopoverMenu, PopoverMenuHandle, Tooltip,
+    prelude::*,
 };
 
-use crate::{CycleModeSelector, ToggleProfileSelector, ui::HoldForDefault};
+use crate::{
+    CycleModeSelector, ToggleProfileSelector,
+    ui::{HoldForDefault, documentation_aside_side},
+};
 
 pub struct ModeSelector {
     connection: Rc<dyn AgentSessionModes>,
@@ -87,13 +90,7 @@ impl ModeSelector {
             let current_mode = self.connection.current_mode();
             let default_mode = self.agent_server.default_mode(cx);
 
-            let settings = AgentSettings::get_global(cx);
-            let side = match settings.dock {
-                settings::DockPosition::Left => DocumentationSide::Right,
-                settings::DockPosition::Bottom | settings::DockPosition::Right => {
-                    DocumentationSide::Left
-                }
-            };
+            let side = documentation_aside_side(cx);
 
             for mode in all_modes {
                 let is_selected = &mode.id == &current_mode;

crates/agent_ui/src/model_selector.rs 🔗

@@ -3,7 +3,7 @@ use std::{cmp::Reverse, rc::Rc, sync::Arc};
 use acp_thread::{AgentModelIcon, AgentModelInfo, AgentModelList, AgentModelSelector};
 use agent_client_protocol::ModelId;
 use agent_servers::AgentServer;
-use agent_settings::AgentSettings;
+
 use anyhow::Result;
 use collections::{HashSet, IndexMap};
 use fs::Fs;
@@ -16,12 +16,15 @@ use gpui::{
 use itertools::Itertools;
 use ordered_float::OrderedFloat;
 use picker::{Picker, PickerDelegate};
-use settings::{Settings, SettingsStore};
-use ui::{DocumentationAside, DocumentationSide, IntoElement, prelude::*};
+use settings::SettingsStore;
+use ui::{DocumentationAside, IntoElement, prelude::*};
 use util::ResultExt;
 use zed_actions::agent::OpenSettings;
 
-use crate::ui::{HoldForDefault, ModelSelectorFooter, ModelSelectorHeader, ModelSelectorListItem};
+use crate::ui::{
+    HoldForDefault, ModelSelectorFooter, ModelSelectorHeader, ModelSelectorListItem,
+    documentation_aside_side,
+};
 
 pub type ModelSelector = Picker<ModelPickerDelegate>;
 
@@ -385,13 +388,7 @@ impl PickerDelegate for ModelPickerDelegate {
                 let description = description.clone();
                 let is_default = *is_default;
 
-                let settings = AgentSettings::get_global(cx);
-                let side = match settings.dock {
-                    settings::DockPosition::Left => DocumentationSide::Right,
-                    settings::DockPosition::Bottom | settings::DockPosition::Right => {
-                        DocumentationSide::Left
-                    }
-                };
+                let side = documentation_aside_side(cx);
 
                 DocumentationAside::new(
                     side,

crates/agent_ui/src/profile_selector.rs 🔗

@@ -1,4 +1,6 @@
-use crate::{CycleModeSelector, ManageProfiles, ToggleProfileSelector};
+use crate::{
+    CycleModeSelector, ManageProfiles, ToggleProfileSelector, ui::documentation_aside_side,
+};
 use agent_settings::{
     AgentProfile, AgentProfileId, AgentSettings, AvailableProfiles, builtin_profiles,
 };
@@ -15,8 +17,8 @@ use std::{
     sync::{Arc, atomic::AtomicBool},
 };
 use ui::{
-    DocumentationAside, DocumentationSide, HighlightedLabel, KeyBinding, LabelSize, ListItem,
-    ListItemSpacing, PopoverMenuHandle, Tooltip, prelude::*,
+    DocumentationAside, HighlightedLabel, KeyBinding, LabelSize, ListItem, ListItemSpacing,
+    PopoverMenuHandle, Tooltip, prelude::*,
 };
 
 /// Trait for types that can provide and manage agent profiles
@@ -629,13 +631,7 @@ impl PickerDelegate for ProfilePickerDelegate {
         let candidate = self.candidates.get(entry.candidate_index)?;
         let docs_aside = Self::documentation(candidate)?.to_string();
 
-        let settings = AgentSettings::get_global(cx);
-        let side = match settings.dock {
-            settings::DockPosition::Left => DocumentationSide::Right,
-            settings::DockPosition::Bottom | settings::DockPosition::Right => {
-                DocumentationSide::Left
-            }
-        };
+        let side = documentation_aside_side(cx);
 
         Some(DocumentationAside {
             side,

crates/agent_ui/src/thread_branch_picker.rs 🔗

@@ -0,0 +1,758 @@
+use std::collections::{HashMap, HashSet};
+use std::rc::Rc;
+
+use collections::HashSet as CollectionsHashSet;
+use std::path::PathBuf;
+use std::sync::Arc;
+
+use fuzzy::StringMatchCandidate;
+use git::repository::Branch as GitBranch;
+use gpui::{
+    AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable,
+    IntoElement, ParentElement, Render, SharedString, Styled, Task, Window, rems,
+};
+use picker::{Picker, PickerDelegate, PickerEditorPosition};
+use project::Project;
+use ui::{
+    Divider, DocumentationAside, HighlightedLabel, Icon, IconName, Label, LabelCommon, ListItem,
+    ListItemSpacing, prelude::*,
+};
+use util::ResultExt as _;
+
+use crate::{NewWorktreeBranchTarget, StartThreadIn};
+
+pub(crate) struct ThreadBranchPicker {
+    picker: Entity<Picker<ThreadBranchPickerDelegate>>,
+    focus_handle: FocusHandle,
+    _subscription: gpui::Subscription,
+}
+
+impl ThreadBranchPicker {
+    pub fn new(
+        project: Entity<Project>,
+        current_target: &StartThreadIn,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) -> Self {
+        let project_worktree_paths: HashSet<PathBuf> = project
+            .read(cx)
+            .visible_worktrees(cx)
+            .map(|worktree| worktree.read(cx).abs_path().to_path_buf())
+            .collect();
+
+        let has_multiple_repositories = project.read(cx).repositories(cx).len() > 1;
+        let current_branch_name = project
+            .read(cx)
+            .active_repository(cx)
+            .and_then(|repo| {
+                repo.read(cx)
+                    .branch
+                    .as_ref()
+                    .map(|branch| branch.name().to_string())
+            })
+            .unwrap_or_else(|| "HEAD".to_string());
+
+        let repository = if has_multiple_repositories {
+            None
+        } else {
+            project.read(cx).active_repository(cx)
+        };
+        let branches_request = repository
+            .clone()
+            .map(|repo| repo.update(cx, |repo, _| repo.branches()));
+        let default_branch_request = repository
+            .clone()
+            .map(|repo| repo.update(cx, |repo, _| repo.default_branch(false)));
+        let worktrees_request = repository.map(|repo| repo.update(cx, |repo, _| repo.worktrees()));
+
+        let (worktree_name, branch_target) = match current_target {
+            StartThreadIn::NewWorktree {
+                worktree_name,
+                branch_target,
+            } => (worktree_name.clone(), branch_target.clone()),
+            _ => (None, NewWorktreeBranchTarget::default()),
+        };
+
+        let delegate = ThreadBranchPickerDelegate {
+            matches: vec![ThreadBranchEntry::CurrentBranch],
+            all_branches: None,
+            occupied_branches: None,
+            selected_index: 0,
+            worktree_name,
+            branch_target,
+            project_worktree_paths,
+            current_branch_name,
+            default_branch_name: None,
+            has_multiple_repositories,
+        };
+
+        let picker = cx.new(|cx| {
+            Picker::list(delegate, window, cx)
+                .list_measure_all()
+                .modal(false)
+                .max_height(Some(rems(20.).into()))
+        });
+
+        let focus_handle = picker.focus_handle(cx);
+
+        if let (Some(branches_request), Some(default_branch_request), Some(worktrees_request)) =
+            (branches_request, default_branch_request, worktrees_request)
+        {
+            let picker_handle = picker.downgrade();
+            cx.spawn_in(window, async move |_this, cx| {
+                let branches = branches_request.await??;
+                let default_branch = default_branch_request.await.ok().and_then(Result::ok).flatten();
+                let worktrees = worktrees_request.await??;
+
+                let remote_upstreams: CollectionsHashSet<_> = branches
+                    .iter()
+                    .filter_map(|branch| {
+                        branch
+                            .upstream
+                            .as_ref()
+                            .filter(|upstream| upstream.is_remote())
+                            .map(|upstream| upstream.ref_name.clone())
+                    })
+                    .collect();
+
+                let mut occupied_branches = HashMap::new();
+                for worktree in worktrees {
+                    let Some(branch_name) = worktree.branch_name().map(ToOwned::to_owned) else {
+                        continue;
+                    };
+
+                    let reason = if picker_handle
+                        .read_with(cx, |picker, _| {
+                            picker
+                                .delegate
+                                .project_worktree_paths
+                                .contains(&worktree.path)
+                        })
+                        .unwrap_or(false)
+                    {
+                        format!(
+                            "This branch is already checked out in the current project worktree at {}.",
+                            worktree.path.display()
+                        )
+                    } else {
+                        format!(
+                            "This branch is already checked out in a linked worktree at {}.",
+                            worktree.path.display()
+                        )
+                    };
+
+                    occupied_branches.insert(branch_name, reason);
+                }
+
+                let mut all_branches: Vec<_> = branches
+                    .into_iter()
+                    .filter(|branch| !remote_upstreams.contains(&branch.ref_name))
+                    .collect();
+                all_branches.sort_by_key(|branch| {
+                    (
+                        branch.is_remote(),
+                        !branch.is_head,
+                        branch
+                            .most_recent_commit
+                            .as_ref()
+                            .map(|commit| 0 - commit.commit_timestamp),
+                    )
+                });
+
+                picker_handle.update_in(cx, |picker, window, cx| {
+                    picker.delegate.all_branches = Some(all_branches);
+                    picker.delegate.occupied_branches = Some(occupied_branches);
+                    picker.delegate.default_branch_name = default_branch.map(|branch| branch.to_string());
+                    picker.refresh(window, cx);
+                })?;
+
+                anyhow::Ok(())
+            })
+            .detach_and_log_err(cx);
+        }
+
+        let subscription = cx.subscribe(&picker, |_, _, _, cx| {
+            cx.emit(DismissEvent);
+        });
+
+        Self {
+            picker,
+            focus_handle,
+            _subscription: subscription,
+        }
+    }
+}
+
+impl Focusable for ThreadBranchPicker {
+    fn focus_handle(&self, _cx: &App) -> FocusHandle {
+        self.focus_handle.clone()
+    }
+}
+
+impl EventEmitter<DismissEvent> for ThreadBranchPicker {}
+
+impl Render for ThreadBranchPicker {
+    fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+        v_flex()
+            .w(rems(22.))
+            .elevation_3(cx)
+            .child(self.picker.clone())
+            .on_mouse_down_out(cx.listener(|_, _, _, cx| {
+                cx.emit(DismissEvent);
+            }))
+    }
+}
+
+#[derive(Clone)]
+enum ThreadBranchEntry {
+    CurrentBranch,
+    DefaultBranch,
+    Separator,
+    ExistingBranch {
+        branch: GitBranch,
+        positions: Vec<usize>,
+    },
+    CreateNamed {
+        name: String,
+    },
+}
+
+pub(crate) struct ThreadBranchPickerDelegate {
+    matches: Vec<ThreadBranchEntry>,
+    all_branches: Option<Vec<GitBranch>>,
+    occupied_branches: Option<HashMap<String, String>>,
+    selected_index: usize,
+    worktree_name: Option<String>,
+    branch_target: NewWorktreeBranchTarget,
+    project_worktree_paths: HashSet<PathBuf>,
+    current_branch_name: String,
+    default_branch_name: Option<String>,
+    has_multiple_repositories: bool,
+}
+
+impl ThreadBranchPickerDelegate {
+    fn new_worktree_action(&self, branch_target: NewWorktreeBranchTarget) -> StartThreadIn {
+        StartThreadIn::NewWorktree {
+            worktree_name: self.worktree_name.clone(),
+            branch_target,
+        }
+    }
+
+    fn selected_entry_name(&self) -> Option<&str> {
+        match &self.branch_target {
+            NewWorktreeBranchTarget::CurrentBranch => None,
+            NewWorktreeBranchTarget::ExistingBranch { name } => Some(name),
+            NewWorktreeBranchTarget::CreateBranch {
+                from_ref: Some(from_ref),
+                ..
+            } => Some(from_ref),
+            NewWorktreeBranchTarget::CreateBranch { name, .. } => Some(name),
+        }
+    }
+
+    fn prefer_create_entry(&self) -> bool {
+        matches!(
+            &self.branch_target,
+            NewWorktreeBranchTarget::CreateBranch { from_ref: None, .. }
+        )
+    }
+
+    fn fixed_matches(&self) -> Vec<ThreadBranchEntry> {
+        let mut matches = vec![ThreadBranchEntry::CurrentBranch];
+        if !self.has_multiple_repositories
+            && self
+                .default_branch_name
+                .as_ref()
+                .is_some_and(|default_branch_name| default_branch_name != &self.current_branch_name)
+        {
+            matches.push(ThreadBranchEntry::DefaultBranch);
+        }
+        matches
+    }
+
+    fn is_branch_occupied(&self, branch_name: &str) -> bool {
+        self.occupied_branches
+            .as_ref()
+            .is_some_and(|occupied| occupied.contains_key(branch_name))
+    }
+
+    fn branch_aside_text(&self, branch_name: &str, is_remote: bool) -> Option<SharedString> {
+        if self.is_branch_occupied(branch_name) {
+            Some(
+                format!(
+                    "This branch is already checked out in another worktree. \
+                     A new branch will be created from {branch_name}."
+                )
+                .into(),
+            )
+        } else if is_remote {
+            Some("A new local branch will be created from this remote branch.".into())
+        } else {
+            None
+        }
+    }
+
+    fn entry_aside_text(&self, entry: &ThreadBranchEntry) -> Option<SharedString> {
+        match entry {
+            ThreadBranchEntry::CurrentBranch => Some(SharedString::from(
+                "A new branch will be created from the current branch.",
+            )),
+            ThreadBranchEntry::DefaultBranch => {
+                let default_branch_name = self
+                    .default_branch_name
+                    .as_ref()
+                    .filter(|name| *name != &self.current_branch_name)?;
+                self.branch_aside_text(default_branch_name, false)
+            }
+            ThreadBranchEntry::ExistingBranch { branch, .. } => {
+                self.branch_aside_text(branch.name(), branch.is_remote())
+            }
+            _ => None,
+        }
+    }
+
+    fn sync_selected_index(&mut self) {
+        let selected_entry_name = self.selected_entry_name().map(ToOwned::to_owned);
+        let prefer_create = self.prefer_create_entry();
+
+        if prefer_create {
+            if let Some(ref selected_entry_name) = selected_entry_name {
+                if let Some(index) = self.matches.iter().position(|entry| {
+                    matches!(
+                        entry,
+                        ThreadBranchEntry::CreateNamed { name } if name == selected_entry_name
+                    )
+                }) {
+                    self.selected_index = index;
+                    return;
+                }
+            }
+        } else if let Some(ref selected_entry_name) = selected_entry_name {
+            if selected_entry_name == &self.current_branch_name {
+                if let Some(index) = self
+                    .matches
+                    .iter()
+                    .position(|entry| matches!(entry, ThreadBranchEntry::CurrentBranch))
+                {
+                    self.selected_index = index;
+                    return;
+                }
+            }
+
+            if self
+                .default_branch_name
+                .as_ref()
+                .is_some_and(|default_branch_name| default_branch_name == selected_entry_name)
+            {
+                if let Some(index) = self
+                    .matches
+                    .iter()
+                    .position(|entry| matches!(entry, ThreadBranchEntry::DefaultBranch))
+                {
+                    self.selected_index = index;
+                    return;
+                }
+            }
+
+            if let Some(index) = self.matches.iter().position(|entry| {
+                matches!(
+                    entry,
+                    ThreadBranchEntry::ExistingBranch { branch, .. }
+                        if branch.name() == selected_entry_name.as_str()
+                )
+            }) {
+                self.selected_index = index;
+                return;
+            }
+        }
+
+        if self.matches.len() > 1
+            && self
+                .matches
+                .iter()
+                .skip(1)
+                .all(|entry| matches!(entry, ThreadBranchEntry::CreateNamed { .. }))
+        {
+            self.selected_index = 1;
+            return;
+        }
+
+        self.selected_index = 0;
+    }
+}
+
+impl PickerDelegate for ThreadBranchPickerDelegate {
+    type ListItem = AnyElement;
+
+    fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> {
+        "Search branches…".into()
+    }
+
+    fn editor_position(&self) -> PickerEditorPosition {
+        PickerEditorPosition::Start
+    }
+
+    fn match_count(&self) -> usize {
+        self.matches.len()
+    }
+
+    fn selected_index(&self) -> usize {
+        self.selected_index
+    }
+
+    fn set_selected_index(
+        &mut self,
+        ix: usize,
+        _window: &mut Window,
+        _cx: &mut Context<Picker<Self>>,
+    ) {
+        self.selected_index = ix;
+    }
+
+    fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context<Picker<Self>>) -> bool {
+        !matches!(self.matches.get(ix), Some(ThreadBranchEntry::Separator))
+    }
+
+    fn update_matches(
+        &mut self,
+        query: String,
+        window: &mut Window,
+        cx: &mut Context<Picker<Self>>,
+    ) -> Task<()> {
+        if self.has_multiple_repositories {
+            let mut matches = self.fixed_matches();
+
+            if query.is_empty() {
+                if let Some(name) = self.selected_entry_name().map(ToOwned::to_owned) {
+                    if self.prefer_create_entry() {
+                        matches.push(ThreadBranchEntry::Separator);
+                        matches.push(ThreadBranchEntry::CreateNamed { name });
+                    }
+                }
+            } else {
+                matches.push(ThreadBranchEntry::Separator);
+                matches.push(ThreadBranchEntry::CreateNamed {
+                    name: query.replace(' ', "-"),
+                });
+            }
+
+            self.matches = matches;
+            self.sync_selected_index();
+            return Task::ready(());
+        }
+
+        let Some(all_branches) = self.all_branches.clone() else {
+            self.matches = self.fixed_matches();
+            self.selected_index = 0;
+            return Task::ready(());
+        };
+
+        if query.is_empty() {
+            let mut matches = self.fixed_matches();
+            let filtered_branches: Vec<_> = all_branches
+                .into_iter()
+                .filter(|branch| {
+                    branch.name() != self.current_branch_name
+                        && self
+                            .default_branch_name
+                            .as_ref()
+                            .is_none_or(|default_branch_name| branch.name() != default_branch_name)
+                })
+                .collect();
+
+            if !filtered_branches.is_empty() {
+                matches.push(ThreadBranchEntry::Separator);
+            }
+            for branch in filtered_branches {
+                matches.push(ThreadBranchEntry::ExistingBranch {
+                    branch,
+                    positions: Vec::new(),
+                });
+            }
+
+            if let Some(selected_entry_name) = self.selected_entry_name().map(ToOwned::to_owned) {
+                let has_existing = matches.iter().any(|entry| {
+                    matches!(
+                        entry,
+                        ThreadBranchEntry::ExistingBranch { branch, .. }
+                            if branch.name() == selected_entry_name
+                    )
+                });
+                if self.prefer_create_entry() && !has_existing {
+                    matches.push(ThreadBranchEntry::CreateNamed {
+                        name: selected_entry_name,
+                    });
+                }
+            }
+
+            self.matches = matches;
+            self.sync_selected_index();
+            return Task::ready(());
+        }
+
+        let candidates: Vec<_> = all_branches
+            .iter()
+            .enumerate()
+            .map(|(ix, branch)| StringMatchCandidate::new(ix, branch.name()))
+            .collect();
+        let executor = cx.background_executor().clone();
+        let query_clone = query.clone();
+        let normalized_query = query.replace(' ', "-");
+
+        let task = cx.background_executor().spawn(async move {
+            fuzzy::match_strings(
+                &candidates,
+                &query_clone,
+                true,
+                true,
+                10000,
+                &Default::default(),
+                executor,
+            )
+            .await
+        });
+
+        let all_branches_clone = all_branches;
+        cx.spawn_in(window, async move |picker, cx| {
+            let fuzzy_matches = task.await;
+
+            picker
+                .update_in(cx, |picker, _window, cx| {
+                    let mut matches = picker.delegate.fixed_matches();
+                    let mut has_dynamic_entries = false;
+
+                    for candidate in &fuzzy_matches {
+                        let branch = all_branches_clone[candidate.candidate_id].clone();
+                        if branch.name() == picker.delegate.current_branch_name
+                            || picker.delegate.default_branch_name.as_ref().is_some_and(
+                                |default_branch_name| branch.name() == default_branch_name,
+                            )
+                        {
+                            continue;
+                        }
+                        if !has_dynamic_entries {
+                            matches.push(ThreadBranchEntry::Separator);
+                            has_dynamic_entries = true;
+                        }
+                        matches.push(ThreadBranchEntry::ExistingBranch {
+                            branch,
+                            positions: candidate.positions.clone(),
+                        });
+                    }
+
+                    if fuzzy_matches.is_empty() {
+                        if !has_dynamic_entries {
+                            matches.push(ThreadBranchEntry::Separator);
+                        }
+                        matches.push(ThreadBranchEntry::CreateNamed {
+                            name: normalized_query.clone(),
+                        });
+                    }
+
+                    picker.delegate.matches = matches;
+                    if let Some(index) =
+                        picker.delegate.matches.iter().position(|entry| {
+                            matches!(entry, ThreadBranchEntry::ExistingBranch { .. })
+                        })
+                    {
+                        picker.delegate.selected_index = index;
+                    } else if !fuzzy_matches.is_empty() {
+                        picker.delegate.selected_index = 0;
+                    } else if let Some(index) =
+                        picker.delegate.matches.iter().position(|entry| {
+                            matches!(entry, ThreadBranchEntry::CreateNamed { .. })
+                        })
+                    {
+                        picker.delegate.selected_index = index;
+                    } else {
+                        picker.delegate.sync_selected_index();
+                    }
+                    cx.notify();
+                })
+                .log_err();
+        })
+    }
+
+    fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
+        let Some(entry) = self.matches.get(self.selected_index) else {
+            return;
+        };
+
+        match entry {
+            ThreadBranchEntry::Separator => return,
+            ThreadBranchEntry::CurrentBranch => {
+                window.dispatch_action(
+                    Box::new(self.new_worktree_action(NewWorktreeBranchTarget::CurrentBranch)),
+                    cx,
+                );
+            }
+            ThreadBranchEntry::DefaultBranch => {
+                let Some(default_branch_name) = self.default_branch_name.clone() else {
+                    return;
+                };
+                window.dispatch_action(
+                    Box::new(
+                        self.new_worktree_action(NewWorktreeBranchTarget::ExistingBranch {
+                            name: default_branch_name,
+                        }),
+                    ),
+                    cx,
+                );
+            }
+            ThreadBranchEntry::ExistingBranch { branch, .. } => {
+                let branch_target = if branch.is_remote() {
+                    let branch_name = branch
+                        .ref_name
+                        .as_ref()
+                        .strip_prefix("refs/remotes/")
+                        .and_then(|stripped| stripped.split_once('/').map(|(_, name)| name))
+                        .unwrap_or(branch.name())
+                        .to_string();
+                    NewWorktreeBranchTarget::CreateBranch {
+                        name: branch_name,
+                        from_ref: Some(branch.name().to_string()),
+                    }
+                } else {
+                    NewWorktreeBranchTarget::ExistingBranch {
+                        name: branch.name().to_string(),
+                    }
+                };
+                window.dispatch_action(Box::new(self.new_worktree_action(branch_target)), cx);
+            }
+            ThreadBranchEntry::CreateNamed { name } => {
+                window.dispatch_action(
+                    Box::new(
+                        self.new_worktree_action(NewWorktreeBranchTarget::CreateBranch {
+                            name: name.clone(),
+                            from_ref: None,
+                        }),
+                    ),
+                    cx,
+                );
+            }
+        }
+
+        cx.emit(DismissEvent);
+    }
+
+    fn dismissed(&mut self, _window: &mut Window, _cx: &mut Context<Picker<Self>>) {}
+
+    fn render_match(
+        &self,
+        ix: usize,
+        selected: bool,
+        _window: &mut Window,
+        cx: &mut Context<Picker<Self>>,
+    ) -> Option<Self::ListItem> {
+        let entry = self.matches.get(ix)?;
+
+        match entry {
+            ThreadBranchEntry::Separator => Some(
+                div()
+                    .py(DynamicSpacing::Base04.rems(cx))
+                    .child(Divider::horizontal())
+                    .into_any_element(),
+            ),
+            ThreadBranchEntry::CurrentBranch => {
+                let branch_name = if self.has_multiple_repositories {
+                    SharedString::from("current branches")
+                } else {
+                    SharedString::from(self.current_branch_name.clone())
+                };
+
+                Some(
+                    ListItem::new("current-branch")
+                        .inset(true)
+                        .spacing(ListItemSpacing::Sparse)
+                        .toggle_state(selected)
+                        .child(Label::new(branch_name))
+                        .into_any_element(),
+                )
+            }
+            ThreadBranchEntry::DefaultBranch => {
+                let default_branch_name = self
+                    .default_branch_name
+                    .as_ref()
+                    .filter(|name| *name != &self.current_branch_name)?;
+                let is_occupied = self.is_branch_occupied(default_branch_name);
+
+                let item = ListItem::new("default-branch")
+                    .inset(true)
+                    .spacing(ListItemSpacing::Sparse)
+                    .toggle_state(selected)
+                    .child(Label::new(default_branch_name.clone()));
+
+                Some(
+                    if is_occupied {
+                        item.start_slot(Icon::new(IconName::GitBranchPlus).color(Color::Muted))
+                    } else {
+                        item
+                    }
+                    .into_any_element(),
+                )
+            }
+            ThreadBranchEntry::ExistingBranch {
+                branch, positions, ..
+            } => {
+                let branch_name = branch.name().to_string();
+                let needs_new_branch = self.is_branch_occupied(&branch_name) || branch.is_remote();
+
+                Some(
+                    ListItem::new(SharedString::from(format!("branch-{ix}")))
+                        .inset(true)
+                        .spacing(ListItemSpacing::Sparse)
+                        .toggle_state(selected)
+                        .child(
+                            h_flex()
+                                .min_w_0()
+                                .gap_1()
+                                .child(
+                                    HighlightedLabel::new(branch_name, positions.clone())
+                                        .truncate(),
+                                )
+                                .when(needs_new_branch, |item| {
+                                    item.child(
+                                        Icon::new(IconName::GitBranchPlus)
+                                            .size(IconSize::Small)
+                                            .color(Color::Muted),
+                                    )
+                                }),
+                        )
+                        .into_any_element(),
+                )
+            }
+            ThreadBranchEntry::CreateNamed { name } => Some(
+                ListItem::new("create-named-branch")
+                    .inset(true)
+                    .spacing(ListItemSpacing::Sparse)
+                    .toggle_state(selected)
+                    .child(Label::new(format!("Create Branch: \"{name}\"…")))
+                    .into_any_element(),
+            ),
+        }
+    }
+
+    fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option<SharedString> {
+        None
+    }
+
+    fn documentation_aside(
+        &self,
+        _window: &mut Window,
+        cx: &mut Context<Picker<Self>>,
+    ) -> Option<DocumentationAside> {
+        let entry = self.matches.get(self.selected_index)?;
+        let aside_text = self.entry_aside_text(entry)?;
+        let side = crate::ui::documentation_aside_side(cx);
+
+        Some(DocumentationAside::new(
+            side,
+            Rc::new(move |_| Label::new(aside_text.clone()).into_any_element()),
+        ))
+    }
+
+    fn documentation_aside_index(&self) -> Option<usize> {
+        let entry = self.matches.get(self.selected_index)?;
+        self.entry_aside_text(entry).map(|_| self.selected_index)
+    }
+}

crates/agent_ui/src/thread_metadata_store.rs 🔗

@@ -16,7 +16,6 @@ use db::{
     },
     sqlez_macros::sql,
 };
-use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt};
 use futures::{FutureExt as _, future::Shared};
 use gpui::{AppContext as _, Entity, Global, Subscription, Task};
 use project::AgentId;
@@ -28,16 +27,7 @@ use crate::DEFAULT_THREAD_TITLE;
 
 pub fn init(cx: &mut App) {
     ThreadMetadataStore::init_global(cx);
-
-    if cx.has_flag::<AgentV2FeatureFlag>() {
-        migrate_thread_metadata(cx);
-    }
-    cx.observe_flag::<AgentV2FeatureFlag, _>(|has_flag, cx| {
-        if has_flag {
-            migrate_thread_metadata(cx);
-        }
-    })
-    .detach();
+    migrate_thread_metadata(cx);
 }
 
 /// Migrate existing thread metadata from native agent thread store to the new metadata storage.
@@ -344,10 +334,6 @@ impl ThreadMetadataStore {
     }
 
     pub fn save_all(&mut self, metadata: Vec<ThreadMetadata>, cx: &mut Context<Self>) {
-        if !cx.has_flag::<AgentV2FeatureFlag>() {
-            return;
-        }
-
         for metadata in metadata {
             self.save_internal(metadata);
         }
@@ -360,10 +346,6 @@ impl ThreadMetadataStore {
     }
 
     fn save(&mut self, metadata: ThreadMetadata, cx: &mut Context<Self>) {
-        if !cx.has_flag::<AgentV2FeatureFlag>() {
-            return;
-        }
-
         self.save_internal(metadata);
         cx.notify();
     }
@@ -413,10 +395,6 @@ impl ThreadMetadataStore {
         work_dirs: PathList,
         cx: &mut Context<Self>,
     ) {
-        if !cx.has_flag::<AgentV2FeatureFlag>() {
-            return;
-        }
-
         if let Some(thread) = self.threads.get(session_id) {
             self.save_internal(ThreadMetadata {
                 folder_paths: work_dirs,
@@ -538,10 +516,6 @@ impl ThreadMetadataStore {
         archived: bool,
         cx: &mut Context<Self>,
     ) {
-        if !cx.has_flag::<AgentV2FeatureFlag>() {
-            return;
-        }
-
         if let Some(thread) = self.threads.get(session_id) {
             self.save_internal(ThreadMetadata {
                 archived,
@@ -552,10 +526,6 @@ impl ThreadMetadataStore {
     }
 
     pub fn delete(&mut self, session_id: acp::SessionId, cx: &mut Context<Self>) {
-        if !cx.has_flag::<AgentV2FeatureFlag>() {
-            return;
-        }
-
         if let Some(thread) = self.threads.get(&session_id) {
             if let Some(session_ids) = self.threads_by_paths.get_mut(&thread.folder_paths) {
                 session_ids.remove(&session_id);
@@ -712,19 +682,12 @@ impl ThreadMetadataStore {
                     PathList::new(&paths)
                 };
 
-                let main_worktree_paths = {
-                    let project = thread_ref.project().read(cx);
-                    let mut main_paths: Vec<Arc<Path>> = Vec::new();
-                    for repo in project.repositories(cx).values() {
-                        let snapshot = repo.read(cx).snapshot();
-                        if snapshot.is_linked_worktree() {
-                            main_paths.push(snapshot.original_repo_abs_path.clone());
-                        }
-                    }
-                    main_paths.sort();
-                    main_paths.dedup();
-                    PathList::new(&main_paths)
-                };
+                let main_worktree_paths = thread_ref
+                    .project()
+                    .read(cx)
+                    .project_group_key(cx)
+                    .path_list()
+                    .clone();
 
                 // Threads without a folder path (e.g. started in an empty
                 // window) are archived by default so they don't get lost,
@@ -1053,7 +1016,7 @@ mod tests {
     use action_log::ActionLog;
     use agent::DbThread;
     use agent_client_protocol as acp;
-    use feature_flags::FeatureFlagAppExt;
+
     use gpui::TestAppContext;
     use project::FakeFs;
     use project::Project;
@@ -1103,7 +1066,6 @@ mod tests {
         cx.update(|cx| {
             let settings_store = settings::SettingsStore::test(cx);
             cx.set_global(settings_store);
-            cx.update_flags(true, vec!["agent-v2".to_string()]);
             ThreadMetadataStore::init_global(cx);
             ThreadStore::init_global(cx);
         });
@@ -1144,7 +1106,6 @@ mod tests {
         cx.update(|cx| {
             let settings_store = settings::SettingsStore::test(cx);
             cx.set_global(settings_store);
-            cx.update_flags(true, vec!["agent-v2".to_string()]);
             ThreadMetadataStore::init_global(cx);
         });
 

crates/agent_ui/src/thread_worktree_picker.rs 🔗

@@ -0,0 +1,621 @@
+use std::path::PathBuf;
+use std::rc::Rc;
+use std::sync::Arc;
+
+use agent_settings::AgentSettings;
+use fs::Fs;
+use fuzzy::StringMatchCandidate;
+use git::repository::Worktree as GitWorktree;
+use gpui::{
+    AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable,
+    IntoElement, ParentElement, Render, SharedString, Styled, Task, Window, rems,
+};
+use picker::{Picker, PickerDelegate, PickerEditorPosition};
+use project::{Project, git_store::RepositoryId};
+use settings::{NewThreadLocation, Settings, update_settings_file};
+use ui::{
+    Divider, DocumentationAside, HighlightedLabel, Label, LabelCommon, ListItem, ListItemSpacing,
+    Tooltip, prelude::*,
+};
+use util::ResultExt as _;
+use util::paths::PathExt;
+
+use crate::ui::HoldForDefault;
+use crate::{NewWorktreeBranchTarget, StartThreadIn};
+
+pub(crate) struct ThreadWorktreePicker {
+    picker: Entity<Picker<ThreadWorktreePickerDelegate>>,
+    focus_handle: FocusHandle,
+    _subscription: gpui::Subscription,
+}
+
+impl ThreadWorktreePicker {
+    pub fn new(
+        project: Entity<Project>,
+        current_target: &StartThreadIn,
+        fs: Arc<dyn Fs>,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) -> Self {
+        let project_worktree_paths: Vec<PathBuf> = project
+            .read(cx)
+            .visible_worktrees(cx)
+            .map(|wt| wt.read(cx).abs_path().to_path_buf())
+            .collect();
+
+        let preserved_branch_target = match current_target {
+            StartThreadIn::NewWorktree { branch_target, .. } => branch_target.clone(),
+            _ => NewWorktreeBranchTarget::default(),
+        };
+
+        let all_worktrees: Vec<_> = project
+            .read(cx)
+            .repositories(cx)
+            .iter()
+            .map(|(repo_id, repo)| (*repo_id, repo.read(cx).linked_worktrees.clone()))
+            .collect();
+
+        let has_multiple_repositories = all_worktrees.len() > 1;
+
+        let linked_worktrees: Vec<_> = if has_multiple_repositories {
+            Vec::new()
+        } else {
+            all_worktrees
+                .iter()
+                .flat_map(|(_, worktrees)| worktrees.iter())
+                .filter(|worktree| {
+                    !project_worktree_paths
+                        .iter()
+                        .any(|project_path| project_path == &worktree.path)
+                })
+                .cloned()
+                .collect()
+        };
+
+        let mut initial_matches = vec![
+            ThreadWorktreeEntry::CurrentWorktree,
+            ThreadWorktreeEntry::NewWorktree,
+        ];
+
+        if !linked_worktrees.is_empty() {
+            initial_matches.push(ThreadWorktreeEntry::Separator);
+            for worktree in &linked_worktrees {
+                initial_matches.push(ThreadWorktreeEntry::LinkedWorktree {
+                    worktree: worktree.clone(),
+                    positions: Vec::new(),
+                });
+            }
+        }
+
+        let selected_index = match current_target {
+            StartThreadIn::LocalProject => 0,
+            StartThreadIn::NewWorktree { .. } => 1,
+            StartThreadIn::LinkedWorktree { path, .. } => initial_matches
+                .iter()
+                .position(|entry| matches!(entry, ThreadWorktreeEntry::LinkedWorktree { worktree, .. } if worktree.path == *path))
+                .unwrap_or(0),
+        };
+
+        let delegate = ThreadWorktreePickerDelegate {
+            matches: initial_matches,
+            all_worktrees,
+            project_worktree_paths,
+            selected_index,
+            project,
+            preserved_branch_target,
+            fs,
+        };
+
+        let picker = cx.new(|cx| {
+            Picker::list(delegate, window, cx)
+                .list_measure_all()
+                .modal(false)
+                .max_height(Some(rems(20.).into()))
+        });
+
+        let subscription = cx.subscribe(&picker, |_, _, _, cx| {
+            cx.emit(DismissEvent);
+        });
+
+        Self {
+            focus_handle: picker.focus_handle(cx),
+            picker,
+            _subscription: subscription,
+        }
+    }
+}
+
+impl Focusable for ThreadWorktreePicker {
+    fn focus_handle(&self, _cx: &App) -> FocusHandle {
+        self.focus_handle.clone()
+    }
+}
+
+impl EventEmitter<DismissEvent> for ThreadWorktreePicker {}
+
+impl Render for ThreadWorktreePicker {
+    fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+        v_flex()
+            .w(rems(20.))
+            .elevation_3(cx)
+            .child(self.picker.clone())
+            .on_mouse_down_out(cx.listener(|_, _, _, cx| {
+                cx.emit(DismissEvent);
+            }))
+    }
+}
+
+#[derive(Clone)]
+enum ThreadWorktreeEntry {
+    CurrentWorktree,
+    NewWorktree,
+    Separator,
+    LinkedWorktree {
+        worktree: GitWorktree,
+        positions: Vec<usize>,
+    },
+    CreateNamed {
+        name: String,
+        disabled_reason: Option<String>,
+    },
+}
+
+pub(crate) struct ThreadWorktreePickerDelegate {
+    matches: Vec<ThreadWorktreeEntry>,
+    all_worktrees: Vec<(RepositoryId, Arc<[GitWorktree]>)>,
+    project_worktree_paths: Vec<PathBuf>,
+    selected_index: usize,
+    preserved_branch_target: NewWorktreeBranchTarget,
+    project: Entity<Project>,
+    fs: Arc<dyn Fs>,
+}
+
+impl ThreadWorktreePickerDelegate {
+    fn new_worktree_action(&self, worktree_name: Option<String>) -> StartThreadIn {
+        StartThreadIn::NewWorktree {
+            worktree_name,
+            branch_target: self.preserved_branch_target.clone(),
+        }
+    }
+
+    fn sync_selected_index(&mut self, has_query: bool) {
+        if !has_query {
+            return;
+        }
+
+        if let Some(index) = self
+            .matches
+            .iter()
+            .position(|entry| matches!(entry, ThreadWorktreeEntry::LinkedWorktree { .. }))
+        {
+            self.selected_index = index;
+        } else if let Some(index) = self
+            .matches
+            .iter()
+            .position(|entry| matches!(entry, ThreadWorktreeEntry::CreateNamed { .. }))
+        {
+            self.selected_index = index;
+        } else {
+            self.selected_index = 0;
+        }
+    }
+}
+
+impl PickerDelegate for ThreadWorktreePickerDelegate {
+    type ListItem = AnyElement;
+
+    fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> {
+        "Search or create worktrees…".into()
+    }
+
+    fn editor_position(&self) -> PickerEditorPosition {
+        PickerEditorPosition::Start
+    }
+
+    fn match_count(&self) -> usize {
+        self.matches.len()
+    }
+
+    fn selected_index(&self) -> usize {
+        self.selected_index
+    }
+
+    fn set_selected_index(
+        &mut self,
+        ix: usize,
+        _window: &mut Window,
+        _cx: &mut Context<Picker<Self>>,
+    ) {
+        self.selected_index = ix;
+    }
+
+    fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context<Picker<Self>>) -> bool {
+        !matches!(self.matches.get(ix), Some(ThreadWorktreeEntry::Separator))
+    }
+
+    fn update_matches(
+        &mut self,
+        query: String,
+        window: &mut Window,
+        cx: &mut Context<Picker<Self>>,
+    ) -> Task<()> {
+        let has_multiple_repositories = self.all_worktrees.len() > 1;
+
+        let linked_worktrees: Vec<_> = if has_multiple_repositories {
+            Vec::new()
+        } else {
+            self.all_worktrees
+                .iter()
+                .flat_map(|(_, worktrees)| worktrees.iter())
+                .filter(|worktree| {
+                    !self
+                        .project_worktree_paths
+                        .iter()
+                        .any(|project_path| project_path == &worktree.path)
+                })
+                .cloned()
+                .collect()
+        };
+
+        let normalized_query = query.replace(' ', "-");
+        let has_named_worktree = self.all_worktrees.iter().any(|(_, worktrees)| {
+            worktrees
+                .iter()
+                .any(|worktree| worktree.display_name() == normalized_query)
+        });
+        let create_named_disabled_reason = if has_multiple_repositories {
+            Some("Cannot create a named worktree in a project with multiple repositories".into())
+        } else if has_named_worktree {
+            Some("A worktree with this name already exists".into())
+        } else {
+            None
+        };
+
+        let mut matches = vec![
+            ThreadWorktreeEntry::CurrentWorktree,
+            ThreadWorktreeEntry::NewWorktree,
+        ];
+
+        if query.is_empty() {
+            if !linked_worktrees.is_empty() {
+                matches.push(ThreadWorktreeEntry::Separator);
+            }
+            for worktree in &linked_worktrees {
+                matches.push(ThreadWorktreeEntry::LinkedWorktree {
+                    worktree: worktree.clone(),
+                    positions: Vec::new(),
+                });
+            }
+        } else if linked_worktrees.is_empty() {
+            matches.push(ThreadWorktreeEntry::Separator);
+            matches.push(ThreadWorktreeEntry::CreateNamed {
+                name: normalized_query,
+                disabled_reason: create_named_disabled_reason,
+            });
+        } else {
+            let candidates: Vec<_> = linked_worktrees
+                .iter()
+                .enumerate()
+                .map(|(ix, worktree)| StringMatchCandidate::new(ix, worktree.display_name()))
+                .collect();
+
+            let executor = cx.background_executor().clone();
+            let query_clone = query.clone();
+
+            let task = cx.background_executor().spawn(async move {
+                fuzzy::match_strings(
+                    &candidates,
+                    &query_clone,
+                    true,
+                    true,
+                    10000,
+                    &Default::default(),
+                    executor,
+                )
+                .await
+            });
+
+            let linked_worktrees_clone = linked_worktrees;
+            return cx.spawn_in(window, async move |picker, cx| {
+                let fuzzy_matches = task.await;
+
+                picker
+                    .update_in(cx, |picker, _window, cx| {
+                        let mut new_matches = vec![
+                            ThreadWorktreeEntry::CurrentWorktree,
+                            ThreadWorktreeEntry::NewWorktree,
+                        ];
+
+                        let has_extra_entries = !fuzzy_matches.is_empty();
+
+                        if has_extra_entries {
+                            new_matches.push(ThreadWorktreeEntry::Separator);
+                        }
+
+                        for candidate in &fuzzy_matches {
+                            new_matches.push(ThreadWorktreeEntry::LinkedWorktree {
+                                worktree: linked_worktrees_clone[candidate.candidate_id].clone(),
+                                positions: candidate.positions.clone(),
+                            });
+                        }
+
+                        let has_exact_match = linked_worktrees_clone
+                            .iter()
+                            .any(|worktree| worktree.display_name() == query);
+
+                        if !has_exact_match {
+                            if !has_extra_entries {
+                                new_matches.push(ThreadWorktreeEntry::Separator);
+                            }
+                            new_matches.push(ThreadWorktreeEntry::CreateNamed {
+                                name: normalized_query.clone(),
+                                disabled_reason: create_named_disabled_reason.clone(),
+                            });
+                        }
+
+                        picker.delegate.matches = new_matches;
+                        picker.delegate.sync_selected_index(true);
+
+                        cx.notify();
+                    })
+                    .log_err();
+            });
+        }
+
+        self.matches = matches;
+        self.sync_selected_index(!query.is_empty());
+
+        Task::ready(())
+    }
+
+    fn confirm(&mut self, secondary: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
+        let Some(entry) = self.matches.get(self.selected_index) else {
+            return;
+        };
+
+        match entry {
+            ThreadWorktreeEntry::Separator => return,
+            ThreadWorktreeEntry::CurrentWorktree => {
+                if secondary {
+                    update_settings_file(self.fs.clone(), cx, |settings, _| {
+                        settings
+                            .agent
+                            .get_or_insert_default()
+                            .set_new_thread_location(NewThreadLocation::LocalProject);
+                    });
+                }
+                window.dispatch_action(Box::new(StartThreadIn::LocalProject), cx);
+            }
+            ThreadWorktreeEntry::NewWorktree => {
+                if secondary {
+                    update_settings_file(self.fs.clone(), cx, |settings, _| {
+                        settings
+                            .agent
+                            .get_or_insert_default()
+                            .set_new_thread_location(NewThreadLocation::NewWorktree);
+                    });
+                }
+                window.dispatch_action(Box::new(self.new_worktree_action(None)), cx);
+            }
+            ThreadWorktreeEntry::LinkedWorktree { worktree, .. } => {
+                window.dispatch_action(
+                    Box::new(StartThreadIn::LinkedWorktree {
+                        path: worktree.path.clone(),
+                        display_name: worktree.display_name().to_string(),
+                    }),
+                    cx,
+                );
+            }
+            ThreadWorktreeEntry::CreateNamed {
+                name,
+                disabled_reason: None,
+            } => {
+                window.dispatch_action(Box::new(self.new_worktree_action(Some(name.clone()))), cx);
+            }
+            ThreadWorktreeEntry::CreateNamed {
+                disabled_reason: Some(_),
+                ..
+            } => {
+                return;
+            }
+        }
+
+        cx.emit(DismissEvent);
+    }
+
+    fn dismissed(&mut self, _window: &mut Window, _cx: &mut Context<Picker<Self>>) {}
+
+    fn render_match(
+        &self,
+        ix: usize,
+        selected: bool,
+        _window: &mut Window,
+        cx: &mut Context<Picker<Self>>,
+    ) -> Option<Self::ListItem> {
+        let entry = self.matches.get(ix)?;
+        let project = self.project.read(cx);
+        let is_new_worktree_disabled =
+            project.repositories(cx).is_empty() || project.is_via_collab();
+
+        match entry {
+            ThreadWorktreeEntry::Separator => Some(
+                div()
+                    .py(DynamicSpacing::Base04.rems(cx))
+                    .child(Divider::horizontal())
+                    .into_any_element(),
+            ),
+            ThreadWorktreeEntry::CurrentWorktree => {
+                let path_label = project.active_repository(cx).map(|repo| {
+                    let path = repo.read(cx).work_directory_abs_path.clone();
+                    path.compact().to_string_lossy().to_string()
+                });
+
+                Some(
+                    ListItem::new("current-worktree")
+                        .inset(true)
+                        .spacing(ListItemSpacing::Sparse)
+                        .toggle_state(selected)
+                        .child(
+                            v_flex()
+                                .min_w_0()
+                                .overflow_hidden()
+                                .child(Label::new("Current Worktree"))
+                                .when_some(path_label, |this, path| {
+                                    this.child(
+                                        Label::new(path)
+                                            .size(LabelSize::Small)
+                                            .color(Color::Muted)
+                                            .truncate_start(),
+                                    )
+                                }),
+                        )
+                        .into_any_element(),
+                )
+            }
+            ThreadWorktreeEntry::NewWorktree => {
+                let item = ListItem::new("new-worktree")
+                    .inset(true)
+                    .spacing(ListItemSpacing::Sparse)
+                    .toggle_state(selected)
+                    .disabled(is_new_worktree_disabled)
+                    .child(
+                        v_flex()
+                            .min_w_0()
+                            .overflow_hidden()
+                            .child(
+                                Label::new("New Git Worktree")
+                                    .when(is_new_worktree_disabled, |this| {
+                                        this.color(Color::Disabled)
+                                    }),
+                            )
+                            .child(
+                                Label::new("Get a fresh new worktree")
+                                    .size(LabelSize::Small)
+                                    .color(Color::Muted),
+                            ),
+                    );
+
+                Some(
+                    if is_new_worktree_disabled {
+                        item.tooltip(Tooltip::text("Requires a Git repository in the project"))
+                    } else {
+                        item
+                    }
+                    .into_any_element(),
+                )
+            }
+            ThreadWorktreeEntry::LinkedWorktree {
+                worktree,
+                positions,
+            } => {
+                let display_name = worktree.display_name();
+                let first_line = display_name.lines().next().unwrap_or(display_name);
+                let positions: Vec<_> = positions
+                    .iter()
+                    .copied()
+                    .filter(|&pos| pos < first_line.len())
+                    .collect();
+                let path = worktree.path.compact();
+
+                Some(
+                    ListItem::new(SharedString::from(format!("linked-worktree-{ix}")))
+                        .inset(true)
+                        .spacing(ListItemSpacing::Sparse)
+                        .toggle_state(selected)
+                        .child(
+                            v_flex()
+                                .min_w_0()
+                                .overflow_hidden()
+                                .child(
+                                    HighlightedLabel::new(first_line.to_owned(), positions)
+                                        .truncate(),
+                                )
+                                .child(
+                                    Label::new(path.to_string_lossy().to_string())
+                                        .size(LabelSize::Small)
+                                        .color(Color::Muted)
+                                        .truncate_start(),
+                                ),
+                        )
+                        .into_any_element(),
+                )
+            }
+            ThreadWorktreeEntry::CreateNamed {
+                name,
+                disabled_reason,
+            } => {
+                let is_disabled = disabled_reason.is_some();
+                let item = ListItem::new("create-named-worktree")
+                    .inset(true)
+                    .spacing(ListItemSpacing::Sparse)
+                    .toggle_state(selected)
+                    .disabled(is_disabled)
+                    .child(Label::new(format!("Create Worktree: \"{name}\"…")).color(
+                        if is_disabled {
+                            Color::Disabled
+                        } else {
+                            Color::Default
+                        },
+                    ));
+
+                Some(
+                    if let Some(reason) = disabled_reason.clone() {
+                        item.tooltip(Tooltip::text(reason))
+                    } else {
+                        item
+                    }
+                    .into_any_element(),
+                )
+            }
+        }
+    }
+
+    fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option<SharedString> {
+        None
+    }
+
+    fn documentation_aside(
+        &self,
+        _window: &mut Window,
+        cx: &mut Context<Picker<Self>>,
+    ) -> Option<DocumentationAside> {
+        let entry = self.matches.get(self.selected_index)?;
+        let is_default = match entry {
+            ThreadWorktreeEntry::CurrentWorktree => {
+                let new_thread_location = AgentSettings::get_global(cx).new_thread_location;
+                Some(new_thread_location == NewThreadLocation::LocalProject)
+            }
+            ThreadWorktreeEntry::NewWorktree => {
+                let project = self.project.read(cx);
+                let is_disabled = project.repositories(cx).is_empty() || project.is_via_collab();
+                if is_disabled {
+                    None
+                } else {
+                    let new_thread_location = AgentSettings::get_global(cx).new_thread_location;
+                    Some(new_thread_location == NewThreadLocation::NewWorktree)
+                }
+            }
+            _ => None,
+        }?;
+
+        let side = crate::ui::documentation_aside_side(cx);
+
+        Some(DocumentationAside::new(
+            side,
+            Rc::new(move |_| {
+                HoldForDefault::new(is_default)
+                    .more_content(false)
+                    .into_any_element()
+            }),
+        ))
+    }
+
+    fn documentation_aside_index(&self) -> Option<usize> {
+        match self.matches.get(self.selected_index) {
+            Some(ThreadWorktreeEntry::CurrentWorktree | ThreadWorktreeEntry::NewWorktree) => {
+                Some(self.selected_index)
+            }
+            _ => None,
+        }
+    }
+}

crates/agent_ui/src/threads_archive_view.rs 🔗

@@ -1236,6 +1236,7 @@ impl PickerDelegate for ProjectPickerDelegate {
                     },
                     match_label: HighlightedMatch::join(match_labels.into_iter().flatten(), ", "),
                     paths: Vec::new(),
+                    active: false,
                 };
 
                 Some(

crates/agent_ui/src/ui.rs 🔗

@@ -13,3 +13,16 @@ pub use hold_for_default::*;
 pub use mention_crease::*;
 pub use model_selector_components::*;
 pub use undo_reject_toast::*;
+
+/// Returns the appropriate [`DocumentationSide`] for documentation asides
+/// in the agent panel, based on the current dock position.
+pub fn documentation_aside_side(cx: &gpui::App) -> ui::DocumentationSide {
+    use agent_settings::AgentSettings;
+    use settings::Settings;
+    use ui::DocumentationSide;
+
+    match AgentSettings::get_global(cx).dock {
+        settings::DockPosition::Left => DocumentationSide::Right,
+        settings::DockPosition::Bottom | settings::DockPosition::Right => DocumentationSide::Left,
+    }
+}

crates/ai_onboarding/src/agent_panel_onboarding_card.rs 🔗

@@ -1,6 +1,6 @@
 use gpui::{AnyElement, IntoElement, ParentElement, linear_color_stop, linear_gradient};
 use smallvec::SmallVec;
-use ui::{Vector, VectorName, prelude::*};
+use ui::prelude::*;
 
 #[derive(IntoElement)]
 pub struct AgentPanelOnboardingCard {
@@ -23,61 +23,43 @@ impl ParentElement for AgentPanelOnboardingCard {
 
 impl RenderOnce for AgentPanelOnboardingCard {
     fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
-        div()
-            .m_2p5()
-            .p(px(3.))
-            .elevation_2(cx)
-            .rounded_lg()
-            .bg(cx.theme().colors().background.alpha(0.5))
-            .child(
-                v_flex()
-                    .relative()
-                    .size_full()
-                    .px_4()
-                    .py_3()
-                    .gap_2()
-                    .border_1()
-                    .rounded(px(5.))
-                    .border_color(cx.theme().colors().text.alpha(0.1))
-                    .overflow_hidden()
-                    .bg(cx.theme().colors().panel_background)
-                    .child(
-                        div()
-                            .opacity(0.5)
-                            .absolute()
-                            .top(px(-8.0))
-                            .right_0()
-                            .w(px(400.))
-                            .h(px(92.))
-                            .rounded_md()
-                            .child(
-                                Vector::new(
-                                    VectorName::AiGrid,
-                                    rems_from_px(400.),
-                                    rems_from_px(92.),
-                                )
-                                .color(Color::Custom(cx.theme().colors().text.alpha(0.32))),
-                            ),
-                    )
-                    .child(
-                        div()
-                            .absolute()
-                            .top_0p5()
-                            .right_0p5()
-                            .w(px(660.))
-                            .h(px(401.))
-                            .overflow_hidden()
-                            .rounded_md()
-                            .bg(linear_gradient(
-                                75.,
-                                linear_color_stop(
-                                    cx.theme().colors().panel_background.alpha(0.01),
-                                    1.0,
-                                ),
-                                linear_color_stop(cx.theme().colors().panel_background, 0.45),
-                            )),
-                    )
-                    .children(self.children),
-            )
+        let color = cx.theme().colors();
+
+        div().min_w_0().p_2p5().bg(color.editor_background).child(
+            div()
+                .min_w_0()
+                .p(px(3.))
+                .rounded_lg()
+                .elevation_2(cx)
+                .bg(color.background.opacity(0.5))
+                .child(
+                    v_flex()
+                        .relative()
+                        .size_full()
+                        .min_w_0()
+                        .px_4()
+                        .py_3()
+                        .gap_2()
+                        .border_1()
+                        .rounded(px(5.))
+                        .border_color(color.text.opacity(0.1))
+                        .bg(color.panel_background)
+                        .overflow_hidden()
+                        .child(
+                            div()
+                                .absolute()
+                                .inset_0()
+                                .size_full()
+                                .rounded_md()
+                                .overflow_hidden()
+                                .bg(linear_gradient(
+                                    360.,
+                                    linear_color_stop(color.panel_background, 1.0),
+                                    linear_color_stop(color.editor_background, 0.45),
+                                )),
+                        )
+                        .children(self.children),
+                ),
+        )
     }
 }

crates/ai_onboarding/src/agent_panel_onboarding_content.rs 🔗

@@ -59,25 +59,26 @@ impl Render for AgentPanelOnboarding {
             .read(cx)
             .plan()
             .is_some_and(|plan| plan == Plan::ZedProTrial);
+
         let is_pro_user = self
             .user_store
             .read(cx)
             .plan()
             .is_some_and(|plan| plan == Plan::ZedPro);
 
+        let onboarding = ZedAiOnboarding::new(
+            self.client.clone(),
+            &self.user_store,
+            self.continue_with_zed_ai.clone(),
+            cx,
+        )
+        .with_dismiss({
+            let callback = self.continue_with_zed_ai.clone();
+            move |window, cx| callback(window, cx)
+        });
+
         AgentPanelOnboardingCard::new()
-            .child(
-                ZedAiOnboarding::new(
-                    self.client.clone(),
-                    &self.user_store,
-                    self.continue_with_zed_ai.clone(),
-                    cx,
-                )
-                .with_dismiss({
-                    let callback = self.continue_with_zed_ai.clone();
-                    move |window, cx| callback(window, cx)
-                }),
-            )
+            .child(onboarding)
             .map(|this| {
                 if enrolled_in_trial || is_pro_user || self.has_configured_providers {
                     this

crates/ai_onboarding/src/ai_onboarding.rs 🔗

@@ -1,7 +1,6 @@
 mod agent_api_keys_onboarding;
 mod agent_panel_onboarding_card;
 mod agent_panel_onboarding_content;
-mod ai_upsell_card;
 mod edit_prediction_onboarding_content;
 mod plan_definitions;
 mod young_account_banner;
@@ -9,7 +8,6 @@ mod young_account_banner;
 pub use agent_api_keys_onboarding::{ApiKeysWithProviders, ApiKeysWithoutProviders};
 pub use agent_panel_onboarding_card::AgentPanelOnboardingCard;
 pub use agent_panel_onboarding_content::AgentPanelOnboarding;
-pub use ai_upsell_card::AiUpsellCard;
 use cloud_api_types::Plan;
 pub use edit_prediction_onboarding_content::EditPredictionOnboarding;
 pub use plan_definitions::PlanDefinitions;
@@ -19,7 +17,9 @@ use std::sync::Arc;
 
 use client::{Client, UserStore, zed_urls};
 use gpui::{AnyElement, Entity, IntoElement, ParentElement};
-use ui::{Divider, RegisterComponent, Tooltip, prelude::*};
+use ui::{
+    Divider, List, ListBulletItem, RegisterComponent, Tooltip, Vector, VectorName, prelude::*,
+};
 
 #[derive(PartialEq)]
 pub enum SignInStatus {
@@ -84,6 +84,50 @@ impl ZedAiOnboarding {
         self
     }
 
+    fn certified_user_stamp(cx: &App) -> impl IntoElement {
+        div().absolute().bottom_1().right_1().child(
+            Vector::new(
+                VectorName::ProUserStamp,
+                rems_from_px(156.),
+                rems_from_px(60.),
+            )
+            .color(Color::Custom(cx.theme().colors().text_accent.alpha(0.8))),
+        )
+    }
+
+    fn pro_trial_stamp(cx: &App) -> impl IntoElement {
+        div().absolute().bottom_1().right_1().child(
+            Vector::new(
+                VectorName::ProTrialStamp,
+                rems_from_px(156.),
+                rems_from_px(60.),
+            )
+            .color(Color::Custom(cx.theme().colors().text.alpha(0.8))),
+        )
+    }
+
+    fn business_stamp(cx: &App) -> impl IntoElement {
+        div().absolute().bottom_1().right_1().child(
+            Vector::new(
+                VectorName::BusinessStamp,
+                rems_from_px(156.),
+                rems_from_px(60.),
+            )
+            .color(Color::Custom(cx.theme().colors().text_accent.alpha(0.8))),
+        )
+    }
+
+    fn student_stamp(cx: &App) -> impl IntoElement {
+        div().absolute().bottom_1().right_1().child(
+            Vector::new(
+                VectorName::StudentStamp,
+                rems_from_px(156.),
+                rems_from_px(60.),
+            )
+            .color(Color::Custom(cx.theme().colors().text.alpha(0.8))),
+        )
+    }
+
     fn render_dismiss_button(&self) -> Option<AnyElement> {
         self.dismiss_onboarding.as_ref().map(|dismiss_callback| {
             let callback = dismiss_callback.clone();
@@ -109,6 +153,7 @@ impl ZedAiOnboarding {
         let signing_in = matches!(self.sign_in_status, SignInStatus::SigningIn);
 
         v_flex()
+            .w_full()
             .relative()
             .gap_1()
             .child(Headline::new("Welcome to Zed AI"))
@@ -139,7 +184,7 @@ impl ZedAiOnboarding {
         if self.account_too_young {
             v_flex()
                 .relative()
-                .max_w_full()
+                .min_w_0()
                 .gap_1()
                 .child(Headline::new("Welcome to Zed AI"))
                 .child(YoungAccountBanner)
@@ -175,6 +220,7 @@ impl ZedAiOnboarding {
                 .into_any_element()
         } else {
             v_flex()
+                .w_full()
                 .relative()
                 .gap_1()
                 .child(Headline::new("Welcome to Zed AI"))
@@ -237,10 +283,12 @@ impl ZedAiOnboarding {
         }
     }
 
-    fn render_trial_state(&self, _cx: &mut App) -> AnyElement {
+    fn render_trial_state(&self, cx: &mut App) -> AnyElement {
         v_flex()
+            .w_full()
             .relative()
             .gap_1()
+            .child(Self::pro_trial_stamp(cx))
             .child(Headline::new("Welcome to the Zed Pro Trial"))
             .child(
                 Label::new("Here's what you get for the next 14 days:")
@@ -252,9 +300,12 @@ impl ZedAiOnboarding {
             .into_any_element()
     }
 
-    fn render_pro_plan_state(&self, _cx: &mut App) -> AnyElement {
+    fn render_pro_plan_state(&self, cx: &mut App) -> AnyElement {
         v_flex()
+            .w_full()
+            .relative()
             .gap_1()
+            .child(Self::certified_user_stamp(cx))
             .child(Headline::new("Welcome to Zed Pro"))
             .child(
                 Label::new("Here's what you get:")
@@ -266,9 +317,12 @@ impl ZedAiOnboarding {
             .into_any_element()
     }
 
-    fn render_business_plan_state(&self, _cx: &mut App) -> AnyElement {
+    fn render_business_plan_state(&self, cx: &mut App) -> AnyElement {
         v_flex()
+            .w_full()
+            .relative()
             .gap_1()
+            .child(Self::business_stamp(cx))
             .child(Headline::new("Welcome to Zed Business"))
             .child(
                 Label::new("Here's what you get:")
@@ -280,9 +334,12 @@ impl ZedAiOnboarding {
             .into_any_element()
     }
 
-    fn render_student_plan_state(&self, _cx: &mut App) -> AnyElement {
+    fn render_student_plan_state(&self, cx: &mut App) -> AnyElement {
         v_flex()
+            .w_full()
+            .relative()
             .gap_1()
+            .child(Self::student_stamp(cx))
             .child(Headline::new("Welcome to Zed Student"))
             .child(
                 Label::new("Here's what you get:")
@@ -318,11 +375,7 @@ impl Component for ZedAiOnboarding {
     }
 
     fn name() -> &'static str {
-        "Agent Panel Banners"
-    }
-
-    fn sort_name() -> &'static str {
-        "Agent Panel Banners"
+        "Agent New User Onboarding"
     }
 
     fn preview(_window: &mut Window, _cx: &mut App) -> Option<AnyElement> {
@@ -331,22 +384,30 @@ impl Component for ZedAiOnboarding {
             plan: Option<Plan>,
             account_too_young: bool,
         ) -> AnyElement {
-            ZedAiOnboarding {
-                sign_in_status,
-                plan,
-                account_too_young,
-                continue_with_zed_ai: Arc::new(|_, _| {}),
-                sign_in: Arc::new(|_, _| {}),
-                dismiss_onboarding: None,
-            }
-            .into_any_element()
+            div()
+                .w_full()
+                .min_w_40()
+                .max_w(px(1100.))
+                .child(
+                    AgentPanelOnboardingCard::new().child(
+                        ZedAiOnboarding {
+                            sign_in_status,
+                            plan,
+                            account_too_young,
+                            continue_with_zed_ai: Arc::new(|_, _| {}),
+                            sign_in: Arc::new(|_, _| {}),
+                            dismiss_onboarding: None,
+                        }
+                        .into_any_element(),
+                    ),
+                )
+                .into_any_element()
         }
 
         Some(
             v_flex()
+                .min_w_0()
                 .gap_4()
-                .items_center()
-                .max_w_4_5()
                 .children(vec![
                     single_example(
                         "Not Signed-in",
@@ -381,3 +442,119 @@ impl Component for ZedAiOnboarding {
         )
     }
 }
+
+#[derive(RegisterComponent)]
+pub struct AgentLayoutOnboarding {
+    pub use_agent_layout: Arc<dyn Fn(&mut Window, &mut App)>,
+    pub revert_to_editor_layout: Arc<dyn Fn(&mut Window, &mut App)>,
+    pub dismissed: Arc<dyn Fn(&mut Window, &mut App)>,
+    pub is_agent_layout: bool,
+}
+
+impl Render for AgentLayoutOnboarding {
+    fn render(&mut self, _window: &mut ui::Window, _cx: &mut Context<Self>) -> impl IntoElement {
+        let description = "The new threads sidebar, positioned in the far left of your workspace, allows you to manage agents across many projects. Your agent thread lives alongside it, and all other panels live on the right.";
+
+        let dismiss_button = div().absolute().top_1().right_1().child(
+            IconButton::new("dismiss", IconName::Close)
+                .icon_size(IconSize::Small)
+                .on_click({
+                    let dismiss = self.dismissed.clone();
+                    move |_, window, cx| {
+                        telemetry::event!("Agentic Layout Onboarding Dismissed");
+                        dismiss(window, cx)
+                    }
+                }),
+        );
+
+        let primary_button = if self.is_agent_layout {
+            Button::new("revert", "Use Previous Layout")
+                .label_size(LabelSize::Small)
+                .style(ButtonStyle::Outlined)
+                .on_click({
+                    let revert = self.revert_to_editor_layout.clone();
+                    let dismiss = self.dismissed.clone();
+                    move |_, window, cx| {
+                        telemetry::event!("Clicked to Use Previous Layout");
+                        revert(window, cx);
+                        dismiss(window, cx);
+                    }
+                })
+        } else {
+            Button::new("start", "Use New Layout")
+                .label_size(LabelSize::Small)
+                .style(ButtonStyle::Outlined)
+                .on_click({
+                    let use_layout = self.use_agent_layout.clone();
+                    let dismiss = self.dismissed.clone();
+                    move |_, window, cx| {
+                        telemetry::event!("Clicked to Use New Layout");
+                        use_layout(window, cx);
+                        dismiss(window, cx);
+                    }
+                })
+        };
+
+        let content = v_flex()
+            .min_w_0()
+            .w_full()
+            .relative()
+            .gap_1()
+            .child(Label::new("A new workspace layout for agentic work"))
+            .child(Label::new(description).color(Color::Muted).mb_2())
+            .child(
+                List::new()
+                    .child(ListBulletItem::new("Use your favorite agents in parallel"))
+                    .child(ListBulletItem::new("Isolate agents using worktrees"))
+                    .child(ListBulletItem::new(
+                        "Combine multiple projects in one window",
+                    )),
+            )
+            .child(
+                h_flex()
+                    .w_full()
+                    .gap_1()
+                    .flex_wrap()
+                    .justify_end()
+                    .child(primary_button),
+            )
+            .child(dismiss_button);
+
+        AgentPanelOnboardingCard::new().child(content)
+    }
+}
+
+impl Component for AgentLayoutOnboarding {
+    fn scope() -> ComponentScope {
+        ComponentScope::Onboarding
+    }
+
+    fn name() -> &'static str {
+        "Agent Layout Onboarding"
+    }
+
+    fn preview(_window: &mut Window, cx: &mut App) -> Option<AnyElement> {
+        let onboarding = cx.new(|_cx| AgentLayoutOnboarding {
+            use_agent_layout: Arc::new(|_, _| {}),
+            revert_to_editor_layout: Arc::new(|_, _| {}),
+            dismissed: Arc::new(|_, _| {}),
+            is_agent_layout: false,
+        });
+
+        Some(
+            v_flex()
+                .min_w_0()
+                .gap_4()
+                .child(single_example(
+                    "Agent Layout Onboarding",
+                    div()
+                        .w_full()
+                        .min_w_40()
+                        .max_w(px(1100.))
+                        .child(onboarding)
+                        .into_any_element(),
+                ))
+                .into_any_element(),
+        )
+    }
+}

crates/ai_onboarding/src/ai_upsell_card.rs 🔗

@@ -1,407 +0,0 @@
-use std::sync::Arc;
-
-use client::{Client, UserStore, zed_urls};
-use cloud_api_types::Plan;
-use gpui::{AnyElement, App, Entity, IntoElement, RenderOnce, Window};
-use ui::{CommonAnimationExt, Divider, Vector, VectorName, prelude::*};
-
-use crate::{SignInStatus, YoungAccountBanner, plan_definitions::PlanDefinitions};
-
-#[derive(IntoElement, RegisterComponent)]
-pub struct AiUpsellCard {
-    sign_in_status: SignInStatus,
-    sign_in: Arc<dyn Fn(&mut Window, &mut App)>,
-    account_too_young: bool,
-    user_plan: Option<Plan>,
-    tab_index: Option<isize>,
-}
-
-impl AiUpsellCard {
-    pub fn new(
-        client: Arc<Client>,
-        user_store: &Entity<UserStore>,
-        user_plan: Option<Plan>,
-        cx: &mut App,
-    ) -> Self {
-        let status = *client.status().borrow();
-        let store = user_store.read(cx);
-
-        Self {
-            user_plan,
-            sign_in_status: status.into(),
-            sign_in: Arc::new(move |_window, cx| {
-                cx.spawn({
-                    let client = client.clone();
-                    async move |cx| client.sign_in_with_optional_connect(true, cx).await
-                })
-                .detach_and_log_err(cx);
-            }),
-            account_too_young: store.account_too_young(),
-            tab_index: None,
-        }
-    }
-
-    pub fn tab_index(mut self, tab_index: Option<isize>) -> Self {
-        self.tab_index = tab_index;
-        self
-    }
-}
-
-impl RenderOnce for AiUpsellCard {
-    fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
-        let pro_section = v_flex()
-            .flex_grow()
-            .w_full()
-            .gap_1()
-            .child(
-                h_flex()
-                    .gap_2()
-                    .child(
-                        Label::new("Pro")
-                            .size(LabelSize::Small)
-                            .color(Color::Accent)
-                            .buffer_font(cx),
-                    )
-                    .child(Divider::horizontal()),
-            )
-            .child(PlanDefinitions.pro_plan());
-
-        let free_section = v_flex()
-            .flex_grow()
-            .w_full()
-            .gap_1()
-            .child(
-                h_flex()
-                    .gap_2()
-                    .child(
-                        Label::new("Free")
-                            .size(LabelSize::Small)
-                            .color(Color::Muted)
-                            .buffer_font(cx),
-                    )
-                    .child(Divider::horizontal()),
-            )
-            .child(PlanDefinitions.free_plan());
-
-        let grid_bg = h_flex()
-            .absolute()
-            .inset_0()
-            .w_full()
-            .h(px(240.))
-            .bg(gpui::pattern_slash(
-                cx.theme().colors().border.opacity(0.1),
-                2.,
-                25.,
-            ));
-
-        let gradient_bg = div()
-            .absolute()
-            .inset_0()
-            .size_full()
-            .bg(gpui::linear_gradient(
-                180.,
-                gpui::linear_color_stop(
-                    cx.theme().colors().elevated_surface_background.opacity(0.8),
-                    0.,
-                ),
-                gpui::linear_color_stop(
-                    cx.theme().colors().elevated_surface_background.opacity(0.),
-                    0.8,
-                ),
-            ));
-
-        let description = PlanDefinitions::AI_DESCRIPTION;
-
-        let card = v_flex()
-            .relative()
-            .flex_grow()
-            .p_4()
-            .pt_3()
-            .border_1()
-            .border_color(cx.theme().colors().border)
-            .rounded_lg()
-            .overflow_hidden()
-            .child(grid_bg)
-            .child(gradient_bg);
-
-        let plans_section = h_flex()
-            .w_full()
-            .mt_1p5()
-            .mb_2p5()
-            .items_start()
-            .gap_6()
-            .child(free_section)
-            .child(pro_section);
-
-        let footer_container = v_flex().items_center().gap_1();
-
-        let certified_user_stamp = div()
-            .absolute()
-            .top_2()
-            .right_2()
-            .size(rems_from_px(72.))
-            .child(
-                Vector::new(
-                    VectorName::ProUserStamp,
-                    rems_from_px(72.),
-                    rems_from_px(72.),
-                )
-                .color(Color::Custom(cx.theme().colors().text_accent.alpha(0.3)))
-                .with_rotate_animation(10),
-            );
-
-        let pro_trial_stamp = div()
-            .absolute()
-            .top_2()
-            .right_2()
-            .size(rems_from_px(72.))
-            .child(
-                Vector::new(
-                    VectorName::ProTrialStamp,
-                    rems_from_px(72.),
-                    rems_from_px(72.),
-                )
-                .color(Color::Custom(cx.theme().colors().text.alpha(0.2))),
-            );
-
-        match self.sign_in_status {
-            SignInStatus::SignedIn => match self.user_plan {
-                None | Some(Plan::ZedFree) => card
-                    .child(Label::new("Try Zed AI").size(LabelSize::Large))
-                    .map(|this| {
-                        if self.account_too_young {
-                            this.child(YoungAccountBanner).child(
-                                v_flex()
-                                    .mt_2()
-                                    .gap_1()
-                                    .child(
-                                        h_flex()
-                                            .gap_2()
-                                            .child(
-                                                Label::new("Pro")
-                                                    .size(LabelSize::Small)
-                                                    .color(Color::Accent)
-                                                    .buffer_font(cx),
-                                            )
-                                            .child(Divider::horizontal()),
-                                    )
-                                    .child(PlanDefinitions.pro_plan())
-                                    .child(
-                                        Button::new("pro", "Get Started")
-                                            .full_width()
-                                            .style(ButtonStyle::Tinted(ui::TintColor::Accent))
-                                            .on_click(move |_, _window, cx| {
-                                                telemetry::event!(
-                                                    "Upgrade To Pro Clicked",
-                                                    state = "young-account"
-                                                );
-                                                cx.open_url(&zed_urls::upgrade_to_zed_pro_url(cx))
-                                            }),
-                                    ),
-                            )
-                        } else {
-                            this.child(
-                                div()
-                                    .max_w_3_4()
-                                    .mb_2()
-                                    .child(Label::new(description).color(Color::Muted)),
-                            )
-                            .child(plans_section)
-                            .child(
-                                footer_container
-                                    .child(
-                                        Button::new("start_trial", "Start Pro Trial")
-                                            .full_width()
-                                            .style(ButtonStyle::Tinted(ui::TintColor::Accent))
-                                            .when_some(self.tab_index, |this, tab_index| {
-                                                this.tab_index(tab_index)
-                                            })
-                                            .on_click(move |_, _window, cx| {
-                                                telemetry::event!(
-                                                    "Start Trial Clicked",
-                                                    state = "post-sign-in"
-                                                );
-                                                cx.open_url(&zed_urls::start_trial_url(cx))
-                                            }),
-                                    )
-                                    .child(
-                                        Label::new("14 days, no credit card required")
-                                            .size(LabelSize::Small)
-                                            .color(Color::Muted),
-                                    ),
-                            )
-                        }
-                    }),
-                Some(Plan::ZedProTrial) => card
-                    .child(pro_trial_stamp)
-                    .child(Label::new("You're in the Zed Pro Trial").size(LabelSize::Large))
-                    .child(
-                        Label::new("Here's what you get for the next 14 days:")
-                            .color(Color::Muted)
-                            .mb_2(),
-                    )
-                    .child(PlanDefinitions.pro_trial(false)),
-                Some(Plan::ZedPro) => card
-                    .child(certified_user_stamp)
-                    .child(Label::new("You're in the Zed Pro plan").size(LabelSize::Large))
-                    .child(
-                        Label::new("Here's what you get:")
-                            .color(Color::Muted)
-                            .mb_2(),
-                    )
-                    .child(PlanDefinitions.pro_plan()),
-                Some(Plan::ZedBusiness) => card
-                    .child(certified_user_stamp)
-                    .child(Label::new("You're in the Zed Business plan").size(LabelSize::Large))
-                    .child(
-                        Label::new("Here's what you get:")
-                            .color(Color::Muted)
-                            .mb_2(),
-                    )
-                    .child(PlanDefinitions.business_plan()),
-                Some(Plan::ZedStudent) => card
-                    .child(certified_user_stamp)
-                    .child(Label::new("You're in the Zed Student plan").size(LabelSize::Large))
-                    .child(
-                        Label::new("Here's what you get:")
-                            .color(Color::Muted)
-                            .mb_2(),
-                    )
-                    .child(PlanDefinitions.student_plan()),
-            },
-            // Signed Out State
-            _ => card
-                .child(Label::new("Try Zed AI").size(LabelSize::Large))
-                .child(
-                    div()
-                        .max_w_3_4()
-                        .mb_2()
-                        .child(Label::new(description).color(Color::Muted)),
-                )
-                .child(plans_section)
-                .child(
-                    Button::new("sign_in", "Sign In")
-                        .full_width()
-                        .style(ButtonStyle::Tinted(ui::TintColor::Accent))
-                        .when_some(self.tab_index, |this, tab_index| this.tab_index(tab_index))
-                        .on_click({
-                            let callback = self.sign_in.clone();
-                            move |_, window, cx| {
-                                telemetry::event!("Start Trial Clicked", state = "pre-sign-in");
-                                callback(window, cx)
-                            }
-                        }),
-                ),
-        }
-    }
-}
-
-impl Component for AiUpsellCard {
-    fn scope() -> ComponentScope {
-        ComponentScope::Onboarding
-    }
-
-    fn name() -> &'static str {
-        "AI Upsell Card"
-    }
-
-    fn sort_name() -> &'static str {
-        "AI Upsell Card"
-    }
-
-    fn description() -> Option<&'static str> {
-        Some("A card presenting the Zed AI product during user's first-open onboarding flow.")
-    }
-
-    fn preview(_window: &mut Window, _cx: &mut App) -> Option<AnyElement> {
-        Some(
-            v_flex()
-                .gap_4()
-                .items_center()
-                .max_w_4_5()
-                .child(single_example(
-                    "Signed Out State",
-                    AiUpsellCard {
-                        sign_in_status: SignInStatus::SignedOut,
-                        sign_in: Arc::new(|_, _| {}),
-                        account_too_young: false,
-                        user_plan: None,
-                        tab_index: Some(0),
-                    }
-                    .into_any_element(),
-                ))
-                .child(example_group_with_title(
-                    "Signed In States",
-                    vec![
-                        single_example(
-                            "Free Plan",
-                            AiUpsellCard {
-                                sign_in_status: SignInStatus::SignedIn,
-                                sign_in: Arc::new(|_, _| {}),
-                                account_too_young: false,
-                                user_plan: Some(Plan::ZedFree),
-                                tab_index: Some(1),
-                            }
-                            .into_any_element(),
-                        ),
-                        single_example(
-                            "Free Plan but Young Account",
-                            AiUpsellCard {
-                                sign_in_status: SignInStatus::SignedIn,
-                                sign_in: Arc::new(|_, _| {}),
-                                account_too_young: true,
-                                user_plan: Some(Plan::ZedFree),
-                                tab_index: Some(1),
-                            }
-                            .into_any_element(),
-                        ),
-                        single_example(
-                            "Pro Trial",
-                            AiUpsellCard {
-                                sign_in_status: SignInStatus::SignedIn,
-                                sign_in: Arc::new(|_, _| {}),
-                                account_too_young: false,
-                                user_plan: Some(Plan::ZedProTrial),
-                                tab_index: Some(1),
-                            }
-                            .into_any_element(),
-                        ),
-                        single_example(
-                            "Pro Plan",
-                            AiUpsellCard {
-                                sign_in_status: SignInStatus::SignedIn,
-                                sign_in: Arc::new(|_, _| {}),
-                                account_too_young: false,
-                                user_plan: Some(Plan::ZedPro),
-                                tab_index: Some(1),
-                            }
-                            .into_any_element(),
-                        ),
-                        single_example(
-                            "Business Plan",
-                            AiUpsellCard {
-                                sign_in_status: SignInStatus::SignedIn,
-                                sign_in: Arc::new(|_, _| {}),
-                                account_too_young: false,
-                                user_plan: Some(Plan::ZedBusiness),
-                                tab_index: Some(1),
-                            }
-                            .into_any_element(),
-                        ),
-                        single_example(
-                            "Student Plan",
-                            AiUpsellCard {
-                                sign_in_status: SignInStatus::SignedIn,
-                                sign_in: Arc::new(|_, _| {}),
-                                account_too_young: false,
-                                user_plan: Some(Plan::ZedStudent),
-                                tab_index: Some(1),
-                            }
-                            .into_any_element(),
-                        ),
-                    ],
-                ))
-                .into_any_element(),
-        )
-    }
-}

crates/ai_onboarding/src/plan_definitions.rs 🔗

@@ -5,23 +5,19 @@ use ui::{List, ListBulletItem, prelude::*};
 pub struct PlanDefinitions;
 
 impl PlanDefinitions {
-    pub const AI_DESCRIPTION: &'static str = "Zed offers a complete agentic experience, with robust editing and reviewing features to collaborate with AI.";
-
     pub fn free_plan(&self) -> impl IntoElement {
         List::new()
             .child(ListBulletItem::new("2,000 accepted edit predictions"))
             .child(ListBulletItem::new(
                 "Unlimited prompts with your AI API keys",
             ))
-            .child(ListBulletItem::new(
-                "Unlimited use of external agents like Claude Agent",
-            ))
+            .child(ListBulletItem::new("Unlimited use of external agents"))
     }
 
     pub fn pro_trial(&self, period: bool) -> impl IntoElement {
         List::new()
+            .child(ListBulletItem::new("$20 of tokens in Zed agent"))
             .child(ListBulletItem::new("Unlimited edit predictions"))
-            .child(ListBulletItem::new("$20 of tokens"))
             .when(period, |this| {
                 this.child(ListBulletItem::new(
                     "Try it out for 14 days, no credit card required",
@@ -31,9 +27,9 @@ impl PlanDefinitions {
 
     pub fn pro_plan(&self) -> impl IntoElement {
         List::new()
-            .child(ListBulletItem::new("Unlimited edit predictions"))
-            .child(ListBulletItem::new("$5 of tokens"))
+            .child(ListBulletItem::new("$5 of tokens in Zed agent"))
             .child(ListBulletItem::new("Usage-based billing beyond $5"))
+            .child(ListBulletItem::new("Unlimited edit predictions"))
     }
 
     pub fn business_plan(&self) -> impl IntoElement {
@@ -45,7 +41,7 @@ impl PlanDefinitions {
     pub fn student_plan(&self) -> impl IntoElement {
         List::new()
             .child(ListBulletItem::new("Unlimited edit predictions"))
-            .child(ListBulletItem::new("$10 of tokens"))
+            .child(ListBulletItem::new("$10 of tokens in Zed agent"))
             .child(ListBulletItem::new(
                 "Optional credit packs for additional usage",
             ))

crates/anthropic/Cargo.toml 🔗

@@ -18,12 +18,16 @@ path = "src/anthropic.rs"
 [dependencies]
 anyhow.workspace = true
 chrono.workspace = true
+collections.workspace = true
 futures.workspace = true
 http_client.workspace = true
+language_model_core.workspace = true
+log.workspace = true
 schemars = { workspace = true, optional = true }
 serde.workspace = true
 serde_json.workspace = true
 strum.workspace = true
 thiserror.workspace = true
+tiktoken-rs.workspace = true
 
 

crates/anthropic/src/anthropic.rs 🔗

@@ -12,6 +12,7 @@ use strum::{EnumIter, EnumString};
 use thiserror::Error;
 
 pub mod batches;
+pub mod completion;
 
 pub const ANTHROPIC_API_URL: &str = "https://api.anthropic.com";
 
@@ -1026,6 +1027,89 @@ pub async fn count_tokens(
     }
 }
 
+// -- Conversions from/to `language_model_core` types --
+
+impl From<language_model_core::Speed> for Speed {
+    fn from(speed: language_model_core::Speed) -> Self {
+        match speed {
+            language_model_core::Speed::Standard => Speed::Standard,
+            language_model_core::Speed::Fast => Speed::Fast,
+        }
+    }
+}
+
+impl From<AnthropicError> for language_model_core::LanguageModelCompletionError {
+    fn from(error: AnthropicError) -> Self {
+        let provider = language_model_core::ANTHROPIC_PROVIDER_NAME;
+        match error {
+            AnthropicError::SerializeRequest(error) => Self::SerializeRequest { provider, error },
+            AnthropicError::BuildRequestBody(error) => Self::BuildRequestBody { provider, error },
+            AnthropicError::HttpSend(error) => Self::HttpSend { provider, error },
+            AnthropicError::DeserializeResponse(error) => {
+                Self::DeserializeResponse { provider, error }
+            }
+            AnthropicError::ReadResponse(error) => Self::ApiReadResponseError { provider, error },
+            AnthropicError::HttpResponseError {
+                status_code,
+                message,
+            } => Self::HttpResponseError {
+                provider,
+                status_code,
+                message,
+            },
+            AnthropicError::RateLimit { retry_after } => Self::RateLimitExceeded {
+                provider,
+                retry_after: Some(retry_after),
+            },
+            AnthropicError::ServerOverloaded { retry_after } => Self::ServerOverloaded {
+                provider,
+                retry_after,
+            },
+            AnthropicError::ApiError(api_error) => api_error.into(),
+        }
+    }
+}
+
+impl From<ApiError> for language_model_core::LanguageModelCompletionError {
+    fn from(error: ApiError) -> Self {
+        use ApiErrorCode::*;
+        let provider = language_model_core::ANTHROPIC_PROVIDER_NAME;
+        match error.code() {
+            Some(code) => match code {
+                InvalidRequestError => Self::BadRequestFormat {
+                    provider,
+                    message: error.message,
+                },
+                AuthenticationError => Self::AuthenticationError {
+                    provider,
+                    message: error.message,
+                },
+                PermissionError => Self::PermissionError {
+                    provider,
+                    message: error.message,
+                },
+                NotFoundError => Self::ApiEndpointNotFound { provider },
+                RequestTooLarge => Self::PromptTooLarge {
+                    tokens: language_model_core::parse_prompt_too_long(&error.message),
+                },
+                RateLimitError => Self::RateLimitExceeded {
+                    provider,
+                    retry_after: None,
+                },
+                ApiError => Self::ApiInternalServerError {
+                    provider,
+                    message: error.message,
+                },
+                OverloadedError => Self::ServerOverloaded {
+                    provider,
+                    retry_after: None,
+                },
+            },
+            None => Self::Other(error.into()),
+        }
+    }
+}
+
 #[test]
 fn test_match_window_exceeded() {
     let error = ApiError {

crates/anthropic/src/completion.rs 🔗

@@ -0,0 +1,765 @@
+use anyhow::Result;
+use collections::HashMap;
+use futures::{Stream, StreamExt};
+use language_model_core::{
+    LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelRequest,
+    LanguageModelToolChoice, LanguageModelToolResultContent, LanguageModelToolUse, MessageContent,
+    Role, StopReason, TokenUsage,
+    util::{fix_streamed_json, parse_tool_arguments},
+};
+use std::pin::Pin;
+use std::str::FromStr;
+
+use crate::{
+    AnthropicError, AnthropicModelMode, CacheControl, CacheControlType, ContentDelta,
+    CountTokensRequest, Event, ImageSource, Message, RequestContent, ResponseContent,
+    StringOrContents, Thinking, Tool, ToolChoice, ToolResultContent, ToolResultPart, Usage,
+};
+
+fn to_anthropic_content(content: MessageContent) -> Option<RequestContent> {
+    match content {
+        MessageContent::Text(text) => {
+            let text = if text.chars().last().is_some_and(|c| c.is_whitespace()) {
+                text.trim_end().to_string()
+            } else {
+                text
+            };
+            if !text.is_empty() {
+                Some(RequestContent::Text {
+                    text,
+                    cache_control: None,
+                })
+            } else {
+                None
+            }
+        }
+        MessageContent::Thinking {
+            text: thinking,
+            signature,
+        } => {
+            if let Some(signature) = signature
+                && !thinking.is_empty()
+            {
+                Some(RequestContent::Thinking {
+                    thinking,
+                    signature,
+                    cache_control: None,
+                })
+            } else {
+                None
+            }
+        }
+        MessageContent::RedactedThinking(data) => {
+            if !data.is_empty() {
+                Some(RequestContent::RedactedThinking { data })
+            } else {
+                None
+            }
+        }
+        MessageContent::Image(image) => Some(RequestContent::Image {
+            source: ImageSource {
+                source_type: "base64".to_string(),
+                media_type: "image/png".to_string(),
+                data: image.source.to_string(),
+            },
+            cache_control: None,
+        }),
+        MessageContent::ToolUse(tool_use) => Some(RequestContent::ToolUse {
+            id: tool_use.id.to_string(),
+            name: tool_use.name.to_string(),
+            input: tool_use.input,
+            cache_control: None,
+        }),
+        MessageContent::ToolResult(tool_result) => Some(RequestContent::ToolResult {
+            tool_use_id: tool_result.tool_use_id.to_string(),
+            is_error: tool_result.is_error,
+            content: match tool_result.content {
+                LanguageModelToolResultContent::Text(text) => {
+                    ToolResultContent::Plain(text.to_string())
+                }
+                LanguageModelToolResultContent::Image(image) => {
+                    ToolResultContent::Multipart(vec![ToolResultPart::Image {
+                        source: ImageSource {
+                            source_type: "base64".to_string(),
+                            media_type: "image/png".to_string(),
+                            data: image.source.to_string(),
+                        },
+                    }])
+                }
+            },
+            cache_control: None,
+        }),
+    }
+}
+
+/// Convert a LanguageModelRequest to an Anthropic CountTokensRequest.
+pub fn into_anthropic_count_tokens_request(
+    request: LanguageModelRequest,
+    model: String,
+    mode: AnthropicModelMode,
+) -> CountTokensRequest {
+    let mut new_messages: Vec<Message> = Vec::new();
+    let mut system_message = String::new();
+
+    for message in request.messages {
+        if message.contents_empty() {
+            continue;
+        }
+
+        match message.role {
+            Role::User | Role::Assistant => {
+                let anthropic_message_content: Vec<RequestContent> = message
+                    .content
+                    .into_iter()
+                    .filter_map(to_anthropic_content)
+                    .collect();
+                let anthropic_role = match message.role {
+                    Role::User => crate::Role::User,
+                    Role::Assistant => crate::Role::Assistant,
+                    Role::System => unreachable!("System role should never occur here"),
+                };
+                if anthropic_message_content.is_empty() {
+                    continue;
+                }
+
+                if let Some(last_message) = new_messages.last_mut()
+                    && last_message.role == anthropic_role
+                {
+                    last_message.content.extend(anthropic_message_content);
+                    continue;
+                }
+
+                new_messages.push(Message {
+                    role: anthropic_role,
+                    content: anthropic_message_content,
+                });
+            }
+            Role::System => {
+                if !system_message.is_empty() {
+                    system_message.push_str("\n\n");
+                }
+                system_message.push_str(&message.string_contents());
+            }
+        }
+    }
+
+    CountTokensRequest {
+        model,
+        messages: new_messages,
+        system: if system_message.is_empty() {
+            None
+        } else {
+            Some(StringOrContents::String(system_message))
+        },
+        thinking: if request.thinking_allowed {
+            match mode {
+                AnthropicModelMode::Thinking { budget_tokens } => {
+                    Some(Thinking::Enabled { budget_tokens })
+                }
+                AnthropicModelMode::AdaptiveThinking => Some(Thinking::Adaptive),
+                AnthropicModelMode::Default => None,
+            }
+        } else {
+            None
+        },
+        tools: request
+            .tools
+            .into_iter()
+            .map(|tool| Tool {
+                name: tool.name,
+                description: tool.description,
+                input_schema: tool.input_schema,
+                eager_input_streaming: tool.use_input_streaming,
+            })
+            .collect(),
+        tool_choice: request.tool_choice.map(|choice| match choice {
+            LanguageModelToolChoice::Auto => ToolChoice::Auto,
+            LanguageModelToolChoice::Any => ToolChoice::Any,
+            LanguageModelToolChoice::None => ToolChoice::None,
+        }),
+    }
+}
+
+/// Estimate tokens using tiktoken. Used as a fallback when the API is unavailable,
+/// or by providers (like Zed Cloud) that don't have direct Anthropic API access.
+pub fn count_anthropic_tokens_with_tiktoken(request: LanguageModelRequest) -> Result<u64> {
+    let messages = request.messages;
+    let mut tokens_from_images = 0;
+    let mut string_messages = Vec::with_capacity(messages.len());
+
+    for message in messages {
+        let mut string_contents = String::new();
+
+        for content in message.content {
+            match content {
+                MessageContent::Text(text) => {
+                    string_contents.push_str(&text);
+                }
+                MessageContent::Thinking { .. } => {
+                    // Thinking blocks are not included in the input token count.
+                }
+                MessageContent::RedactedThinking(_) => {
+                    // Thinking blocks are not included in the input token count.
+                }
+                MessageContent::Image(image) => {
+                    tokens_from_images += image.estimate_tokens();
+                }
+                MessageContent::ToolUse(_tool_use) => {
+                    // TODO: Estimate token usage from tool uses.
+                }
+                MessageContent::ToolResult(tool_result) => match &tool_result.content {
+                    LanguageModelToolResultContent::Text(text) => {
+                        string_contents.push_str(text);
+                    }
+                    LanguageModelToolResultContent::Image(image) => {
+                        tokens_from_images += image.estimate_tokens();
+                    }
+                },
+            }
+        }
+
+        if !string_contents.is_empty() {
+            string_messages.push(tiktoken_rs::ChatCompletionRequestMessage {
+                role: match message.role {
+                    Role::User => "user".into(),
+                    Role::Assistant => "assistant".into(),
+                    Role::System => "system".into(),
+                },
+                content: Some(string_contents),
+                name: None,
+                function_call: None,
+            });
+        }
+    }
+
+    // Tiktoken doesn't yet support these models, so we manually use the
+    // same tokenizer as GPT-4.
+    tiktoken_rs::num_tokens_from_messages("gpt-4", &string_messages)
+        .map(|tokens| (tokens + tokens_from_images) as u64)
+}
+
+pub fn into_anthropic(
+    request: LanguageModelRequest,
+    model: String,
+    default_temperature: f32,
+    max_output_tokens: u64,
+    mode: AnthropicModelMode,
+) -> crate::Request {
+    let mut new_messages: Vec<Message> = Vec::new();
+    let mut system_message = String::new();
+
+    for message in request.messages {
+        if message.contents_empty() {
+            continue;
+        }
+
+        match message.role {
+            Role::User | Role::Assistant => {
+                let mut anthropic_message_content: Vec<RequestContent> = message
+                    .content
+                    .into_iter()
+                    .filter_map(to_anthropic_content)
+                    .collect();
+                let anthropic_role = match message.role {
+                    Role::User => crate::Role::User,
+                    Role::Assistant => crate::Role::Assistant,
+                    Role::System => unreachable!("System role should never occur here"),
+                };
+                if anthropic_message_content.is_empty() {
+                    continue;
+                }
+
+                if let Some(last_message) = new_messages.last_mut()
+                    && last_message.role == anthropic_role
+                {
+                    last_message.content.extend(anthropic_message_content);
+                    continue;
+                }
+
+                // Mark the last segment of the message as cached
+                if message.cache {
+                    let cache_control_value = Some(CacheControl {
+                        cache_type: CacheControlType::Ephemeral,
+                    });
+                    for message_content in anthropic_message_content.iter_mut().rev() {
+                        match message_content {
+                            RequestContent::RedactedThinking { .. } => {
+                                // Caching is not possible, fallback to next message
+                            }
+                            RequestContent::Text { cache_control, .. }
+                            | RequestContent::Thinking { cache_control, .. }
+                            | RequestContent::Image { cache_control, .. }
+                            | RequestContent::ToolUse { cache_control, .. }
+                            | RequestContent::ToolResult { cache_control, .. } => {
+                                *cache_control = cache_control_value;
+                                break;
+                            }
+                        }
+                    }
+                }
+
+                new_messages.push(Message {
+                    role: anthropic_role,
+                    content: anthropic_message_content,
+                });
+            }
+            Role::System => {
+                if !system_message.is_empty() {
+                    system_message.push_str("\n\n");
+                }
+                system_message.push_str(&message.string_contents());
+            }
+        }
+    }
+
+    crate::Request {
+        model,
+        messages: new_messages,
+        max_tokens: max_output_tokens,
+        system: if system_message.is_empty() {
+            None
+        } else {
+            Some(StringOrContents::String(system_message))
+        },
+        thinking: if request.thinking_allowed {
+            match mode {
+                AnthropicModelMode::Thinking { budget_tokens } => {
+                    Some(Thinking::Enabled { budget_tokens })
+                }
+                AnthropicModelMode::AdaptiveThinking => Some(Thinking::Adaptive),
+                AnthropicModelMode::Default => None,
+            }
+        } else {
+            None
+        },
+        tools: request
+            .tools
+            .into_iter()
+            .map(|tool| Tool {
+                name: tool.name,
+                description: tool.description,
+                input_schema: tool.input_schema,
+                eager_input_streaming: tool.use_input_streaming,
+            })
+            .collect(),
+        tool_choice: request.tool_choice.map(|choice| match choice {
+            LanguageModelToolChoice::Auto => ToolChoice::Auto,
+            LanguageModelToolChoice::Any => ToolChoice::Any,
+            LanguageModelToolChoice::None => ToolChoice::None,
+        }),
+        metadata: None,
+        output_config: if request.thinking_allowed
+            && matches!(mode, AnthropicModelMode::AdaptiveThinking)
+        {
+            request.thinking_effort.as_deref().and_then(|effort| {
+                let effort = match effort {
+                    "low" => Some(crate::Effort::Low),
+                    "medium" => Some(crate::Effort::Medium),
+                    "high" => Some(crate::Effort::High),
+                    "max" => Some(crate::Effort::Max),
+                    _ => None,
+                };
+                effort.map(|effort| crate::OutputConfig {
+                    effort: Some(effort),
+                })
+            })
+        } else {
+            None
+        },
+        stop_sequences: Vec::new(),
+        speed: request.speed.map(Into::into),
+        temperature: request.temperature.or(Some(default_temperature)),
+        top_k: None,
+        top_p: None,
+    }
+}
+
+pub struct AnthropicEventMapper {
+    tool_uses_by_index: HashMap<usize, RawToolUse>,
+    usage: Usage,
+    stop_reason: StopReason,
+}
+
+impl AnthropicEventMapper {
+    pub fn new() -> Self {
+        Self {
+            tool_uses_by_index: HashMap::default(),
+            usage: Usage::default(),
+            stop_reason: StopReason::EndTurn,
+        }
+    }
+
+    pub fn map_stream(
+        mut self,
+        events: Pin<Box<dyn Send + Stream<Item = Result<Event, AnthropicError>>>>,
+    ) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
+    {
+        events.flat_map(move |event| {
+            futures::stream::iter(match event {
+                Ok(event) => self.map_event(event),
+                Err(error) => vec![Err(error.into())],
+            })
+        })
+    }
+
+    pub fn map_event(
+        &mut self,
+        event: Event,
+    ) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
+        match event {
+            Event::ContentBlockStart {
+                index,
+                content_block,
+            } => match content_block {
+                ResponseContent::Text { text } => {
+                    vec![Ok(LanguageModelCompletionEvent::Text(text))]
+                }
+                ResponseContent::Thinking { thinking } => {
+                    vec![Ok(LanguageModelCompletionEvent::Thinking {
+                        text: thinking,
+                        signature: None,
+                    })]
+                }
+                ResponseContent::RedactedThinking { data } => {
+                    vec![Ok(LanguageModelCompletionEvent::RedactedThinking { data })]
+                }
+                ResponseContent::ToolUse { id, name, .. } => {
+                    self.tool_uses_by_index.insert(
+                        index,
+                        RawToolUse {
+                            id,
+                            name,
+                            input_json: String::new(),
+                        },
+                    );
+                    Vec::new()
+                }
+            },
+            Event::ContentBlockDelta { index, delta } => match delta {
+                ContentDelta::TextDelta { text } => {
+                    vec![Ok(LanguageModelCompletionEvent::Text(text))]
+                }
+                ContentDelta::ThinkingDelta { thinking } => {
+                    vec![Ok(LanguageModelCompletionEvent::Thinking {
+                        text: thinking,
+                        signature: None,
+                    })]
+                }
+                ContentDelta::SignatureDelta { signature } => {
+                    vec![Ok(LanguageModelCompletionEvent::Thinking {
+                        text: "".to_string(),
+                        signature: Some(signature),
+                    })]
+                }
+                ContentDelta::InputJsonDelta { partial_json } => {
+                    if let Some(tool_use) = self.tool_uses_by_index.get_mut(&index) {
+                        tool_use.input_json.push_str(&partial_json);
+
+                        // Try to convert invalid (incomplete) JSON into
+                        // valid JSON that serde can accept, e.g. by closing
+                        // unclosed delimiters. This way, we can update the
+                        // UI with whatever has been streamed back so far.
+                        if let Ok(input) =
+                            serde_json::Value::from_str(&fix_streamed_json(&tool_use.input_json))
+                        {
+                            return vec![Ok(LanguageModelCompletionEvent::ToolUse(
+                                LanguageModelToolUse {
+                                    id: tool_use.id.clone().into(),
+                                    name: tool_use.name.clone().into(),
+                                    is_input_complete: false,
+                                    raw_input: tool_use.input_json.clone(),
+                                    input,
+                                    thought_signature: None,
+                                },
+                            ))];
+                        }
+                    }
+                    vec![]
+                }
+            },
+            Event::ContentBlockStop { index } => {
+                if let Some(tool_use) = self.tool_uses_by_index.remove(&index) {
+                    let input_json = tool_use.input_json.trim();
+                    let event_result = match parse_tool_arguments(input_json) {
+                        Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
+                            LanguageModelToolUse {
+                                id: tool_use.id.into(),
+                                name: tool_use.name.into(),
+                                is_input_complete: true,
+                                input,
+                                raw_input: tool_use.input_json.clone(),
+                                thought_signature: None,
+                            },
+                        )),
+                        Err(json_parse_err) => {
+                            Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
+                                id: tool_use.id.into(),
+                                tool_name: tool_use.name.into(),
+                                raw_input: input_json.into(),
+                                json_parse_error: json_parse_err.to_string(),
+                            })
+                        }
+                    };
+
+                    vec![event_result]
+                } else {
+                    Vec::new()
+                }
+            }
+            Event::MessageStart { message } => {
+                update_usage(&mut self.usage, &message.usage);
+                vec![
+                    Ok(LanguageModelCompletionEvent::UsageUpdate(convert_usage(
+                        &self.usage,
+                    ))),
+                    Ok(LanguageModelCompletionEvent::StartMessage {
+                        message_id: message.id,
+                    }),
+                ]
+            }
+            Event::MessageDelta { delta, usage } => {
+                update_usage(&mut self.usage, &usage);
+                if let Some(stop_reason) = delta.stop_reason.as_deref() {
+                    self.stop_reason = match stop_reason {
+                        "end_turn" => StopReason::EndTurn,
+                        "max_tokens" => StopReason::MaxTokens,
+                        "tool_use" => StopReason::ToolUse,
+                        "refusal" => StopReason::Refusal,
+                        _ => {
+                            log::error!("Unexpected anthropic stop_reason: {stop_reason}");
+                            StopReason::EndTurn
+                        }
+                    };
+                }
+                vec![Ok(LanguageModelCompletionEvent::UsageUpdate(
+                    convert_usage(&self.usage),
+                ))]
+            }
+            Event::MessageStop => {
+                vec![Ok(LanguageModelCompletionEvent::Stop(self.stop_reason))]
+            }
+            Event::Error { error } => {
+                vec![Err(error.into())]
+            }
+            _ => Vec::new(),
+        }
+    }
+}
+
+struct RawToolUse {
+    id: String,
+    name: String,
+    input_json: String,
+}
+
+/// Updates usage data by preferring counts from `new`.
+fn update_usage(usage: &mut Usage, new: &Usage) {
+    if let Some(input_tokens) = new.input_tokens {
+        usage.input_tokens = Some(input_tokens);
+    }
+    if let Some(output_tokens) = new.output_tokens {
+        usage.output_tokens = Some(output_tokens);
+    }
+    if let Some(cache_creation_input_tokens) = new.cache_creation_input_tokens {
+        usage.cache_creation_input_tokens = Some(cache_creation_input_tokens);
+    }
+    if let Some(cache_read_input_tokens) = new.cache_read_input_tokens {
+        usage.cache_read_input_tokens = Some(cache_read_input_tokens);
+    }
+}
+
+fn convert_usage(usage: &Usage) -> TokenUsage {
+    TokenUsage {
+        input_tokens: usage.input_tokens.unwrap_or(0),
+        output_tokens: usage.output_tokens.unwrap_or(0),
+        cache_creation_input_tokens: usage.cache_creation_input_tokens.unwrap_or(0),
+        cache_read_input_tokens: usage.cache_read_input_tokens.unwrap_or(0),
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use crate::AnthropicModelMode;
+    use language_model_core::{LanguageModelImage, LanguageModelRequestMessage, MessageContent};
+
+    #[test]
+    fn test_cache_control_only_on_last_segment() {
+        let request = LanguageModelRequest {
+            messages: vec![LanguageModelRequestMessage {
+                role: Role::User,
+                content: vec![
+                    MessageContent::Text("Some prompt".to_string()),
+                    MessageContent::Image(LanguageModelImage::empty()),
+                    MessageContent::Image(LanguageModelImage::empty()),
+                    MessageContent::Image(LanguageModelImage::empty()),
+                    MessageContent::Image(LanguageModelImage::empty()),
+                ],
+                cache: true,
+                reasoning_details: None,
+            }],
+            thread_id: None,
+            prompt_id: None,
+            intent: None,
+            stop: vec![],
+            temperature: None,
+            tools: vec![],
+            tool_choice: None,
+            thinking_allowed: true,
+            thinking_effort: None,
+            speed: None,
+        };
+
+        let anthropic_request = into_anthropic(
+            request,
+            "claude-3-5-sonnet".to_string(),
+            0.7,
+            4096,
+            AnthropicModelMode::Default,
+        );
+
+        assert_eq!(anthropic_request.messages.len(), 1);
+
+        let message = &anthropic_request.messages[0];
+        assert_eq!(message.content.len(), 5);
+
+        assert!(matches!(
+            message.content[0],
+            RequestContent::Text {
+                cache_control: None,
+                ..
+            }
+        ));
+        for i in 1..3 {
+            assert!(matches!(
+                message.content[i],
+                RequestContent::Image {
+                    cache_control: None,
+                    ..
+                }
+            ));
+        }
+
+        assert!(matches!(
+            message.content[4],
+            RequestContent::Image {
+                cache_control: Some(CacheControl {
+                    cache_type: CacheControlType::Ephemeral,
+                }),
+                ..
+            }
+        ));
+    }
+
+    fn request_with_assistant_content(assistant_content: Vec<MessageContent>) -> crate::Request {
+        let mut request = LanguageModelRequest {
+            messages: vec![LanguageModelRequestMessage {
+                role: Role::User,
+                content: vec![MessageContent::Text("Hello".to_string())],
+                cache: false,
+                reasoning_details: None,
+            }],
+            thinking_effort: None,
+            thread_id: None,
+            prompt_id: None,
+            intent: None,
+            stop: vec![],
+            temperature: None,
+            tools: vec![],
+            tool_choice: None,
+            thinking_allowed: true,
+            speed: None,
+        };
+        request.messages.push(LanguageModelRequestMessage {
+            role: Role::Assistant,
+            content: assistant_content,
+            cache: false,
+            reasoning_details: None,
+        });
+        into_anthropic(
+            request,
+            "claude-sonnet-4-5".to_string(),
+            1.0,
+            16000,
+            AnthropicModelMode::Thinking {
+                budget_tokens: Some(10000),
+            },
+        )
+    }
+
+    #[test]
+    fn test_unsigned_thinking_blocks_stripped() {
+        let result = request_with_assistant_content(vec![
+            MessageContent::Thinking {
+                text: "Cancelled mid-think, no signature".to_string(),
+                signature: None,
+            },
+            MessageContent::Text("Some response text".to_string()),
+        ]);
+
+        let assistant_message = result
+            .messages
+            .iter()
+            .find(|m| m.role == crate::Role::Assistant)
+            .expect("assistant message should still exist");
+
+        assert_eq!(
+            assistant_message.content.len(),
+            1,
+            "Only the text content should remain; unsigned thinking block should be stripped"
+        );
+        assert!(matches!(
+            &assistant_message.content[0],
+            RequestContent::Text { text, .. } if text == "Some response text"
+        ));
+    }
+
+    #[test]
+    fn test_signed_thinking_blocks_preserved() {
+        let result = request_with_assistant_content(vec![
+            MessageContent::Thinking {
+                text: "Completed thinking".to_string(),
+                signature: Some("valid-signature".to_string()),
+            },
+            MessageContent::Text("Response".to_string()),
+        ]);
+
+        let assistant_message = result
+            .messages
+            .iter()
+            .find(|m| m.role == crate::Role::Assistant)
+            .expect("assistant message should exist");
+
+        assert_eq!(
+            assistant_message.content.len(),
+            2,
+            "Both the signed thinking block and text should be preserved"
+        );
+        assert!(matches!(
+            &assistant_message.content[0],
+            RequestContent::Thinking { thinking, signature, .. }
+                if thinking == "Completed thinking" && signature == "valid-signature"
+        ));
+    }
+
+    #[test]
+    fn test_only_unsigned_thinking_block_omits_entire_message() {
+        let result = request_with_assistant_content(vec![MessageContent::Thinking {
+            text: "Cancelled before any text or signature".to_string(),
+            signature: None,
+        }]);
+
+        let assistant_messages: Vec<_> = result
+            .messages
+            .iter()
+            .filter(|m| m.role == crate::Role::Assistant)
+            .collect();
+
+        assert_eq!(
+            assistant_messages.len(),
+            0,
+            "An assistant message whose only content was an unsigned thinking block \
+             should be omitted entirely"
+        );
+    }
+}

crates/auto_update_ui/Cargo.toml 🔗

@@ -12,9 +12,12 @@ workspace = true
 path = "src/auto_update_ui.rs"
 
 [dependencies]
+agent_settings.workspace = true
 anyhow.workspace = true
 auto_update.workspace = true
 client.workspace = true
+db.workspace = true
+fs.workspace = true
 editor.workspace = true
 gpui.workspace = true
 markdown_preview.workspace = true
@@ -23,6 +26,8 @@ semver.workspace = true
 serde.workspace = true
 serde_json.workspace = true
 smol.workspace = true
+telemetry.workspace = true
 ui.workspace = true
 util.workspace = true
 workspace.workspace = true
+zed_actions.workspace = true

crates/auto_update_ui/src/auto_update_ui.rs 🔗

@@ -1,5 +1,10 @@
+use std::sync::Arc;
+
+use agent_settings::{AgentSettings, WindowLayout};
 use auto_update::{AutoUpdater, release_notes_url};
+use db::kvp::Dismissable;
 use editor::{Editor, MultiBuffer};
+use fs::Fs;
 use gpui::{
     App, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Window, actions, prelude::*,
 };
@@ -8,10 +13,10 @@ use release_channel::{AppVersion, ReleaseChannel};
 use semver::Version;
 use serde::Deserialize;
 use smol::io::AsyncReadExt;
-use ui::{AnnouncementToast, ListBulletItem, prelude::*};
+use ui::{AnnouncementToast, ListBulletItem, ParallelAgentsIllustration, prelude::*};
 use util::{ResultExt as _, maybe};
 use workspace::{
-    Workspace,
+    ToggleWorkspaceSidebar, Workspace,
     notifications::{
         ErrorMessagePrompt, Notification, NotificationId, SuppressEvent, show_app_notification,
         simple_message_notification::MessageNotification,
@@ -169,23 +174,52 @@ struct AnnouncementContent {
     bullet_items: Vec<SharedString>,
     primary_action_label: SharedString,
     primary_action_url: Option<SharedString>,
+    primary_action_callback: Option<Arc<dyn Fn(&mut Window, &mut App) + Send + Sync>>,
+    secondary_action_url: Option<SharedString>,
+    on_dismiss: Option<Arc<dyn Fn(&mut App) + Send + Sync>>,
+}
+
+struct ParallelAgentAnnouncement;
+
+impl Dismissable for ParallelAgentAnnouncement {
+    const KEY: &'static str = "parallel-agent-announcement";
 }
 
-fn announcement_for_version(version: &Version) -> Option<AnnouncementContent> {
-    #[allow(clippy::match_single_binding)]
+fn announcement_for_version(version: &Version, cx: &App) -> Option<AnnouncementContent> {
     match (version.major, version.minor, version.patch) {
-        // TODO: Add real version when we have it
-        // (0, 225, 0) => Some(AnnouncementContent {
-        //     heading: "What's new in Zed 0.225".into(),
-        //     description: "This release includes some exciting improvements.".into(),
-        //     bullet_items: vec![
-        //         "Improved agent performance".into(),
-        //         "New agentic features".into(),
-        //         "Better agent capabilities".into(),
-        //     ],
-        //     primary_action_label: "Learn More".into(),
-        //     primary_action_url: Some("https://zed.dev/".into()),
-        // }),
+        (0, 232, _) => {
+            if ParallelAgentAnnouncement::dismissed(cx) {
+                None
+            } else {
+                let fs = <dyn Fs>::global(cx);
+                let already_agent_layout =
+                    matches!(AgentSettings::get_layout(cx), WindowLayout::Agent(_));
+
+                Some(AnnouncementContent {
+                    heading: "Introducing Parallel Agents".into(),
+                    description: "Run multiple agent threads simultaneously across projects."
+                        .into(),
+                    bullet_items: vec![
+                        "Mix and match Zed's agent with any ACP-compatible agent".into(),
+                        "Optional worktree isolation keeps agents from conflicting".into(),
+                        "Updated workspace layout designed for agentic workflows".into(),
+                    ],
+                    primary_action_label: "Try Now".into(),
+                    primary_action_url: None,
+                    primary_action_callback: Some(Arc::new(move |window, cx| {
+                        if !already_agent_layout {
+                            AgentSettings::set_layout(WindowLayout::Agent(None), fs.clone(), cx);
+                        }
+                        window.dispatch_action(Box::new(ToggleWorkspaceSidebar), cx);
+                        window.dispatch_action(Box::new(zed_actions::assistant::ToggleFocus), cx);
+                    })),
+                    on_dismiss: Some(Arc::new(|cx| {
+                        ParallelAgentAnnouncement::set_dismissed(true, cx)
+                    })),
+                    secondary_action_url: Some("https://zed.dev/blog/".into()),
+                })
+            }
+        }
         _ => None,
     }
 }
@@ -202,6 +236,13 @@ impl AnnouncementToastNotification {
             content,
         }
     }
+
+    fn dismiss(&mut self, cx: &mut Context<Self>) {
+        cx.emit(DismissEvent);
+        if let Some(on_dismiss) = &self.content.on_dismiss {
+            on_dismiss(cx);
+        }
+    }
 }
 
 impl Focusable for AnnouncementToastNotification {
@@ -217,6 +258,7 @@ impl Notification for AnnouncementToastNotification {}
 impl Render for AnnouncementToastNotification {
     fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
         AnnouncementToast::new()
+            .illustration(ParallelAgentsIllustration::new())
             .heading(self.content.heading.clone())
             .description(self.content.description.clone())
             .bullet_items(
@@ -228,24 +270,31 @@ impl Render for AnnouncementToastNotification {
             .primary_action_label(self.content.primary_action_label.clone())
             .primary_on_click(cx.listener({
                 let url = self.content.primary_action_url.clone();
-                move |_, _, _window, cx| {
+                let callback = self.content.primary_action_callback.clone();
+                move |this, _, window, cx| {
+                    telemetry::event!("Parallel Agent Announcement Main Click");
+                    if let Some(callback) = &callback {
+                        callback(window, cx);
+                    }
                     if let Some(url) = &url {
                         cx.open_url(url);
                     }
-                    cx.emit(DismissEvent);
+                    this.dismiss(cx);
                 }
             }))
             .secondary_on_click(cx.listener({
-                let url = self.content.primary_action_url.clone();
-                move |_, _, _window, cx| {
+                let url = self.content.secondary_action_url.clone();
+                move |this, _, _window, cx| {
+                    telemetry::event!("Parallel Agent Announcement Secondary Click");
                     if let Some(url) = &url {
                         cx.open_url(url);
                     }
-                    cx.emit(DismissEvent);
+                    this.dismiss(cx);
                 }
             }))
-            .dismiss_on_click(cx.listener(|_, _, _window, cx| {
-                cx.emit(DismissEvent);
+            .dismiss_on_click(cx.listener(|this, _, _window, cx| {
+                telemetry::event!("Parallel Agent Announcement Dismiss");
+                this.dismiss(cx);
             }))
     }
 }
@@ -274,7 +323,7 @@ pub fn notify_if_app_was_updated(cx: &mut App) {
                 version.build = semver::BuildMetadata::EMPTY;
                 let app_name = ReleaseChannel::global(cx).display_name();
 
-                if let Some(content) = announcement_for_version(&version) {
+                if let Some(content) = announcement_for_version(&version, cx) {
                     show_app_notification(
                         NotificationId::unique::<UpdateNotification>(),
                         cx,

crates/bedrock/src/models.rs 🔗

@@ -113,6 +113,10 @@ pub enum Model {
     MistralLarge3,
     #[serde(rename = "pixtral-large")]
     PixtralLarge,
+    #[serde(rename = "devstral-2-123b")]
+    Devstral2_123B,
+    #[serde(rename = "ministral-14b")]
+    Ministral14B,
 
     // Qwen models
     #[serde(rename = "qwen3-32b")]
@@ -146,9 +150,27 @@ pub enum Model {
     #[serde(rename = "gpt-oss-120b")]
     GptOss120B,
 
+    // NVIDIA Nemotron models
+    #[serde(rename = "nemotron-super-3-120b")]
+    NemotronSuper3_120B,
+    #[serde(rename = "nemotron-nano-3-30b")]
+    NemotronNano3_30B,
+
     // MiniMax models
     #[serde(rename = "minimax-m2")]
     MiniMaxM2,
+    #[serde(rename = "minimax-m2-1")]
+    MiniMaxM2_1,
+    #[serde(rename = "minimax-m2-5")]
+    MiniMaxM2_5,
+
+    // Z.AI GLM models
+    #[serde(rename = "glm-5")]
+    GLM5,
+    #[serde(rename = "glm-4-7")]
+    GLM4_7,
+    #[serde(rename = "glm-4-7-flash")]
+    GLM4_7Flash,
 
     // Moonshot models
     #[serde(rename = "kimi-k2-thinking")]
@@ -217,6 +239,8 @@ impl Model {
             Self::MagistralSmall => "magistral-small",
             Self::MistralLarge3 => "mistral-large-3",
             Self::PixtralLarge => "pixtral-large",
+            Self::Devstral2_123B => "devstral-2-123b",
+            Self::Ministral14B => "ministral-14b",
             Self::Qwen3_32B => "qwen3-32b",
             Self::Qwen3VL235B => "qwen3-vl-235b",
             Self::Qwen3_235B => "qwen3-235b",
@@ -230,7 +254,14 @@ impl Model {
             Self::Nova2Lite => "nova-2-lite",
             Self::GptOss20B => "gpt-oss-20b",
             Self::GptOss120B => "gpt-oss-120b",
+            Self::NemotronSuper3_120B => "nemotron-super-3-120b",
+            Self::NemotronNano3_30B => "nemotron-nano-3-30b",
             Self::MiniMaxM2 => "minimax-m2",
+            Self::MiniMaxM2_1 => "minimax-m2-1",
+            Self::MiniMaxM2_5 => "minimax-m2-5",
+            Self::GLM5 => "glm-5",
+            Self::GLM4_7 => "glm-4-7",
+            Self::GLM4_7Flash => "glm-4-7-flash",
             Self::KimiK2Thinking => "kimi-k2-thinking",
             Self::KimiK2_5 => "kimi-k2-5",
             Self::DeepSeekR1 => "deepseek-r1",
@@ -257,6 +288,8 @@ impl Model {
             Self::MagistralSmall => "mistral.magistral-small-2509",
             Self::MistralLarge3 => "mistral.mistral-large-3-675b-instruct",
             Self::PixtralLarge => "mistral.pixtral-large-2502-v1:0",
+            Self::Devstral2_123B => "mistral.devstral-2-123b",
+            Self::Ministral14B => "mistral.ministral-3-14b-instruct",
             Self::Qwen3VL235B => "qwen.qwen3-vl-235b-a22b",
             Self::Qwen3_32B => "qwen.qwen3-32b-v1:0",
             Self::Qwen3_235B => "qwen.qwen3-235b-a22b-2507-v1:0",
@@ -270,7 +303,14 @@ impl Model {
             Self::Nova2Lite => "amazon.nova-2-lite-v1:0",
             Self::GptOss20B => "openai.gpt-oss-20b-1:0",
             Self::GptOss120B => "openai.gpt-oss-120b-1:0",
+            Self::NemotronSuper3_120B => "nvidia.nemotron-super-3-120b",
+            Self::NemotronNano3_30B => "nvidia.nemotron-nano-3-30b",
             Self::MiniMaxM2 => "minimax.minimax-m2",
+            Self::MiniMaxM2_1 => "minimax.minimax-m2.1",
+            Self::MiniMaxM2_5 => "minimax.minimax-m2.5",
+            Self::GLM5 => "zai.glm-5",
+            Self::GLM4_7 => "zai.glm-4.7",
+            Self::GLM4_7Flash => "zai.glm-4.7-flash",
             Self::KimiK2Thinking => "moonshot.kimi-k2-thinking",
             Self::KimiK2_5 => "moonshotai.kimi-k2.5",
             Self::DeepSeekR1 => "deepseek.r1-v1:0",
@@ -297,6 +337,8 @@ impl Model {
             Self::MagistralSmall => "Magistral Small",
             Self::MistralLarge3 => "Mistral Large 3",
             Self::PixtralLarge => "Pixtral Large",
+            Self::Devstral2_123B => "Devstral 2 123B",
+            Self::Ministral14B => "Ministral 14B",
             Self::Qwen3VL235B => "Qwen3 VL 235B",
             Self::Qwen3_32B => "Qwen3 32B",
             Self::Qwen3_235B => "Qwen3 235B",
@@ -310,7 +352,14 @@ impl Model {
             Self::Nova2Lite => "Amazon Nova 2 Lite",
             Self::GptOss20B => "GPT OSS 20B",
             Self::GptOss120B => "GPT OSS 120B",
+            Self::NemotronSuper3_120B => "Nemotron Super 3 120B",
+            Self::NemotronNano3_30B => "Nemotron Nano 3 30B",
             Self::MiniMaxM2 => "MiniMax M2",
+            Self::MiniMaxM2_1 => "MiniMax M2.1",
+            Self::MiniMaxM2_5 => "MiniMax M2.5",
+            Self::GLM5 => "GLM 5",
+            Self::GLM4_7 => "GLM 4.7",
+            Self::GLM4_7Flash => "GLM 4.7 Flash",
             Self::KimiK2Thinking => "Kimi K2 Thinking",
             Self::KimiK2_5 => "Kimi K2.5",
             Self::DeepSeekR1 => "DeepSeek R1",
@@ -338,6 +387,7 @@ impl Model {
             Self::Llama4Scout17B | Self::Llama4Maverick17B => 128_000,
             Self::Gemma3_4B | Self::Gemma3_12B | Self::Gemma3_27B => 128_000,
             Self::MagistralSmall | Self::MistralLarge3 | Self::PixtralLarge => 128_000,
+            Self::Devstral2_123B | Self::Ministral14B => 256_000,
             Self::Qwen3_32B
             | Self::Qwen3VL235B
             | Self::Qwen3_235B
@@ -349,7 +399,9 @@ impl Model {
             Self::NovaPremier => 1_000_000,
             Self::Nova2Lite => 300_000,
             Self::GptOss20B | Self::GptOss120B => 128_000,
-            Self::MiniMaxM2 => 128_000,
+            Self::NemotronSuper3_120B | Self::NemotronNano3_30B => 262_000,
+            Self::MiniMaxM2 | Self::MiniMaxM2_1 | Self::MiniMaxM2_5 => 196_000,
+            Self::GLM5 | Self::GLM4_7 | Self::GLM4_7Flash => 203_000,
             Self::KimiK2Thinking | Self::KimiK2_5 => 128_000,
             Self::DeepSeekR1 | Self::DeepSeekV3_1 | Self::DeepSeekV3_2 => 128_000,
             Self::Custom { max_tokens, .. } => *max_tokens,
@@ -373,6 +425,7 @@ impl Model {
             | Self::MagistralSmall
             | Self::MistralLarge3
             | Self::PixtralLarge => 8_192,
+            Self::Devstral2_123B | Self::Ministral14B => 131_000,
             Self::Qwen3_32B
             | Self::Qwen3VL235B
             | Self::Qwen3_235B
@@ -382,7 +435,9 @@ impl Model {
             | Self::Qwen3Coder480B => 8_192,
             Self::NovaLite | Self::NovaPro | Self::NovaPremier | Self::Nova2Lite => 5_000,
             Self::GptOss20B | Self::GptOss120B => 16_000,
-            Self::MiniMaxM2 => 16_000,
+            Self::NemotronSuper3_120B | Self::NemotronNano3_30B => 131_000,
+            Self::MiniMaxM2 | Self::MiniMaxM2_1 | Self::MiniMaxM2_5 => 98_000,
+            Self::GLM5 | Self::GLM4_7 | Self::GLM4_7Flash => 101_000,
             Self::KimiK2Thinking | Self::KimiK2_5 => 16_000,
             Self::DeepSeekR1 | Self::DeepSeekV3_1 | Self::DeepSeekV3_2 => 16_000,
             Self::Custom {
@@ -419,6 +474,7 @@ impl Model {
             | Self::ClaudeSonnet4_6 => true,
             Self::NovaLite | Self::NovaPro | Self::NovaPremier | Self::Nova2Lite => true,
             Self::MistralLarge3 | Self::PixtralLarge | Self::MagistralSmall => true,
+            Self::Devstral2_123B | Self::Ministral14B => true,
             // Gemma accepts toolConfig without error but produces unreliable tool
             // calls -- malformed JSON args, hallucinated tool names, dropped calls.
             Self::Qwen3_32B
@@ -428,7 +484,9 @@ impl Model {
             | Self::Qwen3Coder30B
             | Self::Qwen3CoderNext
             | Self::Qwen3Coder480B => true,
-            Self::MiniMaxM2 => true,
+            Self::MiniMaxM2 | Self::MiniMaxM2_1 | Self::MiniMaxM2_5 => true,
+            Self::NemotronSuper3_120B | Self::NemotronNano3_30B => true,
+            Self::GLM5 | Self::GLM4_7 | Self::GLM4_7Flash => true,
             Self::KimiK2Thinking | Self::KimiK2_5 => true,
             Self::DeepSeekR1 | Self::DeepSeekV3_1 | Self::DeepSeekV3_2 => true,
             _ => false,

crates/client/Cargo.toml 🔗

@@ -36,7 +36,6 @@ gpui_tokio.workspace = true
 http_client.workspace = true
 http_client_tls.workspace = true
 httparse = "1.10"
-language_model.workspace = true
 log.workspace = true
 parking_lot.workspace = true
 paths.workspace = true

crates/client/src/client.rs 🔗

@@ -14,6 +14,7 @@ use async_tungstenite::tungstenite::{
     http::{HeaderValue, Request, StatusCode},
 };
 use clock::SystemClock;
+use cloud_api_client::LlmApiToken;
 use cloud_api_client::websocket_protocol::MessageToClient;
 use cloud_api_client::{ClientApiError, CloudApiClient};
 use cloud_api_types::OrganizationId;
@@ -26,7 +27,6 @@ use futures::{
 };
 use gpui::{App, AsyncApp, Entity, Global, Task, WeakEntity, actions};
 use http_client::{HttpClient, HttpClientWithUrl, http, read_proxy_from_env};
-use language_model::LlmApiToken;
 use parking_lot::{Mutex, RwLock};
 use postage::watch;
 use proxy::connect_proxy_stream;

crates/client/src/llm_token.rs 🔗

@@ -1,10 +1,10 @@
 use super::{Client, UserStore};
+use cloud_api_client::LlmApiToken;
 use cloud_api_types::websocket_protocol::MessageToClient;
 use cloud_llm_client::{EXPIRED_LLM_TOKEN_HEADER_NAME, OUTDATED_LLM_TOKEN_HEADER_NAME};
 use gpui::{
     App, AppContext as _, Context, Entity, EventEmitter, Global, ReadGlobal as _, Subscription,
 };
-use language_model::LlmApiToken;
 use std::sync::Arc;
 
 pub trait NeedsLlmTokenRefresh {

crates/cloud_api_client/Cargo.toml 🔗

@@ -20,5 +20,6 @@ gpui_tokio.workspace = true
 http_client.workspace = true
 parking_lot.workspace = true
 serde_json.workspace = true
+smol.workspace = true
 thiserror.workspace = true
 yawc.workspace = true

crates/cloud_api_client/src/cloud_api_client.rs 🔗

@@ -1,3 +1,4 @@
+mod llm_token;
 mod websocket;
 
 use std::sync::Arc;
@@ -18,6 +19,8 @@ use yawc::WebSocket;
 
 use crate::websocket::Connection;
 
+pub use llm_token::LlmApiToken;
+
 struct Credentials {
     user_id: u32,
     access_token: String,

crates/cloud_api_client/src/llm_token.rs 🔗

@@ -0,0 +1,74 @@
+use std::sync::Arc;
+
+use cloud_api_types::OrganizationId;
+use smol::lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard};
+
+use crate::{ClientApiError, CloudApiClient};
+
+#[derive(Clone, Default)]
+pub struct LlmApiToken(Arc<RwLock<Option<String>>>);
+
+impl LlmApiToken {
+    pub async fn acquire(
+        &self,
+        client: &CloudApiClient,
+        system_id: Option<String>,
+        organization_id: Option<OrganizationId>,
+    ) -> Result<String, ClientApiError> {
+        let lock = self.0.upgradable_read().await;
+        if let Some(token) = lock.as_ref() {
+            Ok(token.to_string())
+        } else {
+            Self::fetch(
+                RwLockUpgradableReadGuard::upgrade(lock).await,
+                client,
+                system_id,
+                organization_id,
+            )
+            .await
+        }
+    }
+
+    pub async fn refresh(
+        &self,
+        client: &CloudApiClient,
+        system_id: Option<String>,
+        organization_id: Option<OrganizationId>,
+    ) -> Result<String, ClientApiError> {
+        Self::fetch(self.0.write().await, client, system_id, organization_id).await
+    }
+
+    /// Clears the existing token before attempting to fetch a new one.
+    ///
+    /// Used when switching organizations so that a failed refresh doesn't
+    /// leave a token for the wrong organization.
+    pub async fn clear_and_refresh(
+        &self,
+        client: &CloudApiClient,
+        system_id: Option<String>,
+        organization_id: Option<OrganizationId>,
+    ) -> Result<String, ClientApiError> {
+        let mut lock = self.0.write().await;
+        *lock = None;
+        Self::fetch(lock, client, system_id, organization_id).await
+    }
+
+    async fn fetch(
+        mut lock: RwLockWriteGuard<'_, Option<String>>,
+        client: &CloudApiClient,
+        system_id: Option<String>,
+        organization_id: Option<OrganizationId>,
+    ) -> Result<String, ClientApiError> {
+        let result = client.create_llm_token(system_id, organization_id).await;
+        match result {
+            Ok(response) => {
+                *lock = Some(response.token.0.clone());
+                Ok(response.token.0)
+            }
+            Err(err) => {
+                *lock = None;
+                Err(err)
+            }
+        }
+    }
+}

crates/cloud_llm_client/Cargo.toml 🔗

@@ -7,6 +7,7 @@ license = "Apache-2.0"
 
 [features]
 test-support = []
+predict-edits = ["dep:zeta_prompt"]
 
 [lints]
 workspace = true
@@ -20,6 +21,6 @@ serde = { workspace = true, features = ["derive", "rc"] }
 serde_json.workspace = true
 strum = { workspace = true, features = ["derive"] }
 uuid = { workspace = true, features = ["serde"] }
-zeta_prompt.workspace = true
+zeta_prompt = { workspace = true, optional = true }
 
 

crates/collab/tests/integration/git_tests.rs 🔗

@@ -269,9 +269,11 @@ async fn test_remote_git_worktrees(
     cx_b.update(|cx| {
         repo_b.update(cx, |repository, _| {
             repository.create_worktree(
-                "feature-branch".to_string(),
+                git::repository::CreateWorktreeTarget::NewBranch {
+                    branch_name: "feature-branch".to_string(),
+                    base_sha: Some("abc123".to_string()),
+                },
                 worktree_directory.join("feature-branch"),
-                Some("abc123".to_string()),
             )
         })
     })
@@ -323,9 +325,11 @@ async fn test_remote_git_worktrees(
     cx_b.update(|cx| {
         repo_b.update(cx, |repository, _| {
             repository.create_worktree(
-                "bugfix-branch".to_string(),
+                git::repository::CreateWorktreeTarget::NewBranch {
+                    branch_name: "bugfix-branch".to_string(),
+                    base_sha: None,
+                },
                 worktree_directory.join("bugfix-branch"),
-                None,
             )
         })
     })

crates/collab/tests/integration/remote_editing_collaboration_tests.rs 🔗

@@ -473,9 +473,11 @@ async fn test_ssh_collaboration_git_worktrees(
     cx_b.update(|cx| {
         repo_b.update(cx, |repo, _| {
             repo.create_worktree(
-                "feature-branch".to_string(),
+                git::repository::CreateWorktreeTarget::NewBranch {
+                    branch_name: "feature-branch".to_string(),
+                    base_sha: Some("abc123".to_string()),
+                },
                 worktree_directory.join("feature-branch"),
-                Some("abc123".to_string()),
             )
         })
     })

crates/collab_ui/Cargo.toml 🔗

@@ -32,7 +32,6 @@ test-support = [
 anyhow.workspace = true
 call.workspace = true
 channel.workspace = true
-chrono.workspace = true
 client.workspace = true
 collections.workspace = true
 db.workspace = true
@@ -41,7 +40,6 @@ futures.workspace = true
 fuzzy.workspace = true
 gpui.workspace = true
 livekit_client.workspace = true
-log.workspace = true
 menu.workspace = true
 notifications.workspace = true
 picker.workspace = true
@@ -56,7 +54,6 @@ telemetry.workspace = true
 theme.workspace = true
 theme_settings.workspace = true
 time.workspace = true
-time_format.workspace = true
 title_bar.workspace = true
 ui.workspace = true
 util.workspace = true

crates/collab_ui/src/collab_panel.rs 🔗

@@ -6,7 +6,7 @@ use crate::{CollaborationPanelSettings, channel_view::ChannelView};
 use anyhow::Context as _;
 use call::ActiveCall;
 use channel::{Channel, ChannelEvent, ChannelStore};
-use client::{ChannelId, Client, Contact, User, UserStore};
+use client::{ChannelId, Client, Contact, Notification, User, UserStore};
 use collections::{HashMap, HashSet};
 use contact_finder::ContactFinder;
 use db::kvp::KeyValueStore;
@@ -21,6 +21,7 @@ use gpui::{
 };
 
 use menu::{Cancel, Confirm, SecondaryConfirm, SelectNext, SelectPrevious};
+use notifications::{NotificationEntry, NotificationEvent, NotificationStore};
 use project::{Fs, Project};
 use rpc::{
     ErrorCode, ErrorExt,
@@ -29,19 +30,23 @@ use rpc::{
 use serde::{Deserialize, Serialize};
 use settings::Settings;
 use smallvec::SmallVec;
-use std::{mem, sync::Arc};
+use std::{mem, sync::Arc, time::Duration};
 use theme::ActiveTheme;
 use theme_settings::ThemeSettings;
 use ui::{
-    Avatar, AvatarAvailabilityIndicator, ContextMenu, CopyButton, Facepile, HighlightedLabel,
-    IconButtonShape, Indicator, ListHeader, ListItem, Tab, Tooltip, prelude::*, tooltip_container,
+    Avatar, AvatarAvailabilityIndicator, CollabNotification, ContextMenu, CopyButton, Facepile,
+    HighlightedLabel, IconButtonShape, Indicator, ListHeader, ListItem, Tab, Tooltip, prelude::*,
+    tooltip_container,
 };
 use util::{ResultExt, TryFutureExt, maybe};
 use workspace::{
     CopyRoomId, Deafen, LeaveCall, MultiWorkspace, Mute, OpenChannelNotes, OpenChannelNotesById,
     ScreenShare, ShareProject, Workspace,
     dock::{DockPosition, Panel, PanelEvent},
-    notifications::{DetachAndPromptErr, NotifyResultExt},
+    notifications::{
+        DetachAndPromptErr, Notification as WorkspaceNotification, NotificationId, NotifyResultExt,
+        SuppressEvent,
+    },
 };
 
 const FILTER_OCCUPIED_CHANNELS_KEY: &str = "filter_occupied_channels";
@@ -87,6 +92,7 @@ struct ChannelMoveClipboard {
 }
 
 const COLLABORATION_PANEL_KEY: &str = "CollaborationPanel";
+const TOAST_DURATION: Duration = Duration::from_secs(5);
 
 pub fn init(cx: &mut App) {
     cx.observe_new(|workspace: &mut Workspace, _, _| {
@@ -267,6 +273,9 @@ pub struct CollabPanel {
     collapsed_channels: Vec<ChannelId>,
     filter_occupied_channels: bool,
     workspace: WeakEntity<Workspace>,
+    notification_store: Entity<NotificationStore>,
+    current_notification_toast: Option<(u64, Task<()>)>,
+    mark_as_read_tasks: HashMap<u64, Task<anyhow::Result<()>>>,
 }
 
 #[derive(Serialize, Deserialize)]
@@ -394,6 +403,9 @@ impl CollabPanel {
                 channel_editing_state: None,
                 selection: None,
                 channel_store: ChannelStore::global(cx),
+                notification_store: NotificationStore::global(cx),
+                current_notification_toast: None,
+                mark_as_read_tasks: HashMap::default(),
                 user_store: workspace.user_store().clone(),
                 project: workspace.project().clone(),
                 subscriptions: Vec::default(),
@@ -437,6 +449,11 @@ impl CollabPanel {
                     }
                 },
             ));
+            this.subscriptions.push(cx.subscribe_in(
+                &this.notification_store,
+                window,
+                Self::on_notification_event,
+            ));
 
             this
         })
@@ -1181,7 +1198,7 @@ impl CollabPanel {
         .into();
 
         ListItem::new(project_id as usize)
-            .height(px(24.))
+            .height(rems_from_px(24.))
             .toggle_state(is_selected)
             .on_click(cx.listener(move |this, _, window, cx| {
                 this.workspace
@@ -1222,7 +1239,7 @@ impl CollabPanel {
         let id = peer_id.map_or(usize::MAX, |id| id.as_u64() as usize);
 
         ListItem::new(("screen", id))
-            .height(px(24.))
+            .height(rems_from_px(24.))
             .toggle_state(is_selected)
             .start_slot(
                 h_flex()
@@ -1269,7 +1286,7 @@ impl CollabPanel {
         let has_channel_buffer_changed = channel_store.has_channel_buffer_changed(channel_id);
 
         ListItem::new("channel-notes")
-            .height(px(24.))
+            .height(rems_from_px(24.))
             .toggle_state(is_selected)
             .on_click(cx.listener(move |this, _, window, cx| {
                 this.open_channel_notes(channel_id, window, cx);
@@ -2665,26 +2682,28 @@ impl CollabPanel {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) -> AnyElement {
-        let entry = &self.entries[ix];
+        let entry = self.entries[ix].clone();
 
         let is_selected = self.selection == Some(ix);
         match entry {
             ListEntry::Header(section) => {
-                let is_collapsed = self.collapsed_sections.contains(section);
-                self.render_header(*section, is_selected, is_collapsed, cx)
+                let is_collapsed = self.collapsed_sections.contains(&section);
+                self.render_header(section, is_selected, is_collapsed, cx)
+                    .into_any_element()
+            }
+            ListEntry::Contact { contact, calling } => {
+                self.mark_contact_request_accepted_notifications_read(contact.user.id, cx);
+                self.render_contact(&contact, calling, is_selected, cx)
                     .into_any_element()
             }
-            ListEntry::Contact { contact, calling } => self
-                .render_contact(contact, *calling, is_selected, cx)
-                .into_any_element(),
             ListEntry::ContactPlaceholder => self
                 .render_contact_placeholder(is_selected, cx)
                 .into_any_element(),
             ListEntry::IncomingRequest(user) => self
-                .render_contact_request(user, true, is_selected, cx)
+                .render_contact_request(&user, true, is_selected, cx)
                 .into_any_element(),
             ListEntry::OutgoingRequest(user) => self
-                .render_contact_request(user, false, is_selected, cx)
+                .render_contact_request(&user, false, is_selected, cx)
                 .into_any_element(),
             ListEntry::Channel {
                 channel,
@@ -2694,9 +2713,9 @@ impl CollabPanel {
                 ..
             } => self
                 .render_channel(
-                    channel,
-                    *depth,
-                    *has_children,
+                    &channel,
+                    depth,
+                    has_children,
                     is_selected,
                     ix,
                     string_match.as_ref(),
@@ -2704,10 +2723,10 @@ impl CollabPanel {
                 )
                 .into_any_element(),
             ListEntry::ChannelEditor { depth } => self
-                .render_channel_editor(*depth, window, cx)
+                .render_channel_editor(depth, window, cx)
                 .into_any_element(),
             ListEntry::ChannelInvite(channel) => self
-                .render_channel_invite(channel, is_selected, cx)
+                .render_channel_invite(&channel, is_selected, cx)
                 .into_any_element(),
             ListEntry::CallParticipant {
                 user,
@@ -2715,7 +2734,7 @@ impl CollabPanel {
                 is_pending,
                 role,
             } => self
-                .render_call_participant(user, *peer_id, *is_pending, *role, is_selected, cx)
+                .render_call_participant(&user, peer_id, is_pending, role, is_selected, cx)
                 .into_any_element(),
             ListEntry::ParticipantProject {
                 project_id,
@@ -2724,20 +2743,20 @@ impl CollabPanel {
                 is_last,
             } => self
                 .render_participant_project(
-                    *project_id,
-                    worktree_root_names,
-                    *host_user_id,
-                    *is_last,
+                    project_id,
+                    &worktree_root_names,
+                    host_user_id,
+                    is_last,
                     is_selected,
                     window,
                     cx,
                 )
                 .into_any_element(),
             ListEntry::ParticipantScreen { peer_id, is_last } => self
-                .render_participant_screen(*peer_id, *is_last, is_selected, window, cx)
+                .render_participant_screen(peer_id, is_last, is_selected, window, cx)
                 .into_any_element(),
             ListEntry::ChannelNotes { channel_id } => self
-                .render_channel_notes(*channel_id, is_selected, window, cx)
+                .render_channel_notes(channel_id, is_selected, window, cx)
                 .into_any_element(),
         }
     }
@@ -2846,11 +2865,11 @@ impl CollabPanel {
                         }
                     };
 
-                    Some(channel.name.as_ref())
+                    Some(channel.name.clone())
                 });
 
                 if let Some(name) = channel_name {
-                    SharedString::from(name.to_string())
+                    name
                 } else {
                     SharedString::from("Current Call")
                 }
@@ -3210,7 +3229,7 @@ impl CollabPanel {
             (IconName::Star, Color::Default, "Add to Favorites")
         };
 
-        let height = px(24.);
+        let height = rems_from_px(24.);
 
         h_flex()
             .id(ix)
@@ -3397,6 +3416,178 @@ impl CollabPanel {
             item.child(self.channel_name_editor.clone())
         }
     }
+
+    fn on_notification_event(
+        &mut self,
+        _: &Entity<NotificationStore>,
+        event: &NotificationEvent,
+        _window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        match event {
+            NotificationEvent::NewNotification { entry } => {
+                self.add_toast(entry, cx);
+                cx.notify();
+            }
+            NotificationEvent::NotificationRemoved { entry }
+            | NotificationEvent::NotificationRead { entry } => {
+                self.remove_toast(entry.id, cx);
+                cx.notify();
+            }
+            NotificationEvent::NotificationsUpdated { .. } => {
+                cx.notify();
+            }
+        }
+    }
+
+    fn present_notification(
+        &self,
+        entry: &NotificationEntry,
+        cx: &App,
+    ) -> Option<(Option<Arc<User>>, String)> {
+        let user_store = self.user_store.read(cx);
+        match &entry.notification {
+            Notification::ContactRequest { sender_id } => {
+                let requester = user_store.get_cached_user(*sender_id)?;
+                Some((
+                    Some(requester.clone()),
+                    format!("{} wants to add you as a contact", requester.github_login),
+                ))
+            }
+            Notification::ContactRequestAccepted { responder_id } => {
+                let responder = user_store.get_cached_user(*responder_id)?;
+                Some((
+                    Some(responder.clone()),
+                    format!("{} accepted your contact request", responder.github_login),
+                ))
+            }
+            Notification::ChannelInvitation {
+                channel_name,
+                inviter_id,
+                ..
+            } => {
+                let inviter = user_store.get_cached_user(*inviter_id)?;
+                Some((
+                    Some(inviter.clone()),
+                    format!(
+                        "{} invited you to join the #{channel_name} channel",
+                        inviter.github_login
+                    ),
+                ))
+            }
+        }
+    }
+
+    fn add_toast(&mut self, entry: &NotificationEntry, cx: &mut Context<Self>) {
+        let Some((actor, text)) = self.present_notification(entry, cx) else {
+            return;
+        };
+
+        let notification = entry.notification.clone();
+        let needs_response = matches!(
+            notification,
+            Notification::ContactRequest { .. } | Notification::ChannelInvitation { .. }
+        );
+
+        let notification_id = entry.id;
+
+        self.current_notification_toast = Some((
+            notification_id,
+            cx.spawn(async move |this, cx| {
+                cx.background_executor().timer(TOAST_DURATION).await;
+                this.update(cx, |this, cx| this.remove_toast(notification_id, cx))
+                    .ok();
+            }),
+        ));
+
+        let collab_panel = cx.entity().downgrade();
+        self.workspace
+            .update(cx, |workspace, cx| {
+                let id = NotificationId::unique::<CollabNotificationToast>();
+
+                workspace.dismiss_notification(&id, cx);
+                workspace.show_notification(id, cx, |cx| {
+                    let workspace = cx.entity().downgrade();
+                    cx.new(|cx| CollabNotificationToast {
+                        actor,
+                        text,
+                        notification: needs_response.then(|| notification),
+                        workspace,
+                        collab_panel: collab_panel.clone(),
+                        focus_handle: cx.focus_handle(),
+                    })
+                })
+            })
+            .ok();
+    }
+
+    fn mark_notification_read(&mut self, notification_id: u64, cx: &mut Context<Self>) {
+        let client = self.client.clone();
+        self.mark_as_read_tasks
+            .entry(notification_id)
+            .or_insert_with(|| {
+                cx.spawn(async move |this, cx| {
+                    let request_result = client
+                        .request(proto::MarkNotificationRead { notification_id })
+                        .await;
+
+                    this.update(cx, |this, _| {
+                        this.mark_as_read_tasks.remove(&notification_id);
+                    })?;
+
+                    request_result?;
+                    Ok(())
+                })
+            });
+    }
+
+    fn mark_contact_request_accepted_notifications_read(
+        &mut self,
+        contact_user_id: u64,
+        cx: &mut Context<Self>,
+    ) {
+        let notification_ids = self.notification_store.read_with(cx, |store, _| {
+            (0..store.notification_count())
+                .filter_map(|index| {
+                    let entry = store.notification_at(index)?;
+                    if entry.is_read {
+                        return None;
+                    }
+
+                    match &entry.notification {
+                        Notification::ContactRequestAccepted { responder_id }
+                            if *responder_id == contact_user_id =>
+                        {
+                            Some(entry.id)
+                        }
+                        _ => None,
+                    }
+                })
+                .collect::<Vec<_>>()
+        });
+
+        for notification_id in notification_ids {
+            self.mark_notification_read(notification_id, cx);
+        }
+    }
+
+    fn remove_toast(&mut self, notification_id: u64, cx: &mut Context<Self>) {
+        if let Some((current_id, _)) = &self.current_notification_toast {
+            if *current_id == notification_id {
+                self.dismiss_toast(cx);
+            }
+        }
+    }
+
+    fn dismiss_toast(&mut self, cx: &mut Context<Self>) {
+        self.current_notification_toast.take();
+        self.workspace
+            .update(cx, |workspace, cx| {
+                let id = NotificationId::unique::<CollabNotificationToast>();
+                workspace.dismiss_notification(&id, cx)
+            })
+            .ok();
+    }
 }
 
 fn render_tree_branch(
@@ -3516,12 +3707,38 @@ impl Panel for CollabPanel {
         CollaborationPanelSettings::get_global(cx).default_width
     }
 
+    fn set_active(&mut self, active: bool, _window: &mut Window, cx: &mut Context<Self>) {
+        if active && self.current_notification_toast.is_some() {
+            self.current_notification_toast.take();
+            let workspace = self.workspace.clone();
+            cx.defer(move |cx| {
+                workspace
+                    .update(cx, |workspace, cx| {
+                        let id = NotificationId::unique::<CollabNotificationToast>();
+                        workspace.dismiss_notification(&id, cx)
+                    })
+                    .ok();
+            });
+        }
+    }
+
     fn icon(&self, _window: &Window, cx: &App) -> Option<ui::IconName> {
         CollaborationPanelSettings::get_global(cx)
             .button
             .then_some(ui::IconName::UserGroup)
     }
 
+    fn icon_label(&self, _window: &Window, cx: &App) -> Option<String> {
+        let user_store = self.user_store.read(cx);
+        let count = user_store.incoming_contact_requests().len()
+            + self.channel_store.read(cx).channel_invitations().len();
+        if count == 0 {
+            None
+        } else {
+            Some(count.to_string())
+        }
+    }
+
     fn icon_tooltip(&self, _window: &Window, _cx: &App) -> Option<&'static str> {
         Some("Collab Panel")
     }
@@ -3702,6 +3919,101 @@ impl Render for JoinChannelTooltip {
     }
 }
 
+pub struct CollabNotificationToast {
+    actor: Option<Arc<User>>,
+    text: String,
+    notification: Option<Notification>,
+    workspace: WeakEntity<Workspace>,
+    collab_panel: WeakEntity<CollabPanel>,
+    focus_handle: FocusHandle,
+}
+
+impl Focusable for CollabNotificationToast {
+    fn focus_handle(&self, _cx: &App) -> FocusHandle {
+        self.focus_handle.clone()
+    }
+}
+
+impl WorkspaceNotification for CollabNotificationToast {}
+
+impl CollabNotificationToast {
+    fn focus_collab_panel(&self, window: &mut Window, cx: &mut Context<Self>) {
+        let workspace = self.workspace.clone();
+        window.defer(cx, move |window, cx| {
+            workspace
+                .update(cx, |workspace, cx| {
+                    workspace.focus_panel::<CollabPanel>(window, cx)
+                })
+                .ok();
+        })
+    }
+
+    fn respond(&mut self, accept: bool, window: &mut Window, cx: &mut Context<Self>) {
+        if let Some(notification) = self.notification.take() {
+            self.collab_panel
+                .update(cx, |collab_panel, cx| match notification {
+                    Notification::ContactRequest { sender_id } => {
+                        collab_panel.respond_to_contact_request(sender_id, accept, window, cx);
+                    }
+                    Notification::ChannelInvitation { channel_id, .. } => {
+                        collab_panel.respond_to_channel_invite(ChannelId(channel_id), accept, cx);
+                    }
+                    Notification::ContactRequestAccepted { .. } => {}
+                })
+                .ok();
+        }
+        cx.emit(DismissEvent);
+    }
+}
+
+impl Render for CollabNotificationToast {
+    fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+        let needs_response = self.notification.is_some();
+
+        let accept_button = if needs_response {
+            Button::new("accept", "Accept").on_click(cx.listener(|this, _, window, cx| {
+                this.respond(true, window, cx);
+                cx.stop_propagation();
+            }))
+        } else {
+            Button::new("dismiss", "Dismiss").on_click(cx.listener(|_, _, _, cx| {
+                cx.emit(DismissEvent);
+            }))
+        };
+
+        let decline_button = if needs_response {
+            Button::new("decline", "Decline").on_click(cx.listener(|this, _, window, cx| {
+                this.respond(false, window, cx);
+                cx.stop_propagation();
+            }))
+        } else {
+            Button::new("close", "Close").on_click(cx.listener(|_, _, _, cx| {
+                cx.emit(DismissEvent);
+            }))
+        };
+
+        let avatar_uri = self
+            .actor
+            .as_ref()
+            .map(|user| user.avatar_uri.clone())
+            .unwrap_or_default();
+
+        div()
+            .id("collab_notification_toast")
+            .on_click(cx.listener(|this, _, window, cx| {
+                this.focus_collab_panel(window, cx);
+                cx.emit(DismissEvent);
+            }))
+            .child(
+                CollabNotification::new(avatar_uri, accept_button, decline_button)
+                    .child(Label::new(self.text.clone())),
+            )
+    }
+}
+
+impl EventEmitter<DismissEvent> for CollabNotificationToast {}
+impl EventEmitter<SuppressEvent> for CollabNotificationToast {}
+
 #[cfg(any(test, feature = "test-support"))]
 impl CollabPanel {
     pub fn entries_as_strings(&self) -> Vec<String> {

crates/collab_ui/src/collab_ui.rs 🔗

@@ -1,7 +1,6 @@
 mod call_stats_modal;
 pub mod channel_view;
 pub mod collab_panel;
-pub mod notification_panel;
 pub mod notifications;
 mod panel_settings;
 
@@ -12,7 +11,7 @@ use gpui::{
     App, Pixels, PlatformDisplay, Size, WindowBackgroundAppearance, WindowBounds,
     WindowDecorations, WindowKind, WindowOptions, point,
 };
-pub use panel_settings::{CollaborationPanelSettings, NotificationPanelSettings};
+pub use panel_settings::CollaborationPanelSettings;
 use release_channel::ReleaseChannel;
 use ui::px;
 use workspace::AppState;
@@ -22,7 +21,6 @@ pub fn init(app_state: &Arc<AppState>, cx: &mut App) {
     call_stats_modal::init(cx);
     channel_view::init(cx);
     collab_panel::init(cx);
-    notification_panel::init(cx);
     notifications::init(app_state, cx);
     title_bar::init(cx);
 }

crates/collab_ui/src/notification_panel.rs 🔗

@@ -1,727 +0,0 @@
-use crate::NotificationPanelSettings;
-use anyhow::Result;
-use channel::ChannelStore;
-use client::{ChannelId, Client, Notification, User, UserStore};
-use collections::HashMap;
-use futures::StreamExt;
-use gpui::{
-    AnyElement, App, AsyncWindowContext, ClickEvent, Context, DismissEvent, Element, Entity,
-    EventEmitter, FocusHandle, Focusable, InteractiveElement, IntoElement, ListAlignment,
-    ListScrollEvent, ListState, ParentElement, Render, StatefulInteractiveElement, Styled, Task,
-    WeakEntity, Window, actions, div, img, list, px,
-};
-use notifications::{NotificationEntry, NotificationEvent, NotificationStore};
-use project::Fs;
-use rpc::proto;
-
-use settings::{Settings, SettingsStore};
-use std::{sync::Arc, time::Duration};
-use time::{OffsetDateTime, UtcOffset};
-use ui::{
-    Avatar, Button, Icon, IconButton, IconName, Label, Tab, Tooltip, h_flex, prelude::*, v_flex,
-};
-use util::ResultExt;
-use workspace::notifications::{
-    Notification as WorkspaceNotification, NotificationId, SuppressEvent,
-};
-use workspace::{
-    Workspace,
-    dock::{DockPosition, Panel, PanelEvent},
-};
-
-const LOADING_THRESHOLD: usize = 30;
-const MARK_AS_READ_DELAY: Duration = Duration::from_secs(1);
-const TOAST_DURATION: Duration = Duration::from_secs(5);
-const NOTIFICATION_PANEL_KEY: &str = "NotificationPanel";
-
-pub struct NotificationPanel {
-    client: Arc<Client>,
-    user_store: Entity<UserStore>,
-    channel_store: Entity<ChannelStore>,
-    notification_store: Entity<NotificationStore>,
-    fs: Arc<dyn Fs>,
-    active: bool,
-    notification_list: ListState,
-    subscriptions: Vec<gpui::Subscription>,
-    workspace: WeakEntity<Workspace>,
-    current_notification_toast: Option<(u64, Task<()>)>,
-    local_timezone: UtcOffset,
-    focus_handle: FocusHandle,
-    mark_as_read_tasks: HashMap<u64, Task<Result<()>>>,
-    unseen_notifications: Vec<NotificationEntry>,
-}
-
-#[derive(Debug)]
-pub enum Event {
-    DockPositionChanged,
-    Focus,
-    Dismissed,
-}
-
-pub struct NotificationPresenter {
-    pub actor: Option<Arc<client::User>>,
-    pub text: String,
-    pub icon: &'static str,
-    pub needs_response: bool,
-}
-
-actions!(
-    notification_panel,
-    [
-        /// Toggles the notification panel.
-        Toggle,
-        /// Toggles focus on the notification panel.
-        ToggleFocus
-    ]
-);
-
-pub fn init(cx: &mut App) {
-    cx.observe_new(|workspace: &mut Workspace, _, _| {
-        workspace.register_action(|workspace, _: &ToggleFocus, window, cx| {
-            workspace.toggle_panel_focus::<NotificationPanel>(window, cx);
-        });
-        workspace.register_action(|workspace, _: &Toggle, window, cx| {
-            if !workspace.toggle_panel_focus::<NotificationPanel>(window, cx) {
-                workspace.close_panel::<NotificationPanel>(window, cx);
-            }
-        });
-    })
-    .detach();
-}
-
-impl NotificationPanel {
-    pub fn new(
-        workspace: &mut Workspace,
-        window: &mut Window,
-        cx: &mut Context<Workspace>,
-    ) -> Entity<Self> {
-        let fs = workspace.app_state().fs.clone();
-        let client = workspace.app_state().client.clone();
-        let user_store = workspace.app_state().user_store.clone();
-        let workspace_handle = workspace.weak_handle();
-
-        cx.new(|cx| {
-            let mut status = client.status();
-            cx.spawn_in(window, async move |this, cx| {
-                while (status.next().await).is_some() {
-                    if this
-                        .update(cx, |_: &mut Self, cx| {
-                            cx.notify();
-                        })
-                        .is_err()
-                    {
-                        break;
-                    }
-                }
-            })
-            .detach();
-
-            let notification_list = ListState::new(0, ListAlignment::Top, px(1000.));
-            notification_list.set_scroll_handler(cx.listener(
-                |this, event: &ListScrollEvent, _, cx| {
-                    if event.count.saturating_sub(event.visible_range.end) < LOADING_THRESHOLD
-                        && let Some(task) = this
-                            .notification_store
-                            .update(cx, |store, cx| store.load_more_notifications(false, cx))
-                    {
-                        task.detach();
-                    }
-                },
-            ));
-
-            let local_offset = chrono::Local::now().offset().local_minus_utc();
-            let mut this = Self {
-                fs,
-                client,
-                user_store,
-                local_timezone: UtcOffset::from_whole_seconds(local_offset).unwrap(),
-                channel_store: ChannelStore::global(cx),
-                notification_store: NotificationStore::global(cx),
-                notification_list,
-                workspace: workspace_handle,
-                focus_handle: cx.focus_handle(),
-                subscriptions: Default::default(),
-                current_notification_toast: None,
-                active: false,
-                mark_as_read_tasks: Default::default(),
-                unseen_notifications: Default::default(),
-            };
-
-            let mut old_dock_position = this.position(window, cx);
-            this.subscriptions.extend([
-                cx.observe(&this.notification_store, |_, _, cx| cx.notify()),
-                cx.subscribe_in(
-                    &this.notification_store,
-                    window,
-                    Self::on_notification_event,
-                ),
-                cx.observe_global_in::<SettingsStore>(
-                    window,
-                    move |this: &mut Self, window, cx| {
-                        let new_dock_position = this.position(window, cx);
-                        if new_dock_position != old_dock_position {
-                            old_dock_position = new_dock_position;
-                            cx.emit(Event::DockPositionChanged);
-                        }
-                        cx.notify();
-                    },
-                ),
-            ]);
-            this
-        })
-    }
-
-    pub fn load(
-        workspace: WeakEntity<Workspace>,
-        cx: AsyncWindowContext,
-    ) -> Task<Result<Entity<Self>>> {
-        cx.spawn(async move |cx| {
-            workspace.update_in(cx, |workspace, window, cx| Self::new(workspace, window, cx))
-        })
-    }
-
-    fn render_notification(
-        &mut self,
-        ix: usize,
-        window: &mut Window,
-        cx: &mut Context<Self>,
-    ) -> Option<AnyElement> {
-        let entry = self.notification_store.read(cx).notification_at(ix)?;
-        let notification_id = entry.id;
-        let now = OffsetDateTime::now_utc();
-        let timestamp = entry.timestamp;
-        let NotificationPresenter {
-            actor,
-            text,
-            needs_response,
-            ..
-        } = self.present_notification(entry, cx)?;
-
-        let response = entry.response;
-        let notification = entry.notification.clone();
-
-        if self.active && !entry.is_read {
-            self.did_render_notification(notification_id, &notification, window, cx);
-        }
-
-        let relative_timestamp = time_format::format_localized_timestamp(
-            timestamp,
-            now,
-            self.local_timezone,
-            time_format::TimestampFormat::Relative,
-        );
-
-        let absolute_timestamp = time_format::format_localized_timestamp(
-            timestamp,
-            now,
-            self.local_timezone,
-            time_format::TimestampFormat::Absolute,
-        );
-
-        Some(
-            div()
-                .id(ix)
-                .flex()
-                .flex_row()
-                .size_full()
-                .px_2()
-                .py_1()
-                .gap_2()
-                .hover(|style| style.bg(cx.theme().colors().element_hover))
-                .children(actor.map(|actor| {
-                    img(actor.avatar_uri.clone())
-                        .flex_none()
-                        .w_8()
-                        .h_8()
-                        .rounded_full()
-                }))
-                .child(
-                    v_flex()
-                        .gap_1()
-                        .size_full()
-                        .overflow_hidden()
-                        .child(Label::new(text))
-                        .child(
-                            h_flex()
-                                .child(
-                                    div()
-                                        .id("notification_timestamp")
-                                        .hover(|style| {
-                                            style
-                                                .bg(cx.theme().colors().element_selected)
-                                                .rounded_sm()
-                                        })
-                                        .child(Label::new(relative_timestamp).color(Color::Muted))
-                                        .tooltip(move |_, cx| {
-                                            Tooltip::simple(absolute_timestamp.clone(), cx)
-                                        }),
-                                )
-                                .children(if let Some(is_accepted) = response {
-                                    Some(div().flex().flex_grow().justify_end().child(Label::new(
-                                        if is_accepted {
-                                            "You accepted"
-                                        } else {
-                                            "You declined"
-                                        },
-                                    )))
-                                } else if needs_response {
-                                    Some(
-                                        h_flex()
-                                            .flex_grow()
-                                            .justify_end()
-                                            .child(Button::new("decline", "Decline").on_click({
-                                                let notification = notification.clone();
-                                                let entity = cx.entity();
-                                                move |_, _, cx| {
-                                                    entity.update(cx, |this, cx| {
-                                                        this.respond_to_notification(
-                                                            notification.clone(),
-                                                            false,
-                                                            cx,
-                                                        )
-                                                    });
-                                                }
-                                            }))
-                                            .child(Button::new("accept", "Accept").on_click({
-                                                let notification = notification.clone();
-                                                let entity = cx.entity();
-                                                move |_, _, cx| {
-                                                    entity.update(cx, |this, cx| {
-                                                        this.respond_to_notification(
-                                                            notification.clone(),
-                                                            true,
-                                                            cx,
-                                                        )
-                                                    });
-                                                }
-                                            })),
-                                    )
-                                } else {
-                                    None
-                                }),
-                        ),
-                )
-                .into_any(),
-        )
-    }
-
-    fn present_notification(
-        &self,
-        entry: &NotificationEntry,
-        cx: &App,
-    ) -> Option<NotificationPresenter> {
-        let user_store = self.user_store.read(cx);
-        let channel_store = self.channel_store.read(cx);
-        match entry.notification {
-            Notification::ContactRequest { sender_id } => {
-                let requester = user_store.get_cached_user(sender_id)?;
-                Some(NotificationPresenter {
-                    icon: "icons/plus.svg",
-                    text: format!("{} wants to add you as a contact", requester.github_login),
-                    needs_response: user_store.has_incoming_contact_request(requester.id),
-                    actor: Some(requester),
-                })
-            }
-            Notification::ContactRequestAccepted { responder_id } => {
-                let responder = user_store.get_cached_user(responder_id)?;
-                Some(NotificationPresenter {
-                    icon: "icons/plus.svg",
-                    text: format!("{} accepted your contact invite", responder.github_login),
-                    needs_response: false,
-                    actor: Some(responder),
-                })
-            }
-            Notification::ChannelInvitation {
-                ref channel_name,
-                channel_id,
-                inviter_id,
-            } => {
-                let inviter = user_store.get_cached_user(inviter_id)?;
-                Some(NotificationPresenter {
-                    icon: "icons/hash.svg",
-                    text: format!(
-                        "{} invited you to join the #{channel_name} channel",
-                        inviter.github_login
-                    ),
-                    needs_response: channel_store.has_channel_invitation(ChannelId(channel_id)),
-                    actor: Some(inviter),
-                })
-            }
-        }
-    }
-
-    fn did_render_notification(
-        &mut self,
-        notification_id: u64,
-        notification: &Notification,
-        window: &mut Window,
-        cx: &mut Context<Self>,
-    ) {
-        let should_mark_as_read = match notification {
-            Notification::ContactRequestAccepted { .. } => true,
-            Notification::ContactRequest { .. } | Notification::ChannelInvitation { .. } => false,
-        };
-
-        if should_mark_as_read {
-            self.mark_as_read_tasks
-                .entry(notification_id)
-                .or_insert_with(|| {
-                    let client = self.client.clone();
-                    cx.spawn_in(window, async move |this, cx| {
-                        cx.background_executor().timer(MARK_AS_READ_DELAY).await;
-                        client
-                            .request(proto::MarkNotificationRead { notification_id })
-                            .await?;
-                        this.update(cx, |this, _| {
-                            this.mark_as_read_tasks.remove(&notification_id);
-                        })?;
-                        Ok(())
-                    })
-                });
-        }
-    }
-
-    fn on_notification_event(
-        &mut self,
-        _: &Entity<NotificationStore>,
-        event: &NotificationEvent,
-        window: &mut Window,
-        cx: &mut Context<Self>,
-    ) {
-        match event {
-            NotificationEvent::NewNotification { entry } => {
-                self.unseen_notifications.push(entry.clone());
-                self.add_toast(entry, window, cx);
-            }
-            NotificationEvent::NotificationRemoved { entry }
-            | NotificationEvent::NotificationRead { entry } => {
-                self.unseen_notifications.retain(|n| n.id != entry.id);
-                self.remove_toast(entry.id, cx);
-            }
-            NotificationEvent::NotificationsUpdated {
-                old_range,
-                new_count,
-            } => {
-                self.notification_list.splice(old_range.clone(), *new_count);
-                cx.notify();
-            }
-        }
-    }
-
-    fn add_toast(
-        &mut self,
-        entry: &NotificationEntry,
-        window: &mut Window,
-        cx: &mut Context<Self>,
-    ) {
-        let Some(NotificationPresenter { actor, text, .. }) = self.present_notification(entry, cx)
-        else {
-            return;
-        };
-
-        let notification_id = entry.id;
-        self.current_notification_toast = Some((
-            notification_id,
-            cx.spawn_in(window, async move |this, cx| {
-                cx.background_executor().timer(TOAST_DURATION).await;
-                this.update(cx, |this, cx| this.remove_toast(notification_id, cx))
-                    .ok();
-            }),
-        ));
-
-        self.workspace
-            .update(cx, |workspace, cx| {
-                let id = NotificationId::unique::<NotificationToast>();
-
-                workspace.dismiss_notification(&id, cx);
-                workspace.show_notification(id, cx, |cx| {
-                    let workspace = cx.entity().downgrade();
-                    cx.new(|cx| NotificationToast {
-                        actor,
-                        text,
-                        workspace,
-                        focus_handle: cx.focus_handle(),
-                    })
-                })
-            })
-            .ok();
-    }
-
-    fn remove_toast(&mut self, notification_id: u64, cx: &mut Context<Self>) {
-        if let Some((current_id, _)) = &self.current_notification_toast
-            && *current_id == notification_id
-        {
-            self.current_notification_toast.take();
-            self.workspace
-                .update(cx, |workspace, cx| {
-                    let id = NotificationId::unique::<NotificationToast>();
-                    workspace.dismiss_notification(&id, cx)
-                })
-                .ok();
-        }
-    }
-
-    fn respond_to_notification(
-        &mut self,
-        notification: Notification,
-        response: bool,
-
-        cx: &mut Context<Self>,
-    ) {
-        self.notification_store.update(cx, |store, cx| {
-            store.respond_to_notification(notification, response, cx);
-        });
-    }
-}
-
-impl Render for NotificationPanel {
-    fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
-        v_flex()
-            .size_full()
-            .child(
-                h_flex()
-                    .justify_between()
-                    .px_2()
-                    .py_1()
-                    // Match the height of the tab bar so they line up.
-                    .h(Tab::container_height(cx))
-                    .border_b_1()
-                    .border_color(cx.theme().colors().border)
-                    .child(Label::new("Notifications"))
-                    .child(Icon::new(IconName::Envelope)),
-            )
-            .map(|this| {
-                if !self.client.status().borrow().is_connected() {
-                    this.child(
-                        v_flex()
-                            .gap_2()
-                            .p_4()
-                            .child(
-                                Button::new("connect_prompt_button", "Connect")
-                                    .start_icon(Icon::new(IconName::Github).color(Color::Muted))
-                                    .style(ButtonStyle::Filled)
-                                    .full_width()
-                                    .on_click({
-                                        let client = self.client.clone();
-                                        move |_, window, cx| {
-                                            let client = client.clone();
-                                            window
-                                                .spawn(cx, async move |cx| {
-                                                    match client.connect(true, cx).await {
-                                                        util::ConnectionResult::Timeout => {
-                                                            log::error!("Connection timeout");
-                                                        }
-                                                        util::ConnectionResult::ConnectionReset => {
-                                                            log::error!("Connection reset");
-                                                        }
-                                                        util::ConnectionResult::Result(r) => {
-                                                            r.log_err();
-                                                        }
-                                                    }
-                                                })
-                                                .detach()
-                                        }
-                                    }),
-                            )
-                            .child(
-                                div().flex().w_full().items_center().child(
-                                    Label::new("Connect to view notifications.")
-                                        .color(Color::Muted)
-                                        .size(LabelSize::Small),
-                                ),
-                            ),
-                    )
-                } else if self.notification_list.item_count() == 0 {
-                    this.child(
-                        v_flex().p_4().child(
-                            div().flex().w_full().items_center().child(
-                                Label::new("You have no notifications.")
-                                    .color(Color::Muted)
-                                    .size(LabelSize::Small),
-                            ),
-                        ),
-                    )
-                } else {
-                    this.child(
-                        list(
-                            self.notification_list.clone(),
-                            cx.processor(|this, ix, window, cx| {
-                                this.render_notification(ix, window, cx)
-                                    .unwrap_or_else(|| div().into_any())
-                            }),
-                        )
-                        .size_full(),
-                    )
-                }
-            })
-    }
-}
-
-impl Focusable for NotificationPanel {
-    fn focus_handle(&self, _: &App) -> FocusHandle {
-        self.focus_handle.clone()
-    }
-}
-
-impl EventEmitter<Event> for NotificationPanel {}
-impl EventEmitter<PanelEvent> for NotificationPanel {}
-
-impl Panel for NotificationPanel {
-    fn persistent_name() -> &'static str {
-        "NotificationPanel"
-    }
-
-    fn panel_key() -> &'static str {
-        NOTIFICATION_PANEL_KEY
-    }
-
-    fn position(&self, _: &Window, cx: &App) -> DockPosition {
-        NotificationPanelSettings::get_global(cx).dock
-    }
-
-    fn position_is_valid(&self, position: DockPosition) -> bool {
-        matches!(position, DockPosition::Left | DockPosition::Right)
-    }
-
-    fn set_position(&mut self, position: DockPosition, _: &mut Window, cx: &mut Context<Self>) {
-        settings::update_settings_file(self.fs.clone(), cx, move |settings, _| {
-            settings.notification_panel.get_or_insert_default().dock = Some(position.into())
-        });
-    }
-
-    fn default_size(&self, _: &Window, cx: &App) -> Pixels {
-        NotificationPanelSettings::get_global(cx).default_width
-    }
-
-    fn set_active(&mut self, active: bool, _: &mut Window, cx: &mut Context<Self>) {
-        self.active = active;
-
-        if self.active {
-            self.unseen_notifications = Vec::new();
-            cx.notify();
-        }
-
-        if self.notification_store.read(cx).notification_count() == 0 {
-            cx.emit(Event::Dismissed);
-        }
-    }
-
-    fn icon(&self, _: &Window, cx: &App) -> Option<IconName> {
-        let show_button = NotificationPanelSettings::get_global(cx).button;
-        if !show_button {
-            return None;
-        }
-
-        if self.unseen_notifications.is_empty() {
-            return Some(IconName::Bell);
-        }
-
-        Some(IconName::BellDot)
-    }
-
-    fn icon_tooltip(&self, _window: &Window, _cx: &App) -> Option<&'static str> {
-        Some("Notification Panel")
-    }
-
-    fn icon_label(&self, _window: &Window, cx: &App) -> Option<String> {
-        if !NotificationPanelSettings::get_global(cx).show_count_badge {
-            return None;
-        }
-        let count = self.notification_store.read(cx).unread_notification_count();
-        if count == 0 {
-            None
-        } else {
-            Some(count.to_string())
-        }
-    }
-
-    fn toggle_action(&self) -> Box<dyn gpui::Action> {
-        Box::new(ToggleFocus)
-    }
-
-    fn activation_priority(&self) -> u32 {
-        4
-    }
-}
-
-pub struct NotificationToast {
-    actor: Option<Arc<User>>,
-    text: String,
-    workspace: WeakEntity<Workspace>,
-    focus_handle: FocusHandle,
-}
-
-impl Focusable for NotificationToast {
-    fn focus_handle(&self, _cx: &App) -> FocusHandle {
-        self.focus_handle.clone()
-    }
-}
-
-impl WorkspaceNotification for NotificationToast {}
-
-impl NotificationToast {
-    fn focus_notification_panel(&self, window: &mut Window, cx: &mut Context<Self>) {
-        let workspace = self.workspace.clone();
-        window.defer(cx, move |window, cx| {
-            workspace
-                .update(cx, |workspace, cx| {
-                    workspace.focus_panel::<NotificationPanel>(window, cx)
-                })
-                .ok();
-        })
-    }
-}
-
-impl Render for NotificationToast {
-    fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
-        let user = self.actor.clone();
-
-        let suppress = window.modifiers().shift;
-        let (close_id, close_icon) = if suppress {
-            ("suppress", IconName::Minimize)
-        } else {
-            ("close", IconName::Close)
-        };
-
-        h_flex()
-            .id("notification_panel_toast")
-            .elevation_3(cx)
-            .p_2()
-            .justify_between()
-            .children(user.map(|user| Avatar::new(user.avatar_uri.clone())))
-            .child(Label::new(self.text.clone()))
-            .on_modifiers_changed(cx.listener(|_, _, _, cx| cx.notify()))
-            .child(
-                IconButton::new(close_id, close_icon)
-                    .tooltip(move |_window, cx| {
-                        if suppress {
-                            Tooltip::for_action(
-                                "Suppress.\nClose with click.",
-                                &workspace::SuppressNotification,
-                                cx,
-                            )
-                        } else {
-                            Tooltip::for_action(
-                                "Close.\nSuppress with shift-click",
-                                &menu::Cancel,
-                                cx,
-                            )
-                        }
-                    })
-                    .on_click(cx.listener(move |_, _: &ClickEvent, _, cx| {
-                        if suppress {
-                            cx.emit(SuppressEvent);
-                        } else {
-                            cx.emit(DismissEvent);
-                        }
-                    })),
-            )
-            .on_click(cx.listener(|this, _, window, cx| {
-                this.focus_notification_panel(window, cx);
-                cx.emit(DismissEvent);
-            }))
-    }
-}
-
-impl EventEmitter<DismissEvent> for NotificationToast {}
-impl EventEmitter<SuppressEvent> for NotificationToast {}

crates/collab_ui/src/panel_settings.rs 🔗

@@ -10,14 +10,6 @@ pub struct CollaborationPanelSettings {
     pub default_width: Pixels,
 }
 
-#[derive(Debug, RegisterSetting)]
-pub struct NotificationPanelSettings {
-    pub button: bool,
-    pub dock: DockPosition,
-    pub default_width: Pixels,
-    pub show_count_badge: bool,
-}
-
 impl Settings for CollaborationPanelSettings {
     fn from_settings(content: &settings::SettingsContent) -> Self {
         let panel = content.collaboration_panel.as_ref().unwrap();
@@ -29,15 +21,3 @@ impl Settings for CollaborationPanelSettings {
         }
     }
 }
-
-impl Settings for NotificationPanelSettings {
-    fn from_settings(content: &settings::SettingsContent) -> Self {
-        let panel = content.notification_panel.as_ref().unwrap();
-        return Self {
-            button: panel.button.unwrap(),
-            dock: panel.dock.unwrap().into(),
-            default_width: panel.default_width.map(px).unwrap(),
-            show_count_badge: panel.show_count_badge.unwrap(),
-        };
-    }
-}

crates/dev_container/src/devcontainer_json.rs 🔗

@@ -217,8 +217,8 @@ pub(crate) struct DevContainer {
     pub(crate) override_feature_install_order: Option<Vec<String>>,
     pub(crate) customizations: Option<ZedCustomizationsWrapper>,
     pub(crate) build: Option<ContainerBuild>,
-    #[serde(default, deserialize_with = "deserialize_string_or_int")]
-    pub(crate) app_port: Option<String>,
+    #[serde(default, deserialize_with = "deserialize_app_port")]
+    pub(crate) app_port: Vec<String>,
     #[serde(default, deserialize_with = "deserialize_mount_definition")]
     pub(crate) workspace_mount: Option<MountDefinition>,
     pub(crate) workspace_folder: Option<String>,
@@ -517,7 +517,7 @@ where
     Ok(Some(mounts))
 }
 
-fn deserialize_string_or_int<'de, D>(deserializer: D) -> Result<Option<String>, D::Error>
+fn deserialize_app_port<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
 where
     D: serde::Deserializer<'de>,
 {
@@ -530,9 +530,29 @@ where
         Int(u32),
     }
 
-    match StringOrInt::deserialize(deserializer)? {
-        StringOrInt::String(s) => Ok(Some(s)),
-        StringOrInt::Int(b) => Ok(Some(b.to_string())),
+    #[derive(Deserialize)]
+    #[serde(untagged)]
+    enum AppPort {
+        Array(Vec<StringOrInt>),
+        Single(StringOrInt),
+    }
+
+    fn normalize_port(value: StringOrInt) -> String {
+        match value {
+            StringOrInt::String(s) => {
+                if s.contains(':') {
+                    s
+                } else {
+                    format!("{s}:{s}")
+                }
+            }
+            StringOrInt::Int(n) => format!("{n}:{n}"),
+        }
+    }
+
+    match AppPort::deserialize(deserializer)? {
+        AppPort::Single(value) => Ok(vec![normalize_port(value)]),
+        AppPort::Array(values) => Ok(values.into_iter().map(normalize_port).collect()),
     }
 }
 
@@ -862,7 +882,7 @@ mod test {
                     memory: Some("8gb".to_string()),
                     storage: Some("32gb".to_string()),
                 }),
-                app_port: Some("8081".to_string()),
+                app_port: vec!["8081:8081".to_string()],
                 container_env: Some(HashMap::from([
                     ("MYVAR3".to_string(), "myvar3".to_string()),
                     ("MYVAR4".to_string(), "myvar4".to_string())
@@ -1304,7 +1324,7 @@ mod test {
                     memory: Some("8gb".to_string()),
                     storage: Some("32gb".to_string()),
                 }),
-                app_port: Some("8081".to_string()),
+                app_port: vec!["8081:8081".to_string()],
                 container_env: Some(HashMap::from([
                     ("MYVAR3".to_string(), "myvar3".to_string()),
                     ("MYVAR4".to_string(), "myvar4".to_string())
@@ -1349,6 +1369,35 @@ mod test {
         assert_eq!(devcontainer.build_type(), DevContainerBuildType::Dockerfile);
     }
 
+    #[test]
+    fn should_deserialize_app_port_array() {
+        let given_json = r#"
+            // These are some external comments. serde_lenient should handle them
+            {
+                // These are some internal comments
+                "name": "myDevContainer",
+                "remoteUser": "root",
+                "appPort": [
+                    "8081:8083",
+                    "9001",
+                ],
+                "build": {
+                   	"dockerfile": "DockerFile",
+                }
+            }
+            "#;
+
+        let result = deserialize_devcontainer_json(given_json);
+
+        assert!(result.is_ok());
+        let devcontainer = result.expect("ok");
+
+        assert_eq!(
+            devcontainer.app_port,
+            vec!["8081:8083".to_string(), "9001:9001".to_string()]
+        )
+    }
+
     #[test]
     fn mount_definition_should_use_bind_type_for_unix_absolute_paths() {
         let mount = MountDefinition {

crates/dev_container/src/devcontainer_manifest.rs 🔗

@@ -1229,35 +1229,6 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
                 }
             }
         }
-        if let Some(port) = &self.dev_container().app_port {
-            if let Some(network_service_name) = network_mode_service {
-                if let Some(service) = service_declarations.get_mut(network_service_name) {
-                    service.ports.push(DockerComposeServicePort {
-                        target: port.clone(),
-                        published: port.clone(),
-                        ..Default::default()
-                    });
-                } else {
-                    service_declarations.insert(
-                        network_service_name.to_string(),
-                        DockerComposeService {
-                            ports: vec![DockerComposeServicePort {
-                                target: port.clone(),
-                                published: port.clone(),
-                                ..Default::default()
-                            }],
-                            ..Default::default()
-                        },
-                    );
-                }
-            } else {
-                main_service.ports.push(DockerComposeServicePort {
-                    target: port.clone(),
-                    published: port.clone(),
-                    ..Default::default()
-                });
-            }
-        }
 
         service_declarations.insert(main_service_name.to_string(), main_service);
         let new_docker_compose_config = DockerComposeConfig {
@@ -1811,9 +1782,10 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
                 }
             }
         }
-        if let Some(app_port) = &self.dev_container().app_port {
+        for app_port in &self.dev_container().app_port {
             command.arg("-p");
-            command.arg(format!("{app_port}:{app_port}"));
+            // Should just implement display for an AppPort struct which takes care of this; it might be a custom map like (literally) "8081:8080"
+            command.arg(app_port);
         }
 
         command.arg("--entrypoint");
@@ -2997,7 +2969,10 @@ mod test {
                 8082,
                 8083,
               ],
-              "appPort": "8084",
+              "appPort": [
+                8084,
+                "8085:8086",
+              ],
 
               "containerEnv": {
                 "VARIABLE_VALUE": "value",
@@ -3301,6 +3276,8 @@ chmod +x ./install.sh
                 "8083:8083".to_string(),
                 "-p".to_string(),
                 "8084:8084".to_string(),
+                "-p".to_string(),
+                "8085:8086".to_string(),
                 "--entrypoint".to_string(),
                 "/bin/sh".to_string(),
                 "sha256:610e6cfca95280188b021774f8cf69dd6f49bdb6eebc34c5ee2010f4d51cc105".to_string(),
@@ -3357,7 +3334,6 @@ chmod +x ./install.sh
                 "db:5432",
                 "db:1234",
               ],
-              "appPort": "8084",
 
               // Use 'postCreateCommand' to run commands after the container is created.
               // "postCreateCommand": "rustc --version",
@@ -3631,11 +3607,6 @@ ENV DOCKER_BUILDKIT=1
                                 published: "1234".to_string(),
                                 ..Default::default()
                             },
-                            DockerComposeServicePort {
-                                target: "8084".to_string(),
-                                published: "8084".to_string(),
-                                ..Default::default()
-                            },
                         ],
                         ..Default::default()
                     },

crates/edit_prediction/Cargo.toml 🔗

@@ -21,8 +21,9 @@ heapless.workspace = true
 buffer_diff.workspace = true
 client.workspace = true
 clock.workspace = true
+cloud_api_client.workspace = true
 cloud_api_types.workspace = true
-cloud_llm_client.workspace = true
+cloud_llm_client = { workspace = true, features = ["predict-edits"] }
 collections.workspace = true
 copilot.workspace = true
 copilot_ui.workspace = true

crates/edit_prediction/src/edit_prediction.rs 🔗

@@ -1,5 +1,6 @@
 use anyhow::Result;
 use client::{Client, EditPredictionUsage, NeedsLlmTokenRefresh, UserStore, global_llm_token};
+use cloud_api_client::LlmApiToken;
 use cloud_api_types::{OrganizationId, SubmitEditPredictionFeedbackBody};
 use cloud_llm_client::predict_edits_v3::{
     PredictEditsV3Request, PredictEditsV3Response, RawCompletionRequest, RawCompletionResponse,
@@ -31,7 +32,6 @@ use heapless::Vec as ArrayVec;
 use language::language_settings::all_language_settings;
 use language::{Anchor, Buffer, File, Point, TextBufferSnapshot, ToOffset, ToPoint};
 use language::{BufferSnapshot, OffsetRangeExt};
-use language_model::LlmApiToken;
 use project::{DisableAiSettings, Project, ProjectPath, WorktreeId};
 use release_channel::AppVersion;
 use semver::Version;

crates/edit_prediction/src/ollama.rs 🔗

@@ -57,7 +57,7 @@ pub fn fetch_models(cx: &mut App) -> Vec<SharedString> {
     let mut models: Vec<SharedString> = provider
         .provided_models(cx)
         .into_iter()
-        .map(|model| SharedString::from(model.id().0.to_string()))
+        .map(|model| model.id().0)
         .collect();
     models.sort();
     models

crates/edit_prediction/src/onboarding_modal.rs 🔗

@@ -11,7 +11,7 @@ use gpui::{
 };
 use language::language_settings::EditPredictionProvider;
 use settings::update_settings_file;
-use ui::{Vector, VectorName, prelude::*};
+use ui::prelude::*;
 use workspace::{ModalView, Workspace};
 
 #[macro_export]
@@ -119,6 +119,7 @@ impl Render for ZedPredictModal {
     fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
         let window_height = window.viewport_size().height;
         let max_height = window_height - px(200.);
+        let color = cx.theme().colors();
 
         v_flex()
             .id("edit-prediction-onboarding")
@@ -127,7 +128,7 @@ impl Render for ZedPredictModal {
             .w(px(550.))
             .h_full()
             .max_h(max_height)
-            .p_4()
+            .p_1()
             .gap_2()
             .elevation_3(cx)
             .track_focus(&self.focus_handle(cx))
@@ -142,32 +143,19 @@ impl Render for ZedPredictModal {
             }))
             .child(
                 div()
-                    .opacity(0.5)
-                    .absolute()
-                    .top(px(-8.0))
-                    .right_0()
-                    .w(px(400.))
-                    .h(px(92.))
-                    .child(
-                        Vector::new(VectorName::AiGrid, rems_from_px(400.), rems_from_px(92.))
-                            .color(Color::Custom(cx.theme().colors().text.alpha(0.32))),
-                    ),
-            )
-            .child(
-                div()
-                    .absolute()
-                    .top_0()
-                    .right_0()
-                    .w(px(660.))
-                    .h(px(401.))
-                    .overflow_hidden()
+                    .p_3()
+                    .size_full()
+                    .border_1()
+                    .border_color(cx.theme().colors().border)
+                    .rounded(px(5.))
                     .bg(linear_gradient(
-                        75.,
-                        linear_color_stop(cx.theme().colors().panel_background.alpha(0.01), 1.0),
-                        linear_color_stop(cx.theme().colors().panel_background, 0.45),
-                    )),
+                        360.,
+                        linear_color_stop(color.panel_background, 1.0),
+                        linear_color_stop(color.editor_background, 0.45),
+                    ))
+                    .child(self.onboarding.clone()),
             )
-            .child(h_flex().absolute().top_2().right_2().child(
+            .child(h_flex().absolute().top_3().right_3().child(
                 IconButton::new("cancel", IconName::Close).on_click(cx.listener(
                     |_, _: &ClickEvent, _window, cx| {
                         onboarding_event!("Cancelled", trigger = "X click");
@@ -175,6 +163,5 @@ impl Render for ZedPredictModal {
                     },
                 )),
             ))
-            .child(self.onboarding.clone())
     }
 }

crates/edit_prediction/src/zed_edit_prediction_delegate.rs 🔗

@@ -177,7 +177,7 @@ impl EditPredictionDelegate for ZedEditPredictionDelegate {
                 BufferEditPrediction::Local { prediction } => prediction,
                 BufferEditPrediction::Jump { prediction } => {
                     return Some(edit_prediction_types::EditPrediction::Jump {
-                        id: Some(prediction.id.to_string().into()),
+                        id: Some(prediction.id.0.clone()),
                         snapshot: prediction.snapshot.clone(),
                         target: prediction.edits.first().unwrap().0.start,
                     });
@@ -228,7 +228,7 @@ impl EditPredictionDelegate for ZedEditPredictionDelegate {
             }
 
             Some(edit_prediction_types::EditPrediction::Local {
-                id: Some(prediction.id.to_string().into()),
+                id: Some(prediction.id.0.clone()),
                 edits: edits[edit_start_ix..edit_end_ix].to_vec(),
                 cursor_position: prediction.cursor_position,
                 edit_preview: Some(prediction.edit_preview.clone()),

crates/edit_prediction_cli/Cargo.toml 🔗

@@ -22,7 +22,7 @@ http_client.workspace = true
 chrono.workspace = true
 clap = "4"
 client.workspace = true
-cloud_llm_client.workspace= true
+cloud_llm_client = { workspace = true, features = ["predict-edits"] }
 collections.workspace = true
 db.workspace = true
 debug_adapter_extension.workspace = true

crates/editor/src/display_map.rs 🔗

@@ -98,7 +98,7 @@ use gpui::{
     WeakEntity,
 };
 use language::{
-    Point, Subscription as BufferSubscription,
+    LanguageAwareStyling, Point, Subscription as BufferSubscription,
     language_settings::{AllLanguageSettings, LanguageSettings},
 };
 
@@ -1769,7 +1769,10 @@ impl DisplaySnapshot {
         self.block_snapshot
             .chunks(
                 BlockRow(display_row.0)..BlockRow(self.max_point().row().next_row().0),
-                false,
+                LanguageAwareStyling {
+                    tree_sitter: false,
+                    diagnostics: false,
+                },
                 self.masked,
                 Highlights::default(),
             )
@@ -1783,7 +1786,10 @@ impl DisplaySnapshot {
             self.block_snapshot
                 .chunks(
                     BlockRow(row)..BlockRow(row + 1),
-                    false,
+                    LanguageAwareStyling {
+                        tree_sitter: false,
+                        diagnostics: false,
+                    },
                     self.masked,
                     Highlights::default(),
                 )
@@ -1798,7 +1804,7 @@ impl DisplaySnapshot {
     pub fn chunks(
         &self,
         display_rows: Range<DisplayRow>,
-        language_aware: bool,
+        language_aware: LanguageAwareStyling,
         highlight_styles: HighlightStyles,
     ) -> DisplayChunks<'_> {
         self.block_snapshot.chunks(
@@ -1818,7 +1824,7 @@ impl DisplaySnapshot {
     pub fn highlighted_chunks<'a>(
         &'a self,
         display_rows: Range<DisplayRow>,
-        language_aware: bool,
+        language_aware: LanguageAwareStyling,
         editor_style: &'a EditorStyle,
     ) -> impl Iterator<Item = HighlightedChunk<'a>> {
         self.chunks(
@@ -1910,7 +1916,10 @@ impl DisplaySnapshot {
 
         let chunks = custom_highlights::CustomHighlightsChunks::new(
             multibuffer_range,
-            true,
+            LanguageAwareStyling {
+                tree_sitter: true,
+                diagnostics: true,
+            },
             None,
             Some(&self.semantic_token_highlights),
             multibuffer,
@@ -1961,7 +1970,14 @@ impl DisplaySnapshot {
         let mut line = String::new();
 
         let range = display_row..display_row.next_row();
-        for chunk in self.highlighted_chunks(range, false, editor_style) {
+        for chunk in self.highlighted_chunks(
+            range,
+            LanguageAwareStyling {
+                tree_sitter: false,
+                diagnostics: false,
+            },
+            editor_style,
+        ) {
             line.push_str(chunk.text);
 
             let text_style = if let Some(style) = chunk.style {
@@ -3388,7 +3404,14 @@ pub mod tests {
 
         let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
         let mut chunks = Vec::<(String, Option<lsp::DiagnosticSeverity>, Rgba)>::new();
-        for chunk in snapshot.chunks(DisplayRow(0)..DisplayRow(5), true, Default::default()) {
+        for chunk in snapshot.chunks(
+            DisplayRow(0)..DisplayRow(5),
+            LanguageAwareStyling {
+                tree_sitter: true,
+                diagnostics: true,
+            },
+            Default::default(),
+        ) {
             let color = chunk
                 .highlight_style
                 .and_then(|style| style.color)
@@ -3940,7 +3963,14 @@ pub mod tests {
     ) -> Vec<(String, Option<Hsla>, Option<Hsla>)> {
         let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
         let mut chunks: Vec<(String, Option<Hsla>, Option<Hsla>)> = Vec::new();
-        for chunk in snapshot.chunks(rows, true, HighlightStyles::default()) {
+        for chunk in snapshot.chunks(
+            rows,
+            LanguageAwareStyling {
+                tree_sitter: true,
+                diagnostics: true,
+            },
+            HighlightStyles::default(),
+        ) {
             let syntax_color = chunk
                 .syntax_highlight_id
                 .and_then(|id| theme.get(id)?.color);

crates/editor/src/display_map/block_map.rs 🔗

@@ -9,7 +9,7 @@ use crate::{
 };
 use collections::{Bound, HashMap, HashSet};
 use gpui::{AnyElement, App, EntityId, Pixels, Window};
-use language::{Patch, Point};
+use language::{LanguageAwareStyling, Patch, Point};
 use multi_buffer::{
     Anchor, ExcerptBoundaryInfo, MultiBuffer, MultiBufferOffset, MultiBufferPoint, MultiBufferRow,
     MultiBufferSnapshot, RowInfo, ToOffset, ToPoint as _,
@@ -2140,7 +2140,10 @@ impl BlockSnapshot {
     pub fn text(&self) -> String {
         self.chunks(
             BlockRow(0)..self.transforms.summary().output_rows,
-            false,
+            LanguageAwareStyling {
+                tree_sitter: false,
+                diagnostics: false,
+            },
             false,
             Highlights::default(),
         )
@@ -2152,7 +2155,7 @@ impl BlockSnapshot {
     pub(crate) fn chunks<'a>(
         &'a self,
         rows: Range<BlockRow>,
-        language_aware: bool,
+        language_aware: LanguageAwareStyling,
         masked: bool,
         highlights: Highlights<'a>,
     ) -> BlockChunks<'a> {
@@ -4300,7 +4303,10 @@ mod tests {
                 let actual_text = blocks_snapshot
                     .chunks(
                         BlockRow(start_row as u32)..BlockRow(end_row as u32),
-                        false,
+                        LanguageAwareStyling {
+                            tree_sitter: false,
+                            diagnostics: false,
+                        },
                         false,
                         Highlights::default(),
                     )

crates/editor/src/display_map/custom_highlights.rs 🔗

@@ -1,6 +1,6 @@
 use collections::BTreeMap;
 use gpui::HighlightStyle;
-use language::Chunk;
+use language::{Chunk, LanguageAwareStyling};
 use multi_buffer::{MultiBufferChunks, MultiBufferOffset, MultiBufferSnapshot, ToOffset as _};
 use std::{
     cmp,
@@ -34,7 +34,7 @@ impl<'a> CustomHighlightsChunks<'a> {
     #[ztracing::instrument(skip_all)]
     pub fn new(
         range: Range<MultiBufferOffset>,
-        language_aware: bool,
+        language_aware: LanguageAwareStyling,
         text_highlights: Option<&'a TextHighlights>,
         semantic_token_highlights: Option<&'a SemanticTokensHighlights>,
         multibuffer_snapshot: &'a MultiBufferSnapshot,
@@ -308,7 +308,10 @@ mod tests {
         // Get all chunks and verify their bitmaps
         let chunks = CustomHighlightsChunks::new(
             MultiBufferOffset(0)..buffer_snapshot.len(),
-            false,
+            LanguageAwareStyling {
+                tree_sitter: false,
+                diagnostics: false,
+            },
             None,
             None,
             &buffer_snapshot,

crates/editor/src/display_map/fold_map.rs 🔗

@@ -5,7 +5,7 @@ use super::{
     inlay_map::{InlayBufferRows, InlayChunks, InlayEdit, InlayOffset, InlayPoint, InlaySnapshot},
 };
 use gpui::{AnyElement, App, ElementId, HighlightStyle, Pixels, SharedString, Stateful, Window};
-use language::{Edit, HighlightId, Point};
+use language::{Edit, HighlightId, LanguageAwareStyling, Point};
 use multi_buffer::{
     Anchor, AnchorRangeExt, MBTextSummary, MultiBufferOffset, MultiBufferRow, MultiBufferSnapshot,
     RowInfo, ToOffset,
@@ -707,7 +707,10 @@ impl FoldSnapshot {
     pub fn text(&self) -> String {
         self.chunks(
             FoldOffset(MultiBufferOffset(0))..self.len(),
-            false,
+            LanguageAwareStyling {
+                tree_sitter: false,
+                diagnostics: false,
+            },
             Highlights::default(),
         )
         .map(|c| c.text)
@@ -909,7 +912,7 @@ impl FoldSnapshot {
     pub(crate) fn chunks<'a>(
         &'a self,
         range: Range<FoldOffset>,
-        language_aware: bool,
+        language_aware: LanguageAwareStyling,
         highlights: Highlights<'a>,
     ) -> FoldChunks<'a> {
         let mut transform_cursor = self
@@ -954,7 +957,10 @@ impl FoldSnapshot {
     pub fn chars_at(&self, start: FoldPoint) -> impl '_ + Iterator<Item = char> {
         self.chunks(
             start.to_offset(self)..self.len(),
-            false,
+            LanguageAwareStyling {
+                tree_sitter: false,
+                diagnostics: false,
+            },
             Highlights::default(),
         )
         .flat_map(|chunk| chunk.text.chars())
@@ -964,7 +970,10 @@ impl FoldSnapshot {
     pub fn chunks_at(&self, start: FoldPoint) -> FoldChunks<'_> {
         self.chunks(
             start.to_offset(self)..self.len(),
-            false,
+            LanguageAwareStyling {
+                tree_sitter: false,
+                diagnostics: false,
+            },
             Highlights::default(),
         )
     }
@@ -2131,7 +2140,14 @@ mod tests {
                 let text = &expected_text[start.0.0..end.0.0];
                 assert_eq!(
                     snapshot
-                        .chunks(start..end, false, Highlights::default())
+                        .chunks(
+                            start..end,
+                            LanguageAwareStyling {
+                                tree_sitter: false,
+                                diagnostics: false,
+                            },
+                            Highlights::default()
+                        )
                         .map(|c| c.text)
                         .collect::<String>(),
                     text,
@@ -2303,7 +2319,10 @@ mod tests {
         // Get all chunks and verify their bitmaps
         let chunks = snapshot.chunks(
             FoldOffset(MultiBufferOffset(0))..FoldOffset(snapshot.len().0),
-            false,
+            LanguageAwareStyling {
+                tree_sitter: false,
+                diagnostics: false,
+            },
             Highlights::default(),
         );
 

crates/editor/src/display_map/inlay_map.rs 🔗

@@ -10,7 +10,7 @@ use crate::{
     inlays::{Inlay, InlayContent},
 };
 use collections::BTreeSet;
-use language::{Chunk, Edit, Point, TextSummary};
+use language::{Chunk, Edit, LanguageAwareStyling, Point, TextSummary};
 use multi_buffer::{
     MBTextSummary, MultiBufferOffset, MultiBufferRow, MultiBufferRows, MultiBufferSnapshot,
     RowInfo, ToOffset,
@@ -1200,7 +1200,7 @@ impl InlaySnapshot {
     pub(crate) fn chunks<'a>(
         &'a self,
         range: Range<InlayOffset>,
-        language_aware: bool,
+        language_aware: LanguageAwareStyling,
         highlights: Highlights<'a>,
     ) -> InlayChunks<'a> {
         let mut cursor = self
@@ -1234,9 +1234,16 @@ impl InlaySnapshot {
     #[cfg(test)]
     #[ztracing::instrument(skip_all)]
     pub fn text(&self) -> String {
-        self.chunks(Default::default()..self.len(), false, Highlights::default())
-            .map(|chunk| chunk.chunk.text)
-            .collect()
+        self.chunks(
+            Default::default()..self.len(),
+            LanguageAwareStyling {
+                tree_sitter: false,
+                diagnostics: false,
+            },
+            Highlights::default(),
+        )
+        .map(|chunk| chunk.chunk.text)
+        .collect()
     }
 
     #[ztracing::instrument(skip_all)]
@@ -1979,7 +1986,10 @@ mod tests {
                 let actual_text = inlay_snapshot
                     .chunks(
                         range,
-                        false,
+                        LanguageAwareStyling {
+                            tree_sitter: false,
+                            diagnostics: false,
+                        },
                         Highlights {
                             text_highlights: Some(&text_highlights),
                             inlay_highlights: Some(&inlay_highlights),
@@ -2158,7 +2168,10 @@ mod tests {
         // Get all chunks and verify their bitmaps
         let chunks = snapshot.chunks(
             InlayOffset(MultiBufferOffset(0))..snapshot.len(),
-            false,
+            LanguageAwareStyling {
+                tree_sitter: false,
+                diagnostics: false,
+            },
             Highlights::default(),
         );
 
@@ -2293,7 +2306,10 @@ mod tests {
         let chunks: Vec<_> = inlay_snapshot
             .chunks(
                 InlayOffset(MultiBufferOffset(0))..inlay_snapshot.len(),
-                false,
+                LanguageAwareStyling {
+                    tree_sitter: false,
+                    diagnostics: false,
+                },
                 highlights,
             )
             .collect();
@@ -2408,7 +2424,10 @@ mod tests {
             let chunks: Vec<_> = inlay_snapshot
                 .chunks(
                     InlayOffset(MultiBufferOffset(0))..inlay_snapshot.len(),
-                    false,
+                    LanguageAwareStyling {
+                        tree_sitter: false,
+                        diagnostics: false,
+                    },
                     highlights,
                 )
                 .collect();

crates/editor/src/display_map/tab_map.rs 🔗

@@ -3,7 +3,7 @@ use super::{
     fold_map::{self, Chunk, FoldChunks, FoldEdit, FoldPoint, FoldSnapshot},
 };
 
-use language::Point;
+use language::{LanguageAwareStyling, Point};
 use multi_buffer::MultiBufferSnapshot;
 use std::{cmp, num::NonZeroU32, ops::Range};
 use sum_tree::Bias;
@@ -101,7 +101,10 @@ impl TabMap {
             let mut last_tab_with_changed_expansion_offset = None;
             'outer: for chunk in old_snapshot.fold_snapshot.chunks(
                 fold_edit.old.end..old_end_row_successor_offset,
-                false,
+                LanguageAwareStyling {
+                    tree_sitter: false,
+                    diagnostics: false,
+                },
                 Highlights::default(),
             ) {
                 let mut remaining_tabs = chunk.tabs;
@@ -244,7 +247,14 @@ impl TabSnapshot {
             self.max_point()
         };
         let first_line_chars = self
-            .chunks(range.start..line_end, false, Highlights::default())
+            .chunks(
+                range.start..line_end,
+                LanguageAwareStyling {
+                    tree_sitter: false,
+                    diagnostics: false,
+                },
+                Highlights::default(),
+            )
             .flat_map(|chunk| chunk.text.chars())
             .take_while(|&c| c != '\n')
             .count() as u32;
@@ -254,7 +264,10 @@ impl TabSnapshot {
         } else {
             self.chunks(
                 TabPoint::new(range.end.row(), 0)..range.end,
-                false,
+                LanguageAwareStyling {
+                    tree_sitter: false,
+                    diagnostics: false,
+                },
                 Highlights::default(),
             )
             .flat_map(|chunk| chunk.text.chars())
@@ -274,7 +287,7 @@ impl TabSnapshot {
     pub(crate) fn chunks<'a>(
         &'a self,
         range: Range<TabPoint>,
-        language_aware: bool,
+        language_aware: LanguageAwareStyling,
         highlights: Highlights<'a>,
     ) -> TabChunks<'a> {
         let (input_start, expanded_char_column, to_next_stop) =
@@ -324,7 +337,10 @@ impl TabSnapshot {
     pub fn text(&self) -> String {
         self.chunks(
             TabPoint::zero()..self.max_point(),
-            false,
+            LanguageAwareStyling {
+                tree_sitter: false,
+                diagnostics: false,
+            },
             Highlights::default(),
         )
         .map(|chunk| chunk.text)
@@ -1170,7 +1186,10 @@ mod tests {
                 tab_snapshot
                     .chunks(
                         TabPoint::new(0, ix as u32)..tab_snapshot.max_point(),
-                        false,
+                        LanguageAwareStyling {
+                            tree_sitter: false,
+                            diagnostics: false,
+                        },
                         Highlights::default(),
                     )
                     .map(|c| c.text)
@@ -1246,8 +1265,14 @@ mod tests {
             let mut chunks = Vec::new();
             let mut was_tab = false;
             let mut text = String::new();
-            for chunk in snapshot.chunks(start..snapshot.max_point(), false, Highlights::default())
-            {
+            for chunk in snapshot.chunks(
+                start..snapshot.max_point(),
+                LanguageAwareStyling {
+                    tree_sitter: false,
+                    diagnostics: false,
+                },
+                Highlights::default(),
+            ) {
                 if chunk.is_tab != was_tab {
                     if !text.is_empty() {
                         chunks.push((mem::take(&mut text), was_tab));
@@ -1296,7 +1321,14 @@ mod tests {
 
         // This should not panic.
         let result: String = tab_snapshot
-            .chunks(start..end, false, Highlights::default())
+            .chunks(
+                start..end,
+                LanguageAwareStyling {
+                    tree_sitter: false,
+                    diagnostics: false,
+                },
+                Highlights::default(),
+            )
             .map(|c| c.text)
             .collect();
         assert!(!result.is_empty());
@@ -1354,7 +1386,14 @@ mod tests {
             let expected_summary = TextSummary::from(expected_text.as_str());
             assert_eq!(
                 tabs_snapshot
-                    .chunks(start..end, false, Highlights::default())
+                    .chunks(
+                        start..end,
+                        LanguageAwareStyling {
+                            tree_sitter: false,
+                            diagnostics: false,
+                        },
+                        Highlights::default()
+                    )
                     .map(|c| c.text)
                     .collect::<String>(),
                 expected_text,
@@ -1436,7 +1475,10 @@ mod tests {
         let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
         let chunks = fold_snapshot.chunks(
             FoldOffset(MultiBufferOffset(0))..fold_snapshot.len(),
-            false,
+            LanguageAwareStyling {
+                tree_sitter: false,
+                diagnostics: false,
+            },
             Default::default(),
         );
         let mut cursor = TabStopCursor::new(chunks);
@@ -1598,7 +1640,10 @@ mod tests {
         let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
         let chunks = fold_snapshot.chunks(
             FoldOffset(MultiBufferOffset(0))..fold_snapshot.len(),
-            false,
+            LanguageAwareStyling {
+                tree_sitter: false,
+                diagnostics: false,
+            },
             Default::default(),
         );
         let mut cursor = TabStopCursor::new(chunks);

crates/editor/src/display_map/wrap_map.rs 🔗

@@ -5,7 +5,7 @@ use super::{
     tab_map::{self, TabEdit, TabPoint, TabSnapshot},
 };
 use gpui::{App, AppContext as _, Context, Entity, Font, LineWrapper, Pixels, Task};
-use language::Point;
+use language::{LanguageAwareStyling, Point};
 use multi_buffer::{MultiBufferSnapshot, RowInfo};
 use smol::future::yield_now;
 use std::{cmp, collections::VecDeque, mem, ops::Range, sync::LazyLock, time::Duration};
@@ -513,7 +513,10 @@ impl WrapSnapshot {
                 let mut remaining = None;
                 let mut chunks = new_tab_snapshot.chunks(
                     TabPoint::new(edit.new_rows.start, 0)..new_tab_snapshot.max_point(),
-                    false,
+                    LanguageAwareStyling {
+                        tree_sitter: false,
+                        diagnostics: false,
+                    },
                     Highlights::default(),
                 );
                 let mut edit_transforms = Vec::<Transform>::new();
@@ -656,7 +659,7 @@ impl WrapSnapshot {
     pub(crate) fn chunks<'a>(
         &'a self,
         rows: Range<WrapRow>,
-        language_aware: bool,
+        language_aware: LanguageAwareStyling,
         highlights: Highlights<'a>,
     ) -> WrapChunks<'a> {
         let output_start = WrapPoint::new(rows.start, 0);
@@ -960,7 +963,10 @@ impl WrapSnapshot {
     pub fn text_chunks(&self, wrap_row: WrapRow) -> impl Iterator<Item = &str> {
         self.chunks(
             wrap_row..self.max_point().row() + WrapRow(1),
-            false,
+            LanguageAwareStyling {
+                tree_sitter: false,
+                diagnostics: false,
+            },
             Highlights::default(),
         )
         .map(|h| h.text)
@@ -1719,7 +1725,10 @@ mod tests {
                 let actual_text = self
                     .chunks(
                         WrapRow(start_row)..WrapRow(end_row),
-                        true,
+                        LanguageAwareStyling {
+                            tree_sitter: true,
+                            diagnostics: true,
+                        },
                         Highlights::default(),
                     )
                     .map(|c| c.text)

crates/editor/src/editor.rs 🔗

@@ -132,9 +132,9 @@ use language::{
     AutoindentMode, BlockCommentConfig, BracketMatch, BracketPair, Buffer, BufferRow,
     BufferSnapshot, Capability, CharClassifier, CharKind, CharScopeContext, CodeLabel, CursorShape,
     DiagnosticEntryRef, DiffOptions, EditPredictionsMode, EditPreview, HighlightedText, IndentKind,
-    IndentSize, Language, LanguageName, LanguageRegistry, LanguageScope, LocalFile, OffsetRangeExt,
-    OutlineItem, Point, Selection, SelectionGoal, TextObject, TransactionId, TreeSitterOptions,
-    WordsQuery,
+    IndentSize, Language, LanguageAwareStyling, LanguageName, LanguageRegistry, LanguageScope,
+    LocalFile, OffsetRangeExt, OutlineItem, Point, Selection, SelectionGoal, TextObject,
+    TransactionId, TreeSitterOptions, WordsQuery,
     language_settings::{
         self, AllLanguageSettings, LanguageSettings, LspInsertMode, RewrapBehavior,
         WordsCompletionMode, all_language_settings,
@@ -1265,6 +1265,7 @@ pub struct Editor {
     >,
     use_autoclose: bool,
     use_auto_surround: bool,
+    use_selection_highlight: bool,
     auto_replace_emoji_shortcode: bool,
     jsx_tag_auto_close_enabled_in_any_buffer: bool,
     show_git_blame_gutter: bool,
@@ -2468,6 +2469,7 @@ impl Editor {
             read_only: is_minimap,
             use_autoclose: true,
             use_auto_surround: true,
+            use_selection_highlight: true,
             auto_replace_emoji_shortcode: false,
             jsx_tag_auto_close_enabled_in_any_buffer: false,
             leader_id: None,
@@ -3547,6 +3549,10 @@ impl Editor {
         self.use_autoclose = autoclose;
     }
 
+    pub fn set_use_selection_highlight(&mut self, highlight: bool) {
+        self.use_selection_highlight = highlight;
+    }
+
     pub fn set_use_auto_surround(&mut self, auto_surround: bool) {
         self.use_auto_surround = auto_surround;
     }
@@ -7699,7 +7705,7 @@ impl Editor {
         if matches!(self.mode, EditorMode::SingleLine) {
             return None;
         }
-        if !EditorSettings::get_global(cx).selection_highlight {
+        if !self.use_selection_highlight || !EditorSettings::get_global(cx).selection_highlight {
             return None;
         }
         if self.selections.count() != 1 || self.selections.line_mode() {
@@ -19147,7 +19153,13 @@ impl Editor {
                     let range = buffer.anchor_before(rename_start)..buffer.anchor_after(rename_end);
                     let mut old_highlight_id = None;
                     let old_name: Arc<str> = buffer
-                        .chunks(rename_start..rename_end, true)
+                        .chunks(
+                            rename_start..rename_end,
+                            LanguageAwareStyling {
+                                tree_sitter: true,
+                                diagnostics: true,
+                            },
+                        )
                         .map(|chunk| {
                             if old_highlight_id.is_none() {
                                 old_highlight_id = chunk.syntax_highlight_id;
@@ -25005,7 +25017,13 @@ impl Editor {
             selection.range()
         };
 
-        let chunks = snapshot.chunks(range, true);
+        let chunks = snapshot.chunks(
+            range,
+            LanguageAwareStyling {
+                tree_sitter: true,
+                diagnostics: true,
+            },
+        );
         let mut lines = Vec::new();
         let mut line: VecDeque<Chunk> = VecDeque::new();
 

crates/editor/src/element.rs 🔗

@@ -51,7 +51,10 @@ use gpui::{
     pattern_slash, point, px, quad, relative, size, solid_background, transparent_black,
 };
 use itertools::Itertools;
-use language::{HighlightedText, IndentGuideSettings, language_settings::ShowWhitespaceSetting};
+use language::{
+    HighlightedText, IndentGuideSettings, LanguageAwareStyling,
+    language_settings::ShowWhitespaceSetting,
+};
 use markdown::Markdown;
 use multi_buffer::{
     Anchor, ExcerptBoundaryInfo, ExpandExcerptDirection, ExpandInfo, MultiBufferPoint,
@@ -3819,7 +3822,11 @@ impl EditorElement {
         } else {
             let use_tree_sitter = !snapshot.semantic_tokens_enabled
                 || snapshot.use_tree_sitter_for_syntax(rows.start, cx);
-            let chunks = snapshot.highlighted_chunks(rows.clone(), use_tree_sitter, style);
+            let language_aware = LanguageAwareStyling {
+                tree_sitter: use_tree_sitter,
+                diagnostics: true,
+            };
+            let chunks = snapshot.highlighted_chunks(rows.clone(), language_aware, style);
             LineWithInvisibles::from_chunks(
                 chunks,
                 style,
@@ -11999,7 +12006,11 @@ pub fn layout_line(
 ) -> LineWithInvisibles {
     let use_tree_sitter =
         !snapshot.semantic_tokens_enabled || snapshot.use_tree_sitter_for_syntax(row, cx);
-    let chunks = snapshot.highlighted_chunks(row..row + DisplayRow(1), use_tree_sitter, style);
+    let language_aware = LanguageAwareStyling {
+        tree_sitter: use_tree_sitter,
+        diagnostics: true,
+    };
+    let chunks = snapshot.highlighted_chunks(row..row + DisplayRow(1), language_aware, style);
     LineWithInvisibles::from_chunks(
         chunks,
         style,

crates/editor/src/semantic_tokens.rs 🔗

@@ -475,13 +475,17 @@ mod tests {
     use gpui::{
         AppContext as _, Entity, Focusable as _, HighlightStyle, TestAppContext, UpdateGlobal as _,
     };
-    use language::{Language, LanguageConfig, LanguageMatcher};
+    use language::{
+        Diagnostic, DiagnosticEntry, DiagnosticSet, Language, LanguageAwareStyling, LanguageConfig,
+        LanguageMatcher,
+    };
     use languages::FakeLspAdapter;
+    use lsp::LanguageServerId;
     use multi_buffer::{
         AnchorRangeExt, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset, PathKey,
     };
     use project::Project;
-    use rope::Point;
+    use rope::{Point, PointUtf16};
     use serde_json::json;
     use settings::{
         GlobalLspSettingsContent, LanguageSettingsContent, SemanticTokenRule, SemanticTokenRules,
@@ -2088,6 +2092,130 @@ mod tests {
         );
     }
 
+    #[gpui::test]
+    async fn test_diagnostics_visible_when_semantic_token_set_to_full(cx: &mut TestAppContext) {
+        init_test(cx, |_| {});
+
+        update_test_language_settings(cx, &|language_settings| {
+            language_settings.languages.0.insert(
+                "Rust".into(),
+                LanguageSettingsContent {
+                    semantic_tokens: Some(SemanticTokens::Full),
+                    ..LanguageSettingsContent::default()
+                },
+            );
+        });
+
+        let mut cx = EditorLspTestContext::new_rust(
+            lsp::ServerCapabilities {
+                semantic_tokens_provider: Some(
+                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
+                        lsp::SemanticTokensOptions {
+                            legend: lsp::SemanticTokensLegend {
+                                token_types: vec!["function".into()],
+                                token_modifiers: Vec::new(),
+                            },
+                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
+                            ..lsp::SemanticTokensOptions::default()
+                        },
+                    ),
+                ),
+                ..lsp::ServerCapabilities::default()
+            },
+            cx,
+        )
+        .await;
+
+        let mut full_request = cx
+            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
+                move |_, _, _| {
+                    async move {
+                        Ok(Some(lsp::SemanticTokensResult::Tokens(
+                            lsp::SemanticTokens {
+                                data: vec![
+                                    0, // delta_line
+                                    3, // delta_start
+                                    4, // length
+                                    0, // token_type
+                                    0, // token_modifiers_bitset
+                                ],
+                                result_id: Some("a".into()),
+                            },
+                        )))
+                    }
+                },
+            );
+
+        cx.set_state("ˇfn main() {}");
+        assert!(full_request.next().await.is_some());
+
+        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
+        task.await;
+
+        cx.update_buffer(|buffer, cx| {
+            buffer.update_diagnostics(
+                LanguageServerId(0),
+                DiagnosticSet::new(
+                    [DiagnosticEntry {
+                        range: PointUtf16::new(0, 3)..PointUtf16::new(0, 7),
+                        diagnostic: Diagnostic {
+                            severity: lsp::DiagnosticSeverity::ERROR,
+                            group_id: 1,
+                            message: "unused function".into(),
+                            ..Default::default()
+                        },
+                    }],
+                    buffer,
+                ),
+                cx,
+            )
+        });
+
+        cx.run_until_parked();
+        let chunks = cx.update_editor(|editor, window, cx| {
+            editor
+                .snapshot(window, cx)
+                .display_snapshot
+                .chunks(
+                    crate::display_map::DisplayRow(0)..crate::display_map::DisplayRow(1),
+                    LanguageAwareStyling {
+                        tree_sitter: false,
+                        diagnostics: true,
+                    },
+                    crate::HighlightStyles::default(),
+                )
+                .map(|chunk| {
+                    (
+                        chunk.text.to_string(),
+                        chunk.diagnostic_severity,
+                        chunk.highlight_style,
+                    )
+                })
+                .collect::<Vec<_>>()
+        });
+
+        assert_eq!(
+            extract_semantic_highlights(&cx.editor, &cx),
+            vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
+        );
+
+        assert!(
+            chunks.iter().any(
+                |(text, severity, style): &(
+                    String,
+                    Option<lsp::DiagnosticSeverity>,
+                    Option<gpui::HighlightStyle>
+                )| {
+                    text == "main"
+                        && *severity == Some(lsp::DiagnosticSeverity::ERROR)
+                        && style.is_some()
+                }
+            ),
+            "expected 'main' chunk to have both diagnostic and semantic styling: {:?}",
+            chunks
+        );
+    }
+
     fn extract_semantic_highlight_styles(
         editor: &Entity<Editor>,
         cx: &TestAppContext,

crates/env_var/Cargo.toml 🔗

@@ -12,4 +12,4 @@ workspace = true
 path = "src/env_var.rs"
 
 [dependencies]
-gpui.workspace = true
+gpui_shared_string.workspace = true

crates/feature_flags/src/flags.rs 🔗

@@ -12,16 +12,6 @@ impl FeatureFlag for PanicFeatureFlag {
     const NAME: &'static str = "panic";
 }
 
-pub struct AgentV2FeatureFlag;
-
-impl FeatureFlag for AgentV2FeatureFlag {
-    const NAME: &'static str = "agent-v2";
-
-    fn enabled_for_staff() -> bool {
-        true
-    }
-}
-
 /// A feature flag for granting access to beta ACP features.
 ///
 /// We reuse this feature flag for new betas, so don't delete it if it is not currently in use.

crates/file_finder/Cargo.toml 🔗

@@ -21,6 +21,7 @@ editor.workspace = true
 file_icons.workspace = true
 futures.workspace = true
 fuzzy.workspace = true
+fuzzy_nucleo.workspace = true
 gpui.workspace = true
 menu.workspace = true
 open_path_prompt.workspace = true

crates/file_finder/src/file_finder.rs 🔗

@@ -9,7 +9,8 @@ use client::ChannelId;
 use collections::HashMap;
 use editor::Editor;
 use file_icons::FileIcons;
-use fuzzy::{CharBag, PathMatch, PathMatchCandidate, StringMatch, StringMatchCandidate};
+use fuzzy::{StringMatch, StringMatchCandidate};
+use fuzzy_nucleo::{PathMatch, PathMatchCandidate};
 use gpui::{
     Action, AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable,
     KeyContext, Modifiers, ModifiersChangedEvent, ParentElement, Render, Styled, Task, WeakEntity,
@@ -663,15 +664,6 @@ impl Matches {
 
         // For file-vs-file matches, use the existing detailed comparison.
         if let (Some(a_panel), Some(b_panel)) = (a.panel_match(), b.panel_match()) {
-            let a_in_filename = Self::is_filename_match(a_panel);
-            let b_in_filename = Self::is_filename_match(b_panel);
-
-            match (a_in_filename, b_in_filename) {
-                (true, false) => return cmp::Ordering::Greater,
-                (false, true) => return cmp::Ordering::Less,
-                _ => {}
-            }
-
             return a_panel.cmp(b_panel);
         }
 
@@ -691,32 +683,6 @@ impl Matches {
             Match::CreateNew(_) => 0.0,
         }
     }
-
-    /// Determines if the match occurred within the filename rather than in the path
-    fn is_filename_match(panel_match: &ProjectPanelOrdMatch) -> bool {
-        if panel_match.0.positions.is_empty() {
-            return false;
-        }
-
-        if let Some(filename) = panel_match.0.path.file_name() {
-            let path_str = panel_match.0.path.as_unix_str();
-
-            if let Some(filename_pos) = path_str.rfind(filename)
-                && panel_match.0.positions[0] >= filename_pos
-            {
-                let mut prev_position = panel_match.0.positions[0];
-                for p in &panel_match.0.positions[1..] {
-                    if *p != prev_position + 1 {
-                        return false;
-                    }
-                    prev_position = *p;
-                }
-                return true;
-            }
-        }
-
-        false
-    }
 }
 
 fn matching_history_items<'a>(
@@ -731,25 +697,16 @@ fn matching_history_items<'a>(
     let history_items_by_worktrees = history_items
         .into_iter()
         .chain(currently_opened)
-        .filter_map(|found_path| {
+        .map(|found_path| {
             let candidate = PathMatchCandidate {
                 is_dir: false, // You can't open directories as project items
                 path: &found_path.project.path,
                 // Only match history items names, otherwise their paths may match too many queries, producing false positives.
                 // E.g. `foo` would match both `something/foo/bar.rs` and `something/foo/foo.rs` and if the former is a history item,
                 // it would be shown first always, despite the latter being a better match.
-                char_bag: CharBag::from_iter(
-                    found_path
-                        .project
-                        .path
-                        .file_name()?
-                        .to_string()
-                        .to_lowercase()
-                        .chars(),
-                ),
             };
             candidates_paths.insert(&found_path.project, found_path);
-            Some((found_path.project.worktree_id, candidate))
+            (found_path.project.worktree_id, candidate)
         })
         .fold(
             HashMap::default(),
@@ -767,8 +724,9 @@ fn matching_history_items<'a>(
         let worktree_root_name = worktree_name_by_id
             .as_ref()
             .and_then(|w| w.get(&worktree).cloned());
+
         matching_history_paths.extend(
-            fuzzy::match_fixed_path_set(
+            fuzzy_nucleo::match_fixed_path_set(
                 candidates,
                 worktree.to_usize(),
                 worktree_root_name,
@@ -778,6 +736,18 @@ fn matching_history_items<'a>(
                 path_style,
             )
             .into_iter()
+            // filter matches where at least one matched position is in filename portion, to prevent directory matches, nucleo scores them higher as history items are matched against their full path
+            .filter(|path_match| {
+                if let Some(filename) = path_match.path.file_name() {
+                    let filename_start = path_match.path.as_unix_str().len() - filename.len();
+                    path_match
+                        .positions
+                        .iter()
+                        .any(|&pos| pos >= filename_start)
+                } else {
+                    true
+                }
+            })
             .filter_map(|path_match| {
                 candidates_paths
                     .remove_entry(&ProjectPath {
@@ -940,7 +910,7 @@ impl FileFinderDelegate {
         self.cancel_flag = Arc::new(AtomicBool::new(false));
         let cancel_flag = self.cancel_flag.clone();
         cx.spawn_in(window, async move |picker, cx| {
-            let matches = fuzzy::match_path_sets(
+            let matches = fuzzy_nucleo::match_path_sets(
                 candidate_sets.as_slice(),
                 query.path_query(),
                 &relative_to,
@@ -1452,7 +1422,6 @@ impl PickerDelegate for FileFinderDelegate {
         window: &mut Window,
         cx: &mut Context<Picker<Self>>,
     ) -> Task<()> {
-        let raw_query = raw_query.replace(' ', "");
         let raw_query = raw_query.trim();
 
         let raw_query = match &raw_query.get(0..2) {

crates/file_finder/src/file_finder_tests.rs 🔗

@@ -4161,3 +4161,233 @@ async fn test_clear_navigation_history(cx: &mut TestAppContext) {
         "Should have no history items after clearing"
     );
 }
+
+#[gpui::test]
+async fn test_order_independent_search(cx: &mut TestAppContext) {
+    let app_state = init_test(cx);
+    app_state
+        .fs
+        .as_fake()
+        .insert_tree(
+            "/src",
+            json!({
+                "internal": {
+                    "auth": {
+                        "login.rs": "",
+                    }
+                }
+            }),
+        )
+        .await;
+    let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
+    let (picker, _, cx) = build_find_picker(project, cx);
+
+    // forward order
+    picker
+        .update_in(cx, |picker, window, cx| {
+            picker
+                .delegate
+                .spawn_search(test_path_position("auth internal"), window, cx)
+        })
+        .await;
+    picker.update(cx, |picker, _| {
+        let matches = collect_search_matches(picker).search_matches_only();
+        assert_eq!(matches.len(), 1);
+        assert_eq!(matches[0].path.as_unix_str(), "internal/auth/login.rs");
+    });
+
+    // reverse order should give same result
+    picker
+        .update_in(cx, |picker, window, cx| {
+            picker
+                .delegate
+                .spawn_search(test_path_position("internal auth"), window, cx)
+        })
+        .await;
+    picker.update(cx, |picker, _| {
+        let matches = collect_search_matches(picker).search_matches_only();
+        assert_eq!(matches.len(), 1);
+        assert_eq!(matches[0].path.as_unix_str(), "internal/auth/login.rs");
+    });
+}
+
+#[gpui::test]
+async fn test_filename_preferred_over_directory_match(cx: &mut TestAppContext) {
+    let app_state = init_test(cx);
+    app_state
+        .fs
+        .as_fake()
+        .insert_tree(
+            "/src",
+            json!({
+                "crates": {
+                    "settings_ui": {
+                        "src": {
+                            "pages": {
+                                "audio_test_window.rs": "",
+                                "audio_input_output_setup.rs": "",
+                            }
+                        }
+                    },
+                    "audio": {
+                        "src": {
+                            "audio_settings.rs": "",
+                        }
+                    }
+                }
+            }),
+        )
+        .await;
+    let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
+    let (picker, _, cx) = build_find_picker(project, cx);
+
+    picker
+        .update_in(cx, |picker, window, cx| {
+            picker
+                .delegate
+                .spawn_search(test_path_position("settings audio"), window, cx)
+        })
+        .await;
+    picker.update(cx, |picker, _| {
+        let matches = collect_search_matches(picker).search_matches_only();
+        assert!(!matches.is_empty(),);
+        assert_eq!(
+            matches[0].path.as_unix_str(),
+            "crates/audio/src/audio_settings.rs"
+        );
+    });
+}
+
+#[gpui::test]
+async fn test_start_of_word_preferred_over_scattered_match(cx: &mut TestAppContext) {
+    let app_state = init_test(cx);
+    app_state
+        .fs
+        .as_fake()
+        .insert_tree(
+            "/src",
+            json!({
+                "crates": {
+                    "livekit_client": {
+                        "src": {
+                            "livekit_client": {
+                                "playback.rs": "",
+                            }
+                        }
+                    },
+                    "vim": {
+                        "test_data": {
+                            "test_record_replay_interleaved.json": "",
+                        }
+                    }
+                }
+            }),
+        )
+        .await;
+    let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
+    let (picker, _, cx) = build_find_picker(project, cx);
+
+    picker
+        .update_in(cx, |picker, window, cx| {
+            picker
+                .delegate
+                .spawn_search(test_path_position("live pla"), window, cx)
+        })
+        .await;
+    picker.update(cx, |picker, _| {
+        let matches = collect_search_matches(picker).search_matches_only();
+        assert!(!matches.is_empty(),);
+        assert_eq!(
+            matches[0].path.as_unix_str(),
+            "crates/livekit_client/src/livekit_client/playback.rs",
+        );
+    });
+}
+
+#[gpui::test]
+async fn test_exact_filename_stem_preferred(cx: &mut TestAppContext) {
+    let app_state = init_test(cx);
+    app_state
+        .fs
+        .as_fake()
+        .insert_tree(
+            "/src",
+            json!({
+                "assets": {
+                    "icons": {
+                        "file_icons": {
+                            "nix.svg": "",
+                        }
+                    }
+                },
+                "crates": {
+                    "zed": {
+                        "resources": {
+                            "app-icon-nightly@2x.png": "",
+                            "app-icon-preview@2x.png": "",
+                        }
+                    }
+                }
+            }),
+        )
+        .await;
+    let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
+    let (picker, _, cx) = build_find_picker(project, cx);
+
+    picker
+        .update_in(cx, |picker, window, cx| {
+            picker
+                .delegate
+                .spawn_search(test_path_position("nix icon"), window, cx)
+        })
+        .await;
+    picker.update(cx, |picker, _| {
+        let matches = collect_search_matches(picker).search_matches_only();
+        assert!(!matches.is_empty(),);
+        assert_eq!(
+            matches[0].path.as_unix_str(),
+            "assets/icons/file_icons/nix.svg",
+        );
+    });
+}
+
+#[gpui::test]
+async fn test_exact_filename_with_directory_token(cx: &mut TestAppContext) {
+    let app_state = init_test(cx);
+    app_state
+        .fs
+        .as_fake()
+        .insert_tree(
+            "/src",
+            json!({
+                "crates": {
+                    "agent_servers": {
+                        "src": {
+                            "acp.rs": "",
+                            "agent_server.rs": "",
+                            "custom.rs": "",
+                        }
+                    }
+                }
+            }),
+        )
+        .await;
+    let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
+    let (picker, _, cx) = build_find_picker(project, cx);
+
+    picker
+        .update_in(cx, |picker, window, cx| {
+            picker
+                .delegate
+                .spawn_search(test_path_position("acp server"), window, cx)
+        })
+        .await;
+    picker.update(cx, |picker, _| {
+        let matches = collect_search_matches(picker).search_matches_only();
+        assert!(!matches.is_empty(),);
+        assert_eq!(
+            matches[0].path.as_unix_str(),
+            "crates/agent_servers/src/acp.rs",
+        );
+    });
+}

crates/fs/src/fake_git_repo.rs 🔗

@@ -6,9 +6,10 @@ use git::{
     Oid, RunHook,
     blame::Blame,
     repository::{
-        AskPassDelegate, Branch, CommitDataReader, CommitDetails, CommitOptions, FetchOptions,
-        GRAPH_CHUNK_SIZE, GitRepository, GitRepositoryCheckpoint, InitialGraphCommitData, LogOrder,
-        LogSource, PushOptions, Remote, RepoPath, ResetMode, SearchCommitArgs, Worktree,
+        AskPassDelegate, Branch, CommitDataReader, CommitDetails, CommitOptions,
+        CreateWorktreeTarget, FetchOptions, GRAPH_CHUNK_SIZE, GitRepository,
+        GitRepositoryCheckpoint, InitialGraphCommitData, LogOrder, LogSource, PushOptions, Remote,
+        RepoPath, ResetMode, SearchCommitArgs, Worktree,
     },
     stash::GitStash,
     status::{
@@ -60,6 +61,7 @@ pub struct FakeGitRepositoryState {
     pub remotes: HashMap<String, String>,
     pub simulated_index_write_error_message: Option<String>,
     pub simulated_create_worktree_error: Option<String>,
+    pub simulated_graph_error: Option<String>,
     pub refs: HashMap<String, String>,
     pub graph_commits: Vec<Arc<InitialGraphCommitData>>,
     pub stash_entries: GitStash,
@@ -77,6 +79,7 @@ impl FakeGitRepositoryState {
             branches: Default::default(),
             simulated_index_write_error_message: Default::default(),
             simulated_create_worktree_error: Default::default(),
+            simulated_graph_error: None,
             refs: HashMap::from_iter([("HEAD".into(), "abc".into())]),
             merge_base_contents: Default::default(),
             oids: Default::default(),
@@ -540,9 +543,8 @@ impl GitRepository for FakeGitRepository {
 
     fn create_worktree(
         &self,
-        branch_name: Option<String>,
+        target: CreateWorktreeTarget,
         path: PathBuf,
-        from_commit: Option<String>,
     ) -> BoxFuture<'_, Result<()>> {
         let fs = self.fs.clone();
         let executor = self.executor.clone();
@@ -550,30 +552,82 @@ impl GitRepository for FakeGitRepository {
         let common_dir_path = self.common_dir_path.clone();
         async move {
             executor.simulate_random_delay().await;
-            // Check for simulated error and duplicate branch before any side effects.
-            fs.with_git_state(&dot_git_path, false, |state| {
-                if let Some(message) = &state.simulated_create_worktree_error {
-                    anyhow::bail!("{message}");
-                }
-                if let Some(ref name) = branch_name {
-                    if state.branches.contains(name) {
-                        bail!("a branch named '{}' already exists", name);
+
+            let branch_name = target.branch_name().map(ToOwned::to_owned);
+            let create_branch_ref = matches!(target, CreateWorktreeTarget::NewBranch { .. });
+
+            // Check for simulated error and validate branch state before any side effects.
+            fs.with_git_state(&dot_git_path, false, {
+                let branch_name = branch_name.clone();
+                move |state| {
+                    if let Some(message) = &state.simulated_create_worktree_error {
+                        anyhow::bail!("{message}");
+                    }
+
+                    match (create_branch_ref, branch_name.as_ref()) {
+                        (true, Some(branch_name)) => {
+                            if state.branches.contains(branch_name) {
+                                bail!("a branch named '{}' already exists", branch_name);
+                            }
+                        }
+                        (false, Some(branch_name)) => {
+                            if !state.branches.contains(branch_name) {
+                                bail!("no branch named '{}' exists", branch_name);
+                            }
+                        }
+                        (false, None) => {}
+                        (true, None) => bail!("branch name is required to create a branch"),
                     }
+
+                    Ok(())
                 }
-                Ok(())
             })??;
 
+            let (branch_name, sha, create_branch_ref) = match target {
+                CreateWorktreeTarget::ExistingBranch { branch_name } => {
+                    let ref_name = format!("refs/heads/{branch_name}");
+                    let sha = fs.with_git_state(&dot_git_path, false, {
+                        move |state| {
+                            Ok::<_, anyhow::Error>(
+                                state
+                                    .refs
+                                    .get(&ref_name)
+                                    .cloned()
+                                    .unwrap_or_else(|| "fake-sha".to_string()),
+                            )
+                        }
+                    })??;
+                    (Some(branch_name), sha, false)
+                }
+                CreateWorktreeTarget::NewBranch {
+                    branch_name,
+                    base_sha: start_point,
+                } => (
+                    Some(branch_name),
+                    start_point.unwrap_or_else(|| "fake-sha".to_string()),
+                    true,
+                ),
+                CreateWorktreeTarget::Detached {
+                    base_sha: start_point,
+                } => (
+                    None,
+                    start_point.unwrap_or_else(|| "fake-sha".to_string()),
+                    false,
+                ),
+            };
+
             // Create the worktree checkout directory.
             fs.create_dir(&path).await?;
 
             // Create .git/worktrees/<name>/ directory with HEAD, commondir, gitdir.
-            let worktree_entry_name = branch_name
-                .as_deref()
-                .unwrap_or_else(|| path.file_name().unwrap().to_str().unwrap());
+            let worktree_entry_name = branch_name.as_deref().unwrap_or_else(|| {
+                path.file_name()
+                    .and_then(|name| name.to_str())
+                    .unwrap_or("detached")
+            });
             let worktrees_entry_dir = common_dir_path.join("worktrees").join(worktree_entry_name);
             fs.create_dir(&worktrees_entry_dir).await?;
 
-            let sha = from_commit.unwrap_or_else(|| "fake-sha".to_string());
             let head_content = if let Some(ref branch_name) = branch_name {
                 let ref_name = format!("refs/heads/{branch_name}");
                 format!("ref: {ref_name}")
@@ -604,15 +658,22 @@ impl GitRepository for FakeGitRepository {
                 false,
             )?;
 
-            // Update git state: add ref and branch.
-            fs.with_git_state(&dot_git_path, true, move |state| {
-                if let Some(branch_name) = branch_name {
-                    let ref_name = format!("refs/heads/{branch_name}");
-                    state.refs.insert(ref_name, sha);
-                    state.branches.insert(branch_name);
-                }
-                Ok::<(), anyhow::Error>(())
-            })??;
+            // Update git state for newly created branches.
+            if create_branch_ref {
+                fs.with_git_state(&dot_git_path, true, {
+                    let branch_name = branch_name.clone();
+                    let sha = sha.clone();
+                    move |state| {
+                        if let Some(branch_name) = branch_name {
+                            let ref_name = format!("refs/heads/{branch_name}");
+                            state.refs.insert(ref_name, sha);
+                            state.branches.insert(branch_name);
+                        }
+                        Ok::<(), anyhow::Error>(())
+                    }
+                })??;
+            }
+
             Ok(())
         }
         .boxed()
@@ -1268,8 +1329,17 @@ impl GitRepository for FakeGitRepository {
         let fs = self.fs.clone();
         let dot_git_path = self.dot_git_path.clone();
         async move {
-            let graph_commits =
-                fs.with_git_state(&dot_git_path, false, |state| state.graph_commits.clone())?;
+            let (graph_commits, simulated_error) =
+                fs.with_git_state(&dot_git_path, false, |state| {
+                    (
+                        state.graph_commits.clone(),
+                        state.simulated_graph_error.clone(),
+                    )
+                })?;
+
+            if let Some(error) = simulated_error {
+                anyhow::bail!("{}", error);
+            }
 
             for chunk in graph_commits.chunks(GRAPH_CHUNK_SIZE) {
                 request_tx.send(chunk.to_vec()).await.ok();

crates/fs/src/fs.rs 🔗

@@ -2168,6 +2168,13 @@ impl FakeFs {
         .unwrap();
     }
 
+    pub fn set_graph_error(&self, dot_git: &Path, error: Option<String>) {
+        self.with_git_state(dot_git, true, |state| {
+            state.simulated_graph_error = error;
+        })
+        .unwrap();
+    }
+
     /// Put the given git repository into a state with the given status,
     /// by mutating the head, index, and unmerged state.
     pub fn set_status_for_repo(&self, dot_git: &Path, statuses: &[(&str, FileStatus)]) {

crates/fs/tests/integration/fake_git_repo.rs 🔗

@@ -24,9 +24,11 @@ async fn test_fake_worktree_lifecycle(cx: &mut TestAppContext) {
     // Create a worktree
     let worktree_1_dir = worktrees_dir.join("feature-branch");
     repo.create_worktree(
-        Some("feature-branch".to_string()),
+        git::repository::CreateWorktreeTarget::NewBranch {
+            branch_name: "feature-branch".to_string(),
+            base_sha: Some("abc123".to_string()),
+        },
         worktree_1_dir.clone(),
-        Some("abc123".to_string()),
     )
     .await
     .unwrap();
@@ -48,9 +50,11 @@ async fn test_fake_worktree_lifecycle(cx: &mut TestAppContext) {
     // Create a second worktree (without explicit commit)
     let worktree_2_dir = worktrees_dir.join("bugfix-branch");
     repo.create_worktree(
-        Some("bugfix-branch".to_string()),
+        git::repository::CreateWorktreeTarget::NewBranch {
+            branch_name: "bugfix-branch".to_string(),
+            base_sha: None,
+        },
         worktree_2_dir.clone(),
-        None,
     )
     .await
     .unwrap();

crates/fuzzy_nucleo/Cargo.toml 🔗

@@ -0,0 +1,21 @@
+[package]
+name = "fuzzy_nucleo"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/fuzzy_nucleo.rs"
+doctest = false
+
+[dependencies]
+nucleo.workspace = true
+gpui.workspace = true
+util.workspace = true
+
+[dev-dependencies]
+util = {workspace = true, features = ["test-support"]}

crates/fuzzy_nucleo/src/matcher.rs 🔗

@@ -0,0 +1,39 @@
+use std::sync::Mutex;
+
+static MATCHERS: Mutex<Vec<nucleo::Matcher>> = Mutex::new(Vec::new());
+
+pub const LENGTH_PENALTY: f64 = 0.01;
+
+pub fn get_matcher(config: nucleo::Config) -> nucleo::Matcher {
+    let mut matchers = MATCHERS.lock().unwrap();
+    match matchers.pop() {
+        Some(mut matcher) => {
+            matcher.config = config;
+            matcher
+        }
+        None => nucleo::Matcher::new(config),
+    }
+}
+
+pub fn return_matcher(matcher: nucleo::Matcher) {
+    MATCHERS.lock().unwrap().push(matcher);
+}
+
+pub fn get_matchers(n: usize, config: nucleo::Config) -> Vec<nucleo::Matcher> {
+    let mut matchers: Vec<_> = {
+        let mut pool = MATCHERS.lock().unwrap();
+        let available = pool.len().min(n);
+        pool.drain(..available)
+            .map(|mut matcher| {
+                matcher.config = config.clone();
+                matcher
+            })
+            .collect()
+    };
+    matchers.resize_with(n, || nucleo::Matcher::new(config.clone()));
+    matchers
+}
+
+pub fn return_matchers(mut matchers: Vec<nucleo::Matcher>) {
+    MATCHERS.lock().unwrap().append(&mut matchers);
+}

crates/fuzzy_nucleo/src/paths.rs 🔗

@@ -0,0 +1,352 @@
+use gpui::BackgroundExecutor;
+use std::{
+    cmp::Ordering,
+    sync::{
+        Arc,
+        atomic::{self, AtomicBool},
+    },
+};
+use util::{paths::PathStyle, rel_path::RelPath};
+
+use nucleo::Utf32Str;
+use nucleo::pattern::{Atom, AtomKind, CaseMatching, Normalization};
+
+use crate::matcher::{self, LENGTH_PENALTY};
+
+#[derive(Clone, Debug)]
+pub struct PathMatchCandidate<'a> {
+    pub is_dir: bool,
+    pub path: &'a RelPath,
+}
+
+#[derive(Clone, Debug)]
+pub struct PathMatch {
+    pub score: f64,
+    pub positions: Vec<usize>,
+    pub worktree_id: usize,
+    pub path: Arc<RelPath>,
+    pub path_prefix: Arc<RelPath>,
+    pub is_dir: bool,
+    /// Number of steps removed from a shared parent with the relative path
+    /// Used to order closer paths first in the search list
+    pub distance_to_relative_ancestor: usize,
+}
+
+pub trait PathMatchCandidateSet<'a>: Send + Sync {
+    type Candidates: Iterator<Item = PathMatchCandidate<'a>>;
+    fn id(&self) -> usize;
+    fn len(&self) -> usize;
+    fn is_empty(&self) -> bool {
+        self.len() == 0
+    }
+    fn root_is_file(&self) -> bool;
+    fn prefix(&self) -> Arc<RelPath>;
+    fn candidates(&'a self, start: usize) -> Self::Candidates;
+    fn path_style(&self) -> PathStyle;
+}
+
+impl PartialEq for PathMatch {
+    fn eq(&self, other: &Self) -> bool {
+        self.cmp(other).is_eq()
+    }
+}
+
+impl Eq for PathMatch {}
+
+impl PartialOrd for PathMatch {
+    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+        Some(self.cmp(other))
+    }
+}
+
+impl Ord for PathMatch {
+    fn cmp(&self, other: &Self) -> Ordering {
+        self.score
+            .partial_cmp(&other.score)
+            .unwrap_or(Ordering::Equal)
+            .then_with(|| self.worktree_id.cmp(&other.worktree_id))
+            .then_with(|| {
+                other
+                    .distance_to_relative_ancestor
+                    .cmp(&self.distance_to_relative_ancestor)
+            })
+            .then_with(|| self.path.cmp(&other.path))
+    }
+}
+
+fn make_atoms(query: &str, smart_case: bool) -> Vec<Atom> {
+    let case = if smart_case {
+        CaseMatching::Smart
+    } else {
+        CaseMatching::Ignore
+    };
+    query
+        .split_whitespace()
+        .map(|word| Atom::new(word, case, Normalization::Smart, AtomKind::Fuzzy, false))
+        .collect()
+}
+
+pub(crate) fn distance_between_paths(path: &RelPath, relative_to: &RelPath) -> usize {
+    let mut path_components = path.components();
+    let mut relative_components = relative_to.components();
+
+    while path_components
+        .next()
+        .zip(relative_components.next())
+        .map(|(path_component, relative_component)| path_component == relative_component)
+        .unwrap_or_default()
+    {}
+    path_components.count() + relative_components.count() + 1
+}
+
+fn get_filename_match_bonus(
+    candidate_buf: &str,
+    query_atoms: &[Atom],
+    matcher: &mut nucleo::Matcher,
+) -> f64 {
+    let filename = match std::path::Path::new(candidate_buf).file_name() {
+        Some(f) => f.to_str().unwrap_or(""),
+        None => return 0.0,
+    };
+    if filename.is_empty() || query_atoms.is_empty() {
+        return 0.0;
+    }
+    let mut buf = Vec::new();
+    let haystack = Utf32Str::new(filename, &mut buf);
+    let mut total_score = 0u32;
+    for atom in query_atoms {
+        if let Some(score) = atom.score(haystack, matcher) {
+            total_score = total_score.saturating_add(score as u32);
+        }
+    }
+    total_score as f64 / filename.len().max(1) as f64
+}
+struct Cancelled;
+
+fn path_match_helper<'a>(
+    matcher: &mut nucleo::Matcher,
+    atoms: &[Atom],
+    candidates: impl Iterator<Item = PathMatchCandidate<'a>>,
+    results: &mut Vec<PathMatch>,
+    worktree_id: usize,
+    path_prefix: &Arc<RelPath>,
+    root_is_file: bool,
+    relative_to: &Option<Arc<RelPath>>,
+    path_style: PathStyle,
+    cancel_flag: &AtomicBool,
+) -> Result<(), Cancelled> {
+    let mut candidate_buf = if !path_prefix.is_empty() && !root_is_file {
+        let mut s = path_prefix.display(path_style).to_string();
+        s.push_str(path_style.primary_separator());
+        s
+    } else {
+        String::new()
+    };
+    let path_prefix_len = candidate_buf.len();
+    let mut buf = Vec::new();
+    let mut matched_chars: Vec<u32> = Vec::new();
+    let mut atom_matched_chars = Vec::new();
+    for candidate in candidates {
+        buf.clear();
+        matched_chars.clear();
+        if cancel_flag.load(atomic::Ordering::Relaxed) {
+            return Err(Cancelled);
+        }
+
+        candidate_buf.truncate(path_prefix_len);
+        if root_is_file {
+            candidate_buf.push_str(path_prefix.as_unix_str());
+        } else {
+            candidate_buf.push_str(candidate.path.as_unix_str());
+        }
+
+        let haystack = Utf32Str::new(&candidate_buf, &mut buf);
+
+        let mut total_score: u32 = 0;
+        let mut all_matched = true;
+
+        for atom in atoms {
+            atom_matched_chars.clear();
+            if let Some(score) = atom.indices(haystack, matcher, &mut atom_matched_chars) {
+                total_score = total_score.saturating_add(score as u32);
+                matched_chars.extend_from_slice(&atom_matched_chars);
+            } else {
+                all_matched = false;
+                break;
+            }
+        }
+
+        if all_matched && !atoms.is_empty() {
+            matched_chars.sort_unstable();
+            matched_chars.dedup();
+
+            let length_penalty = candidate_buf.len() as f64 * LENGTH_PENALTY;
+            let filename_bonus = get_filename_match_bonus(&candidate_buf, atoms, matcher);
+            let adjusted_score = total_score as f64 + filename_bonus - length_penalty;
+            let mut positions: Vec<usize> = candidate_buf
+                .char_indices()
+                .enumerate()
+                .filter_map(|(char_offset, (byte_offset, _))| {
+                    matched_chars
+                        .contains(&(char_offset as u32))
+                        .then_some(byte_offset)
+                })
+                .collect();
+            positions.sort_unstable();
+
+            results.push(PathMatch {
+                score: adjusted_score,
+                positions,
+                worktree_id,
+                path: if root_is_file {
+                    Arc::clone(path_prefix)
+                } else {
+                    candidate.path.into()
+                },
+                path_prefix: if root_is_file {
+                    RelPath::empty().into()
+                } else {
+                    Arc::clone(path_prefix)
+                },
+                is_dir: candidate.is_dir,
+                distance_to_relative_ancestor: relative_to
+                    .as_ref()
+                    .map_or(usize::MAX, |relative_to| {
+                        distance_between_paths(candidate.path, relative_to.as_ref())
+                    }),
+            });
+        }
+    }
+    Ok(())
+}
+
+pub fn match_fixed_path_set(
+    candidates: Vec<PathMatchCandidate>,
+    worktree_id: usize,
+    worktree_root_name: Option<Arc<RelPath>>,
+    query: &str,
+    smart_case: bool,
+    max_results: usize,
+    path_style: PathStyle,
+) -> Vec<PathMatch> {
+    let mut config = nucleo::Config::DEFAULT;
+    config.set_match_paths();
+    let mut matcher = matcher::get_matcher(config);
+
+    let atoms = make_atoms(query, smart_case);
+
+    let root_is_file = worktree_root_name.is_some() && candidates.iter().all(|c| c.path.is_empty());
+
+    let path_prefix = worktree_root_name.unwrap_or_else(|| RelPath::empty().into());
+
+    let mut results = Vec::new();
+
+    path_match_helper(
+        &mut matcher,
+        &atoms,
+        candidates.into_iter(),
+        &mut results,
+        worktree_id,
+        &path_prefix,
+        root_is_file,
+        &None,
+        path_style,
+        &AtomicBool::new(false),
+    )
+    .ok();
+    util::truncate_to_bottom_n_sorted_by(&mut results, max_results, &|a, b| b.cmp(a));
+    matcher::return_matcher(matcher);
+    results
+}
+
+pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>(
+    candidate_sets: &'a [Set],
+    query: &str,
+    relative_to: &Option<Arc<RelPath>>,
+    smart_case: bool,
+    max_results: usize,
+    cancel_flag: &AtomicBool,
+    executor: BackgroundExecutor,
+) -> Vec<PathMatch> {
+    let path_count: usize = candidate_sets.iter().map(|s| s.len()).sum();
+    if path_count == 0 {
+        return Vec::new();
+    }
+
+    let path_style = candidate_sets[0].path_style();
+
+    let query = if path_style.is_windows() {
+        query.replace('\\', "/")
+    } else {
+        query.to_owned()
+    };
+
+    let atoms = make_atoms(&query, smart_case);
+
+    let num_cpus = executor.num_cpus().min(path_count);
+    let segment_size = path_count.div_ceil(num_cpus);
+    let mut segment_results = (0..num_cpus)
+        .map(|_| Vec::with_capacity(max_results))
+        .collect::<Vec<_>>();
+    let mut config = nucleo::Config::DEFAULT;
+    config.set_match_paths();
+    let mut matchers = matcher::get_matchers(num_cpus, config);
+    executor
+        .scoped(|scope| {
+            for (segment_idx, (results, matcher)) in segment_results
+                .iter_mut()
+                .zip(matchers.iter_mut())
+                .enumerate()
+            {
+                let atoms = atoms.clone();
+                let relative_to = relative_to.clone();
+                scope.spawn(async move {
+                    let segment_start = segment_idx * segment_size;
+                    let segment_end = segment_start + segment_size;
+
+                    let mut tree_start = 0;
+                    for candidate_set in candidate_sets {
+                        let tree_end = tree_start + candidate_set.len();
+
+                        if tree_start < segment_end && segment_start < tree_end {
+                            let start = tree_start.max(segment_start) - tree_start;
+                            let end = tree_end.min(segment_end) - tree_start;
+                            let candidates = candidate_set.candidates(start).take(end - start);
+
+                            if path_match_helper(
+                                matcher,
+                                &atoms,
+                                candidates,
+                                results,
+                                candidate_set.id(),
+                                &candidate_set.prefix(),
+                                candidate_set.root_is_file(),
+                                &relative_to,
+                                path_style,
+                                cancel_flag,
+                            )
+                            .is_err()
+                            {
+                                break;
+                            }
+                        }
+
+                        if tree_end >= segment_end {
+                            break;
+                        }
+                        tree_start = tree_end;
+                    }
+                });
+            }
+        })
+        .await;
+
+    matcher::return_matchers(matchers);
+    if cancel_flag.load(atomic::Ordering::Acquire) {
+        return Vec::new();
+    }
+
+    let mut results = segment_results.concat();
+    util::truncate_to_bottom_n_sorted_by(&mut results, max_results, &|a, b| b.cmp(a));
+    results
+}

crates/git/src/repository.rs 🔗

@@ -241,20 +241,57 @@ pub struct Worktree {
     pub is_main: bool,
 }
 
+/// Describes how a new worktree should choose or create its checked-out HEAD.
+#[derive(Clone, Debug, Hash, PartialEq, Eq)]
+pub enum CreateWorktreeTarget {
+    /// Check out an existing local branch in the new worktree.
+    ExistingBranch {
+        /// The existing local branch to check out.
+        branch_name: String,
+    },
+    /// Create a new local branch for the new worktree.
+    NewBranch {
+        /// The new local branch to create and check out.
+        branch_name: String,
+        /// The commit or ref to create the branch from. Uses `HEAD` when `None`.
+        base_sha: Option<String>,
+    },
+    /// Check out a commit or ref in detached HEAD state.
+    Detached {
+        /// The commit or ref to check out. Uses `HEAD` when `None`.
+        base_sha: Option<String>,
+    },
+}
+
+impl CreateWorktreeTarget {
+    pub fn branch_name(&self) -> Option<&str> {
+        match self {
+            Self::ExistingBranch { branch_name } | Self::NewBranch { branch_name, .. } => {
+                Some(branch_name)
+            }
+            Self::Detached { .. } => None,
+        }
+    }
+}
+
 impl Worktree {
+    /// Returns the branch name if the worktree is attached to a branch.
+    pub fn branch_name(&self) -> Option<&str> {
+        self.ref_name.as_ref().map(|ref_name| {
+            ref_name
+                .strip_prefix("refs/heads/")
+                .or_else(|| ref_name.strip_prefix("refs/remotes/"))
+                .unwrap_or(ref_name)
+        })
+    }
+
     /// Returns a display name for the worktree, suitable for use in the UI.
     ///
     /// If the worktree is attached to a branch, returns the branch name.
     /// Otherwise, returns the short SHA of the worktree's HEAD commit.
     pub fn display_name(&self) -> &str {
-        match self.ref_name {
-            Some(ref ref_name) => ref_name
-                .strip_prefix("refs/heads/")
-                .or_else(|| ref_name.strip_prefix("refs/remotes/"))
-                .unwrap_or(ref_name),
-            // Detached HEAD — show the short SHA as a fallback.
-            None => &self.sha[..self.sha.len().min(SHORT_SHA_LENGTH)],
-        }
+        self.branch_name()
+            .unwrap_or(&self.sha[..self.sha.len().min(SHORT_SHA_LENGTH)])
     }
 }
 
@@ -716,9 +753,8 @@ pub trait GitRepository: Send + Sync {
 
     fn create_worktree(
         &self,
-        branch_name: Option<String>,
+        target: CreateWorktreeTarget,
         path: PathBuf,
-        from_commit: Option<String>,
     ) -> BoxFuture<'_, Result<()>>;
 
     fn remove_worktree(&self, path: PathBuf, force: bool) -> BoxFuture<'_, Result<()>>;
@@ -1669,24 +1705,36 @@ impl GitRepository for RealGitRepository {
 
     fn create_worktree(
         &self,
-        branch_name: Option<String>,
+        target: CreateWorktreeTarget,
         path: PathBuf,
-        from_commit: Option<String>,
     ) -> BoxFuture<'_, Result<()>> {
         let git_binary = self.git_binary();
         let mut args = vec![OsString::from("worktree"), OsString::from("add")];
-        if let Some(branch_name) = &branch_name {
-            args.push(OsString::from("-b"));
-            args.push(OsString::from(branch_name.as_str()));
-        } else {
-            args.push(OsString::from("--detach"));
-        }
-        args.push(OsString::from("--"));
-        args.push(OsString::from(path.as_os_str()));
-        if let Some(from_commit) = from_commit {
-            args.push(OsString::from(from_commit));
-        } else {
-            args.push(OsString::from("HEAD"));
+
+        match &target {
+            CreateWorktreeTarget::ExistingBranch { branch_name } => {
+                args.push(OsString::from("--"));
+                args.push(OsString::from(path.as_os_str()));
+                args.push(OsString::from(branch_name));
+            }
+            CreateWorktreeTarget::NewBranch {
+                branch_name,
+                base_sha: start_point,
+            } => {
+                args.push(OsString::from("-b"));
+                args.push(OsString::from(branch_name));
+                args.push(OsString::from("--"));
+                args.push(OsString::from(path.as_os_str()));
+                args.push(OsString::from(start_point.as_deref().unwrap_or("HEAD")));
+            }
+            CreateWorktreeTarget::Detached {
+                base_sha: start_point,
+            } => {
+                args.push(OsString::from("--detach"));
+                args.push(OsString::from("--"));
+                args.push(OsString::from(path.as_os_str()));
+                args.push(OsString::from(start_point.as_deref().unwrap_or("HEAD")));
+            }
         }
 
         self.executor
@@ -2750,10 +2798,11 @@ impl GitRepository for RealGitRepository {
                 log_source.get_arg()?,
             ]);
             command.stdout(Stdio::piped());
-            command.stderr(Stdio::null());
+            command.stderr(Stdio::piped());
 
             let mut child = command.spawn()?;
             let stdout = child.stdout.take().context("failed to get stdout")?;
+            let stderr = child.stderr.take().context("failed to get stderr")?;
             let mut reader = BufReader::new(stdout);
 
             let mut line_buffer = String::new();
@@ -2788,7 +2837,20 @@ impl GitRepository for RealGitRepository {
                 }
             }
 
-            child.status().await?;
+            let status = child.status().await?;
+            if !status.success() {
+                let mut stderr_output = String::new();
+                BufReader::new(stderr)
+                    .read_to_string(&mut stderr_output)
+                    .await
+                    .log_err();
+
+                if stderr_output.is_empty() {
+                    anyhow::bail!("git log command failed with {}", status);
+                } else {
+                    anyhow::bail!("git log command failed with {}: {}", status, stderr_output);
+                }
+            }
             Ok(())
         }
         .boxed()
@@ -4068,9 +4130,11 @@ mod tests {
 
         // Create a new worktree
         repo.create_worktree(
-            Some("test-branch".to_string()),
+            CreateWorktreeTarget::NewBranch {
+                branch_name: "test-branch".to_string(),
+                base_sha: Some("HEAD".to_string()),
+            },
             worktree_path.clone(),
-            Some("HEAD".to_string()),
         )
         .await
         .unwrap();
@@ -4127,9 +4191,11 @@ mod tests {
         // Create a worktree
         let worktree_path = worktrees_dir.join("worktree-to-remove");
         repo.create_worktree(
-            Some("to-remove".to_string()),
+            CreateWorktreeTarget::NewBranch {
+                branch_name: "to-remove".to_string(),
+                base_sha: Some("HEAD".to_string()),
+            },
             worktree_path.clone(),
-            Some("HEAD".to_string()),
         )
         .await
         .unwrap();
@@ -4151,9 +4217,11 @@ mod tests {
         // Create a worktree
         let worktree_path = worktrees_dir.join("dirty-wt");
         repo.create_worktree(
-            Some("dirty-wt".to_string()),
+            CreateWorktreeTarget::NewBranch {
+                branch_name: "dirty-wt".to_string(),
+                base_sha: Some("HEAD".to_string()),
+            },
             worktree_path.clone(),
-            Some("HEAD".to_string()),
         )
         .await
         .unwrap();
@@ -4221,9 +4289,11 @@ mod tests {
         // Create a worktree
         let old_path = worktrees_dir.join("old-worktree-name");
         repo.create_worktree(
-            Some("old-name".to_string()),
+            CreateWorktreeTarget::NewBranch {
+                branch_name: "old-name".to_string(),
+                base_sha: Some("HEAD".to_string()),
+            },
             old_path.clone(),
-            Some("HEAD".to_string()),
         )
         .await
         .unwrap();

crates/git_graph/src/git_graph.rs 🔗

@@ -2536,11 +2536,19 @@ impl Render for GitGraph {
             }
         };
 
+        let error = self.get_repository(cx).and_then(|repo| {
+            repo.read(cx)
+                .get_graph_data(self.log_source.clone(), self.log_order)
+                .and_then(|data| data.error.clone())
+        });
+
         let content = if commit_count == 0 {
-            let message = if is_loading {
-                "Loading"
+            let message = if let Some(error) = &error {
+                format!("Error loading: {}", error)
+            } else if is_loading {
+                "Loading".to_string()
             } else {
-                "No commits found"
+                "No commits found".to_string()
             };
             let label = Label::new(message)
                 .color(Color::Muted)
@@ -2552,7 +2560,7 @@ impl Render for GitGraph {
                 .items_center()
                 .justify_center()
                 .child(label)
-                .when(is_loading, |this| {
+                .when(is_loading && error.is_none(), |this| {
                     this.child(self.render_loading_spinner(cx))
                 })
         } else {
@@ -3757,6 +3765,61 @@ mod tests {
         );
     }
 
+    #[gpui::test]
+    async fn test_initial_graph_data_propagates_error(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let fs = FakeFs::new(cx.executor());
+        fs.insert_tree(
+            Path::new("/project"),
+            json!({
+                ".git": {},
+                "file.txt": "content",
+            }),
+        )
+        .await;
+
+        fs.set_graph_error(
+            Path::new("/project/.git"),
+            Some("fatal: bad default revision 'HEAD'".to_string()),
+        );
+
+        let project = Project::test(fs.clone(), [Path::new("/project")], cx).await;
+
+        let repository = project.read_with(cx, |project, cx| {
+            project
+                .active_repository(cx)
+                .expect("should have a repository")
+        });
+
+        repository.update(cx, |repo, cx| {
+            repo.graph_data(
+                crate::LogSource::default(),
+                crate::LogOrder::default(),
+                0..usize::MAX,
+                cx,
+            );
+        });
+
+        cx.run_until_parked();
+
+        let error = repository.read_with(cx, |repo, _| {
+            repo.get_graph_data(crate::LogSource::default(), crate::LogOrder::default())
+                .and_then(|data| data.error.clone())
+        });
+
+        assert!(
+            error.is_some(),
+            "graph data should contain an error after initial_graph_data fails"
+        );
+        let error_message = error.unwrap();
+        assert!(
+            error_message.contains("bad default revision"),
+            "error should contain the git error message, got: {}",
+            error_message
+        );
+    }
+
     #[gpui::test]
     async fn test_graph_data_repopulated_from_cache_after_repo_switch(cx: &mut TestAppContext) {
         init_test(cx);

crates/git_ui/src/branch_picker.rs 🔗

@@ -1906,7 +1906,7 @@ mod tests {
         assert_eq!(
             remotes,
             vec![Remote {
-                name: SharedString::from("my_new_remote".to_string())
+                name: SharedString::from("my_new_remote")
             }]
         );
     }

crates/git_ui/src/worktree_picker.rs 🔗

@@ -318,8 +318,13 @@ impl WorktreeListDelegate {
                     .clone();
                 let new_worktree_path =
                     repo.path_for_new_linked_worktree(&branch, &worktree_directory_setting)?;
-                let receiver =
-                    repo.create_worktree(branch.clone(), new_worktree_path.clone(), commit);
+                let receiver = repo.create_worktree(
+                    git::repository::CreateWorktreeTarget::NewBranch {
+                        branch_name: branch.clone(),
+                        base_sha: commit,
+                    },
+                    new_worktree_path.clone(),
+                );
                 anyhow::Ok((receiver, new_worktree_path))
             })?;
             receiver.await??;

crates/google_ai/Cargo.toml 🔗

@@ -18,8 +18,10 @@ schemars = ["dep:schemars"]
 anyhow.workspace = true
 futures.workspace = true
 http_client.workspace = true
+language_model_core.workspace = true
+log.workspace = true
 schemars = { workspace = true, optional = true }
 serde.workspace = true
 serde_json.workspace = true
-settings.workspace = true
 strum.workspace = true
+tiktoken-rs.workspace = true

crates/google_ai/src/completion.rs 🔗

@@ -0,0 +1,492 @@
+use anyhow::Result;
+use futures::{Stream, StreamExt};
+use language_model_core::{
+    LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelRequest,
+    LanguageModelToolChoice, LanguageModelToolUse, LanguageModelToolUseId, MessageContent, Role,
+    StopReason, TokenUsage,
+};
+use std::pin::Pin;
+use std::sync::Arc;
+use std::sync::atomic::{self, AtomicU64};
+
+use crate::{
+    Content, FunctionCallingConfig, FunctionCallingMode, FunctionDeclaration,
+    GenerateContentResponse, GenerationConfig, GenerativeContentBlob, GoogleModelMode,
+    InlineDataPart, ModelName, Part, SystemInstruction, TextPart, ThinkingConfig, ToolConfig,
+    UsageMetadata,
+};
+
+pub fn into_google(
+    mut request: LanguageModelRequest,
+    model_id: String,
+    mode: GoogleModelMode,
+) -> crate::GenerateContentRequest {
+    fn map_content(content: Vec<MessageContent>) -> Vec<Part> {
+        content
+            .into_iter()
+            .flat_map(|content| match content {
+                MessageContent::Text(text) => {
+                    if !text.is_empty() {
+                        vec![Part::TextPart(TextPart { text })]
+                    } else {
+                        vec![]
+                    }
+                }
+                MessageContent::Thinking {
+                    text: _,
+                    signature: Some(signature),
+                } => {
+                    if !signature.is_empty() {
+                        vec![Part::ThoughtPart(crate::ThoughtPart {
+                            thought: true,
+                            thought_signature: signature,
+                        })]
+                    } else {
+                        vec![]
+                    }
+                }
+                MessageContent::Thinking { .. } => {
+                    vec![]
+                }
+                MessageContent::RedactedThinking(_) => vec![],
+                MessageContent::Image(image) => {
+                    vec![Part::InlineDataPart(InlineDataPart {
+                        inline_data: GenerativeContentBlob {
+                            mime_type: "image/png".to_string(),
+                            data: image.source.to_string(),
+                        },
+                    })]
+                }
+                MessageContent::ToolUse(tool_use) => {
+                    // Normalize empty string signatures to None
+                    let thought_signature = tool_use.thought_signature.filter(|s| !s.is_empty());
+
+                    vec![Part::FunctionCallPart(crate::FunctionCallPart {
+                        function_call: crate::FunctionCall {
+                            name: tool_use.name.to_string(),
+                            args: tool_use.input,
+                        },
+                        thought_signature,
+                    })]
+                }
+                MessageContent::ToolResult(tool_result) => {
+                    match tool_result.content {
+                        language_model_core::LanguageModelToolResultContent::Text(text) => {
+                            vec![Part::FunctionResponsePart(crate::FunctionResponsePart {
+                                function_response: crate::FunctionResponse {
+                                    name: tool_result.tool_name.to_string(),
+                                    // The API expects a valid JSON object
+                                    response: serde_json::json!({
+                                        "output": text
+                                    }),
+                                },
+                            })]
+                        }
+                        language_model_core::LanguageModelToolResultContent::Image(image) => {
+                            vec![
+                                Part::FunctionResponsePart(crate::FunctionResponsePart {
+                                    function_response: crate::FunctionResponse {
+                                        name: tool_result.tool_name.to_string(),
+                                        // The API expects a valid JSON object
+                                        response: serde_json::json!({
+                                            "output": "Tool responded with an image"
+                                        }),
+                                    },
+                                }),
+                                Part::InlineDataPart(InlineDataPart {
+                                    inline_data: GenerativeContentBlob {
+                                        mime_type: "image/png".to_string(),
+                                        data: image.source.to_string(),
+                                    },
+                                }),
+                            ]
+                        }
+                    }
+                }
+            })
+            .collect()
+    }
+
+    let system_instructions = if request
+        .messages
+        .first()
+        .is_some_and(|msg| matches!(msg.role, Role::System))
+    {
+        let message = request.messages.remove(0);
+        Some(SystemInstruction {
+            parts: map_content(message.content),
+        })
+    } else {
+        None
+    };
+
+    crate::GenerateContentRequest {
+        model: ModelName { model_id },
+        system_instruction: system_instructions,
+        contents: request
+            .messages
+            .into_iter()
+            .filter_map(|message| {
+                let parts = map_content(message.content);
+                if parts.is_empty() {
+                    None
+                } else {
+                    Some(Content {
+                        parts,
+                        role: match message.role {
+                            Role::User => crate::Role::User,
+                            Role::Assistant => crate::Role::Model,
+                            Role::System => crate::Role::User, // Google AI doesn't have a system role
+                        },
+                    })
+                }
+            })
+            .collect(),
+        generation_config: Some(GenerationConfig {
+            candidate_count: Some(1),
+            stop_sequences: Some(request.stop),
+            max_output_tokens: None,
+            temperature: request.temperature.map(|t| t as f64).or(Some(1.0)),
+            thinking_config: match (request.thinking_allowed, mode) {
+                (true, GoogleModelMode::Thinking { budget_tokens }) => {
+                    budget_tokens.map(|thinking_budget| ThinkingConfig { thinking_budget })
+                }
+                _ => None,
+            },
+            top_p: None,
+            top_k: None,
+        }),
+        safety_settings: None,
+        tools: (!request.tools.is_empty()).then(|| {
+            vec![crate::Tool {
+                function_declarations: request
+                    .tools
+                    .into_iter()
+                    .map(|tool| FunctionDeclaration {
+                        name: tool.name,
+                        description: tool.description,
+                        parameters: tool.input_schema,
+                    })
+                    .collect(),
+            }]
+        }),
+        tool_config: request.tool_choice.map(|choice| ToolConfig {
+            function_calling_config: FunctionCallingConfig {
+                mode: match choice {
+                    LanguageModelToolChoice::Auto => FunctionCallingMode::Auto,
+                    LanguageModelToolChoice::Any => FunctionCallingMode::Any,
+                    LanguageModelToolChoice::None => FunctionCallingMode::None,
+                },
+                allowed_function_names: None,
+            },
+        }),
+    }
+}
+
+pub struct GoogleEventMapper {
+    usage: UsageMetadata,
+    stop_reason: StopReason,
+}
+
+impl GoogleEventMapper {
+    pub fn new() -> Self {
+        Self {
+            usage: UsageMetadata::default(),
+            stop_reason: StopReason::EndTurn,
+        }
+    }
+
+    pub fn map_stream(
+        mut self,
+        events: Pin<Box<dyn Send + Stream<Item = Result<GenerateContentResponse>>>>,
+    ) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
+    {
+        events
+            .map(Some)
+            .chain(futures::stream::once(async { None }))
+            .flat_map(move |event| {
+                futures::stream::iter(match event {
+                    Some(Ok(event)) => self.map_event(event),
+                    Some(Err(error)) => {
+                        vec![Err(LanguageModelCompletionError::from(error))]
+                    }
+                    None => vec![Ok(LanguageModelCompletionEvent::Stop(self.stop_reason))],
+                })
+            })
+    }
+
+    pub fn map_event(
+        &mut self,
+        event: GenerateContentResponse,
+    ) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
+        static TOOL_CALL_COUNTER: AtomicU64 = AtomicU64::new(0);
+
+        let mut events: Vec<_> = Vec::new();
+        let mut wants_to_use_tool = false;
+        if let Some(usage_metadata) = event.usage_metadata {
+            update_usage(&mut self.usage, &usage_metadata);
+            events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(
+                convert_usage(&self.usage),
+            )))
+        }
+
+        if let Some(prompt_feedback) = event.prompt_feedback
+            && let Some(block_reason) = prompt_feedback.block_reason.as_deref()
+        {
+            self.stop_reason = match block_reason {
+                "SAFETY" | "OTHER" | "BLOCKLIST" | "PROHIBITED_CONTENT" | "IMAGE_SAFETY" => {
+                    StopReason::Refusal
+                }
+                _ => {
+                    log::error!("Unexpected Google block_reason: {block_reason}");
+                    StopReason::Refusal
+                }
+            };
+            events.push(Ok(LanguageModelCompletionEvent::Stop(self.stop_reason)));
+
+            return events;
+        }
+
+        if let Some(candidates) = event.candidates {
+            for candidate in candidates {
+                if let Some(finish_reason) = candidate.finish_reason.as_deref() {
+                    self.stop_reason = match finish_reason {
+                        "STOP" => StopReason::EndTurn,
+                        "MAX_TOKENS" => StopReason::MaxTokens,
+                        _ => {
+                            log::error!("Unexpected google finish_reason: {finish_reason}");
+                            StopReason::EndTurn
+                        }
+                    };
+                }
+                candidate
+                    .content
+                    .parts
+                    .into_iter()
+                    .for_each(|part| match part {
+                        Part::TextPart(text_part) => {
+                            events.push(Ok(LanguageModelCompletionEvent::Text(text_part.text)))
+                        }
+                        Part::InlineDataPart(_) => {}
+                        Part::FunctionCallPart(function_call_part) => {
+                            wants_to_use_tool = true;
+                            let name: Arc<str> = function_call_part.function_call.name.into();
+                            let next_tool_id =
+                                TOOL_CALL_COUNTER.fetch_add(1, atomic::Ordering::SeqCst);
+                            let id: LanguageModelToolUseId =
+                                format!("{}-{}", name, next_tool_id).into();
+
+                            // Normalize empty string signatures to None
+                            let thought_signature = function_call_part
+                                .thought_signature
+                                .filter(|s| !s.is_empty());
+
+                            events.push(Ok(LanguageModelCompletionEvent::ToolUse(
+                                LanguageModelToolUse {
+                                    id,
+                                    name,
+                                    is_input_complete: true,
+                                    raw_input: function_call_part.function_call.args.to_string(),
+                                    input: function_call_part.function_call.args,
+                                    thought_signature,
+                                },
+                            )));
+                        }
+                        Part::FunctionResponsePart(_) => {}
+                        Part::ThoughtPart(part) => {
+                            events.push(Ok(LanguageModelCompletionEvent::Thinking {
+                                text: "(Encrypted thought)".to_string(), // TODO: Can we populate this from thought summaries?
+                                signature: Some(part.thought_signature),
+                            }));
+                        }
+                    });
+            }
+        }
+
+        // Even when Gemini wants to use a Tool, the API
+        // responds with `finish_reason: STOP`
+        if wants_to_use_tool {
+            self.stop_reason = StopReason::ToolUse;
+            events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)));
+        }
+        events
+    }
+}
+
+/// Count tokens for a Google AI model using tiktoken. This is synchronous;
+/// callers should spawn it on a background thread if needed.
+pub fn count_google_tokens(request: LanguageModelRequest) -> Result<u64> {
+    let messages = request
+        .messages
+        .into_iter()
+        .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
+            role: match message.role {
+                Role::User => "user".into(),
+                Role::Assistant => "assistant".into(),
+                Role::System => "system".into(),
+            },
+            content: Some(message.string_contents()),
+            name: None,
+            function_call: None,
+        })
+        .collect::<Vec<_>>();
+
+    // Tiktoken doesn't yet support these models, so we manually use the
+    // same tokenizer as GPT-4.
+    tiktoken_rs::num_tokens_from_messages("gpt-4", &messages).map(|tokens| tokens as u64)
+}
+
+fn update_usage(usage: &mut UsageMetadata, new: &UsageMetadata) {
+    if let Some(prompt_token_count) = new.prompt_token_count {
+        usage.prompt_token_count = Some(prompt_token_count);
+    }
+    if let Some(cached_content_token_count) = new.cached_content_token_count {
+        usage.cached_content_token_count = Some(cached_content_token_count);
+    }
+    if let Some(candidates_token_count) = new.candidates_token_count {
+        usage.candidates_token_count = Some(candidates_token_count);
+    }
+    if let Some(tool_use_prompt_token_count) = new.tool_use_prompt_token_count {
+        usage.tool_use_prompt_token_count = Some(tool_use_prompt_token_count);
+    }
+    if let Some(thoughts_token_count) = new.thoughts_token_count {
+        usage.thoughts_token_count = Some(thoughts_token_count);
+    }
+    if let Some(total_token_count) = new.total_token_count {
+        usage.total_token_count = Some(total_token_count);
+    }
+}
+
+fn convert_usage(usage: &UsageMetadata) -> TokenUsage {
+    let prompt_tokens = usage.prompt_token_count.unwrap_or(0);
+    let cached_tokens = usage.cached_content_token_count.unwrap_or(0);
+    let input_tokens = prompt_tokens - cached_tokens;
+    let output_tokens = usage.candidates_token_count.unwrap_or(0);
+
+    TokenUsage {
+        input_tokens,
+        output_tokens,
+        cache_read_input_tokens: cached_tokens,
+        cache_creation_input_tokens: 0,
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use crate::{
+        Content, FunctionCall, FunctionCallPart, GenerateContentCandidate, GenerateContentResponse,
+        Part, Role as GoogleRole,
+    };
+    use serde_json::json;
+
+    #[test]
+    fn test_function_call_with_signature_creates_tool_use_with_signature() {
+        let mut mapper = GoogleEventMapper::new();
+
+        let response = GenerateContentResponse {
+            candidates: Some(vec![GenerateContentCandidate {
+                index: Some(0),
+                content: Content {
+                    parts: vec![Part::FunctionCallPart(FunctionCallPart {
+                        function_call: FunctionCall {
+                            name: "test_function".to_string(),
+                            args: json!({"arg": "value"}),
+                        },
+                        thought_signature: Some("test_signature_123".to_string()),
+                    })],
+                    role: GoogleRole::Model,
+                },
+                finish_reason: None,
+                finish_message: None,
+                safety_ratings: None,
+                citation_metadata: None,
+            }]),
+            prompt_feedback: None,
+            usage_metadata: None,
+        };
+
+        let events = mapper.map_event(response);
+        assert_eq!(events.len(), 2);
+
+        if let Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) = &events[0] {
+            assert_eq!(tool_use.name.as_ref(), "test_function");
+            assert_eq!(
+                tool_use.thought_signature.as_deref(),
+                Some("test_signature_123")
+            );
+        } else {
+            panic!("Expected ToolUse event");
+        }
+    }
+
+    #[test]
+    fn test_function_call_without_signature_has_none() {
+        let mut mapper = GoogleEventMapper::new();
+
+        let response = GenerateContentResponse {
+            candidates: Some(vec![GenerateContentCandidate {
+                index: Some(0),
+                content: Content {
+                    parts: vec![Part::FunctionCallPart(FunctionCallPart {
+                        function_call: FunctionCall {
+                            name: "test_function".to_string(),
+                            args: json!({"arg": "value"}),
+                        },
+                        thought_signature: None,
+                    })],
+                    role: GoogleRole::Model,
+                },
+                finish_reason: None,
+                finish_message: None,
+                safety_ratings: None,
+                citation_metadata: None,
+            }]),
+            prompt_feedback: None,
+            usage_metadata: None,
+        };
+
+        let events = mapper.map_event(response);
+        assert_eq!(events.len(), 2);
+
+        if let Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) = &events[0] {
+            assert!(tool_use.thought_signature.is_none());
+        } else {
+            panic!("Expected ToolUse event");
+        }
+    }
+
+    #[test]
+    fn test_empty_string_signature_normalized_to_none() {
+        let mut mapper = GoogleEventMapper::new();
+
+        let response = GenerateContentResponse {
+            candidates: Some(vec![GenerateContentCandidate {
+                index: Some(0),
+                content: Content {
+                    parts: vec![Part::FunctionCallPart(FunctionCallPart {
+                        function_call: FunctionCall {
+                            name: "test_function".to_string(),
+                            args: json!({"arg": "value"}),
+                        },
+                        thought_signature: Some("".to_string()),
+                    })],
+                    role: GoogleRole::Model,
+                },
+                finish_reason: None,
+                finish_message: None,
+                safety_ratings: None,
+                citation_metadata: None,
+            }]),
+            prompt_feedback: None,
+            usage_metadata: None,
+        };
+
+        let events = mapper.map_event(response);
+        if let Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) = &events[0] {
+            assert!(tool_use.thought_signature.is_none());
+        } else {
+            panic!("Expected ToolUse event");
+        }
+    }
+}

crates/google_ai/src/google_ai.rs 🔗

@@ -3,8 +3,9 @@ use std::mem;
 use anyhow::{Result, anyhow, bail};
 use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream};
 use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
+pub use language_model_core::ModelMode as GoogleModelMode;
 use serde::{Deserialize, Deserializer, Serialize, Serializer};
-pub use settings::ModelMode as GoogleModelMode;
+pub mod completion;
 
 pub const API_URL: &str = "https://generativelanguage.googleapis.com";
 

crates/gpui/Cargo.toml 🔗

@@ -56,6 +56,7 @@ etagere = "0.2"
 futures.workspace = true
 futures-concurrency.workspace = true
 gpui_macros.workspace = true
+gpui_shared_string.workspace = true
 http_client.workspace = true
 image.workspace = true
 inventory.workspace = true

crates/gpui/src/gpui.rs 🔗

@@ -39,7 +39,6 @@ pub mod profiler;
 #[expect(missing_docs)]
 pub mod queue;
 mod scene;
-mod shared_string;
 mod shared_uri;
 mod style;
 mod styled;
@@ -92,6 +91,7 @@ pub use global::*;
 pub use gpui_macros::{
     AppContext, IntoElement, Render, VisualContext, property_test, register_action, test,
 };
+pub use gpui_shared_string::*;
 pub use gpui_util::arc_cow::ArcCow;
 pub use http_client;
 pub use input::*;
@@ -106,7 +106,6 @@ pub use profiler::*;
 pub use queue::{PriorityQueueReceiver, PriorityQueueSender};
 pub use refineable::*;
 pub use scene::*;
-pub use shared_string::*;
 pub use shared_uri::*;
 use std::{any::Any, future::Future};
 pub use style::*;

crates/gpui/src/svg_renderer.rs 🔗

@@ -105,18 +105,36 @@ pub enum SvgSize {
 impl SvgRenderer {
     /// Creates a new SVG renderer with the provided asset source.
     pub fn new(asset_source: Arc<dyn AssetSource>) -> Self {
-        static FONT_DB: LazyLock<Arc<usvg::fontdb::Database>> = LazyLock::new(|| {
+        static SYSTEM_FONT_DB: LazyLock<Arc<usvg::fontdb::Database>> = LazyLock::new(|| {
             let mut db = usvg::fontdb::Database::new();
             db.load_system_fonts();
             Arc::new(db)
         });
+
+        let fontdb = {
+            let mut db = (**SYSTEM_FONT_DB).clone();
+            load_bundled_fonts(&*asset_source, &mut db);
+            fix_generic_font_families(&mut db);
+            Arc::new(db)
+        };
+
         let default_font_resolver = usvg::FontResolver::default_font_selector();
         let font_resolver = Box::new(
             move |font: &usvg::Font, db: &mut Arc<usvg::fontdb::Database>| {
                 if db.is_empty() {
-                    *db = FONT_DB.clone();
+                    *db = fontdb.clone();
+                }
+                if let Some(id) = default_font_resolver(font, db) {
+                    return Some(id);
                 }
-                default_font_resolver(font, db)
+                // fontdb doesn't recognize CSS system font keywords like "system-ui"
+                // or "ui-sans-serif", so fall back to sans-serif before any face.
+                let sans_query = usvg::fontdb::Query {
+                    families: &[usvg::fontdb::Family::SansSerif],
+                    ..Default::default()
+                };
+                db.query(&sans_query)
+                    .or_else(|| db.faces().next().map(|f| f.id))
             },
         );
         let default_fallback_selection = usvg::FontResolver::default_fallback_selector();
@@ -226,14 +244,69 @@ impl SvgRenderer {
     }
 }
 
+fn load_bundled_fonts(asset_source: &dyn AssetSource, db: &mut usvg::fontdb::Database) {
+    let font_paths = [
+        "fonts/ibm-plex-sans/IBMPlexSans-Regular.ttf",
+        "fonts/lilex/Lilex-Regular.ttf",
+    ];
+    for path in font_paths {
+        match asset_source.load(path) {
+            Ok(Some(data)) => db.load_font_data(data.into_owned()),
+            Ok(None) => log::warn!("Bundled font not found: {path}"),
+            Err(error) => log::warn!("Failed to load bundled font {path}: {error}"),
+        }
+    }
+}
+
+// fontdb defaults generic families to Microsoft fonts ("Arial", "Times New Roman")
+// which aren't installed on most Linux systems. fontconfig normally overrides these,
+// but when it fails the defaults remain and all generic family queries return None.
+fn fix_generic_font_families(db: &mut usvg::fontdb::Database) {
+    use usvg::fontdb::{Family, Query};
+
+    let families_and_fallbacks: &[(Family<'_>, &str)] = &[
+        (Family::SansSerif, "IBM Plex Sans"),
+        // No serif font bundled; use sans-serif as best available fallback.
+        (Family::Serif, "IBM Plex Sans"),
+        (Family::Monospace, "Lilex"),
+        (Family::Cursive, "IBM Plex Sans"),
+        (Family::Fantasy, "IBM Plex Sans"),
+    ];
+
+    for (family, fallback_name) in families_and_fallbacks {
+        let query = Query {
+            families: &[*family],
+            ..Default::default()
+        };
+        if db.query(&query).is_none() {
+            match family {
+                Family::SansSerif => db.set_sans_serif_family(*fallback_name),
+                Family::Serif => db.set_serif_family(*fallback_name),
+                Family::Monospace => db.set_monospace_family(*fallback_name),
+                Family::Cursive => db.set_cursive_family(*fallback_name),
+                Family::Fantasy => db.set_fantasy_family(*fallback_name),
+                _ => {}
+            }
+        }
+    }
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;
+    use usvg::fontdb::{Database, Family, Query};
 
     const IBM_PLEX_REGULAR: &[u8] =
         include_bytes!("../../../assets/fonts/ibm-plex-sans/IBMPlexSans-Regular.ttf");
     const LILEX_REGULAR: &[u8] = include_bytes!("../../../assets/fonts/lilex/Lilex-Regular.ttf");
 
+    fn db_with_bundled_fonts() -> Database {
+        let mut db = Database::new();
+        db.load_font_data(IBM_PLEX_REGULAR.to_vec());
+        db.load_font_data(LILEX_REGULAR.to_vec());
+        db
+    }
+
     #[test]
     fn test_is_emoji_presentation() {
         let cases = [
@@ -266,11 +339,33 @@ mod tests {
     }
 
     #[test]
-    fn test_select_emoji_font_skips_family_without_glyph() {
-        let mut db = usvg::fontdb::Database::new();
+    fn fix_generic_font_families_sets_all_families() {
+        let mut db = db_with_bundled_fonts();
+        fix_generic_font_families(&mut db);
+
+        let families = [
+            Family::SansSerif,
+            Family::Serif,
+            Family::Monospace,
+            Family::Cursive,
+            Family::Fantasy,
+        ];
 
-        db.load_font_data(IBM_PLEX_REGULAR.to_vec());
-        db.load_font_data(LILEX_REGULAR.to_vec());
+        for family in families {
+            let query = Query {
+                families: &[family],
+                ..Default::default()
+            };
+            assert!(
+                db.query(&query).is_some(),
+                "Expected generic family {family:?} to resolve after fix_generic_font_families"
+            );
+        }
+    }
+
+    #[test]
+    fn test_select_emoji_font_skips_family_without_glyph() {
+        let mut db = db_with_bundled_fonts();
 
         let ibm_plex_sans = db
             .query(&usvg::fontdb::Query {
@@ -294,4 +389,22 @@ mod tests {
         assert!(!font_has_char(&db, ibm_plex_sans, '│'));
         assert!(font_has_char(&db, selected, '│'));
     }
+
+    #[test]
+    fn fix_generic_font_families_monospace_resolves_to_lilex() {
+        let mut db = db_with_bundled_fonts();
+        fix_generic_font_families(&mut db);
+
+        let query = Query {
+            families: &[Family::Monospace],
+            ..Default::default()
+        };
+        let id = db.query(&query).expect("Monospace should resolve");
+        let face = db.face(id).expect("Face should exist");
+        assert!(
+            face.families.iter().any(|(name, _)| name.contains("Lilex")),
+            "Monospace should map to Lilex, got {:?}",
+            face.families
+        );
+    }
 }

crates/gpui/src/text_system/line.rs 🔗

@@ -882,7 +882,7 @@ mod tests {
                 ],
                 len: 6,
             }),
-            text: SharedString::new("abcdef".to_string()),
+            text: "abcdef".into(),
             decoration_runs: SmallVec::new(),
         };
 

crates/gpui/src/window.rs 🔗

@@ -61,7 +61,7 @@ use crate::util::atomic_incr_if_not_zero;
 pub use prompts::*;
 
 /// Default window size used when no explicit size is provided.
-pub const DEFAULT_WINDOW_SIZE: Size<Pixels> = size(px(1536.), px(864.));
+pub const DEFAULT_WINDOW_SIZE: Size<Pixels> = size(px(1536.), px(1095.));
 
 /// A 6:5 aspect ratio minimum window size to be used for functional,
 /// additional-to-main-Zed windows, like the settings and rules library windows.

crates/gpui_shared_string/Cargo.toml 🔗

@@ -0,0 +1,17 @@
+[package]
+name = "gpui_shared_string"
+version = "0.1.0"
+publish.workspace = true
+edition.workspace = true
+
+[lib]
+path = "gpui_shared_string.rs"
+
+[dependencies]
+derive_more.workspace = true
+gpui_util.workspace = true
+schemars.workspace = true
+serde.workspace = true
+
+[lints]
+workspace = true

crates/http_client/src/github_download.rs 🔗

@@ -207,11 +207,7 @@ async fn extract_tar_gz(
     from: impl AsyncRead + Unpin,
 ) -> Result<(), anyhow::Error> {
     let decompressed_bytes = GzipDecoder::new(BufReader::new(from));
-    let archive = async_tar::Archive::new(decompressed_bytes);
-    archive
-        .unpack(&destination_path)
-        .await
-        .with_context(|| format!("extracting {url} to {destination_path:?}"))?;
+    unpack_tar_archive(destination_path, url, decompressed_bytes).await?;
     Ok(())
 }
 
@@ -221,7 +217,21 @@ async fn extract_tar_bz2(
     from: impl AsyncRead + Unpin,
 ) -> Result<(), anyhow::Error> {
     let decompressed_bytes = BzDecoder::new(BufReader::new(from));
-    let archive = async_tar::Archive::new(decompressed_bytes);
+    unpack_tar_archive(destination_path, url, decompressed_bytes).await?;
+    Ok(())
+}
+
+async fn unpack_tar_archive(
+    destination_path: &Path,
+    url: &str,
+    archive_bytes: impl AsyncRead + Unpin,
+) -> Result<(), anyhow::Error> {
+    // We don't need to set the modified time. It's irrelevant to downloaded
+    // archive verification, and some filesystems return errors when asked to
+    // apply it after extraction.
+    let archive = async_tar::ArchiveBuilder::new(archive_bytes)
+        .set_preserve_mtime(false)
+        .build();
     archive
         .unpack(&destination_path)
         .await

crates/icons/src/icons.rs 🔗

@@ -134,7 +134,7 @@ pub enum IconName {
     Flame,
     Folder,
     FolderOpen,
-    FolderPlus,
+    FolderOpenAdd,
     FolderSearch,
     Font,
     FontSize,
@@ -184,6 +184,7 @@ pub enum IconName {
     NewThread,
     Notepad,
     OpenFolder,
+    OpenNewWindow,
     Option,
     PageDown,
     PageUp,

crates/language/src/buffer.rs 🔗

@@ -3733,16 +3733,24 @@ impl BufferSnapshot {
     /// returned in chunks where each chunk has a single syntax highlighting style and
     /// diagnostic status.
     #[ztracing::instrument(skip_all)]
-    pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
+    pub fn chunks<T: ToOffset>(
+        &self,
+        range: Range<T>,
+        language_aware: LanguageAwareStyling,
+    ) -> BufferChunks<'_> {
         let range = range.start.to_offset(self)..range.end.to_offset(self);
 
         let mut syntax = None;
-        if language_aware {
+        if language_aware.tree_sitter {
             syntax = Some(self.get_highlights(range.clone()));
         }
-        // We want to look at diagnostic spans only when iterating over language-annotated chunks.
-        let diagnostics = language_aware;
-        BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
+        BufferChunks::new(
+            self.text.as_rope(),
+            range,
+            syntax,
+            language_aware.diagnostics,
+            Some(self),
+        )
     }
 
     pub fn highlighted_text_for_range<T: ToOffset>(
@@ -4477,7 +4485,13 @@ impl BufferSnapshot {
         let mut text = String::new();
         let mut highlight_ranges = Vec::new();
         let mut name_ranges = Vec::new();
-        let mut chunks = self.chunks(source_range_for_text.clone(), true);
+        let mut chunks = self.chunks(
+            source_range_for_text.clone(),
+            LanguageAwareStyling {
+                tree_sitter: true,
+                diagnostics: true,
+            },
+        );
         let mut last_buffer_range_end = 0;
         for (buffer_range, is_name) in buffer_ranges {
             let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
@@ -5402,7 +5416,13 @@ impl BufferSnapshot {
         let mut words = BTreeMap::default();
         let mut current_word_start_ix = None;
         let mut chunk_ix = query.range.start;
-        for chunk in self.chunks(query.range, false) {
+        for chunk in self.chunks(
+            query.range,
+            LanguageAwareStyling {
+                tree_sitter: false,
+                diagnostics: false,
+            },
+        ) {
             for (i, c) in chunk.text.char_indices() {
                 let ix = chunk_ix + i;
                 if classifier.is_word(c) {
@@ -5441,6 +5461,15 @@ impl BufferSnapshot {
     }
 }
 
+/// A configuration to use when producing styled text chunks.
+#[derive(Clone, Copy)]
+pub struct LanguageAwareStyling {
+    /// Whether to highlight text chunks using tree-sitter.
+    pub tree_sitter: bool,
+    /// Whether to highlight text chunks based on the diagnostics data.
+    pub diagnostics: bool,
+}
+
 pub struct WordsQuery<'a> {
     /// Only returns words with all chars from the fuzzy string in them.
     pub fuzzy_contents: Option<&'a str>,

crates/language/src/buffer_tests.rs 🔗

@@ -4102,7 +4102,13 @@ fn test_random_chunk_bitmaps(cx: &mut App, mut rng: StdRng) {
     let snapshot = buffer.read(cx).snapshot();
 
     // Get all chunks and verify their bitmaps
-    let chunks = snapshot.chunks(0..snapshot.len(), false);
+    let chunks = snapshot.chunks(
+        0..snapshot.len(),
+        LanguageAwareStyling {
+            tree_sitter: false,
+            diagnostics: false,
+        },
+    );
 
     for chunk in chunks {
         let chunk_text = chunk.text;

crates/language_core/Cargo.toml 🔗

@@ -10,7 +10,7 @@ path = "src/language_core.rs"
 [dependencies]
 anyhow.workspace = true
 collections.workspace = true
-gpui.workspace = true
+gpui_shared_string.workspace = true
 log.workspace = true
 lsp.workspace = true
 parking_lot.workspace = true
@@ -22,8 +22,6 @@ toml.workspace = true
 tree-sitter.workspace = true
 util.workspace = true
 
-[dev-dependencies]
-gpui = { workspace = true, features = ["test-support"] }
 
 [features]
 test-support = []

crates/language_core/src/diagnostic.rs 🔗

@@ -1,4 +1,4 @@
-use gpui::SharedString;
+use gpui_shared_string::SharedString;
 use lsp::{DiagnosticSeverity, NumberOrString};
 use serde::{Deserialize, Serialize};
 use serde_json::Value;

crates/language_core/src/grammar.rs 🔗

@@ -4,7 +4,7 @@ use crate::{
 };
 use anyhow::{Context as _, Result};
 use collections::HashMap;
-use gpui::SharedString;
+use gpui_shared_string::SharedString;
 use lsp::LanguageServerName;
 use parking_lot::Mutex;
 use std::sync::atomic::{AtomicUsize, Ordering::SeqCst};

crates/language_core/src/language_config.rs 🔗

@@ -1,6 +1,6 @@
 use crate::LanguageName;
 use collections::{HashMap, HashSet, IndexSet};
-use gpui::SharedString;
+use gpui_shared_string::SharedString;
 use lsp::LanguageServerName;
 use regex::Regex;
 use schemars::{JsonSchema, SchemaGenerator, json_schema};

crates/language_core/src/manifest.rs 🔗

@@ -1,6 +1,6 @@
 use std::borrow::Borrow;
 
-use gpui::SharedString;
+use gpui_shared_string::SharedString;
 
 #[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
 pub struct ManifestName(SharedString);

crates/language_core/src/toolchain.rs 🔗

@@ -6,7 +6,7 @@
 
 use std::{path::Path, sync::Arc};
 
-use gpui::SharedString;
+use gpui_shared_string::SharedString;
 use util::rel_path::RelPath;
 
 use crate::{LanguageName, ManifestName};

crates/language_model/Cargo.toml 🔗

@@ -16,13 +16,9 @@ doctest = false
 test-support = []
 
 [dependencies]
-anthropic = { workspace = true, features = ["schemars"] }
 anyhow.workspace = true
 credentials_provider.workspace = true
 base64.workspace = true
-cloud_api_client.workspace = true
-cloud_api_types.workspace = true
-cloud_llm_client.workspace = true
 collections.workspace = true
 env_var.workspace = true
 futures.workspace = true
@@ -30,14 +26,11 @@ gpui.workspace = true
 http_client.workspace = true
 icons.workspace = true
 image.workspace = true
+language_model_core.workspace = true
 log.workspace = true
-open_ai = { workspace = true, features = ["schemars"] }
-open_router.workspace = true
 parking_lot.workspace = true
-schemars.workspace = true
 serde.workspace = true
 serde_json.workspace = true
-smol.workspace = true
 thiserror.workspace = true
 util.workspace = true
 

crates/language_model/src/fake_provider.rs 🔗

@@ -5,11 +5,10 @@ use crate::{
     LanguageModelRequest, LanguageModelToolChoice,
 };
 use anyhow::anyhow;
-use futures::{FutureExt, channel::mpsc, future::BoxFuture, stream::BoxStream};
+use futures::{FutureExt, channel::mpsc, future::BoxFuture, stream::BoxStream, stream::StreamExt};
 use gpui::{AnyView, App, AsyncApp, Entity, Task, Window};
 use http_client::Result;
 use parking_lot::Mutex;
-use smol::stream::StreamExt;
 use std::sync::{
     Arc,
     atomic::{AtomicBool, Ordering::SeqCst},

crates/language_model/src/language_model.rs 🔗

@@ -1,380 +1,31 @@
 mod api_key;
 mod model;
-mod provider;
-mod rate_limiter;
 mod registry;
 mod request;
-mod role;
-pub mod tool_schema;
 
 #[cfg(any(test, feature = "test-support"))]
 pub mod fake_provider;
 
-use anyhow::{Result, anyhow};
-use cloud_llm_client::CompletionRequestStatus;
+pub use language_model_core::*;
+
+use anyhow::Result;
 use futures::FutureExt;
 use futures::{StreamExt, future::BoxFuture, stream::BoxStream};
-use gpui::{AnyView, App, AsyncApp, SharedString, Task, Window};
-use http_client::{StatusCode, http};
+use gpui::{AnyView, App, AsyncApp, Task, Window};
 use icons::IconName;
 use parking_lot::Mutex;
-use serde::{Deserialize, Serialize};
-use std::ops::{Add, Sub};
-use std::str::FromStr;
 use std::sync::Arc;
-use std::time::Duration;
-use std::{fmt, io};
-use thiserror::Error;
-use util::serde::is_default;
 
 pub use crate::api_key::{ApiKey, ApiKeyState};
 pub use crate::model::*;
-pub use crate::rate_limiter::*;
 pub use crate::registry::*;
-pub use crate::request::*;
-pub use crate::role::*;
-pub use crate::tool_schema::LanguageModelToolSchemaFormat;
+pub use crate::request::{LanguageModelImageExt, gpui_size_to_image_size, image_size_to_gpui};
 pub use env_var::{EnvVar, env_var};
-pub use provider::*;
 
 pub fn init(cx: &mut App) {
     registry::init(cx);
 }
 
-#[derive(Clone, Debug)]
-pub struct LanguageModelCacheConfiguration {
-    pub max_cache_anchors: usize,
-    pub should_speculate: bool,
-    pub min_total_token: u64,
-}
-
-/// A completion event from a language model.
-#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
-pub enum LanguageModelCompletionEvent {
-    Queued {
-        position: usize,
-    },
-    Started,
-    Stop(StopReason),
-    Text(String),
-    Thinking {
-        text: String,
-        signature: Option<String>,
-    },
-    RedactedThinking {
-        data: String,
-    },
-    ToolUse(LanguageModelToolUse),
-    ToolUseJsonParseError {
-        id: LanguageModelToolUseId,
-        tool_name: Arc<str>,
-        raw_input: Arc<str>,
-        json_parse_error: String,
-    },
-    StartMessage {
-        message_id: String,
-    },
-    ReasoningDetails(serde_json::Value),
-    UsageUpdate(TokenUsage),
-}
-
-impl LanguageModelCompletionEvent {
-    pub fn from_completion_request_status(
-        status: CompletionRequestStatus,
-        upstream_provider: LanguageModelProviderName,
-    ) -> Result<Option<Self>, LanguageModelCompletionError> {
-        match status {
-            CompletionRequestStatus::Queued { position } => {
-                Ok(Some(LanguageModelCompletionEvent::Queued { position }))
-            }
-            CompletionRequestStatus::Started => Ok(Some(LanguageModelCompletionEvent::Started)),
-            CompletionRequestStatus::Unknown | CompletionRequestStatus::StreamEnded => Ok(None),
-            CompletionRequestStatus::Failed {
-                code,
-                message,
-                request_id: _,
-                retry_after,
-            } => Err(LanguageModelCompletionError::from_cloud_failure(
-                upstream_provider,
-                code,
-                message,
-                retry_after.map(Duration::from_secs_f64),
-            )),
-        }
-    }
-}
-
-#[derive(Error, Debug)]
-pub enum LanguageModelCompletionError {
-    #[error("prompt too large for context window")]
-    PromptTooLarge { tokens: Option<u64> },
-    #[error("missing {provider} API key")]
-    NoApiKey { provider: LanguageModelProviderName },
-    #[error("{provider}'s API rate limit exceeded")]
-    RateLimitExceeded {
-        provider: LanguageModelProviderName,
-        retry_after: Option<Duration>,
-    },
-    #[error("{provider}'s API servers are overloaded right now")]
-    ServerOverloaded {
-        provider: LanguageModelProviderName,
-        retry_after: Option<Duration>,
-    },
-    #[error("{provider}'s API server reported an internal server error: {message}")]
-    ApiInternalServerError {
-        provider: LanguageModelProviderName,
-        message: String,
-    },
-    #[error("{message}")]
-    UpstreamProviderError {
-        message: String,
-        status: StatusCode,
-        retry_after: Option<Duration>,
-    },
-    #[error("HTTP response error from {provider}'s API: status {status_code} - {message:?}")]
-    HttpResponseError {
-        provider: LanguageModelProviderName,
-        status_code: StatusCode,
-        message: String,
-    },
-
-    // Client errors
-    #[error("invalid request format to {provider}'s API: {message}")]
-    BadRequestFormat {
-        provider: LanguageModelProviderName,
-        message: String,
-    },
-    #[error("authentication error with {provider}'s API: {message}")]
-    AuthenticationError {
-        provider: LanguageModelProviderName,
-        message: String,
-    },
-    #[error("Permission error with {provider}'s API: {message}")]
-    PermissionError {
-        provider: LanguageModelProviderName,
-        message: String,
-    },
-    #[error("language model provider API endpoint not found")]
-    ApiEndpointNotFound { provider: LanguageModelProviderName },
-    #[error("I/O error reading response from {provider}'s API")]
-    ApiReadResponseError {
-        provider: LanguageModelProviderName,
-        #[source]
-        error: io::Error,
-    },
-    #[error("error serializing request to {provider} API")]
-    SerializeRequest {
-        provider: LanguageModelProviderName,
-        #[source]
-        error: serde_json::Error,
-    },
-    #[error("error building request body to {provider} API")]
-    BuildRequestBody {
-        provider: LanguageModelProviderName,
-        #[source]
-        error: http::Error,
-    },
-    #[error("error sending HTTP request to {provider} API")]
-    HttpSend {
-        provider: LanguageModelProviderName,
-        #[source]
-        error: anyhow::Error,
-    },
-    #[error("error deserializing {provider} API response")]
-    DeserializeResponse {
-        provider: LanguageModelProviderName,
-        #[source]
-        error: serde_json::Error,
-    },
-
-    #[error("stream from {provider} ended unexpectedly")]
-    StreamEndedUnexpectedly { provider: LanguageModelProviderName },
-
-    // TODO: Ideally this would be removed in favor of having a comprehensive list of errors.
-    #[error(transparent)]
-    Other(#[from] anyhow::Error),
-}
-
-impl LanguageModelCompletionError {
-    fn parse_upstream_error_json(message: &str) -> Option<(StatusCode, String)> {
-        let error_json = serde_json::from_str::<serde_json::Value>(message).ok()?;
-        let upstream_status = error_json
-            .get("upstream_status")
-            .and_then(|v| v.as_u64())
-            .and_then(|status| u16::try_from(status).ok())
-            .and_then(|status| StatusCode::from_u16(status).ok())?;
-        let inner_message = error_json
-            .get("message")
-            .and_then(|v| v.as_str())
-            .unwrap_or(message)
-            .to_string();
-        Some((upstream_status, inner_message))
-    }
-
-    pub fn from_cloud_failure(
-        upstream_provider: LanguageModelProviderName,
-        code: String,
-        message: String,
-        retry_after: Option<Duration>,
-    ) -> Self {
-        if let Some(tokens) = parse_prompt_too_long(&message) {
-            // TODO: currently Anthropic PAYLOAD_TOO_LARGE response may cause INTERNAL_SERVER_ERROR
-            // to be reported. This is a temporary workaround to handle this in the case where the
-            // token limit has been exceeded.
-            Self::PromptTooLarge {
-                tokens: Some(tokens),
-            }
-        } else if code == "upstream_http_error" {
-            if let Some((upstream_status, inner_message)) =
-                Self::parse_upstream_error_json(&message)
-            {
-                return Self::from_http_status(
-                    upstream_provider,
-                    upstream_status,
-                    inner_message,
-                    retry_after,
-                );
-            }
-            anyhow!("completion request failed, code: {code}, message: {message}").into()
-        } else if let Some(status_code) = code
-            .strip_prefix("upstream_http_")
-            .and_then(|code| StatusCode::from_str(code).ok())
-        {
-            Self::from_http_status(upstream_provider, status_code, message, retry_after)
-        } else if let Some(status_code) = code
-            .strip_prefix("http_")
-            .and_then(|code| StatusCode::from_str(code).ok())
-        {
-            Self::from_http_status(ZED_CLOUD_PROVIDER_NAME, status_code, message, retry_after)
-        } else {
-            anyhow!("completion request failed, code: {code}, message: {message}").into()
-        }
-    }
-
-    pub fn from_http_status(
-        provider: LanguageModelProviderName,
-        status_code: StatusCode,
-        message: String,
-        retry_after: Option<Duration>,
-    ) -> Self {
-        match status_code {
-            StatusCode::BAD_REQUEST => Self::BadRequestFormat { provider, message },
-            StatusCode::UNAUTHORIZED => Self::AuthenticationError { provider, message },
-            StatusCode::FORBIDDEN => Self::PermissionError { provider, message },
-            StatusCode::NOT_FOUND => Self::ApiEndpointNotFound { provider },
-            StatusCode::PAYLOAD_TOO_LARGE => Self::PromptTooLarge {
-                tokens: parse_prompt_too_long(&message),
-            },
-            StatusCode::TOO_MANY_REQUESTS => Self::RateLimitExceeded {
-                provider,
-                retry_after,
-            },
-            StatusCode::INTERNAL_SERVER_ERROR => Self::ApiInternalServerError { provider, message },
-            StatusCode::SERVICE_UNAVAILABLE => Self::ServerOverloaded {
-                provider,
-                retry_after,
-            },
-            _ if status_code.as_u16() == 529 => Self::ServerOverloaded {
-                provider,
-                retry_after,
-            },
-            _ => Self::HttpResponseError {
-                provider,
-                status_code,
-                message,
-            },
-        }
-    }
-}
-
-#[derive(Debug, PartialEq, Clone, Copy, Serialize, Deserialize)]
-#[serde(rename_all = "snake_case")]
-pub enum StopReason {
-    EndTurn,
-    MaxTokens,
-    ToolUse,
-    Refusal,
-}
-
-#[derive(Debug, PartialEq, Clone, Copy, Serialize, Deserialize, Default)]
-pub struct TokenUsage {
-    #[serde(default, skip_serializing_if = "is_default")]
-    pub input_tokens: u64,
-    #[serde(default, skip_serializing_if = "is_default")]
-    pub output_tokens: u64,
-    #[serde(default, skip_serializing_if = "is_default")]
-    pub cache_creation_input_tokens: u64,
-    #[serde(default, skip_serializing_if = "is_default")]
-    pub cache_read_input_tokens: u64,
-}
-
-impl TokenUsage {
-    pub fn total_tokens(&self) -> u64 {
-        self.input_tokens
-            + self.output_tokens
-            + self.cache_read_input_tokens
-            + self.cache_creation_input_tokens
-    }
-}
-
-impl Add<TokenUsage> for TokenUsage {
-    type Output = Self;
-
-    fn add(self, other: Self) -> Self {
-        Self {
-            input_tokens: self.input_tokens + other.input_tokens,
-            output_tokens: self.output_tokens + other.output_tokens,
-            cache_creation_input_tokens: self.cache_creation_input_tokens
-                + other.cache_creation_input_tokens,
-            cache_read_input_tokens: self.cache_read_input_tokens + other.cache_read_input_tokens,
-        }
-    }
-}
-
-impl Sub<TokenUsage> for TokenUsage {
-    type Output = Self;
-
-    fn sub(self, other: Self) -> Self {
-        Self {
-            input_tokens: self.input_tokens - other.input_tokens,
-            output_tokens: self.output_tokens - other.output_tokens,
-            cache_creation_input_tokens: self.cache_creation_input_tokens
-                - other.cache_creation_input_tokens,
-            cache_read_input_tokens: self.cache_read_input_tokens - other.cache_read_input_tokens,
-        }
-    }
-}
-
-#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)]
-pub struct LanguageModelToolUseId(Arc<str>);
-
-impl fmt::Display for LanguageModelToolUseId {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(f, "{}", self.0)
-    }
-}
-
-impl<T> From<T> for LanguageModelToolUseId
-where
-    T: Into<Arc<str>>,
-{
-    fn from(value: T) -> Self {
-        Self(value.into())
-    }
-}
-
-#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)]
-pub struct LanguageModelToolUse {
-    pub id: LanguageModelToolUseId,
-    pub name: Arc<str>,
-    pub raw_input: String,
-    pub input: serde_json::Value,
-    pub is_input_complete: bool,
-    /// Thought signature the model sent us. Some models require that this
-    /// signature be preserved and sent back in conversation history for validation.
-    pub thought_signature: Option<String>,
-}
-
 pub struct LanguageModelTextStream {
     pub message_id: Option<String>,
     pub stream: BoxStream<'static, Result<String, LanguageModelCompletionError>>,
@@ -392,13 +43,6 @@ impl Default for LanguageModelTextStream {
     }
 }
 
-#[derive(Debug, Clone)]
-pub struct LanguageModelEffortLevel {
-    pub name: SharedString,
-    pub value: SharedString,
-    pub is_default: bool,
-}
-
 pub trait LanguageModel: Send + Sync {
     fn id(&self) -> LanguageModelId;
     fn name(&self) -> LanguageModelName;
@@ -605,7 +249,7 @@ pub trait LanguageModel: Send + Sync {
 }
 
 impl std::fmt::Debug for dyn LanguageModel {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         f.debug_struct("<dyn LanguageModel>")
             .field("id", &self.id())
             .field("name", &self.name())
@@ -619,17 +263,6 @@ impl std::fmt::Debug for dyn LanguageModel {
     }
 }
 
-/// An error that occurred when trying to authenticate the language model provider.
-#[derive(Debug, Error)]
-pub enum AuthenticateError {
-    #[error("connection refused")]
-    ConnectionRefused,
-    #[error("credentials not found")]
-    CredentialsNotFound,
-    #[error(transparent)]
-    Other(#[from] anyhow::Error),
-}
-
 /// Either a built-in icon name or a path to an external SVG.
 #[derive(Debug, Clone, PartialEq, Eq)]
 pub enum IconOrSvg {
@@ -692,18 +325,6 @@ pub trait LanguageModelProviderState: 'static {
     }
 }
 
-#[derive(Clone, Eq, PartialEq, Hash, Debug, Ord, PartialOrd, Serialize, Deserialize)]
-pub struct LanguageModelId(pub SharedString);
-
-#[derive(Clone, Eq, PartialEq, Hash, Debug, Ord, PartialOrd)]
-pub struct LanguageModelName(pub SharedString);
-
-#[derive(Clone, Eq, PartialEq, Hash, Debug, Ord, PartialOrd)]
-pub struct LanguageModelProviderId(pub SharedString);
-
-#[derive(Clone, Eq, PartialEq, Hash, Debug, Ord, PartialOrd)]
-pub struct LanguageModelProviderName(pub SharedString);
-
 #[derive(Clone, Debug, PartialEq)]
 pub enum LanguageModelCostInfo {
     /// Cost per 1,000 input and output tokens
@@ -741,245 +362,3 @@ impl LanguageModelCostInfo {
         }
     }
 }
-
-impl LanguageModelProviderId {
-    pub const fn new(id: &'static str) -> Self {
-        Self(SharedString::new_static(id))
-    }
-}
-
-impl LanguageModelProviderName {
-    pub const fn new(id: &'static str) -> Self {
-        Self(SharedString::new_static(id))
-    }
-}
-
-impl fmt::Display for LanguageModelProviderId {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(f, "{}", self.0)
-    }
-}
-
-impl fmt::Display for LanguageModelProviderName {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(f, "{}", self.0)
-    }
-}
-
-impl From<String> for LanguageModelId {
-    fn from(value: String) -> Self {
-        Self(SharedString::from(value))
-    }
-}
-
-impl From<String> for LanguageModelName {
-    fn from(value: String) -> Self {
-        Self(SharedString::from(value))
-    }
-}
-
-impl From<String> for LanguageModelProviderId {
-    fn from(value: String) -> Self {
-        Self(SharedString::from(value))
-    }
-}
-
-impl From<String> for LanguageModelProviderName {
-    fn from(value: String) -> Self {
-        Self(SharedString::from(value))
-    }
-}
-
-impl From<Arc<str>> for LanguageModelProviderId {
-    fn from(value: Arc<str>) -> Self {
-        Self(SharedString::from(value))
-    }
-}
-
-impl From<Arc<str>> for LanguageModelProviderName {
-    fn from(value: Arc<str>) -> Self {
-        Self(SharedString::from(value))
-    }
-}
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-
-    #[test]
-    fn test_from_cloud_failure_with_upstream_http_error() {
-        let error = LanguageModelCompletionError::from_cloud_failure(
-            String::from("anthropic").into(),
-            "upstream_http_error".to_string(),
-            r#"{"code":"upstream_http_error","message":"Received an error from the Anthropic API: upstream connect error or disconnect/reset before headers. reset reason: connection timeout","upstream_status":503}"#.to_string(),
-            None,
-        );
-
-        match error {
-            LanguageModelCompletionError::ServerOverloaded { provider, .. } => {
-                assert_eq!(provider.0, "anthropic");
-            }
-            _ => panic!(
-                "Expected ServerOverloaded error for 503 status, got: {:?}",
-                error
-            ),
-        }
-
-        let error = LanguageModelCompletionError::from_cloud_failure(
-            String::from("anthropic").into(),
-            "upstream_http_error".to_string(),
-            r#"{"code":"upstream_http_error","message":"Internal server error","upstream_status":500}"#.to_string(),
-            None,
-        );
-
-        match error {
-            LanguageModelCompletionError::ApiInternalServerError { provider, message } => {
-                assert_eq!(provider.0, "anthropic");
-                assert_eq!(message, "Internal server error");
-            }
-            _ => panic!(
-                "Expected ApiInternalServerError for 500 status, got: {:?}",
-                error
-            ),
-        }
-    }
-
-    #[test]
-    fn test_from_cloud_failure_with_standard_format() {
-        let error = LanguageModelCompletionError::from_cloud_failure(
-            String::from("anthropic").into(),
-            "upstream_http_503".to_string(),
-            "Service unavailable".to_string(),
-            None,
-        );
-
-        match error {
-            LanguageModelCompletionError::ServerOverloaded { provider, .. } => {
-                assert_eq!(provider.0, "anthropic");
-            }
-            _ => panic!("Expected ServerOverloaded error for upstream_http_503"),
-        }
-    }
-
-    #[test]
-    fn test_upstream_http_error_connection_timeout() {
-        let error = LanguageModelCompletionError::from_cloud_failure(
-            String::from("anthropic").into(),
-            "upstream_http_error".to_string(),
-            r#"{"code":"upstream_http_error","message":"Received an error from the Anthropic API: upstream connect error or disconnect/reset before headers. reset reason: connection timeout","upstream_status":503}"#.to_string(),
-            None,
-        );
-
-        match error {
-            LanguageModelCompletionError::ServerOverloaded { provider, .. } => {
-                assert_eq!(provider.0, "anthropic");
-            }
-            _ => panic!(
-                "Expected ServerOverloaded error for connection timeout with 503 status, got: {:?}",
-                error
-            ),
-        }
-
-        let error = LanguageModelCompletionError::from_cloud_failure(
-            String::from("anthropic").into(),
-            "upstream_http_error".to_string(),
-            r#"{"code":"upstream_http_error","message":"Received an error from the Anthropic API: upstream connect error or disconnect/reset before headers. reset reason: connection timeout","upstream_status":500}"#.to_string(),
-            None,
-        );
-
-        match error {
-            LanguageModelCompletionError::ApiInternalServerError { provider, message } => {
-                assert_eq!(provider.0, "anthropic");
-                assert_eq!(
-                    message,
-                    "Received an error from the Anthropic API: upstream connect error or disconnect/reset before headers. reset reason: connection timeout"
-                );
-            }
-            _ => panic!(
-                "Expected ApiInternalServerError for connection timeout with 500 status, got: {:?}",
-                error
-            ),
-        }
-    }
-
-    #[test]
-    fn test_language_model_tool_use_serializes_with_signature() {
-        use serde_json::json;
-
-        let tool_use = LanguageModelToolUse {
-            id: LanguageModelToolUseId::from("test_id"),
-            name: "test_tool".into(),
-            raw_input: json!({"arg": "value"}).to_string(),
-            input: json!({"arg": "value"}),
-            is_input_complete: true,
-            thought_signature: Some("test_signature".to_string()),
-        };
-
-        let serialized = serde_json::to_value(&tool_use).unwrap();
-
-        assert_eq!(serialized["id"], "test_id");
-        assert_eq!(serialized["name"], "test_tool");
-        assert_eq!(serialized["thought_signature"], "test_signature");
-    }
-
-    #[test]
-    fn test_language_model_tool_use_deserializes_with_missing_signature() {
-        use serde_json::json;
-
-        let json = json!({
-            "id": "test_id",
-            "name": "test_tool",
-            "raw_input": "{\"arg\":\"value\"}",
-            "input": {"arg": "value"},
-            "is_input_complete": true
-        });
-
-        let tool_use: LanguageModelToolUse = serde_json::from_value(json).unwrap();
-
-        assert_eq!(tool_use.id, LanguageModelToolUseId::from("test_id"));
-        assert_eq!(tool_use.name.as_ref(), "test_tool");
-        assert_eq!(tool_use.thought_signature, None);
-    }
-
-    #[test]
-    fn test_language_model_tool_use_round_trip_with_signature() {
-        use serde_json::json;
-
-        let original = LanguageModelToolUse {
-            id: LanguageModelToolUseId::from("round_trip_id"),
-            name: "round_trip_tool".into(),
-            raw_input: json!({"key": "value"}).to_string(),
-            input: json!({"key": "value"}),
-            is_input_complete: true,
-            thought_signature: Some("round_trip_sig".to_string()),
-        };
-
-        let serialized = serde_json::to_value(&original).unwrap();
-        let deserialized: LanguageModelToolUse = serde_json::from_value(serialized).unwrap();
-
-        assert_eq!(deserialized.id, original.id);
-        assert_eq!(deserialized.name, original.name);
-        assert_eq!(deserialized.thought_signature, original.thought_signature);
-    }
-
-    #[test]
-    fn test_language_model_tool_use_round_trip_without_signature() {
-        use serde_json::json;
-
-        let original = LanguageModelToolUse {
-            id: LanguageModelToolUseId::from("no_sig_id"),
-            name: "no_sig_tool".into(),
-            raw_input: json!({"arg": "value"}).to_string(),
-            input: json!({"arg": "value"}),
-            is_input_complete: true,
-            thought_signature: None,
-        };
-
-        let serialized = serde_json::to_value(&original).unwrap();
-        let deserialized: LanguageModelToolUse = serde_json::from_value(serialized).unwrap();
-
-        assert_eq!(deserialized.id, original.id);
-        assert_eq!(deserialized.name, original.name);
-        assert_eq!(deserialized.thought_signature, None);
-    }
-}

crates/language_model/src/model/cloud_model.rs 🔗

@@ -1,10 +1,5 @@
 use std::fmt;
-use std::sync::Arc;
 
-use cloud_api_client::ClientApiError;
-use cloud_api_client::CloudApiClient;
-use cloud_api_types::OrganizationId;
-use smol::lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard};
 use thiserror::Error;
 
 #[derive(Error, Debug)]
@@ -18,71 +13,3 @@ impl fmt::Display for PaymentRequiredError {
         )
     }
 }
-
-#[derive(Clone, Default)]
-pub struct LlmApiToken(Arc<RwLock<Option<String>>>);
-
-impl LlmApiToken {
-    pub async fn acquire(
-        &self,
-        client: &CloudApiClient,
-        system_id: Option<String>,
-        organization_id: Option<OrganizationId>,
-    ) -> Result<String, ClientApiError> {
-        let lock = self.0.upgradable_read().await;
-        if let Some(token) = lock.as_ref() {
-            Ok(token.to_string())
-        } else {
-            Self::fetch(
-                RwLockUpgradableReadGuard::upgrade(lock).await,
-                client,
-                system_id,
-                organization_id,
-            )
-            .await
-        }
-    }
-
-    pub async fn refresh(
-        &self,
-        client: &CloudApiClient,
-        system_id: Option<String>,
-        organization_id: Option<OrganizationId>,
-    ) -> Result<String, ClientApiError> {
-        Self::fetch(self.0.write().await, client, system_id, organization_id).await
-    }
-
-    /// Clears the existing token before attempting to fetch a new one.
-    ///
-    /// Used when switching organizations so that a failed refresh doesn't
-    /// leave a token for the wrong organization.
-    pub async fn clear_and_refresh(
-        &self,
-        client: &CloudApiClient,
-        system_id: Option<String>,
-        organization_id: Option<OrganizationId>,
-    ) -> Result<String, ClientApiError> {
-        let mut lock = self.0.write().await;
-        *lock = None;
-        Self::fetch(lock, client, system_id, organization_id).await
-    }
-
-    async fn fetch(
-        mut lock: RwLockWriteGuard<'_, Option<String>>,
-        client: &CloudApiClient,
-        system_id: Option<String>,
-        organization_id: Option<OrganizationId>,
-    ) -> Result<String, ClientApiError> {
-        let result = client.create_llm_token(system_id, organization_id).await;
-        match result {
-            Ok(response) => {
-                *lock = Some(response.token.0.clone());
-                Ok(response.token.0)
-            }
-            Err(err) => {
-                *lock = None;
-                Err(err)
-            }
-        }
-    }
-}

crates/language_model/src/provider.rs 🔗

@@ -1,12 +0,0 @@
-pub mod anthropic;
-pub mod google;
-pub mod open_ai;
-pub mod open_router;
-pub mod x_ai;
-pub mod zed;
-
-pub use anthropic::*;
-pub use google::*;
-pub use open_ai::*;
-pub use x_ai::*;
-pub use zed::*;

crates/language_model/src/provider/anthropic.rs 🔗

@@ -1,80 +0,0 @@
-use crate::{LanguageModelCompletionError, LanguageModelProviderId, LanguageModelProviderName};
-use anthropic::AnthropicError;
-pub use anthropic::parse_prompt_too_long;
-
-pub const ANTHROPIC_PROVIDER_ID: LanguageModelProviderId =
-    LanguageModelProviderId::new("anthropic");
-pub const ANTHROPIC_PROVIDER_NAME: LanguageModelProviderName =
-    LanguageModelProviderName::new("Anthropic");
-
-impl From<AnthropicError> for LanguageModelCompletionError {
-    fn from(error: AnthropicError) -> Self {
-        let provider = ANTHROPIC_PROVIDER_NAME;
-        match error {
-            AnthropicError::SerializeRequest(error) => Self::SerializeRequest { provider, error },
-            AnthropicError::BuildRequestBody(error) => Self::BuildRequestBody { provider, error },
-            AnthropicError::HttpSend(error) => Self::HttpSend { provider, error },
-            AnthropicError::DeserializeResponse(error) => {
-                Self::DeserializeResponse { provider, error }
-            }
-            AnthropicError::ReadResponse(error) => Self::ApiReadResponseError { provider, error },
-            AnthropicError::HttpResponseError {
-                status_code,
-                message,
-            } => Self::HttpResponseError {
-                provider,
-                status_code,
-                message,
-            },
-            AnthropicError::RateLimit { retry_after } => Self::RateLimitExceeded {
-                provider,
-                retry_after: Some(retry_after),
-            },
-            AnthropicError::ServerOverloaded { retry_after } => Self::ServerOverloaded {
-                provider,
-                retry_after,
-            },
-            AnthropicError::ApiError(api_error) => api_error.into(),
-        }
-    }
-}
-
-impl From<anthropic::ApiError> for LanguageModelCompletionError {
-    fn from(error: anthropic::ApiError) -> Self {
-        use anthropic::ApiErrorCode::*;
-        let provider = ANTHROPIC_PROVIDER_NAME;
-        match error.code() {
-            Some(code) => match code {
-                InvalidRequestError => Self::BadRequestFormat {
-                    provider,
-                    message: error.message,
-                },
-                AuthenticationError => Self::AuthenticationError {
-                    provider,
-                    message: error.message,
-                },
-                PermissionError => Self::PermissionError {
-                    provider,
-                    message: error.message,
-                },
-                NotFoundError => Self::ApiEndpointNotFound { provider },
-                RequestTooLarge => Self::PromptTooLarge {
-                    tokens: parse_prompt_too_long(&error.message),
-                },
-                RateLimitError => Self::RateLimitExceeded {
-                    provider,
-                    retry_after: None,
-                },
-                ApiError => Self::ApiInternalServerError {
-                    provider,
-                    message: error.message,
-                },
-                OverloadedError => Self::ServerOverloaded {
-                    provider,
-                    retry_after: None,
-                },
-            },
-            None => Self::Other(error.into()),
-        }
-    }
-}

crates/language_model/src/provider/google.rs 🔗

@@ -1,5 +0,0 @@
-use crate::{LanguageModelProviderId, LanguageModelProviderName};
-
-pub const GOOGLE_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("google");
-pub const GOOGLE_PROVIDER_NAME: LanguageModelProviderName =
-    LanguageModelProviderName::new("Google AI");

crates/language_model/src/provider/open_ai.rs 🔗

@@ -1,28 +0,0 @@
-use crate::{LanguageModelCompletionError, LanguageModelProviderId, LanguageModelProviderName};
-use http_client::http;
-use std::time::Duration;
-
-pub const OPEN_AI_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("openai");
-pub const OPEN_AI_PROVIDER_NAME: LanguageModelProviderName =
-    LanguageModelProviderName::new("OpenAI");
-
-impl From<open_ai::RequestError> for LanguageModelCompletionError {
-    fn from(error: open_ai::RequestError) -> Self {
-        match error {
-            open_ai::RequestError::HttpResponseError {
-                provider,
-                status_code,
-                body,
-                headers,
-            } => {
-                let retry_after = headers
-                    .get(http::header::RETRY_AFTER)
-                    .and_then(|val| val.to_str().ok()?.parse::<u64>().ok())
-                    .map(Duration::from_secs);
-
-                Self::from_http_status(provider.into(), status_code, body, retry_after)
-            }
-            open_ai::RequestError::Other(e) => Self::Other(e),
-        }
-    }
-}

crates/language_model/src/provider/open_router.rs 🔗

@@ -1,69 +0,0 @@
-use crate::{LanguageModelCompletionError, LanguageModelProviderName};
-use http_client::StatusCode;
-use open_router::OpenRouterError;
-
-impl From<OpenRouterError> for LanguageModelCompletionError {
-    fn from(error: OpenRouterError) -> Self {
-        let provider = LanguageModelProviderName::new("OpenRouter");
-        match error {
-            OpenRouterError::SerializeRequest(error) => Self::SerializeRequest { provider, error },
-            OpenRouterError::BuildRequestBody(error) => Self::BuildRequestBody { provider, error },
-            OpenRouterError::HttpSend(error) => Self::HttpSend { provider, error },
-            OpenRouterError::DeserializeResponse(error) => {
-                Self::DeserializeResponse { provider, error }
-            }
-            OpenRouterError::ReadResponse(error) => Self::ApiReadResponseError { provider, error },
-            OpenRouterError::RateLimit { retry_after } => Self::RateLimitExceeded {
-                provider,
-                retry_after: Some(retry_after),
-            },
-            OpenRouterError::ServerOverloaded { retry_after } => Self::ServerOverloaded {
-                provider,
-                retry_after,
-            },
-            OpenRouterError::ApiError(api_error) => api_error.into(),
-        }
-    }
-}
-
-impl From<open_router::ApiError> for LanguageModelCompletionError {
-    fn from(error: open_router::ApiError) -> Self {
-        use open_router::ApiErrorCode::*;
-        let provider = LanguageModelProviderName::new("OpenRouter");
-        match error.code {
-            InvalidRequestError => Self::BadRequestFormat {
-                provider,
-                message: error.message,
-            },
-            AuthenticationError => Self::AuthenticationError {
-                provider,
-                message: error.message,
-            },
-            PaymentRequiredError => Self::AuthenticationError {
-                provider,
-                message: format!("Payment required: {}", error.message),
-            },
-            PermissionError => Self::PermissionError {
-                provider,
-                message: error.message,
-            },
-            RequestTimedOut => Self::HttpResponseError {
-                provider,
-                status_code: StatusCode::REQUEST_TIMEOUT,
-                message: error.message,
-            },
-            RateLimitError => Self::RateLimitExceeded {
-                provider,
-                retry_after: None,
-            },
-            ApiError => Self::ApiInternalServerError {
-                provider,
-                message: error.message,
-            },
-            OverloadedError => Self::ServerOverloaded {
-                provider,
-                retry_after: None,
-            },
-        }
-    }
-}

crates/language_model/src/provider/x_ai.rs 🔗

@@ -1,4 +0,0 @@
-use crate::{LanguageModelProviderId, LanguageModelProviderName};
-
-pub const X_AI_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("x_ai");
-pub const X_AI_PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("xAI");

crates/language_model/src/provider/zed.rs 🔗

@@ -1,5 +0,0 @@
-use crate::{LanguageModelProviderId, LanguageModelProviderName};
-
-pub const ZED_CLOUD_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("zed.dev");
-pub const ZED_CLOUD_PROVIDER_NAME: LanguageModelProviderName =
-    LanguageModelProviderName::new("Zed");

crates/language_model/src/registry.rs 🔗

@@ -1,6 +1,6 @@
 use crate::{
     LanguageModel, LanguageModelId, LanguageModelProvider, LanguageModelProviderId,
-    LanguageModelProviderState,
+    LanguageModelProviderState, ZED_CLOUD_PROVIDER_ID,
 };
 use collections::{BTreeMap, HashSet};
 use gpui::{App, Context, Entity, EventEmitter, Global, prelude::*};
@@ -101,7 +101,7 @@ impl ConfiguredModel {
     }
 
     pub fn is_provided_by_zed(&self) -> bool {
-        self.provider.id() == crate::provider::ZED_CLOUD_PROVIDER_ID
+        self.provider.id() == ZED_CLOUD_PROVIDER_ID
     }
 }
 

crates/language_model/src/request.rs 🔗

@@ -4,78 +4,13 @@ use std::sync::Arc;
 use anyhow::Result;
 use base64::write::EncoderWriter;
 use gpui::{
-    App, AppContext as _, DevicePixels, Image, ImageFormat, ObjectFit, SharedString, Size, Task,
-    point, px, size,
+    App, AppContext as _, DevicePixels, Image, ImageFormat, ObjectFit, Size, Task, point, px, size,
 };
 use image::GenericImageView as _;
 use image::codecs::png::PngEncoder;
-use serde::{Deserialize, Serialize};
 use util::ResultExt;
 
-use crate::role::Role;
-use crate::{LanguageModelToolUse, LanguageModelToolUseId};
-
-#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Hash)]
-pub struct LanguageModelImage {
-    /// A base64-encoded PNG image.
-    pub source: SharedString,
-    #[serde(default, skip_serializing_if = "Option::is_none")]
-    pub size: Option<Size<DevicePixels>>,
-}
-
-impl LanguageModelImage {
-    pub fn len(&self) -> usize {
-        self.source.len()
-    }
-
-    pub fn is_empty(&self) -> bool {
-        self.source.is_empty()
-    }
-
-    // Parse Self from a JSON object with case-insensitive field names
-    pub fn from_json(obj: &serde_json::Map<String, serde_json::Value>) -> Option<Self> {
-        let mut source = None;
-        let mut size_obj = None;
-
-        // Find source and size fields (case-insensitive)
-        for (k, v) in obj.iter() {
-            match k.to_lowercase().as_str() {
-                "source" => source = v.as_str(),
-                "size" => size_obj = v.as_object(),
-                _ => {}
-            }
-        }
-
-        let source = source?;
-        let size_obj = size_obj?;
-
-        let mut width = None;
-        let mut height = None;
-
-        // Find width and height in size object (case-insensitive)
-        for (k, v) in size_obj.iter() {
-            match k.to_lowercase().as_str() {
-                "width" => width = v.as_i64().map(|w| w as i32),
-                "height" => height = v.as_i64().map(|h| h as i32),
-                _ => {}
-            }
-        }
-
-        Some(Self {
-            size: Some(size(DevicePixels(width?), DevicePixels(height?))),
-            source: SharedString::from(source.to_string()),
-        })
-    }
-}
-
-impl std::fmt::Debug for LanguageModelImage {
-    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        f.debug_struct("LanguageModelImage")
-            .field("source", &format!("<{} bytes>", self.source.len()))
-            .field("size", &self.size)
-            .finish()
-    }
-}
+use language_model_core::{ImageSize, LanguageModelImage};
 
 /// Anthropic wants uploaded images to be smaller than this in both dimensions.
 const ANTHROPIC_SIZE_LIMIT: f32 = 1568.;
@@ -90,18 +25,16 @@ const DEFAULT_IMAGE_MAX_BYTES: usize = 5 * 1024 * 1024;
 /// `DEFAULT_IMAGE_MAX_BYTES`.
 const MAX_IMAGE_DOWNSCALE_PASSES: usize = 8;
 
-impl LanguageModelImage {
-    // All language model images are encoded as PNGs.
-    pub const FORMAT: ImageFormat = ImageFormat::Png;
+/// Extension trait for `LanguageModelImage` that provides GPUI-dependent functionality.
+pub trait LanguageModelImageExt {
+    const FORMAT: ImageFormat;
+    fn from_image(data: Arc<Image>, cx: &mut App) -> Task<Option<LanguageModelImage>>;
+}
 
-    pub fn empty() -> Self {
-        Self {
-            source: "".into(),
-            size: None,
-        }
-    }
+impl LanguageModelImageExt for LanguageModelImage {
+    const FORMAT: ImageFormat = ImageFormat::Png;
 
-    pub fn from_image(data: Arc<Image>, cx: &mut App) -> Task<Option<Self>> {
+    fn from_image(data: Arc<Image>, cx: &mut App) -> Task<Option<LanguageModelImage>> {
         cx.background_spawn(async move {
             let image_bytes = Cursor::new(data.bytes());
             let dynamic_image = match data.format() {
@@ -186,28 +119,14 @@ impl LanguageModelImage {
             let source = unsafe { String::from_utf8_unchecked(base64_image) };
 
             Some(LanguageModelImage {
-                size: Some(image_size),
+                size: Some(ImageSize {
+                    width: width as i32,
+                    height: height as i32,
+                }),
                 source: source.into(),
             })
         })
     }
-
-    pub fn estimate_tokens(&self) -> usize {
-        let Some(size) = self.size.as_ref() else {
-            return 0;
-        };
-        let width = size.width.0.unsigned_abs() as usize;
-        let height = size.height.0.unsigned_abs() as usize;
-
-        // From: https://docs.anthropic.com/en/docs/build-with-claude/vision#calculate-image-costs
-        // Note that are a lot of conditions on Anthropic's API, and OpenAI doesn't use this,
-        // so this method is more of a rough guess.
-        (width * height) / 750
-    }
-
-    pub fn to_base64_url(&self) -> String {
-        format!("data:image/png;base64,{}", self.source)
-    }
 }
 
 fn encode_png_bytes(image: &image::DynamicImage) -> Result<Vec<u8>> {
@@ -228,512 +147,85 @@ fn encode_bytes_as_base64(bytes: &[u8]) -> Result<Vec<u8>> {
     Ok(base64_image)
 }
 
-#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq, Hash)]
-pub struct LanguageModelToolResult {
-    pub tool_use_id: LanguageModelToolUseId,
-    pub tool_name: Arc<str>,
-    pub is_error: bool,
-    /// The tool output formatted for presenting to the model
-    pub content: LanguageModelToolResultContent,
-    /// The raw tool output, if available, often for debugging or extra state for replay
-    pub output: Option<serde_json::Value>,
-}
-
-#[derive(Debug, Clone, Serialize, Eq, PartialEq, Hash)]
-pub enum LanguageModelToolResultContent {
-    Text(Arc<str>),
-    Image(LanguageModelImage),
-}
-
-impl<'de> Deserialize<'de> for LanguageModelToolResultContent {
-    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
-    where
-        D: serde::Deserializer<'de>,
-    {
-        use serde::de::Error;
-
-        let value = serde_json::Value::deserialize(deserializer)?;
-
-        // Models can provide these responses in several styles. Try each in order.
-
-        // 1. Try as plain string
-        if let Ok(text) = serde_json::from_value::<String>(value.clone()) {
-            return Ok(Self::Text(Arc::from(text)));
-        }
-
-        // 2. Try as object
-        if let Some(obj) = value.as_object() {
-            // get a JSON field case-insensitively
-            fn get_field<'a>(
-                obj: &'a serde_json::Map<String, serde_json::Value>,
-                field: &str,
-            ) -> Option<&'a serde_json::Value> {
-                obj.iter()
-                    .find(|(k, _)| k.to_lowercase() == field.to_lowercase())
-                    .map(|(_, v)| v)
-            }
-
-            // Accept wrapped text format: { "type": "text", "text": "..." }
-            if let (Some(type_value), Some(text_value)) =
-                (get_field(obj, "type"), get_field(obj, "text"))
-                && let Some(type_str) = type_value.as_str()
-                && type_str.to_lowercase() == "text"
-                && let Some(text) = text_value.as_str()
-            {
-                return Ok(Self::Text(Arc::from(text)));
-            }
-
-            // Check for wrapped Text variant: { "text": "..." }
-            if let Some((_key, value)) = obj.iter().find(|(k, _)| k.to_lowercase() == "text")
-                && obj.len() == 1
-            {
-                // Only one field, and it's "text" (case-insensitive)
-                if let Some(text) = value.as_str() {
-                    return Ok(Self::Text(Arc::from(text)));
-                }
-            }
-
-            // Check for wrapped Image variant: { "image": { "source": "...", "size": ... } }
-            if let Some((_key, value)) = obj.iter().find(|(k, _)| k.to_lowercase() == "image")
-                && obj.len() == 1
-            {
-                // Only one field, and it's "image" (case-insensitive)
-                // Try to parse the nested image object
-                if let Some(image_obj) = value.as_object()
-                    && let Some(image) = LanguageModelImage::from_json(image_obj)
-                {
-                    return Ok(Self::Image(image));
-                }
-            }
-
-            // Try as direct Image (object with "source" and "size" fields)
-            if let Some(image) = LanguageModelImage::from_json(obj) {
-                return Ok(Self::Image(image));
-            }
-        }
-
-        // If none of the variants match, return an error with the problematic JSON
-        Err(D::Error::custom(format!(
-            "data did not match any variant of LanguageModelToolResultContent. Expected either a string, \
-             an object with 'type': 'text', a wrapped variant like {{\"Text\": \"...\"}}, or an image object. Got: {}",
-            serde_json::to_string_pretty(&value).unwrap_or_else(|_| value.to_string())
-        )))
-    }
-}
-
-impl LanguageModelToolResultContent {
-    pub fn to_str(&self) -> Option<&str> {
-        match self {
-            Self::Text(text) => Some(text),
-            Self::Image(_) => None,
-        }
-    }
-
-    pub fn is_empty(&self) -> bool {
-        match self {
-            Self::Text(text) => text.chars().all(|c| c.is_whitespace()),
-            Self::Image(_) => false,
-        }
-    }
-}
-
-impl From<&str> for LanguageModelToolResultContent {
-    fn from(value: &str) -> Self {
-        Self::Text(Arc::from(value))
-    }
-}
-
-impl From<String> for LanguageModelToolResultContent {
-    fn from(value: String) -> Self {
-        Self::Text(Arc::from(value))
-    }
-}
-
-impl From<LanguageModelImage> for LanguageModelToolResultContent {
-    fn from(image: LanguageModelImage) -> Self {
-        Self::Image(image)
-    }
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq, Hash)]
-pub enum MessageContent {
-    Text(String),
-    Thinking {
-        text: String,
-        signature: Option<String>,
-    },
-    RedactedThinking(String),
-    Image(LanguageModelImage),
-    ToolUse(LanguageModelToolUse),
-    ToolResult(LanguageModelToolResult),
-}
-
-impl MessageContent {
-    pub fn to_str(&self) -> Option<&str> {
-        match self {
-            MessageContent::Text(text) => Some(text.as_str()),
-            MessageContent::Thinking { text, .. } => Some(text.as_str()),
-            MessageContent::RedactedThinking(_) => None,
-            MessageContent::ToolResult(tool_result) => tool_result.content.to_str(),
-            MessageContent::ToolUse(_) | MessageContent::Image(_) => None,
-        }
-    }
-
-    pub fn is_empty(&self) -> bool {
-        match self {
-            MessageContent::Text(text) => text.chars().all(|c| c.is_whitespace()),
-            MessageContent::Thinking { text, .. } => text.chars().all(|c| c.is_whitespace()),
-            MessageContent::ToolResult(tool_result) => tool_result.content.is_empty(),
-            MessageContent::RedactedThinking(_)
-            | MessageContent::ToolUse(_)
-            | MessageContent::Image(_) => false,
-        }
-    }
-}
-
-impl From<String> for MessageContent {
-    fn from(value: String) -> Self {
-        MessageContent::Text(value)
-    }
-}
-
-impl From<&str> for MessageContent {
-    fn from(value: &str) -> Self {
-        MessageContent::Text(value.to_string())
-    }
-}
-
-#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Hash)]
-pub struct LanguageModelRequestMessage {
-    pub role: Role,
-    pub content: Vec<MessageContent>,
-    pub cache: bool,
-    #[serde(default, skip_serializing_if = "Option::is_none")]
-    pub reasoning_details: Option<serde_json::Value>,
-}
-
-impl LanguageModelRequestMessage {
-    pub fn string_contents(&self) -> String {
-        let mut buffer = String::new();
-        for string in self.content.iter().filter_map(|content| content.to_str()) {
-            buffer.push_str(string);
-        }
-
-        buffer
-    }
-
-    pub fn contents_empty(&self) -> bool {
-        self.content.iter().all(|content| content.is_empty())
-    }
-}
-
-#[derive(Debug, PartialEq, Hash, Clone, Serialize, Deserialize)]
-pub struct LanguageModelRequestTool {
-    pub name: String,
-    pub description: String,
-    pub input_schema: serde_json::Value,
-    pub use_input_streaming: bool,
-}
-
-#[derive(Debug, PartialEq, Hash, Clone, Serialize, Deserialize)]
-pub enum LanguageModelToolChoice {
-    Auto,
-    Any,
-    None,
-}
-
-#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)]
-#[serde(rename_all = "snake_case")]
-pub enum CompletionIntent {
-    UserPrompt,
-    Subagent,
-    ToolResults,
-    ThreadSummarization,
-    ThreadContextSummarization,
-    CreateFile,
-    EditFile,
-    InlineAssist,
-    TerminalInlineAssist,
-    GenerateGitCommitMessage,
-}
-
-#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
-pub struct LanguageModelRequest {
-    pub thread_id: Option<String>,
-    pub prompt_id: Option<String>,
-    pub intent: Option<CompletionIntent>,
-    pub messages: Vec<LanguageModelRequestMessage>,
-    pub tools: Vec<LanguageModelRequestTool>,
-    pub tool_choice: Option<LanguageModelToolChoice>,
-    pub stop: Vec<String>,
-    pub temperature: Option<f32>,
-    pub thinking_allowed: bool,
-    pub thinking_effort: Option<String>,
-    pub speed: Option<Speed>,
-}
-
-#[derive(Clone, Copy, Default, Debug, Serialize, Deserialize, PartialEq, Eq)]
-#[serde(rename_all = "snake_case")]
-pub enum Speed {
-    #[default]
-    Standard,
-    Fast,
-}
-
-impl Speed {
-    pub fn toggle(self) -> Self {
-        match self {
-            Speed::Standard => Speed::Fast,
-            Speed::Fast => Speed::Standard,
-        }
+/// Convert a core `ImageSize` to a gpui `Size<DevicePixels>`.
+pub fn image_size_to_gpui(size: ImageSize) -> Size<DevicePixels> {
+    Size {
+        width: DevicePixels(size.width),
+        height: DevicePixels(size.height),
     }
 }
 
-impl From<Speed> for anthropic::Speed {
-    fn from(speed: Speed) -> Self {
-        match speed {
-            Speed::Standard => anthropic::Speed::Standard,
-            Speed::Fast => anthropic::Speed::Fast,
-        }
+/// Convert a gpui `Size<DevicePixels>` to a core `ImageSize`.
+pub fn gpui_size_to_image_size(size: Size<DevicePixels>) -> ImageSize {
+    ImageSize {
+        width: size.width.0,
+        height: size.height.0,
     }
 }
 
-#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
-pub struct LanguageModelResponseMessage {
-    pub role: Option<Role>,
-    pub content: Option<String>,
-}
-
 #[cfg(test)]
 mod tests {
     use super::*;
     use base64::Engine as _;
     use gpui::TestAppContext;
-    use image::ImageDecoder as _;
 
-    fn base64_to_png_bytes(base64_png: &str) -> Vec<u8> {
+    fn base64_to_png_bytes(base64: &str) -> Vec<u8> {
         base64::engine::general_purpose::STANDARD
-            .decode(base64_png.as_bytes())
-            .expect("base64 should decode")
+            .decode(base64)
+            .expect("valid base64")
     }
 
     fn png_dimensions(png_bytes: &[u8]) -> (u32, u32) {
-        let decoder =
-            image::codecs::png::PngDecoder::new(Cursor::new(png_bytes)).expect("png should decode");
-        decoder.dimensions()
+        let img = image::load_from_memory(png_bytes).expect("valid png");
+        (img.width(), img.height())
     }
 
     fn make_noisy_png_bytes(width: u32, height: u32) -> Vec<u8> {
-        // Create an RGBA image with per-pixel variance to avoid PNG compressing too well.
-        let mut img = image::RgbaImage::new(width, height);
-        for y in 0..height {
-            for x in 0..width {
-                let r = ((x ^ y) & 0xFF) as u8;
-                let g = ((x.wrapping_mul(31) ^ y.wrapping_mul(17)) & 0xFF) as u8;
-                let b = ((x.wrapping_mul(131) ^ y.wrapping_mul(7)) & 0xFF) as u8;
-                img.put_pixel(x, y, image::Rgba([r, g, b, 0xFF]));
-            }
-        }
+        use image::{ImageBuffer, Rgba};
+        use std::hash::{Hash, Hasher};
+
+        let img = ImageBuffer::from_fn(width, height, |x, y| {
+            let mut hasher = std::hash::DefaultHasher::new();
+            (x, y, width, height).hash(&mut hasher);
+            let h = hasher.finish();
+            Rgba([h as u8, (h >> 8) as u8, (h >> 16) as u8, 255])
+        });
 
-        let mut out = Vec::new();
-        image::DynamicImage::ImageRgba8(img)
-            .write_with_encoder(PngEncoder::new(&mut out))
-            .expect("png encoding should succeed");
-        out
+        let mut buf = Cursor::new(Vec::new());
+        img.write_with_encoder(PngEncoder::new(&mut buf))
+            .expect("encode");
+        buf.into_inner()
     }
 
     #[gpui::test]
     async fn test_from_image_downscales_to_default_5mb_limit(cx: &mut TestAppContext) {
-        // Pick a size that reliably produces a PNG > 5MB when filled with noise.
-        // If this fails (image is too small), bump dimensions.
-        let original_png = make_noisy_png_bytes(4096, 4096);
+        let raw_png = make_noisy_png_bytes(4096, 4096);
         assert!(
-            original_png.len() > DEFAULT_IMAGE_MAX_BYTES,
-            "precondition failed: noisy PNG must exceed DEFAULT_IMAGE_MAX_BYTES"
+            raw_png.len() > DEFAULT_IMAGE_MAX_BYTES,
+            "Test image should exceed the 5 MB limit (actual: {} bytes)",
+            raw_png.len()
         );
 
-        let image = gpui::Image::from_bytes(ImageFormat::Png, original_png);
+        let image = Arc::new(gpui::Image::from_bytes(ImageFormat::Png, raw_png));
         let lm_image = cx
-            .update(|cx| LanguageModelImage::from_image(Arc::new(image), cx))
+            .update(|cx| LanguageModelImage::from_image(Arc::clone(&image), cx))
             .await
-            .expect("image conversion should succeed");
+            .expect("from_image should succeed");
 
-        let encoded_png = base64_to_png_bytes(lm_image.source.as_ref());
+        let decoded_png = base64_to_png_bytes(lm_image.source.as_ref());
         assert!(
-            encoded_png.len() <= DEFAULT_IMAGE_MAX_BYTES,
-            "expected encoded PNG <= DEFAULT_IMAGE_MAX_BYTES, got {} bytes",
-            encoded_png.len()
+            decoded_png.len() <= DEFAULT_IMAGE_MAX_BYTES,
+            "Encoded PNG should be ≤ {} bytes after downscale, but was {} bytes",
+            DEFAULT_IMAGE_MAX_BYTES,
+            decoded_png.len()
         );
 
-        // Ensure we actually downscaled in pixels (not just re-encoded).
-        let (w, h) = png_dimensions(&encoded_png);
+        let (w, h) = png_dimensions(&decoded_png);
         assert!(
-            w < 4096 || h < 4096,
-            "expected image to be downscaled in at least one dimension; got {w}x{h}"
-        );
-    }
-
-    #[test]
-    fn test_language_model_tool_result_content_deserialization() {
-        let json = r#""This is plain text""#;
-        let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
-        assert_eq!(
-            result,
-            LanguageModelToolResultContent::Text("This is plain text".into())
-        );
-
-        let json = r#"{"type": "text", "text": "This is wrapped text"}"#;
-        let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
-        assert_eq!(
-            result,
-            LanguageModelToolResultContent::Text("This is wrapped text".into())
-        );
-
-        let json = r#"{"Type": "TEXT", "TEXT": "Case insensitive"}"#;
-        let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
-        assert_eq!(
-            result,
-            LanguageModelToolResultContent::Text("Case insensitive".into())
-        );
-
-        let json = r#"{"Text": "Wrapped variant"}"#;
-        let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
-        assert_eq!(
-            result,
-            LanguageModelToolResultContent::Text("Wrapped variant".into())
-        );
-
-        let json = r#"{"text": "Lowercase wrapped"}"#;
-        let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
-        assert_eq!(
-            result,
-            LanguageModelToolResultContent::Text("Lowercase wrapped".into())
+            w < 4096 && h < 4096,
+            "Dimensions should have shrunk: got {}×{}",
+            w,
+            h
         );
-
-        // Test image deserialization
-        let json = r#"{
-            "source": "base64encodedimagedata",
-            "size": {
-                "width": 100,
-                "height": 200
-            }
-        }"#;
-        let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
-        match result {
-            LanguageModelToolResultContent::Image(image) => {
-                assert_eq!(image.source.as_ref(), "base64encodedimagedata");
-                let size = image.size.expect("size");
-                assert_eq!(size.width.0, 100);
-                assert_eq!(size.height.0, 200);
-            }
-            _ => panic!("Expected Image variant"),
-        }
-
-        // Test wrapped Image variant
-        let json = r#"{
-            "Image": {
-                "source": "wrappedimagedata",
-                "size": {
-                    "width": 50,
-                    "height": 75
-                }
-            }
-        }"#;
-        let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
-        match result {
-            LanguageModelToolResultContent::Image(image) => {
-                assert_eq!(image.source.as_ref(), "wrappedimagedata");
-                let size = image.size.expect("size");
-                assert_eq!(size.width.0, 50);
-                assert_eq!(size.height.0, 75);
-            }
-            _ => panic!("Expected Image variant"),
-        }
-
-        // Test wrapped Image variant with case insensitive
-        let json = r#"{
-            "image": {
-                "Source": "caseinsensitive",
-                "SIZE": {
-                    "width": 30,
-                    "height": 40
-                }
-            }
-        }"#;
-        let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
-        match result {
-            LanguageModelToolResultContent::Image(image) => {
-                assert_eq!(image.source.as_ref(), "caseinsensitive");
-                let size = image.size.expect("size");
-                assert_eq!(size.width.0, 30);
-                assert_eq!(size.height.0, 40);
-            }
-            _ => panic!("Expected Image variant"),
-        }
-
-        // Test that wrapped text with wrong type fails
-        let json = r#"{"type": "blahblah", "text": "This should fail"}"#;
-        let result: Result<LanguageModelToolResultContent, _> = serde_json::from_str(json);
-        assert!(result.is_err());
-
-        // Test that malformed JSON fails
-        let json = r#"{"invalid": "structure"}"#;
-        let result: Result<LanguageModelToolResultContent, _> = serde_json::from_str(json);
-        assert!(result.is_err());
-
-        // Test edge cases
-        let json = r#""""#; // Empty string
-        let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
-        assert_eq!(result, LanguageModelToolResultContent::Text("".into()));
-
-        // Test with extra fields in wrapped text (should be ignored)
-        let json = r#"{"type": "text", "text": "Hello", "extra": "field"}"#;
-        let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
-        assert_eq!(result, LanguageModelToolResultContent::Text("Hello".into()));
-
-        // Test direct image with case-insensitive fields
-        let json = r#"{
-            "SOURCE": "directimage",
-            "Size": {
-                "width": 200,
-                "height": 300
-            }
-        }"#;
-        let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
-        match result {
-            LanguageModelToolResultContent::Image(image) => {
-                assert_eq!(image.source.as_ref(), "directimage");
-                let size = image.size.expect("size");
-                assert_eq!(size.width.0, 200);
-                assert_eq!(size.height.0, 300);
-            }
-            _ => panic!("Expected Image variant"),
-        }
-
-        // Test that multiple fields prevent wrapped variant interpretation
-        let json = r#"{"Text": "not wrapped", "extra": "field"}"#;
-        let result: Result<LanguageModelToolResultContent, _> = serde_json::from_str(json);
-        assert!(result.is_err());
-
-        // Test wrapped text with uppercase TEXT variant
-        let json = r#"{"TEXT": "Uppercase variant"}"#;
-        let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
-        assert_eq!(
-            result,
-            LanguageModelToolResultContent::Text("Uppercase variant".into())
-        );
-
-        // Test that numbers and other JSON values fail gracefully
-        let json = r#"123"#;
-        let result: Result<LanguageModelToolResultContent, _> = serde_json::from_str(json);
-        assert!(result.is_err());
-
-        let json = r#"null"#;
-        let result: Result<LanguageModelToolResultContent, _> = serde_json::from_str(json);
-        assert!(result.is_err());
-
-        let json = r#"[1, 2, 3]"#;
-        let result: Result<LanguageModelToolResultContent, _> = serde_json::from_str(json);
-        assert!(result.is_err());
     }
 }

crates/language_model_core/Cargo.toml 🔗

@@ -0,0 +1,27 @@
+[package]
+name = "language_model_core"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/language_model_core.rs"
+doctest = false
+
+[dependencies]
+anyhow.workspace = true
+cloud_llm_client.workspace = true
+futures.workspace = true
+gpui_shared_string.workspace = true
+http_client.workspace = true
+partial-json-fixer.workspace = true
+schemars.workspace = true
+serde.workspace = true
+serde_json.workspace = true
+smol.workspace = true
+strum.workspace = true
+thiserror.workspace = true

crates/language_model_core/src/language_model_core.rs 🔗

@@ -0,0 +1,658 @@
+mod provider;
+mod rate_limiter;
+mod request;
+mod role;
+pub mod tool_schema;
+pub mod util;
+
+use anyhow::{Result, anyhow};
+use cloud_llm_client::CompletionRequestStatus;
+use http_client::{StatusCode, http};
+use schemars::JsonSchema;
+use serde::{Deserialize, Serialize};
+use std::ops::{Add, Sub};
+use std::str::FromStr;
+use std::sync::Arc;
+use std::time::Duration;
+use std::{fmt, io};
+use thiserror::Error;
+fn is_default<T: Default + PartialEq>(value: &T) -> bool {
+    *value == T::default()
+}
+
+pub use crate::provider::*;
+pub use crate::rate_limiter::*;
+pub use crate::request::*;
+pub use crate::role::*;
+pub use crate::tool_schema::LanguageModelToolSchemaFormat;
+pub use crate::util::{fix_streamed_json, parse_prompt_too_long, parse_tool_arguments};
+pub use gpui_shared_string::SharedString;
+
+#[derive(Clone, Debug)]
+pub struct LanguageModelCacheConfiguration {
+    pub max_cache_anchors: usize,
+    pub should_speculate: bool,
+    pub min_total_token: u64,
+}
+
+/// A completion event from a language model.
+#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
+pub enum LanguageModelCompletionEvent {
+    Queued {
+        position: usize,
+    },
+    Started,
+    Stop(StopReason),
+    Text(String),
+    Thinking {
+        text: String,
+        signature: Option<String>,
+    },
+    RedactedThinking {
+        data: String,
+    },
+    ToolUse(LanguageModelToolUse),
+    ToolUseJsonParseError {
+        id: LanguageModelToolUseId,
+        tool_name: Arc<str>,
+        raw_input: Arc<str>,
+        json_parse_error: String,
+    },
+    StartMessage {
+        message_id: String,
+    },
+    ReasoningDetails(serde_json::Value),
+    UsageUpdate(TokenUsage),
+}
+
+impl LanguageModelCompletionEvent {
+    pub fn from_completion_request_status(
+        status: CompletionRequestStatus,
+        upstream_provider: LanguageModelProviderName,
+    ) -> Result<Option<Self>, LanguageModelCompletionError> {
+        match status {
+            CompletionRequestStatus::Queued { position } => {
+                Ok(Some(LanguageModelCompletionEvent::Queued { position }))
+            }
+            CompletionRequestStatus::Started => Ok(Some(LanguageModelCompletionEvent::Started)),
+            CompletionRequestStatus::Unknown | CompletionRequestStatus::StreamEnded => Ok(None),
+            CompletionRequestStatus::Failed {
+                code,
+                message,
+                request_id: _,
+                retry_after,
+            } => Err(LanguageModelCompletionError::from_cloud_failure(
+                upstream_provider,
+                code,
+                message,
+                retry_after.map(Duration::from_secs_f64),
+            )),
+        }
+    }
+}
+
+#[derive(Error, Debug)]
+pub enum LanguageModelCompletionError {
+    #[error("prompt too large for context window")]
+    PromptTooLarge { tokens: Option<u64> },
+    #[error("missing {provider} API key")]
+    NoApiKey { provider: LanguageModelProviderName },
+    #[error("{provider}'s API rate limit exceeded")]
+    RateLimitExceeded {
+        provider: LanguageModelProviderName,
+        retry_after: Option<Duration>,
+    },
+    #[error("{provider}'s API servers are overloaded right now")]
+    ServerOverloaded {
+        provider: LanguageModelProviderName,
+        retry_after: Option<Duration>,
+    },
+    #[error("{provider}'s API server reported an internal server error: {message}")]
+    ApiInternalServerError {
+        provider: LanguageModelProviderName,
+        message: String,
+    },
+    #[error("{message}")]
+    UpstreamProviderError {
+        message: String,
+        status: StatusCode,
+        retry_after: Option<Duration>,
+    },
+    #[error("HTTP response error from {provider}'s API: status {status_code} - {message:?}")]
+    HttpResponseError {
+        provider: LanguageModelProviderName,
+        status_code: StatusCode,
+        message: String,
+    },
+    #[error("invalid request format to {provider}'s API: {message}")]
+    BadRequestFormat {
+        provider: LanguageModelProviderName,
+        message: String,
+    },
+    #[error("authentication error with {provider}'s API: {message}")]
+    AuthenticationError {
+        provider: LanguageModelProviderName,
+        message: String,
+    },
+    #[error("Permission error with {provider}'s API: {message}")]
+    PermissionError {
+        provider: LanguageModelProviderName,
+        message: String,
+    },
+    #[error("language model provider API endpoint not found")]
+    ApiEndpointNotFound { provider: LanguageModelProviderName },
+    #[error("I/O error reading response from {provider}'s API")]
+    ApiReadResponseError {
+        provider: LanguageModelProviderName,
+        #[source]
+        error: io::Error,
+    },
+    #[error("error serializing request to {provider} API")]
+    SerializeRequest {
+        provider: LanguageModelProviderName,
+        #[source]
+        error: serde_json::Error,
+    },
+    #[error("error building request body to {provider} API")]
+    BuildRequestBody {
+        provider: LanguageModelProviderName,
+        #[source]
+        error: http::Error,
+    },
+    #[error("error sending HTTP request to {provider} API")]
+    HttpSend {
+        provider: LanguageModelProviderName,
+        #[source]
+        error: anyhow::Error,
+    },
+    #[error("error deserializing {provider} API response")]
+    DeserializeResponse {
+        provider: LanguageModelProviderName,
+        #[source]
+        error: serde_json::Error,
+    },
+    #[error("stream from {provider} ended unexpectedly")]
+    StreamEndedUnexpectedly { provider: LanguageModelProviderName },
+    #[error(transparent)]
+    Other(#[from] anyhow::Error),
+}
+
+impl LanguageModelCompletionError {
+    fn parse_upstream_error_json(message: &str) -> Option<(StatusCode, String)> {
+        let error_json = serde_json::from_str::<serde_json::Value>(message).ok()?;
+        let upstream_status = error_json
+            .get("upstream_status")
+            .and_then(|v| v.as_u64())
+            .and_then(|status| u16::try_from(status).ok())
+            .and_then(|status| StatusCode::from_u16(status).ok())?;
+        let inner_message = error_json
+            .get("message")
+            .and_then(|v| v.as_str())
+            .unwrap_or(message)
+            .to_string();
+        Some((upstream_status, inner_message))
+    }
+
+    pub fn from_cloud_failure(
+        upstream_provider: LanguageModelProviderName,
+        code: String,
+        message: String,
+        retry_after: Option<Duration>,
+    ) -> Self {
+        if let Some(tokens) = parse_prompt_too_long(&message) {
+            Self::PromptTooLarge {
+                tokens: Some(tokens),
+            }
+        } else if code == "upstream_http_error" {
+            if let Some((upstream_status, inner_message)) =
+                Self::parse_upstream_error_json(&message)
+            {
+                return Self::from_http_status(
+                    upstream_provider,
+                    upstream_status,
+                    inner_message,
+                    retry_after,
+                );
+            }
+            anyhow!("completion request failed, code: {code}, message: {message}").into()
+        } else if let Some(status_code) = code
+            .strip_prefix("upstream_http_")
+            .and_then(|code| StatusCode::from_str(code).ok())
+        {
+            Self::from_http_status(upstream_provider, status_code, message, retry_after)
+        } else if let Some(status_code) = code
+            .strip_prefix("http_")
+            .and_then(|code| StatusCode::from_str(code).ok())
+        {
+            Self::from_http_status(ZED_CLOUD_PROVIDER_NAME, status_code, message, retry_after)
+        } else {
+            anyhow!("completion request failed, code: {code}, message: {message}").into()
+        }
+    }
+
+    pub fn from_http_status(
+        provider: LanguageModelProviderName,
+        status_code: StatusCode,
+        message: String,
+        retry_after: Option<Duration>,
+    ) -> Self {
+        match status_code {
+            StatusCode::BAD_REQUEST => Self::BadRequestFormat { provider, message },
+            StatusCode::UNAUTHORIZED => Self::AuthenticationError { provider, message },
+            StatusCode::FORBIDDEN => Self::PermissionError { provider, message },
+            StatusCode::NOT_FOUND => Self::ApiEndpointNotFound { provider },
+            StatusCode::PAYLOAD_TOO_LARGE => Self::PromptTooLarge {
+                tokens: parse_prompt_too_long(&message),
+            },
+            StatusCode::TOO_MANY_REQUESTS => Self::RateLimitExceeded {
+                provider,
+                retry_after,
+            },
+            StatusCode::INTERNAL_SERVER_ERROR => Self::ApiInternalServerError { provider, message },
+            StatusCode::SERVICE_UNAVAILABLE => Self::ServerOverloaded {
+                provider,
+                retry_after,
+            },
+            _ if status_code.as_u16() == 529 => Self::ServerOverloaded {
+                provider,
+                retry_after,
+            },
+            _ => Self::HttpResponseError {
+                provider,
+                status_code,
+                message,
+            },
+        }
+    }
+}
+
+#[derive(Debug, PartialEq, Clone, Copy, Serialize, Deserialize)]
+#[serde(rename_all = "snake_case")]
+pub enum StopReason {
+    EndTurn,
+    MaxTokens,
+    ToolUse,
+    Refusal,
+}
+
+#[derive(Debug, PartialEq, Clone, Copy, Serialize, Deserialize, Default)]
+pub struct TokenUsage {
+    #[serde(default, skip_serializing_if = "is_default")]
+    pub input_tokens: u64,
+    #[serde(default, skip_serializing_if = "is_default")]
+    pub output_tokens: u64,
+    #[serde(default, skip_serializing_if = "is_default")]
+    pub cache_creation_input_tokens: u64,
+    #[serde(default, skip_serializing_if = "is_default")]
+    pub cache_read_input_tokens: u64,
+}
+
+impl TokenUsage {
+    pub fn total_tokens(&self) -> u64 {
+        self.input_tokens
+            + self.output_tokens
+            + self.cache_read_input_tokens
+            + self.cache_creation_input_tokens
+    }
+}
+
+impl Add<TokenUsage> for TokenUsage {
+    type Output = Self;
+
+    fn add(self, other: Self) -> Self {
+        Self {
+            input_tokens: self.input_tokens + other.input_tokens,
+            output_tokens: self.output_tokens + other.output_tokens,
+            cache_creation_input_tokens: self.cache_creation_input_tokens
+                + other.cache_creation_input_tokens,
+            cache_read_input_tokens: self.cache_read_input_tokens + other.cache_read_input_tokens,
+        }
+    }
+}
+
+impl Sub<TokenUsage> for TokenUsage {
+    type Output = Self;
+
+    fn sub(self, other: Self) -> Self {
+        Self {
+            input_tokens: self.input_tokens - other.input_tokens,
+            output_tokens: self.output_tokens - other.output_tokens,
+            cache_creation_input_tokens: self.cache_creation_input_tokens
+                - other.cache_creation_input_tokens,
+            cache_read_input_tokens: self.cache_read_input_tokens - other.cache_read_input_tokens,
+        }
+    }
+}
+
+#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)]
+pub struct LanguageModelToolUseId(Arc<str>);
+
+impl fmt::Display for LanguageModelToolUseId {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        write!(f, "{}", self.0)
+    }
+}
+
+impl<T> From<T> for LanguageModelToolUseId
+where
+    T: Into<Arc<str>>,
+{
+    fn from(value: T) -> Self {
+        Self(value.into())
+    }
+}
+
+#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)]
+pub struct LanguageModelToolUse {
+    pub id: LanguageModelToolUseId,
+    pub name: Arc<str>,
+    pub raw_input: String,
+    pub input: serde_json::Value,
+    pub is_input_complete: bool,
+    /// Thought signature the model sent us. Some models require that this
+    /// signature be preserved and sent back in conversation history for validation.
+    pub thought_signature: Option<String>,
+}
+
+#[derive(Debug, Clone)]
+pub struct LanguageModelEffortLevel {
+    pub name: SharedString,
+    pub value: SharedString,
+    pub is_default: bool,
+}
+
+/// An error that occurred when trying to authenticate the language model provider.
+#[derive(Debug, Error)]
+pub enum AuthenticateError {
+    #[error("connection refused")]
+    ConnectionRefused,
+    #[error("credentials not found")]
+    CredentialsNotFound,
+    #[error(transparent)]
+    Other(#[from] anyhow::Error),
+}
+
+#[derive(Clone, Eq, PartialEq, Hash, Debug, Ord, PartialOrd, Serialize, Deserialize)]
+pub struct LanguageModelId(pub SharedString);
+
+#[derive(Clone, Eq, PartialEq, Hash, Debug, Ord, PartialOrd)]
+pub struct LanguageModelName(pub SharedString);
+
+#[derive(Clone, Eq, PartialEq, Hash, Debug, Ord, PartialOrd)]
+pub struct LanguageModelProviderId(pub SharedString);
+
+#[derive(Clone, Eq, PartialEq, Hash, Debug, Ord, PartialOrd)]
+pub struct LanguageModelProviderName(pub SharedString);
+
+impl LanguageModelProviderId {
+    pub const fn new(id: &'static str) -> Self {
+        Self(SharedString::new_static(id))
+    }
+}
+
+impl LanguageModelProviderName {
+    pub const fn new(id: &'static str) -> Self {
+        Self(SharedString::new_static(id))
+    }
+}
+
+impl fmt::Display for LanguageModelProviderId {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        write!(f, "{}", self.0)
+    }
+}
+
+impl fmt::Display for LanguageModelProviderName {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        write!(f, "{}", self.0)
+    }
+}
+
+impl From<String> for LanguageModelId {
+    fn from(value: String) -> Self {
+        Self(SharedString::from(value))
+    }
+}
+
+impl From<String> for LanguageModelName {
+    fn from(value: String) -> Self {
+        Self(SharedString::from(value))
+    }
+}
+
+impl From<String> for LanguageModelProviderId {
+    fn from(value: String) -> Self {
+        Self(SharedString::from(value))
+    }
+}
+
+impl From<String> for LanguageModelProviderName {
+    fn from(value: String) -> Self {
+        Self(SharedString::from(value))
+    }
+}
+
+impl From<Arc<str>> for LanguageModelProviderId {
+    fn from(value: Arc<str>) -> Self {
+        Self(SharedString::from(value))
+    }
+}
+
+impl From<Arc<str>> for LanguageModelProviderName {
+    fn from(value: Arc<str>) -> Self {
+        Self(SharedString::from(value))
+    }
+}
+
+/// Settings-layer–free model mode enum.
+///
+/// Mirrors the shape of `settings_content::ModelMode` but lives here so that
+/// crates below the settings layer can reference it.
+#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
+#[serde(tag = "type", rename_all = "lowercase")]
+pub enum ModelMode {
+    #[default]
+    Default,
+    Thinking {
+        budget_tokens: Option<u32>,
+    },
+}
+
+/// Settings-layer–free reasoning-effort enum.
+///
+/// Mirrors the shape of `settings_content::OpenAiReasoningEffort` but lives
+/// here so that crates below the settings layer can reference it.
+#[derive(
+    Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema, strum::EnumString,
+)]
+#[serde(rename_all = "lowercase")]
+#[strum(serialize_all = "lowercase")]
+pub enum ReasoningEffort {
+    Minimal,
+    Low,
+    Medium,
+    High,
+    XHigh,
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn test_from_cloud_failure_with_upstream_http_error() {
+        let error = LanguageModelCompletionError::from_cloud_failure(
+            String::from("anthropic").into(),
+            "upstream_http_error".to_string(),
+            r#"{"code":"upstream_http_error","message":"Received an error from the Anthropic API: upstream connect error or disconnect/reset before headers. reset reason: connection timeout","upstream_status":503}"#.to_string(),
+            None,
+        );
+
+        match error {
+            LanguageModelCompletionError::ServerOverloaded { provider, .. } => {
+                assert_eq!(provider.0, "anthropic");
+            }
+            _ => panic!(
+                "Expected ServerOverloaded error for 503 status, got: {:?}",
+                error
+            ),
+        }
+
+        let error = LanguageModelCompletionError::from_cloud_failure(
+            String::from("anthropic").into(),
+            "upstream_http_error".to_string(),
+            r#"{"code":"upstream_http_error","message":"Internal server error","upstream_status":500}"#.to_string(),
+            None,
+        );
+
+        match error {
+            LanguageModelCompletionError::ApiInternalServerError { provider, message } => {
+                assert_eq!(provider.0, "anthropic");
+                assert_eq!(message, "Internal server error");
+            }
+            _ => panic!(
+                "Expected ApiInternalServerError for 500 status, got: {:?}",
+                error
+            ),
+        }
+    }
+
+    #[test]
+    fn test_from_cloud_failure_with_standard_format() {
+        let error = LanguageModelCompletionError::from_cloud_failure(
+            String::from("anthropic").into(),
+            "upstream_http_503".to_string(),
+            "Service unavailable".to_string(),
+            None,
+        );
+
+        match error {
+            LanguageModelCompletionError::ServerOverloaded { provider, .. } => {
+                assert_eq!(provider.0, "anthropic");
+            }
+            _ => panic!("Expected ServerOverloaded error for upstream_http_503"),
+        }
+    }
+
+    #[test]
+    fn test_upstream_http_error_connection_timeout() {
+        let error = LanguageModelCompletionError::from_cloud_failure(
+            String::from("anthropic").into(),
+            "upstream_http_error".to_string(),
+            r#"{"code":"upstream_http_error","message":"Received an error from the Anthropic API: upstream connect error or disconnect/reset before headers. reset reason: connection timeout","upstream_status":503}"#.to_string(),
+            None,
+        );
+
+        match error {
+            LanguageModelCompletionError::ServerOverloaded { provider, .. } => {
+                assert_eq!(provider.0, "anthropic");
+            }
+            _ => panic!(
+                "Expected ServerOverloaded error for connection timeout with 503 status, got: {:?}",
+                error
+            ),
+        }
+
+        let error = LanguageModelCompletionError::from_cloud_failure(
+            String::from("anthropic").into(),
+            "upstream_http_error".to_string(),
+            r#"{"code":"upstream_http_error","message":"Received an error from the Anthropic API: upstream connect error or disconnect/reset before headers. reset reason: connection timeout","upstream_status":500}"#.to_string(),
+            None,
+        );
+
+        match error {
+            LanguageModelCompletionError::ApiInternalServerError { provider, message } => {
+                assert_eq!(provider.0, "anthropic");
+                assert_eq!(
+                    message,
+                    "Received an error from the Anthropic API: upstream connect error or disconnect/reset before headers. reset reason: connection timeout"
+                );
+            }
+            _ => panic!(
+                "Expected ApiInternalServerError for connection timeout with 500 status, got: {:?}",
+                error
+            ),
+        }
+    }
+
+    #[test]
+    fn test_language_model_tool_use_serializes_with_signature() {
+        use serde_json::json;
+
+        let tool_use = LanguageModelToolUse {
+            id: LanguageModelToolUseId::from("test_id"),
+            name: "test_tool".into(),
+            raw_input: json!({"arg": "value"}).to_string(),
+            input: json!({"arg": "value"}),
+            is_input_complete: true,
+            thought_signature: Some("test_signature".to_string()),
+        };
+
+        let serialized = serde_json::to_value(&tool_use).unwrap();
+
+        assert_eq!(serialized["id"], "test_id");
+        assert_eq!(serialized["name"], "test_tool");
+        assert_eq!(serialized["thought_signature"], "test_signature");
+    }
+
+    #[test]
+    fn test_language_model_tool_use_deserializes_with_missing_signature() {
+        use serde_json::json;
+
+        let json = json!({
+            "id": "test_id",
+            "name": "test_tool",
+            "raw_input": "{\"arg\":\"value\"}",
+            "input": {"arg": "value"},
+            "is_input_complete": true
+        });
+
+        let tool_use: LanguageModelToolUse = serde_json::from_value(json).unwrap();
+
+        assert_eq!(tool_use.id, LanguageModelToolUseId::from("test_id"));
+        assert_eq!(tool_use.name.as_ref(), "test_tool");
+        assert_eq!(tool_use.thought_signature, None);
+    }
+
+    #[test]
+    fn test_language_model_tool_use_round_trip_with_signature() {
+        use serde_json::json;
+
+        let original = LanguageModelToolUse {
+            id: LanguageModelToolUseId::from("round_trip_id"),
+            name: "round_trip_tool".into(),
+            raw_input: json!({"key": "value"}).to_string(),
+            input: json!({"key": "value"}),
+            is_input_complete: true,
+            thought_signature: Some("round_trip_sig".to_string()),
+        };
+
+        let serialized = serde_json::to_value(&original).unwrap();
+        let deserialized: LanguageModelToolUse = serde_json::from_value(serialized).unwrap();
+
+        assert_eq!(deserialized.id, original.id);
+        assert_eq!(deserialized.name, original.name);
+        assert_eq!(deserialized.thought_signature, original.thought_signature);
+    }
+
+    #[test]
+    fn test_language_model_tool_use_round_trip_without_signature() {
+        use serde_json::json;
+
+        let original = LanguageModelToolUse {
+            id: LanguageModelToolUseId::from("no_sig_id"),
+            name: "no_sig_tool".into(),
+            raw_input: json!({"arg": "value"}).to_string(),
+            input: json!({"arg": "value"}),
+            is_input_complete: true,
+            thought_signature: None,
+        };
+
+        let serialized = serde_json::to_value(&original).unwrap();
+        let deserialized: LanguageModelToolUse = serde_json::from_value(serialized).unwrap();
+
+        assert_eq!(deserialized.id, original.id);
+        assert_eq!(deserialized.name, original.name);
+        assert_eq!(deserialized.thought_signature, None);
+    }
+}

crates/language_model_core/src/provider.rs 🔗

@@ -0,0 +1,21 @@
+use crate::{LanguageModelProviderId, LanguageModelProviderName};
+
+pub const ANTHROPIC_PROVIDER_ID: LanguageModelProviderId =
+    LanguageModelProviderId::new("anthropic");
+pub const ANTHROPIC_PROVIDER_NAME: LanguageModelProviderName =
+    LanguageModelProviderName::new("Anthropic");
+
+pub const OPEN_AI_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("openai");
+pub const OPEN_AI_PROVIDER_NAME: LanguageModelProviderName =
+    LanguageModelProviderName::new("OpenAI");
+
+pub const GOOGLE_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("google");
+pub const GOOGLE_PROVIDER_NAME: LanguageModelProviderName =
+    LanguageModelProviderName::new("Google AI");
+
+pub const X_AI_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("x_ai");
+pub const X_AI_PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("xAI");
+
+pub const ZED_CLOUD_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("zed.dev");
+pub const ZED_CLOUD_PROVIDER_NAME: LanguageModelProviderName =
+    LanguageModelProviderName::new("Zed");

crates/language_model_core/src/request.rs 🔗

@@ -0,0 +1,463 @@
+use std::sync::Arc;
+
+use serde::{Deserialize, Serialize};
+
+use crate::role::Role;
+use crate::{LanguageModelToolUse, LanguageModelToolUseId, SharedString};
+
+/// Dimensions of a `LanguageModelImage`
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
+pub struct ImageSize {
+    pub width: i32,
+    pub height: i32,
+}
+
+#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Hash)]
+pub struct LanguageModelImage {
+    /// A base64-encoded PNG image.
+    pub source: SharedString,
+    #[serde(default, skip_serializing_if = "Option::is_none")]
+    pub size: Option<ImageSize>,
+}
+
+impl LanguageModelImage {
+    pub fn len(&self) -> usize {
+        self.source.len()
+    }
+
+    pub fn is_empty(&self) -> bool {
+        self.source.is_empty()
+    }
+
+    pub fn empty() -> Self {
+        Self {
+            source: "".into(),
+            size: None,
+        }
+    }
+
+    /// Parse Self from a JSON object with case-insensitive field names
+    pub fn from_json(obj: &serde_json::Map<String, serde_json::Value>) -> Option<Self> {
+        let mut source = None;
+        let mut size_obj = None;
+
+        for (k, v) in obj.iter() {
+            match k.to_lowercase().as_str() {
+                "source" => source = v.as_str(),
+                "size" => size_obj = v.as_object(),
+                _ => {}
+            }
+        }
+
+        let source = source?;
+        let size_obj = size_obj?;
+
+        let mut width = None;
+        let mut height = None;
+
+        for (k, v) in size_obj.iter() {
+            match k.to_lowercase().as_str() {
+                "width" => width = v.as_i64().map(|w| w as i32),
+                "height" => height = v.as_i64().map(|h| h as i32),
+                _ => {}
+            }
+        }
+
+        Some(Self {
+            size: Some(ImageSize {
+                width: width?,
+                height: height?,
+            }),
+            source: SharedString::from(source.to_string()),
+        })
+    }
+
+    pub fn estimate_tokens(&self) -> usize {
+        let Some(size) = self.size.as_ref() else {
+            return 0;
+        };
+        let width = size.width.unsigned_abs() as usize;
+        let height = size.height.unsigned_abs() as usize;
+
+        // From: https://docs.anthropic.com/en/docs/build-with-claude/vision#calculate-image-costs
+        (width * height) / 750
+    }
+
+    pub fn to_base64_url(&self) -> String {
+        format!("data:image/png;base64,{}", self.source)
+    }
+}
+
+impl std::fmt::Debug for LanguageModelImage {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        f.debug_struct("LanguageModelImage")
+            .field("source", &format!("<{} bytes>", self.source.len()))
+            .field("size", &self.size)
+            .finish()
+    }
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq, Hash)]
+pub struct LanguageModelToolResult {
+    pub tool_use_id: LanguageModelToolUseId,
+    pub tool_name: Arc<str>,
+    pub is_error: bool,
+    /// The tool output formatted for presenting to the model
+    pub content: LanguageModelToolResultContent,
+    /// The raw tool output, if available, often for debugging or extra state for replay
+    pub output: Option<serde_json::Value>,
+}
+
+#[derive(Debug, Clone, Serialize, Eq, PartialEq, Hash)]
+pub enum LanguageModelToolResultContent {
+    Text(Arc<str>),
+    Image(LanguageModelImage),
+}
+
+impl<'de> Deserialize<'de> for LanguageModelToolResultContent {
+    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+    where
+        D: serde::Deserializer<'de>,
+    {
+        use serde::de::Error;
+
+        let value = serde_json::Value::deserialize(deserializer)?;
+
+        // 1. Try as plain string
+        if let Ok(text) = serde_json::from_value::<String>(value.clone()) {
+            return Ok(Self::Text(Arc::from(text)));
+        }
+
+        // 2. Try as object
+        if let Some(obj) = value.as_object() {
+            fn get_field<'a>(
+                obj: &'a serde_json::Map<String, serde_json::Value>,
+                field: &str,
+            ) -> Option<&'a serde_json::Value> {
+                obj.iter()
+                    .find(|(k, _)| k.to_lowercase() == field.to_lowercase())
+                    .map(|(_, v)| v)
+            }
+
+            // Accept wrapped text format: { "type": "text", "text": "..." }
+            if let (Some(type_value), Some(text_value)) =
+                (get_field(obj, "type"), get_field(obj, "text"))
+                && let Some(type_str) = type_value.as_str()
+                && type_str.to_lowercase() == "text"
+                && let Some(text) = text_value.as_str()
+            {
+                return Ok(Self::Text(Arc::from(text)));
+            }
+
+            // Check for wrapped Text variant: { "text": "..." }
+            if let Some((_key, value)) = obj.iter().find(|(k, _)| k.to_lowercase() == "text")
+                && obj.len() == 1
+            {
+                if let Some(text) = value.as_str() {
+                    return Ok(Self::Text(Arc::from(text)));
+                }
+            }
+
+            // Check for wrapped Image variant: { "image": { "source": "...", "size": ... } }
+            if let Some((_key, value)) = obj.iter().find(|(k, _)| k.to_lowercase() == "image")
+                && obj.len() == 1
+            {
+                if let Some(image_obj) = value.as_object()
+                    && let Some(image) = LanguageModelImage::from_json(image_obj)
+                {
+                    return Ok(Self::Image(image));
+                }
+            }
+
+            // Try as direct Image
+            if let Some(image) = LanguageModelImage::from_json(obj) {
+                return Ok(Self::Image(image));
+            }
+        }
+
+        Err(D::Error::custom(format!(
+            "data did not match any variant of LanguageModelToolResultContent. Expected either a string, \
+             an object with 'type': 'text', a wrapped variant like {{\"Text\": \"...\"}}, or an image object. Got: {}",
+            serde_json::to_string_pretty(&value).unwrap_or_else(|_| value.to_string())
+        )))
+    }
+}
+
+impl LanguageModelToolResultContent {
+    pub fn to_str(&self) -> Option<&str> {
+        match self {
+            Self::Text(text) => Some(text),
+            Self::Image(_) => None,
+        }
+    }
+
+    pub fn is_empty(&self) -> bool {
+        match self {
+            Self::Text(text) => text.chars().all(|c| c.is_whitespace()),
+            Self::Image(_) => false,
+        }
+    }
+}
+
+impl From<&str> for LanguageModelToolResultContent {
+    fn from(value: &str) -> Self {
+        Self::Text(Arc::from(value))
+    }
+}
+
+impl From<String> for LanguageModelToolResultContent {
+    fn from(value: String) -> Self {
+        Self::Text(Arc::from(value))
+    }
+}
+
+impl From<LanguageModelImage> for LanguageModelToolResultContent {
+    fn from(image: LanguageModelImage) -> Self {
+        Self::Image(image)
+    }
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq, Hash)]
+pub enum MessageContent {
+    Text(String),
+    Thinking {
+        text: String,
+        signature: Option<String>,
+    },
+    RedactedThinking(String),
+    Image(LanguageModelImage),
+    ToolUse(LanguageModelToolUse),
+    ToolResult(LanguageModelToolResult),
+}
+
+impl MessageContent {
+    pub fn to_str(&self) -> Option<&str> {
+        match self {
+            MessageContent::Text(text) => Some(text.as_str()),
+            MessageContent::Thinking { text, .. } => Some(text.as_str()),
+            MessageContent::RedactedThinking(_) => None,
+            MessageContent::ToolResult(tool_result) => tool_result.content.to_str(),
+            MessageContent::ToolUse(_) | MessageContent::Image(_) => None,
+        }
+    }
+
+    pub fn is_empty(&self) -> bool {
+        match self {
+            MessageContent::Text(text) => text.chars().all(|c| c.is_whitespace()),
+            MessageContent::Thinking { text, .. } => text.chars().all(|c| c.is_whitespace()),
+            MessageContent::ToolResult(tool_result) => tool_result.content.is_empty(),
+            MessageContent::RedactedThinking(_)
+            | MessageContent::ToolUse(_)
+            | MessageContent::Image(_) => false,
+        }
+    }
+}
+
+impl From<String> for MessageContent {
+    fn from(value: String) -> Self {
+        MessageContent::Text(value)
+    }
+}
+
+impl From<&str> for MessageContent {
+    fn from(value: &str) -> Self {
+        MessageContent::Text(value.to_string())
+    }
+}
+
+#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Hash)]
+pub struct LanguageModelRequestMessage {
+    pub role: Role,
+    pub content: Vec<MessageContent>,
+    pub cache: bool,
+    #[serde(default, skip_serializing_if = "Option::is_none")]
+    pub reasoning_details: Option<serde_json::Value>,
+}
+
+impl LanguageModelRequestMessage {
+    pub fn string_contents(&self) -> String {
+        let mut buffer = String::new();
+        for string in self.content.iter().filter_map(|content| content.to_str()) {
+            buffer.push_str(string);
+        }
+        buffer
+    }
+
+    pub fn contents_empty(&self) -> bool {
+        self.content.iter().all(|content| content.is_empty())
+    }
+}
+
+#[derive(Debug, PartialEq, Hash, Clone, Serialize, Deserialize)]
+pub struct LanguageModelRequestTool {
+    pub name: String,
+    pub description: String,
+    pub input_schema: serde_json::Value,
+    pub use_input_streaming: bool,
+}
+
+#[derive(Debug, PartialEq, Hash, Clone, Serialize, Deserialize)]
+pub enum LanguageModelToolChoice {
+    Auto,
+    Any,
+    None,
+}
+
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)]
+#[serde(rename_all = "snake_case")]
+pub enum CompletionIntent {
+    UserPrompt,
+    Subagent,
+    ToolResults,
+    ThreadSummarization,
+    ThreadContextSummarization,
+    CreateFile,
+    EditFile,
+    InlineAssist,
+    TerminalInlineAssist,
+    GenerateGitCommitMessage,
+}
+
+#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
+pub struct LanguageModelRequest {
+    pub thread_id: Option<String>,
+    pub prompt_id: Option<String>,
+    pub intent: Option<CompletionIntent>,
+    pub messages: Vec<LanguageModelRequestMessage>,
+    pub tools: Vec<LanguageModelRequestTool>,
+    pub tool_choice: Option<LanguageModelToolChoice>,
+    pub stop: Vec<String>,
+    pub temperature: Option<f32>,
+    pub thinking_allowed: bool,
+    pub thinking_effort: Option<String>,
+    pub speed: Option<Speed>,
+}
+
+#[derive(Clone, Copy, Default, Debug, Serialize, Deserialize, PartialEq, Eq)]
+#[serde(rename_all = "snake_case")]
+pub enum Speed {
+    #[default]
+    Standard,
+    Fast,
+}
+
+impl Speed {
+    pub fn toggle(self) -> Self {
+        match self {
+            Speed::Standard => Speed::Fast,
+            Speed::Fast => Speed::Standard,
+        }
+    }
+}
+
+#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
+pub struct LanguageModelResponseMessage {
+    pub role: Option<Role>,
+    pub content: Option<String>,
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn test_language_model_tool_result_content_deserialization() {
+        // Test plain string
+        let json = serde_json::json!("hello world");
+        let content: LanguageModelToolResultContent = serde_json::from_value(json).unwrap();
+        assert_eq!(
+            content,
+            LanguageModelToolResultContent::Text(Arc::from("hello world"))
+        );
+
+        // Test wrapped text format: { "type": "text", "text": "..." }
+        let json = serde_json::json!({"type": "text", "text": "hello"});
+        let content: LanguageModelToolResultContent = serde_json::from_value(json).unwrap();
+        assert_eq!(
+            content,
+            LanguageModelToolResultContent::Text(Arc::from("hello"))
+        );
+
+        // Test single-field text object: { "text": "..." }
+        let json = serde_json::json!({"text": "hello"});
+        let content: LanguageModelToolResultContent = serde_json::from_value(json).unwrap();
+        assert_eq!(
+            content,
+            LanguageModelToolResultContent::Text(Arc::from("hello"))
+        );
+
+        // Test case-insensitive type field
+        let json = serde_json::json!({"Type": "Text", "Text": "hello"});
+        let content: LanguageModelToolResultContent = serde_json::from_value(json).unwrap();
+        assert_eq!(
+            content,
+            LanguageModelToolResultContent::Text(Arc::from("hello"))
+        );
+
+        // Test image object
+        let json = serde_json::json!({
+            "source": "base64encodedimagedata",
+            "size": {"width": 100, "height": 200}
+        });
+        let content: LanguageModelToolResultContent = serde_json::from_value(json).unwrap();
+        match content {
+            LanguageModelToolResultContent::Image(image) => {
+                assert_eq!(image.source.as_ref(), "base64encodedimagedata");
+                let size = image.size.expect("size");
+                assert_eq!(size.width, 100);
+                assert_eq!(size.height, 200);
+            }
+            _ => panic!("Expected Image variant"),
+        }
+
+        // Test wrapped image: { "image": { "source": "...", "size": ... } }
+        let json = serde_json::json!({
+            "image": {
+                "source": "wrappedimagedata",
+                "size": {"width": 50, "height": 75}
+            }
+        });
+        let content: LanguageModelToolResultContent = serde_json::from_value(json).unwrap();
+        match content {
+            LanguageModelToolResultContent::Image(image) => {
+                assert_eq!(image.source.as_ref(), "wrappedimagedata");
+                let size = image.size.expect("size");
+                assert_eq!(size.width, 50);
+                assert_eq!(size.height, 75);
+            }
+            _ => panic!("Expected Image variant"),
+        }
+
+        // Test case insensitive
+        let json = serde_json::json!({
+            "Source": "caseinsensitive",
+            "Size": {"Width": 30, "Height": 40}
+        });
+        let content: LanguageModelToolResultContent = serde_json::from_value(json).unwrap();
+        match content {
+            LanguageModelToolResultContent::Image(image) => {
+                assert_eq!(image.source.as_ref(), "caseinsensitive");
+                let size = image.size.expect("size");
+                assert_eq!(size.width, 30);
+                assert_eq!(size.height, 40);
+            }
+            _ => panic!("Expected Image variant"),
+        }
+
+        // Test direct image object
+        let json = serde_json::json!({
+            "source": "directimage",
+            "size": {"width": 200, "height": 300}
+        });
+        let content: LanguageModelToolResultContent = serde_json::from_value(json).unwrap();
+        match content {
+            LanguageModelToolResultContent::Image(image) => {
+                assert_eq!(image.source.as_ref(), "directimage");
+                let size = image.size.expect("size");
+                assert_eq!(size.width, 200);
+                assert_eq!(size.height, 300);
+            }
+            _ => panic!("Expected Image variant"),
+        }
+    }
+}

crates/language_model/src/tool_schema.rs → crates/language_model_core/src/tool_schema.rs 🔗

@@ -77,8 +77,6 @@ pub fn adapt_schema_to_format(
 }
 
 fn preprocess_json_schema(json: &mut Value) -> Result<()> {
-    // `additionalProperties` defaults to `false` unless explicitly specified.
-    // This prevents models from hallucinating tool parameters.
     if let Value::Object(obj) = json
         && matches!(obj.get("type"), Some(Value::String(s)) if s == "object")
     {
@@ -86,7 +84,6 @@ fn preprocess_json_schema(json: &mut Value) -> Result<()> {
             obj.insert("additionalProperties".to_string(), Value::Bool(false));
         }
 
-        // OpenAI API requires non-missing `properties`
         if !obj.contains_key("properties") {
             obj.insert("properties".to_string(), Value::Object(Default::default()));
         }
@@ -94,7 +91,6 @@ fn preprocess_json_schema(json: &mut Value) -> Result<()> {
     Ok(())
 }
 
-/// Tries to adapt the json schema so that it is compatible with https://ai.google.dev/api/caching#Schema
 fn adapt_to_json_schema_subset(json: &mut Value) -> Result<()> {
     if let Value::Object(obj) = json {
         const UNSUPPORTED_KEYS: [&str; 4] = ["if", "then", "else", "$ref"];
@@ -108,9 +104,7 @@ fn adapt_to_json_schema_subset(json: &mut Value) -> Result<()> {
 
         const KEYS_TO_REMOVE: [(&str, fn(&Value) -> bool); 6] = [
             ("format", |value| value.is_string()),
-            // Gemini doesn't support `additionalProperties` in any form (boolean or schema object)
             ("additionalProperties", |_| true),
-            // Gemini doesn't support `propertyNames`
             ("propertyNames", |_| true),
             ("exclusiveMinimum", |value| value.is_number()),
             ("exclusiveMaximum", |value| value.is_number()),
@@ -124,7 +118,6 @@ fn adapt_to_json_schema_subset(json: &mut Value) -> Result<()> {
             }
         }
 
-        // If a type is not specified for an input parameter, add a default type
         if matches!(obj.get("description"), Some(Value::String(_)))
             && !obj.contains_key("type")
             && !(obj.contains_key("anyOf")
@@ -134,7 +127,6 @@ fn adapt_to_json_schema_subset(json: &mut Value) -> Result<()> {
             obj.insert("type".to_string(), Value::String("string".to_string()));
         }
 
-        // Handle oneOf -> anyOf conversion
         if let Some(subschemas) = obj.get_mut("oneOf")
             && subschemas.is_array()
         {
@@ -143,7 +135,6 @@ fn adapt_to_json_schema_subset(json: &mut Value) -> Result<()> {
             obj.insert("anyOf".to_string(), subschemas_clone);
         }
 
-        // Recursively process all nested objects and arrays
         for (_, value) in obj.iter_mut() {
             if let Value::Object(_) | Value::Array(_) = value {
                 adapt_to_json_schema_subset(value)?;
@@ -178,7 +169,6 @@ mod tests {
             })
         );
 
-        // Ensure that we do not add a type if it is an object
         let mut json = json!({
             "description": {
                 "value": "abc",
@@ -221,7 +211,6 @@ mod tests {
             })
         );
 
-        // Ensure that we do not remove keys that are actually supported (e.g. "format" can just be used as another property)
         let mut json = json!({
             "description": "A test field",
             "type": "integer",
@@ -239,7 +228,6 @@ mod tests {
             })
         );
 
-        // additionalProperties as an object schema is also unsupported by Gemini
         let mut json = json!({
             "type": "object",
             "properties": {

crates/language_models/src/provider/util.rs → crates/language_model_core/src/util.rs 🔗

@@ -38,13 +38,22 @@ fn strip_trailing_incomplete_escape(json: &str) -> &str {
     }
 }
 
+/// Parses a "prompt is too long: N tokens ..." message and extracts the token count.
+pub fn parse_prompt_too_long(message: &str) -> Option<u64> {
+    message
+        .strip_prefix("prompt is too long: ")?
+        .split_once(" tokens")?
+        .0
+        .parse()
+        .ok()
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;
 
     #[test]
     fn test_fix_streamed_json_strips_incomplete_escape() {
-        // Trailing `\` inside a string — incomplete escape sequence
         let fixed = fix_streamed_json(r#"{"text": "hello\"#);
         let parsed: serde_json::Value = serde_json::from_str(&fixed).expect("valid json");
         assert_eq!(parsed["text"], "hello");
@@ -52,7 +61,6 @@ mod tests {
 
     #[test]
     fn test_fix_streamed_json_preserves_complete_escape() {
-        // `\\` is a complete escape (literal backslash)
         let fixed = fix_streamed_json(r#"{"text": "hello\\"#);
         let parsed: serde_json::Value = serde_json::from_str(&fixed).expect("valid json");
         assert_eq!(parsed["text"], "hello\\");
@@ -60,7 +68,6 @@ mod tests {
 
     #[test]
     fn test_fix_streamed_json_strips_escape_after_complete_escape() {
-        // `\\\` = complete `\\` (literal backslash) + incomplete `\`
         let fixed = fix_streamed_json(r#"{"text": "hello\\\"#);
         let parsed: serde_json::Value = serde_json::from_str(&fixed).expect("valid json");
         assert_eq!(parsed["text"], "hello\\");
@@ -75,12 +82,10 @@ mod tests {
 
     #[test]
     fn test_fix_streamed_json_newline_escape_boundary() {
-        // Simulates a stream boundary landing between `\` and `n`
         let fixed = fix_streamed_json(r#"{"text": "line1\"#);
         let parsed: serde_json::Value = serde_json::from_str(&fixed).expect("valid json");
         assert_eq!(parsed["text"], "line1");
 
-        // Next chunk completes the escape
         let fixed = fix_streamed_json(r#"{"text": "line1\nline2"#);
         let parsed: serde_json::Value = serde_json::from_str(&fixed).expect("valid json");
         assert_eq!(parsed["text"], "line1\nline2");
@@ -88,8 +93,6 @@ mod tests {
 
     #[test]
     fn test_fix_streamed_json_incremental_delta_correctness() {
-        // This is the actual scenario that causes the bug:
-        // chunk 1 ends mid-escape, chunk 2 completes it.
         let chunk1 = r#"{"replacement_text": "fn foo() {\"#;
         let fixed1 = fix_streamed_json(chunk1);
         let parsed1: serde_json::Value = serde_json::from_str(&fixed1).expect("valid json");
@@ -102,7 +105,6 @@ mod tests {
         let text2 = parsed2["replacement_text"].as_str().expect("string");
         assert_eq!(text2, "fn foo() {\n    return bar;\n}");
 
-        // The delta should be the newline + rest, with no spurious backslash
         let delta = &text2[text1.len()..];
         assert_eq!(delta, "\n    return bar;\n}");
     }

crates/language_models/Cargo.toml 🔗

@@ -21,8 +21,8 @@ aws_http_client.workspace = true
 base64.workspace = true
 bedrock = { workspace = true, features = ["schemars"] }
 client.workspace = true
+cloud_api_client.workspace = true
 cloud_api_types.workspace = true
-cloud_llm_client.workspace = true
 collections.workspace = true
 component.workspace = true
 convert_case.workspace = true
@@ -41,6 +41,7 @@ gpui_tokio.workspace = true
 http_client.workspace = true
 language.workspace = true
 language_model.workspace = true
+language_models_cloud.workspace = true
 lmstudio = { workspace = true, features = ["schemars"] }
 log.workspace = true
 menu.workspace = true
@@ -49,16 +50,13 @@ ollama = { workspace = true, features = ["schemars"] }
 open_ai = { workspace = true, features = ["schemars"] }
 opencode = { workspace = true, features = ["schemars"] }
 open_router = { workspace = true, features = ["schemars"] }
-partial-json-fixer.workspace = true
 release_channel.workspace = true
 schemars.workspace = true
-semver.workspace = true
 serde.workspace = true
 serde_json.workspace = true
 settings.workspace = true
 smol.workspace = true
 strum.workspace = true
-thiserror.workspace = true
 tiktoken-rs.workspace = true
 tokio = { workspace = true, features = ["rt", "rt-multi-thread"] }
 ui.workspace = true
@@ -70,4 +68,3 @@ x_ai = { workspace = true, features = ["schemars"] }
 [dev-dependencies]
 language_model = { workspace = true, features = ["test-support"] }
 pretty_assertions.workspace = true
-

crates/language_models/src/provider.rs 🔗

@@ -11,7 +11,7 @@ pub mod open_ai;
 pub mod open_ai_compatible;
 pub mod open_router;
 pub mod opencode;
-mod util;
+
 pub mod vercel;
 pub mod vercel_ai_gateway;
 pub mod x_ai;

crates/language_models/src/provider/anthropic.rs 🔗

@@ -1,13 +1,10 @@
 pub mod telemetry;
 
-use anthropic::{
-    ANTHROPIC_API_URL, AnthropicError, AnthropicModelMode, ContentDelta, CountTokensRequest, Event,
-    ResponseContent, ToolResultContent, ToolResultPart, Usage,
-};
+use anthropic::{ANTHROPIC_API_URL, AnthropicError, AnthropicModelMode};
 use anyhow::Result;
-use collections::{BTreeMap, HashMap};
+use collections::BTreeMap;
 use credentials_provider::CredentialsProvider;
-use futures::{FutureExt, Stream, StreamExt, future::BoxFuture, stream::BoxStream};
+use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream};
 use gpui::{AnyView, App, AsyncApp, Context, Entity, Task};
 use http_client::HttpClient;
 use language_model::{
@@ -16,20 +13,19 @@ use language_model::{
     LanguageModelCacheConfiguration, LanguageModelCompletionError, LanguageModelCompletionEvent,
     LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
     LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
-    LanguageModelToolChoice, LanguageModelToolResultContent, LanguageModelToolUse, MessageContent,
-    RateLimiter, Role, StopReason, env_var,
+    LanguageModelToolChoice, RateLimiter, env_var,
 };
 use settings::{Settings, SettingsStore};
-use std::pin::Pin;
-use std::str::FromStr;
 use std::sync::{Arc, LazyLock};
 use strum::IntoEnumIterator;
 use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
 use ui_input::InputField;
 use util::ResultExt;
 
-use crate::provider::util::{fix_streamed_json, parse_tool_arguments};
-
+pub use anthropic::completion::{
+    AnthropicEventMapper, count_anthropic_tokens_with_tiktoken, into_anthropic,
+    into_anthropic_count_tokens_request,
+};
 pub use settings::AnthropicAvailableModel as AvailableModel;
 
 const PROVIDER_ID: LanguageModelProviderId = ANTHROPIC_PROVIDER_ID;
@@ -249,228 +245,6 @@ pub struct AnthropicModel {
     request_limiter: RateLimiter,
 }
 
-fn to_anthropic_content(content: MessageContent) -> Option<anthropic::RequestContent> {
-    match content {
-        MessageContent::Text(text) => {
-            let text = if text.chars().last().is_some_and(|c| c.is_whitespace()) {
-                text.trim_end().to_string()
-            } else {
-                text
-            };
-            if !text.is_empty() {
-                Some(anthropic::RequestContent::Text {
-                    text,
-                    cache_control: None,
-                })
-            } else {
-                None
-            }
-        }
-        MessageContent::Thinking {
-            text: thinking,
-            signature,
-        } => {
-            if let Some(signature) = signature
-                && !thinking.is_empty()
-            {
-                Some(anthropic::RequestContent::Thinking {
-                    thinking,
-                    signature,
-                    cache_control: None,
-                })
-            } else {
-                None
-            }
-        }
-        MessageContent::RedactedThinking(data) => {
-            if !data.is_empty() {
-                Some(anthropic::RequestContent::RedactedThinking { data })
-            } else {
-                None
-            }
-        }
-        MessageContent::Image(image) => Some(anthropic::RequestContent::Image {
-            source: anthropic::ImageSource {
-                source_type: "base64".to_string(),
-                media_type: "image/png".to_string(),
-                data: image.source.to_string(),
-            },
-            cache_control: None,
-        }),
-        MessageContent::ToolUse(tool_use) => Some(anthropic::RequestContent::ToolUse {
-            id: tool_use.id.to_string(),
-            name: tool_use.name.to_string(),
-            input: tool_use.input,
-            cache_control: None,
-        }),
-        MessageContent::ToolResult(tool_result) => Some(anthropic::RequestContent::ToolResult {
-            tool_use_id: tool_result.tool_use_id.to_string(),
-            is_error: tool_result.is_error,
-            content: match tool_result.content {
-                LanguageModelToolResultContent::Text(text) => {
-                    ToolResultContent::Plain(text.to_string())
-                }
-                LanguageModelToolResultContent::Image(image) => {
-                    ToolResultContent::Multipart(vec![ToolResultPart::Image {
-                        source: anthropic::ImageSource {
-                            source_type: "base64".to_string(),
-                            media_type: "image/png".to_string(),
-                            data: image.source.to_string(),
-                        },
-                    }])
-                }
-            },
-            cache_control: None,
-        }),
-    }
-}
-
-/// Convert a LanguageModelRequest to an Anthropic CountTokensRequest.
-pub fn into_anthropic_count_tokens_request(
-    request: LanguageModelRequest,
-    model: String,
-    mode: AnthropicModelMode,
-) -> CountTokensRequest {
-    let mut new_messages: Vec<anthropic::Message> = Vec::new();
-    let mut system_message = String::new();
-
-    for message in request.messages {
-        if message.contents_empty() {
-            continue;
-        }
-
-        match message.role {
-            Role::User | Role::Assistant => {
-                let anthropic_message_content: Vec<anthropic::RequestContent> = message
-                    .content
-                    .into_iter()
-                    .filter_map(to_anthropic_content)
-                    .collect();
-                let anthropic_role = match message.role {
-                    Role::User => anthropic::Role::User,
-                    Role::Assistant => anthropic::Role::Assistant,
-                    Role::System => unreachable!("System role should never occur here"),
-                };
-                if anthropic_message_content.is_empty() {
-                    continue;
-                }
-
-                if let Some(last_message) = new_messages.last_mut()
-                    && last_message.role == anthropic_role
-                {
-                    last_message.content.extend(anthropic_message_content);
-                    continue;
-                }
-
-                new_messages.push(anthropic::Message {
-                    role: anthropic_role,
-                    content: anthropic_message_content,
-                });
-            }
-            Role::System => {
-                if !system_message.is_empty() {
-                    system_message.push_str("\n\n");
-                }
-                system_message.push_str(&message.string_contents());
-            }
-        }
-    }
-
-    CountTokensRequest {
-        model,
-        messages: new_messages,
-        system: if system_message.is_empty() {
-            None
-        } else {
-            Some(anthropic::StringOrContents::String(system_message))
-        },
-        thinking: if request.thinking_allowed {
-            match mode {
-                AnthropicModelMode::Thinking { budget_tokens } => {
-                    Some(anthropic::Thinking::Enabled { budget_tokens })
-                }
-                AnthropicModelMode::AdaptiveThinking => Some(anthropic::Thinking::Adaptive),
-                AnthropicModelMode::Default => None,
-            }
-        } else {
-            None
-        },
-        tools: request
-            .tools
-            .into_iter()
-            .map(|tool| anthropic::Tool {
-                name: tool.name,
-                description: tool.description,
-                input_schema: tool.input_schema,
-                eager_input_streaming: tool.use_input_streaming,
-            })
-            .collect(),
-        tool_choice: request.tool_choice.map(|choice| match choice {
-            LanguageModelToolChoice::Auto => anthropic::ToolChoice::Auto,
-            LanguageModelToolChoice::Any => anthropic::ToolChoice::Any,
-            LanguageModelToolChoice::None => anthropic::ToolChoice::None,
-        }),
-    }
-}
-
-/// Estimate tokens using tiktoken. Used as a fallback when the API is unavailable,
-/// or by providers (like Zed Cloud) that don't have direct Anthropic API access.
-pub fn count_anthropic_tokens_with_tiktoken(request: LanguageModelRequest) -> Result<u64> {
-    let messages = request.messages;
-    let mut tokens_from_images = 0;
-    let mut string_messages = Vec::with_capacity(messages.len());
-
-    for message in messages {
-        let mut string_contents = String::new();
-
-        for content in message.content {
-            match content {
-                MessageContent::Text(text) => {
-                    string_contents.push_str(&text);
-                }
-                MessageContent::Thinking { .. } => {
-                    // Thinking blocks are not included in the input token count.
-                }
-                MessageContent::RedactedThinking(_) => {
-                    // Thinking blocks are not included in the input token count.
-                }
-                MessageContent::Image(image) => {
-                    tokens_from_images += image.estimate_tokens();
-                }
-                MessageContent::ToolUse(_tool_use) => {
-                    // TODO: Estimate token usage from tool uses.
-                }
-                MessageContent::ToolResult(tool_result) => match &tool_result.content {
-                    LanguageModelToolResultContent::Text(text) => {
-                        string_contents.push_str(text);
-                    }
-                    LanguageModelToolResultContent::Image(image) => {
-                        tokens_from_images += image.estimate_tokens();
-                    }
-                },
-            }
-        }
-
-        if !string_contents.is_empty() {
-            string_messages.push(tiktoken_rs::ChatCompletionRequestMessage {
-                role: match message.role {
-                    Role::User => "user".into(),
-                    Role::Assistant => "assistant".into(),
-                    Role::System => "system".into(),
-                },
-                content: Some(string_contents),
-                name: None,
-                function_call: None,
-            });
-        }
-    }
-
-    // Tiktoken doesn't yet support these models, so we manually use the
-    // same tokenizer as GPT-4.
-    tiktoken_rs::num_tokens_from_messages("gpt-4", &string_messages)
-        .map(|tokens| (tokens + tokens_from_images) as u64)
-}
-
 impl AnthropicModel {
     fn stream_completion(
         &self,
@@ -617,10 +391,13 @@ impl LanguageModel for AnthropicModel {
             )
         });
 
+        let background = cx.background_executor().clone();
         async move {
             // If no API key, fall back to tiktoken estimation
             let Some(api_key) = api_key else {
-                return count_anthropic_tokens_with_tiktoken(request);
+                return background
+                    .spawn(async move { count_anthropic_tokens_with_tiktoken(request) })
+                    .await;
             };
 
             let count_request =
@@ -634,7 +411,9 @@ impl LanguageModel for AnthropicModel {
                     log::error!(
                         "Anthropic count_tokens API failed, falling back to tiktoken: {err:?}"
                     );
-                    count_anthropic_tokens_with_tiktoken(request)
+                    background
+                        .spawn(async move { count_anthropic_tokens_with_tiktoken(request) })
+                        .await
                 }
             }
         }
@@ -678,345 +457,6 @@ impl LanguageModel for AnthropicModel {
     }
 }
 
-pub fn into_anthropic(
-    request: LanguageModelRequest,
-    model: String,
-    default_temperature: f32,
-    max_output_tokens: u64,
-    mode: AnthropicModelMode,
-) -> anthropic::Request {
-    let mut new_messages: Vec<anthropic::Message> = Vec::new();
-    let mut system_message = String::new();
-
-    for message in request.messages {
-        if message.contents_empty() {
-            continue;
-        }
-
-        match message.role {
-            Role::User | Role::Assistant => {
-                let mut anthropic_message_content: Vec<anthropic::RequestContent> = message
-                    .content
-                    .into_iter()
-                    .filter_map(to_anthropic_content)
-                    .collect();
-                let anthropic_role = match message.role {
-                    Role::User => anthropic::Role::User,
-                    Role::Assistant => anthropic::Role::Assistant,
-                    Role::System => unreachable!("System role should never occur here"),
-                };
-                if anthropic_message_content.is_empty() {
-                    continue;
-                }
-
-                if let Some(last_message) = new_messages.last_mut()
-                    && last_message.role == anthropic_role
-                {
-                    last_message.content.extend(anthropic_message_content);
-                    continue;
-                }
-
-                // Mark the last segment of the message as cached
-                if message.cache {
-                    let cache_control_value = Some(anthropic::CacheControl {
-                        cache_type: anthropic::CacheControlType::Ephemeral,
-                    });
-                    for message_content in anthropic_message_content.iter_mut().rev() {
-                        match message_content {
-                            anthropic::RequestContent::RedactedThinking { .. } => {
-                                // Caching is not possible, fallback to next message
-                            }
-                            anthropic::RequestContent::Text { cache_control, .. }
-                            | anthropic::RequestContent::Thinking { cache_control, .. }
-                            | anthropic::RequestContent::Image { cache_control, .. }
-                            | anthropic::RequestContent::ToolUse { cache_control, .. }
-                            | anthropic::RequestContent::ToolResult { cache_control, .. } => {
-                                *cache_control = cache_control_value;
-                                break;
-                            }
-                        }
-                    }
-                }
-
-                new_messages.push(anthropic::Message {
-                    role: anthropic_role,
-                    content: anthropic_message_content,
-                });
-            }
-            Role::System => {
-                if !system_message.is_empty() {
-                    system_message.push_str("\n\n");
-                }
-                system_message.push_str(&message.string_contents());
-            }
-        }
-    }
-
-    anthropic::Request {
-        model,
-        messages: new_messages,
-        max_tokens: max_output_tokens,
-        system: if system_message.is_empty() {
-            None
-        } else {
-            Some(anthropic::StringOrContents::String(system_message))
-        },
-        thinking: if request.thinking_allowed {
-            match mode {
-                AnthropicModelMode::Thinking { budget_tokens } => {
-                    Some(anthropic::Thinking::Enabled { budget_tokens })
-                }
-                AnthropicModelMode::AdaptiveThinking => Some(anthropic::Thinking::Adaptive),
-                AnthropicModelMode::Default => None,
-            }
-        } else {
-            None
-        },
-        tools: request
-            .tools
-            .into_iter()
-            .map(|tool| anthropic::Tool {
-                name: tool.name,
-                description: tool.description,
-                input_schema: tool.input_schema,
-                eager_input_streaming: tool.use_input_streaming,
-            })
-            .collect(),
-        tool_choice: request.tool_choice.map(|choice| match choice {
-            LanguageModelToolChoice::Auto => anthropic::ToolChoice::Auto,
-            LanguageModelToolChoice::Any => anthropic::ToolChoice::Any,
-            LanguageModelToolChoice::None => anthropic::ToolChoice::None,
-        }),
-        metadata: None,
-        output_config: if request.thinking_allowed
-            && matches!(mode, AnthropicModelMode::AdaptiveThinking)
-        {
-            request.thinking_effort.as_deref().and_then(|effort| {
-                let effort = match effort {
-                    "low" => Some(anthropic::Effort::Low),
-                    "medium" => Some(anthropic::Effort::Medium),
-                    "high" => Some(anthropic::Effort::High),
-                    "max" => Some(anthropic::Effort::Max),
-                    _ => None,
-                };
-                effort.map(|effort| anthropic::OutputConfig {
-                    effort: Some(effort),
-                })
-            })
-        } else {
-            None
-        },
-        stop_sequences: Vec::new(),
-        speed: request.speed.map(From::from),
-        temperature: request.temperature.or(Some(default_temperature)),
-        top_k: None,
-        top_p: None,
-    }
-}
-
-pub struct AnthropicEventMapper {
-    tool_uses_by_index: HashMap<usize, RawToolUse>,
-    usage: Usage,
-    stop_reason: StopReason,
-}
-
-impl AnthropicEventMapper {
-    pub fn new() -> Self {
-        Self {
-            tool_uses_by_index: HashMap::default(),
-            usage: Usage::default(),
-            stop_reason: StopReason::EndTurn,
-        }
-    }
-
-    pub fn map_stream(
-        mut self,
-        events: Pin<Box<dyn Send + Stream<Item = Result<Event, AnthropicError>>>>,
-    ) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
-    {
-        events.flat_map(move |event| {
-            futures::stream::iter(match event {
-                Ok(event) => self.map_event(event),
-                Err(error) => vec![Err(error.into())],
-            })
-        })
-    }
-
-    pub fn map_event(
-        &mut self,
-        event: Event,
-    ) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
-        match event {
-            Event::ContentBlockStart {
-                index,
-                content_block,
-            } => match content_block {
-                ResponseContent::Text { text } => {
-                    vec![Ok(LanguageModelCompletionEvent::Text(text))]
-                }
-                ResponseContent::Thinking { thinking } => {
-                    vec![Ok(LanguageModelCompletionEvent::Thinking {
-                        text: thinking,
-                        signature: None,
-                    })]
-                }
-                ResponseContent::RedactedThinking { data } => {
-                    vec![Ok(LanguageModelCompletionEvent::RedactedThinking { data })]
-                }
-                ResponseContent::ToolUse { id, name, .. } => {
-                    self.tool_uses_by_index.insert(
-                        index,
-                        RawToolUse {
-                            id,
-                            name,
-                            input_json: String::new(),
-                        },
-                    );
-                    Vec::new()
-                }
-            },
-            Event::ContentBlockDelta { index, delta } => match delta {
-                ContentDelta::TextDelta { text } => {
-                    vec![Ok(LanguageModelCompletionEvent::Text(text))]
-                }
-                ContentDelta::ThinkingDelta { thinking } => {
-                    vec![Ok(LanguageModelCompletionEvent::Thinking {
-                        text: thinking,
-                        signature: None,
-                    })]
-                }
-                ContentDelta::SignatureDelta { signature } => {
-                    vec![Ok(LanguageModelCompletionEvent::Thinking {
-                        text: "".to_string(),
-                        signature: Some(signature),
-                    })]
-                }
-                ContentDelta::InputJsonDelta { partial_json } => {
-                    if let Some(tool_use) = self.tool_uses_by_index.get_mut(&index) {
-                        tool_use.input_json.push_str(&partial_json);
-
-                        // Try to convert invalid (incomplete) JSON into
-                        // valid JSON that serde can accept, e.g. by closing
-                        // unclosed delimiters. This way, we can update the
-                        // UI with whatever has been streamed back so far.
-                        if let Ok(input) =
-                            serde_json::Value::from_str(&fix_streamed_json(&tool_use.input_json))
-                        {
-                            return vec![Ok(LanguageModelCompletionEvent::ToolUse(
-                                LanguageModelToolUse {
-                                    id: tool_use.id.clone().into(),
-                                    name: tool_use.name.clone().into(),
-                                    is_input_complete: false,
-                                    raw_input: tool_use.input_json.clone(),
-                                    input,
-                                    thought_signature: None,
-                                },
-                            ))];
-                        }
-                    }
-                    vec![]
-                }
-            },
-            Event::ContentBlockStop { index } => {
-                if let Some(tool_use) = self.tool_uses_by_index.remove(&index) {
-                    let input_json = tool_use.input_json.trim();
-                    let event_result = match parse_tool_arguments(input_json) {
-                        Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
-                            LanguageModelToolUse {
-                                id: tool_use.id.into(),
-                                name: tool_use.name.into(),
-                                is_input_complete: true,
-                                input,
-                                raw_input: tool_use.input_json.clone(),
-                                thought_signature: None,
-                            },
-                        )),
-                        Err(json_parse_err) => {
-                            Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
-                                id: tool_use.id.into(),
-                                tool_name: tool_use.name.into(),
-                                raw_input: input_json.into(),
-                                json_parse_error: json_parse_err.to_string(),
-                            })
-                        }
-                    };
-
-                    vec![event_result]
-                } else {
-                    Vec::new()
-                }
-            }
-            Event::MessageStart { message } => {
-                update_usage(&mut self.usage, &message.usage);
-                vec![
-                    Ok(LanguageModelCompletionEvent::UsageUpdate(convert_usage(
-                        &self.usage,
-                    ))),
-                    Ok(LanguageModelCompletionEvent::StartMessage {
-                        message_id: message.id,
-                    }),
-                ]
-            }
-            Event::MessageDelta { delta, usage } => {
-                update_usage(&mut self.usage, &usage);
-                if let Some(stop_reason) = delta.stop_reason.as_deref() {
-                    self.stop_reason = match stop_reason {
-                        "end_turn" => StopReason::EndTurn,
-                        "max_tokens" => StopReason::MaxTokens,
-                        "tool_use" => StopReason::ToolUse,
-                        "refusal" => StopReason::Refusal,
-                        _ => {
-                            log::error!("Unexpected anthropic stop_reason: {stop_reason}");
-                            StopReason::EndTurn
-                        }
-                    };
-                }
-                vec![Ok(LanguageModelCompletionEvent::UsageUpdate(
-                    convert_usage(&self.usage),
-                ))]
-            }
-            Event::MessageStop => {
-                vec![Ok(LanguageModelCompletionEvent::Stop(self.stop_reason))]
-            }
-            Event::Error { error } => {
-                vec![Err(error.into())]
-            }
-            _ => Vec::new(),
-        }
-    }
-}
-
-struct RawToolUse {
-    id: String,
-    name: String,
-    input_json: String,
-}
-
-/// Updates usage data by preferring counts from `new`.
-fn update_usage(usage: &mut Usage, new: &Usage) {
-    if let Some(input_tokens) = new.input_tokens {
-        usage.input_tokens = Some(input_tokens);
-    }
-    if let Some(output_tokens) = new.output_tokens {
-        usage.output_tokens = Some(output_tokens);
-    }
-    if let Some(cache_creation_input_tokens) = new.cache_creation_input_tokens {
-        usage.cache_creation_input_tokens = Some(cache_creation_input_tokens);
-    }
-    if let Some(cache_read_input_tokens) = new.cache_read_input_tokens {
-        usage.cache_read_input_tokens = Some(cache_read_input_tokens);
-    }
-}
-
-fn convert_usage(usage: &Usage) -> language_model::TokenUsage {
-    language_model::TokenUsage {
-        input_tokens: usage.input_tokens.unwrap_or(0),
-        output_tokens: usage.output_tokens.unwrap_or(0),
-        cache_creation_input_tokens: usage.cache_creation_input_tokens.unwrap_or(0),
-        cache_read_input_tokens: usage.cache_read_input_tokens.unwrap_or(0),
-    }
-}
-
 struct ConfigurationView {
     api_key_editor: Entity<InputField>,
     state: Entity<State>,
@@ -1157,192 +597,3 @@ impl Render for ConfigurationView {
         }
     }
 }
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-    use anthropic::AnthropicModelMode;
-    use language_model::{LanguageModelRequestMessage, MessageContent};
-
-    #[test]
-    fn test_cache_control_only_on_last_segment() {
-        let request = LanguageModelRequest {
-            messages: vec![LanguageModelRequestMessage {
-                role: Role::User,
-                content: vec![
-                    MessageContent::Text("Some prompt".to_string()),
-                    MessageContent::Image(language_model::LanguageModelImage::empty()),
-                    MessageContent::Image(language_model::LanguageModelImage::empty()),
-                    MessageContent::Image(language_model::LanguageModelImage::empty()),
-                    MessageContent::Image(language_model::LanguageModelImage::empty()),
-                ],
-                cache: true,
-                reasoning_details: None,
-            }],
-            thread_id: None,
-            prompt_id: None,
-            intent: None,
-            stop: vec![],
-            temperature: None,
-            tools: vec![],
-            tool_choice: None,
-            thinking_allowed: true,
-            thinking_effort: None,
-            speed: None,
-        };
-
-        let anthropic_request = into_anthropic(
-            request,
-            "claude-3-5-sonnet".to_string(),
-            0.7,
-            4096,
-            AnthropicModelMode::Default,
-        );
-
-        assert_eq!(anthropic_request.messages.len(), 1);
-
-        let message = &anthropic_request.messages[0];
-        assert_eq!(message.content.len(), 5);
-
-        assert!(matches!(
-            message.content[0],
-            anthropic::RequestContent::Text {
-                cache_control: None,
-                ..
-            }
-        ));
-        for i in 1..3 {
-            assert!(matches!(
-                message.content[i],
-                anthropic::RequestContent::Image {
-                    cache_control: None,
-                    ..
-                }
-            ));
-        }
-
-        assert!(matches!(
-            message.content[4],
-            anthropic::RequestContent::Image {
-                cache_control: Some(anthropic::CacheControl {
-                    cache_type: anthropic::CacheControlType::Ephemeral,
-                }),
-                ..
-            }
-        ));
-    }
-
-    fn request_with_assistant_content(
-        assistant_content: Vec<MessageContent>,
-    ) -> anthropic::Request {
-        let mut request = LanguageModelRequest {
-            messages: vec![LanguageModelRequestMessage {
-                role: Role::User,
-                content: vec![MessageContent::Text("Hello".to_string())],
-                cache: false,
-                reasoning_details: None,
-            }],
-            thinking_effort: None,
-            thread_id: None,
-            prompt_id: None,
-            intent: None,
-            stop: vec![],
-            temperature: None,
-            tools: vec![],
-            tool_choice: None,
-            thinking_allowed: true,
-            speed: None,
-        };
-        request.messages.push(LanguageModelRequestMessage {
-            role: Role::Assistant,
-            content: assistant_content,
-            cache: false,
-            reasoning_details: None,
-        });
-        into_anthropic(
-            request,
-            "claude-sonnet-4-5".to_string(),
-            1.0,
-            16000,
-            AnthropicModelMode::Thinking {
-                budget_tokens: Some(10000),
-            },
-        )
-    }
-
-    #[test]
-    fn test_unsigned_thinking_blocks_stripped() {
-        let result = request_with_assistant_content(vec![
-            MessageContent::Thinking {
-                text: "Cancelled mid-think, no signature".to_string(),
-                signature: None,
-            },
-            MessageContent::Text("Some response text".to_string()),
-        ]);
-
-        let assistant_message = result
-            .messages
-            .iter()
-            .find(|m| m.role == anthropic::Role::Assistant)
-            .expect("assistant message should still exist");
-
-        assert_eq!(
-            assistant_message.content.len(),
-            1,
-            "Only the text content should remain; unsigned thinking block should be stripped"
-        );
-        assert!(matches!(
-            &assistant_message.content[0],
-            anthropic::RequestContent::Text { text, .. } if text == "Some response text"
-        ));
-    }
-
-    #[test]
-    fn test_signed_thinking_blocks_preserved() {
-        let result = request_with_assistant_content(vec![
-            MessageContent::Thinking {
-                text: "Completed thinking".to_string(),
-                signature: Some("valid-signature".to_string()),
-            },
-            MessageContent::Text("Response".to_string()),
-        ]);
-
-        let assistant_message = result
-            .messages
-            .iter()
-            .find(|m| m.role == anthropic::Role::Assistant)
-            .expect("assistant message should exist");
-
-        assert_eq!(
-            assistant_message.content.len(),
-            2,
-            "Both the signed thinking block and text should be preserved"
-        );
-        assert!(matches!(
-            &assistant_message.content[0],
-            anthropic::RequestContent::Thinking { thinking, signature, .. }
-                if thinking == "Completed thinking" && signature == "valid-signature"
-        ));
-    }
-
-    #[test]
-    fn test_only_unsigned_thinking_block_omits_entire_message() {
-        let result = request_with_assistant_content(vec![MessageContent::Thinking {
-            text: "Cancelled before any text or signature".to_string(),
-            signature: None,
-        }]);
-
-        let assistant_messages: Vec<_> = result
-            .messages
-            .iter()
-            .filter(|m| m.role == anthropic::Role::Assistant)
-            .collect();
-
-        assert_eq!(
-            assistant_messages.len(),
-            0,
-            "An assistant message whose only content was an unsigned thinking block \
-             should be omitted entirely"
-        );
-    }
-}

crates/language_models/src/provider/bedrock.rs 🔗

@@ -48,7 +48,7 @@ use ui_input::InputField;
 use util::ResultExt;
 
 use crate::AllLanguageModelSettings;
-use crate::provider::util::{fix_streamed_json, parse_tool_arguments};
+use language_model::util::{fix_streamed_json, parse_tool_arguments};
 
 actions!(bedrock, [Tab, TabPrev]);
 

crates/language_models/src/provider/cloud.rs 🔗

@@ -1,107 +1,93 @@
 use ai_onboarding::YoungAccountBanner;
-use anthropic::AnthropicModelMode;
-use anyhow::{Context as _, Result, anyhow};
-use client::{
-    Client, NeedsLlmTokenRefresh, RefreshLlmTokenListener, UserStore, global_llm_token, zed_urls,
-};
-use cloud_api_types::{OrganizationId, Plan};
-use cloud_llm_client::{
-    CLIENT_SUPPORTS_STATUS_MESSAGES_HEADER_NAME, CLIENT_SUPPORTS_STATUS_STREAM_ENDED_HEADER_NAME,
-    CLIENT_SUPPORTS_X_AI_HEADER_NAME, CompletionBody, CompletionEvent, CompletionRequestStatus,
-    CountTokensBody, CountTokensResponse, ListModelsResponse,
-    SERVER_SUPPORTS_STATUS_MESSAGES_HEADER_NAME, ZED_VERSION_HEADER_NAME,
-};
-use futures::{
-    AsyncBufReadExt, FutureExt, Stream, StreamExt,
-    future::BoxFuture,
-    stream::{self, BoxStream},
-};
-use google_ai::GoogleModelMode;
-use gpui::{AnyElement, AnyView, App, AsyncApp, Context, Entity, Subscription, Task};
-use http_client::http::{HeaderMap, HeaderValue};
-use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Response, StatusCode};
+use anyhow::Result;
+use client::{Client, RefreshLlmTokenListener, UserStore, global_llm_token, zed_urls};
+use cloud_api_client::LlmApiToken;
+use cloud_api_types::OrganizationId;
+use cloud_api_types::Plan;
+use futures::StreamExt;
+use futures::future::BoxFuture;
+use gpui::AsyncApp;
+use gpui::{AnyElement, AnyView, App, Context, Entity, Subscription, Task};
 use language_model::{
-    ANTHROPIC_PROVIDER_ID, ANTHROPIC_PROVIDER_NAME, AuthenticateError, GOOGLE_PROVIDER_ID,
-    GOOGLE_PROVIDER_NAME, IconOrSvg, LanguageModel, LanguageModelCacheConfiguration,
-    LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelEffortLevel,
-    LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
-    LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
-    LanguageModelToolChoice, LanguageModelToolSchemaFormat, LlmApiToken, OPEN_AI_PROVIDER_ID,
-    OPEN_AI_PROVIDER_NAME, PaymentRequiredError, RateLimiter, X_AI_PROVIDER_ID, X_AI_PROVIDER_NAME,
-    ZED_CLOUD_PROVIDER_ID, ZED_CLOUD_PROVIDER_NAME,
+    AuthenticateError, IconOrSvg, LanguageModel, LanguageModelProvider, LanguageModelProviderId,
+    LanguageModelProviderName, LanguageModelProviderState, ZED_CLOUD_PROVIDER_ID,
+    ZED_CLOUD_PROVIDER_NAME,
 };
+use language_models_cloud::{CloudLlmTokenProvider, CloudModelProvider};
 use release_channel::AppVersion;
-use schemars::JsonSchema;
-use semver::Version;
-use serde::{Deserialize, Serialize, de::DeserializeOwned};
+
 use settings::SettingsStore;
 pub use settings::ZedDotDevAvailableModel as AvailableModel;
 pub use settings::ZedDotDevAvailableProvider as AvailableProvider;
-use smol::io::{AsyncReadExt, BufReader};
-use std::collections::VecDeque;
-use std::pin::Pin;
-use std::str::FromStr;
 use std::sync::Arc;
-use std::task::Poll;
-use std::time::Duration;
-use thiserror::Error;
 use ui::{TintColor, prelude::*};
 
-use crate::provider::anthropic::{
-    AnthropicEventMapper, count_anthropic_tokens_with_tiktoken, into_anthropic,
-};
-use crate::provider::google::{GoogleEventMapper, into_google};
-use crate::provider::open_ai::{
-    OpenAiEventMapper, OpenAiResponseEventMapper, count_open_ai_tokens, into_open_ai,
-    into_open_ai_response,
-};
-use crate::provider::x_ai::count_xai_tokens;
-
 const PROVIDER_ID: LanguageModelProviderId = ZED_CLOUD_PROVIDER_ID;
 const PROVIDER_NAME: LanguageModelProviderName = ZED_CLOUD_PROVIDER_NAME;
 
-#[derive(Default, Clone, Debug, PartialEq)]
-pub struct ZedDotDevSettings {
-    pub available_models: Vec<AvailableModel>,
-}
-#[derive(Default, Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
-#[serde(tag = "type", rename_all = "lowercase")]
-pub enum ModelMode {
-    #[default]
-    Default,
-    Thinking {
-        /// The maximum number of tokens to use for reasoning. Must be lower than the model's `max_output_tokens`.
-        budget_tokens: Option<u32>,
-    },
+struct ClientTokenProvider {
+    client: Arc<Client>,
+    llm_api_token: LlmApiToken,
+    user_store: Entity<UserStore>,
 }
 
-impl From<ModelMode> for AnthropicModelMode {
-    fn from(value: ModelMode) -> Self {
-        match value {
-            ModelMode::Default => AnthropicModelMode::Default,
-            ModelMode::Thinking { budget_tokens } => AnthropicModelMode::Thinking { budget_tokens },
-        }
+impl CloudLlmTokenProvider for ClientTokenProvider {
+    type AuthContext = Option<OrganizationId>;
+
+    fn auth_context(&self, cx: &AsyncApp) -> Self::AuthContext {
+        self.user_store.read_with(cx, |user_store, _| {
+            user_store
+                .current_organization()
+                .map(|organization| organization.id.clone())
+        })
     }
+
+    fn acquire_token(
+        &self,
+        organization_id: Self::AuthContext,
+    ) -> BoxFuture<'static, Result<String>> {
+        let client = self.client.clone();
+        let llm_api_token = self.llm_api_token.clone();
+        Box::pin(async move {
+            client
+                .acquire_llm_token(&llm_api_token, organization_id)
+                .await
+        })
+    }
+
+    fn refresh_token(
+        &self,
+        organization_id: Self::AuthContext,
+    ) -> BoxFuture<'static, Result<String>> {
+        let client = self.client.clone();
+        let llm_api_token = self.llm_api_token.clone();
+        Box::pin(async move {
+            client
+                .refresh_llm_token(&llm_api_token, organization_id)
+                .await
+        })
+    }
+}
+
+#[derive(Default, Clone, Debug, PartialEq)]
+pub struct ZedDotDevSettings {
+    pub available_models: Vec<AvailableModel>,
 }
 
 pub struct CloudLanguageModelProvider {
-    client: Arc<Client>,
     state: Entity<State>,
     _maintain_client_status: Task<()>,
 }
 
 pub struct State {
     client: Arc<Client>,
-    llm_api_token: LlmApiToken,
     user_store: Entity<UserStore>,
     status: client::Status,
-    models: Vec<Arc<cloud_llm_client::LanguageModel>>,
-    default_model: Option<Arc<cloud_llm_client::LanguageModel>>,
-    default_fast_model: Option<Arc<cloud_llm_client::LanguageModel>>,
-    recommended_models: Vec<Arc<cloud_llm_client::LanguageModel>>,
+    provider: Entity<CloudModelProvider<ClientTokenProvider>>,
     _user_store_subscription: Subscription,
     _settings_subscription: Subscription,
     _llm_token_subscription: Subscription,
+    _provider_subscription: Subscription,
 }
 
 impl State {
@@ -112,16 +98,26 @@ impl State {
         cx: &mut Context<Self>,
     ) -> Self {
         let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx);
-        let llm_api_token = global_llm_token(cx);
+        let token_provider = Arc::new(ClientTokenProvider {
+            client: client.clone(),
+            llm_api_token: global_llm_token(cx),
+            user_store: user_store.clone(),
+        });
+
+        let provider = cx.new(|cx| {
+            CloudModelProvider::new(
+                token_provider.clone(),
+                client.http_client(),
+                Some(AppVersion::global(cx)),
+            )
+        });
+
         Self {
             client: client.clone(),
-            llm_api_token,
             user_store: user_store.clone(),
             status,
-            models: Vec::new(),
-            default_model: None,
-            default_fast_model: None,
-            recommended_models: Vec::new(),
+            _provider_subscription: cx.observe(&provider, |_, _, cx| cx.notify()),
+            provider,
             _user_store_subscription: cx.subscribe(
                 &user_store,
                 move |this, _user_store, event, cx| match event {
@@ -131,19 +127,7 @@ impl State {
                             return;
                         }
 
-                        let client = this.client.clone();
-                        let llm_api_token = this.llm_api_token.clone();
-                        let organization_id = this
-                            .user_store
-                            .read(cx)
-                            .current_organization()
-                            .map(|organization| organization.id.clone());
-                        cx.spawn(async move |this, cx| {
-                            let response =
-                                Self::fetch_models(client, llm_api_token, organization_id).await?;
-                            this.update(cx, |this, cx| this.update_models(response, cx))
-                        })
-                        .detach_and_log_err(cx);
+                        this.refresh_models(cx);
                     }
                     _ => {}
                 },
@@ -154,21 +138,7 @@ impl State {
             _llm_token_subscription: cx.subscribe(
                 &refresh_llm_token_listener,
                 move |this, _listener, _event, cx| {
-                    let client = this.client.clone();
-                    let llm_api_token = this.llm_api_token.clone();
-                    let organization_id = this
-                        .user_store
-                        .read(cx)
-                        .current_organization()
-                        .map(|organization| organization.id.clone());
-                    cx.spawn(async move |this, cx| {
-                        let response =
-                            Self::fetch_models(client, llm_api_token, organization_id).await?;
-                        this.update(cx, |this, cx| {
-                            this.update_models(response, cx);
-                        })
-                    })
-                    .detach_and_log_err(cx);
+                    this.refresh_models(cx);
                 },
             ),
         }
@@ -186,74 +156,10 @@ impl State {
         })
     }
 
-    fn update_models(&mut self, response: ListModelsResponse, cx: &mut Context<Self>) {
-        let mut models = Vec::new();
-
-        for model in response.models {
-            models.push(Arc::new(model.clone()));
-        }
-
-        self.default_model = models
-            .iter()
-            .find(|model| {
-                response
-                    .default_model
-                    .as_ref()
-                    .is_some_and(|default_model_id| &model.id == default_model_id)
-            })
-            .cloned();
-        self.default_fast_model = models
-            .iter()
-            .find(|model| {
-                response
-                    .default_fast_model
-                    .as_ref()
-                    .is_some_and(|default_fast_model_id| &model.id == default_fast_model_id)
-            })
-            .cloned();
-        self.recommended_models = response
-            .recommended_models
-            .iter()
-            .filter_map(|id| models.iter().find(|model| &model.id == id))
-            .cloned()
-            .collect();
-        self.models = models;
-        cx.notify();
-    }
-
-    async fn fetch_models(
-        client: Arc<Client>,
-        llm_api_token: LlmApiToken,
-        organization_id: Option<OrganizationId>,
-    ) -> Result<ListModelsResponse> {
-        let http_client = &client.http_client();
-        let token = client
-            .acquire_llm_token(&llm_api_token, organization_id)
-            .await?;
-
-        let request = http_client::Request::builder()
-            .method(Method::GET)
-            .header(CLIENT_SUPPORTS_X_AI_HEADER_NAME, "true")
-            .uri(http_client.build_zed_llm_url("/models", &[])?.as_ref())
-            .header("Authorization", format!("Bearer {token}"))
-            .body(AsyncBody::empty())?;
-        let mut response = http_client
-            .send(request)
-            .await
-            .context("failed to send list models request")?;
-
-        if response.status().is_success() {
-            let mut body = String::new();
-            response.body_mut().read_to_string(&mut body).await?;
-            Ok(serde_json::from_str(&body)?)
-        } else {
-            let mut body = String::new();
-            response.body_mut().read_to_string(&mut body).await?;
-            anyhow::bail!(
-                "error listing models.\nStatus: {:?}\nBody: {body}",
-                response.status(),
-            );
-        }
+    fn refresh_models(&mut self, cx: &mut Context<Self>) {
+        self.provider.update(cx, |provider, cx| {
+            provider.refresh_models(cx).detach_and_log_err(cx);
+        });
     }
 }
 
@@ -281,27 +187,10 @@ impl CloudLanguageModelProvider {
         });
 
         Self {
-            client,
             state,
             _maintain_client_status: maintain_client_status,
         }
     }
-
-    fn create_language_model(
-        &self,
-        model: Arc<cloud_llm_client::LanguageModel>,
-        llm_api_token: LlmApiToken,
-        user_store: Entity<UserStore>,
-    ) -> Arc<dyn LanguageModel> {
-        Arc::new(CloudLanguageModel {
-            id: LanguageModelId(SharedString::from(model.id.0.clone())),
-            model,
-            llm_api_token,
-            user_store,
-            client: self.client.clone(),
-            request_limiter: RateLimiter::new(4),
-        })
-    }
 }
 
 impl LanguageModelProviderState for CloudLanguageModelProvider {
@@ -327,45 +216,35 @@ impl LanguageModelProvider for CloudLanguageModelProvider {
 
     fn default_model(&self, cx: &App) -> Option<Arc<dyn LanguageModel>> {
         let state = self.state.read(cx);
-        let default_model = state.default_model.clone()?;
-        let llm_api_token = state.llm_api_token.clone();
-        let user_store = state.user_store.clone();
-        Some(self.create_language_model(default_model, llm_api_token, user_store))
+        let provider = state.provider.read(cx);
+        let model = provider.default_model()?;
+        Some(provider.create_model(model))
     }
 
     fn default_fast_model(&self, cx: &App) -> Option<Arc<dyn LanguageModel>> {
         let state = self.state.read(cx);
-        let default_fast_model = state.default_fast_model.clone()?;
-        let llm_api_token = state.llm_api_token.clone();
-        let user_store = state.user_store.clone();
-        Some(self.create_language_model(default_fast_model, llm_api_token, user_store))
+        let provider = state.provider.read(cx);
+        let model = provider.default_fast_model()?;
+        Some(provider.create_model(model))
     }
 
     fn recommended_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
         let state = self.state.read(cx);
-        let llm_api_token = state.llm_api_token.clone();
-        let user_store = state.user_store.clone();
-        state
-            .recommended_models
+        let provider = state.provider.read(cx);
+        provider
+            .recommended_models()
             .iter()
-            .cloned()
-            .map(|model| {
-                self.create_language_model(model, llm_api_token.clone(), user_store.clone())
-            })
+            .map(|model| provider.create_model(model))
             .collect()
     }
 
     fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
         let state = self.state.read(cx);
-        let llm_api_token = state.llm_api_token.clone();
-        let user_store = state.user_store.clone();
-        state
-            .models
+        let provider = state.provider.read(cx);
+        provider
+            .models()
             .iter()
-            .cloned()
-            .map(|model| {
-                self.create_language_model(model, llm_api_token.clone(), user_store.clone())
-            })
+            .map(|model| provider.create_model(model))
             .collect()
     }
 
@@ -393,700 +272,6 @@ impl LanguageModelProvider for CloudLanguageModelProvider {
     }
 }
 
-pub struct CloudLanguageModel {
-    id: LanguageModelId,
-    model: Arc<cloud_llm_client::LanguageModel>,
-    llm_api_token: LlmApiToken,
-    user_store: Entity<UserStore>,
-    client: Arc<Client>,
-    request_limiter: RateLimiter,
-}
-
-struct PerformLlmCompletionResponse {
-    response: Response<AsyncBody>,
-    includes_status_messages: bool,
-}
-
-impl CloudLanguageModel {
-    async fn perform_llm_completion(
-        client: Arc<Client>,
-        llm_api_token: LlmApiToken,
-        organization_id: Option<OrganizationId>,
-        app_version: Option<Version>,
-        body: CompletionBody,
-    ) -> Result<PerformLlmCompletionResponse> {
-        let http_client = &client.http_client();
-
-        let mut token = client
-            .acquire_llm_token(&llm_api_token, organization_id.clone())
-            .await?;
-        let mut refreshed_token = false;
-
-        loop {
-            let request = http_client::Request::builder()
-                .method(Method::POST)
-                .uri(http_client.build_zed_llm_url("/completions", &[])?.as_ref())
-                .when_some(app_version.as_ref(), |builder, app_version| {
-                    builder.header(ZED_VERSION_HEADER_NAME, app_version.to_string())
-                })
-                .header("Content-Type", "application/json")
-                .header("Authorization", format!("Bearer {token}"))
-                .header(CLIENT_SUPPORTS_STATUS_MESSAGES_HEADER_NAME, "true")
-                .header(CLIENT_SUPPORTS_STATUS_STREAM_ENDED_HEADER_NAME, "true")
-                .body(serde_json::to_string(&body)?.into())?;
-
-            let mut response = http_client.send(request).await?;
-            let status = response.status();
-            if status.is_success() {
-                let includes_status_messages = response
-                    .headers()
-                    .get(SERVER_SUPPORTS_STATUS_MESSAGES_HEADER_NAME)
-                    .is_some();
-
-                return Ok(PerformLlmCompletionResponse {
-                    response,
-                    includes_status_messages,
-                });
-            }
-
-            if !refreshed_token && response.needs_llm_token_refresh() {
-                token = client
-                    .refresh_llm_token(&llm_api_token, organization_id.clone())
-                    .await?;
-                refreshed_token = true;
-                continue;
-            }
-
-            if status == StatusCode::PAYMENT_REQUIRED {
-                return Err(anyhow!(PaymentRequiredError));
-            }
-
-            let mut body = String::new();
-            let headers = response.headers().clone();
-            response.body_mut().read_to_string(&mut body).await?;
-            return Err(anyhow!(ApiError {
-                status,
-                body,
-                headers
-            }));
-        }
-    }
-}
-
-#[derive(Debug, Error)]
-#[error("cloud language model request failed with status {status}: {body}")]
-struct ApiError {
-    status: StatusCode,
-    body: String,
-    headers: HeaderMap<HeaderValue>,
-}
-
-/// Represents error responses from Zed's cloud API.
-///
-/// Example JSON for an upstream HTTP error:
-/// ```json
-/// {
-///   "code": "upstream_http_error",
-///   "message": "Received an error from the Anthropic API: upstream connect error or disconnect/reset before headers, reset reason: connection timeout",
-///   "upstream_status": 503
-/// }
-/// ```
-#[derive(Debug, serde::Deserialize)]
-struct CloudApiError {
-    code: String,
-    message: String,
-    #[serde(default)]
-    #[serde(deserialize_with = "deserialize_optional_status_code")]
-    upstream_status: Option<StatusCode>,
-    #[serde(default)]
-    retry_after: Option<f64>,
-}
-
-fn deserialize_optional_status_code<'de, D>(deserializer: D) -> Result<Option<StatusCode>, D::Error>
-where
-    D: serde::Deserializer<'de>,
-{
-    let opt: Option<u16> = Option::deserialize(deserializer)?;
-    Ok(opt.and_then(|code| StatusCode::from_u16(code).ok()))
-}
-
-impl From<ApiError> for LanguageModelCompletionError {
-    fn from(error: ApiError) -> Self {
-        if let Ok(cloud_error) = serde_json::from_str::<CloudApiError>(&error.body) {
-            if cloud_error.code.starts_with("upstream_http_") {
-                let status = if let Some(status) = cloud_error.upstream_status {
-                    status
-                } else if cloud_error.code.ends_with("_error") {
-                    error.status
-                } else {
-                    // If there's a status code in the code string (e.g. "upstream_http_429")
-                    // then use that; otherwise, see if the JSON contains a status code.
-                    cloud_error
-                        .code
-                        .strip_prefix("upstream_http_")
-                        .and_then(|code_str| code_str.parse::<u16>().ok())
-                        .and_then(|code| StatusCode::from_u16(code).ok())
-                        .unwrap_or(error.status)
-                };
-
-                return LanguageModelCompletionError::UpstreamProviderError {
-                    message: cloud_error.message,
-                    status,
-                    retry_after: cloud_error.retry_after.map(Duration::from_secs_f64),
-                };
-            }
-
-            return LanguageModelCompletionError::from_http_status(
-                PROVIDER_NAME,
-                error.status,
-                cloud_error.message,
-                None,
-            );
-        }
-
-        let retry_after = None;
-        LanguageModelCompletionError::from_http_status(
-            PROVIDER_NAME,
-            error.status,
-            error.body,
-            retry_after,
-        )
-    }
-}
-
-impl LanguageModel for CloudLanguageModel {
-    fn id(&self) -> LanguageModelId {
-        self.id.clone()
-    }
-
-    fn name(&self) -> LanguageModelName {
-        LanguageModelName::from(self.model.display_name.clone())
-    }
-
-    fn provider_id(&self) -> LanguageModelProviderId {
-        PROVIDER_ID
-    }
-
-    fn provider_name(&self) -> LanguageModelProviderName {
-        PROVIDER_NAME
-    }
-
-    fn upstream_provider_id(&self) -> LanguageModelProviderId {
-        use cloud_llm_client::LanguageModelProvider::*;
-        match self.model.provider {
-            Anthropic => ANTHROPIC_PROVIDER_ID,
-            OpenAi => OPEN_AI_PROVIDER_ID,
-            Google => GOOGLE_PROVIDER_ID,
-            XAi => X_AI_PROVIDER_ID,
-        }
-    }
-
-    fn upstream_provider_name(&self) -> LanguageModelProviderName {
-        use cloud_llm_client::LanguageModelProvider::*;
-        match self.model.provider {
-            Anthropic => ANTHROPIC_PROVIDER_NAME,
-            OpenAi => OPEN_AI_PROVIDER_NAME,
-            Google => GOOGLE_PROVIDER_NAME,
-            XAi => X_AI_PROVIDER_NAME,
-        }
-    }
-
-    fn is_latest(&self) -> bool {
-        self.model.is_latest
-    }
-
-    fn supports_tools(&self) -> bool {
-        self.model.supports_tools
-    }
-
-    fn supports_images(&self) -> bool {
-        self.model.supports_images
-    }
-
-    fn supports_thinking(&self) -> bool {
-        self.model.supports_thinking
-    }
-
-    fn supports_fast_mode(&self) -> bool {
-        self.model.supports_fast_mode
-    }
-
-    fn supported_effort_levels(&self) -> Vec<LanguageModelEffortLevel> {
-        self.model
-            .supported_effort_levels
-            .iter()
-            .map(|effort_level| LanguageModelEffortLevel {
-                name: effort_level.name.clone().into(),
-                value: effort_level.value.clone().into(),
-                is_default: effort_level.is_default.unwrap_or(false),
-            })
-            .collect()
-    }
-
-    fn supports_streaming_tools(&self) -> bool {
-        self.model.supports_streaming_tools
-    }
-
-    fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
-        match choice {
-            LanguageModelToolChoice::Auto
-            | LanguageModelToolChoice::Any
-            | LanguageModelToolChoice::None => true,
-        }
-    }
-
-    fn supports_split_token_display(&self) -> bool {
-        use cloud_llm_client::LanguageModelProvider::*;
-        matches!(self.model.provider, OpenAi | XAi)
-    }
-
-    fn telemetry_id(&self) -> String {
-        format!("zed.dev/{}", self.model.id)
-    }
-
-    fn tool_input_format(&self) -> LanguageModelToolSchemaFormat {
-        match self.model.provider {
-            cloud_llm_client::LanguageModelProvider::Anthropic
-            | cloud_llm_client::LanguageModelProvider::OpenAi => {
-                LanguageModelToolSchemaFormat::JsonSchema
-            }
-            cloud_llm_client::LanguageModelProvider::Google
-            | cloud_llm_client::LanguageModelProvider::XAi => {
-                LanguageModelToolSchemaFormat::JsonSchemaSubset
-            }
-        }
-    }
-
-    fn max_token_count(&self) -> u64 {
-        self.model.max_token_count as u64
-    }
-
-    fn max_output_tokens(&self) -> Option<u64> {
-        Some(self.model.max_output_tokens as u64)
-    }
-
-    fn cache_configuration(&self) -> Option<LanguageModelCacheConfiguration> {
-        match &self.model.provider {
-            cloud_llm_client::LanguageModelProvider::Anthropic => {
-                Some(LanguageModelCacheConfiguration {
-                    min_total_token: 2_048,
-                    should_speculate: true,
-                    max_cache_anchors: 4,
-                })
-            }
-            cloud_llm_client::LanguageModelProvider::OpenAi
-            | cloud_llm_client::LanguageModelProvider::XAi
-            | cloud_llm_client::LanguageModelProvider::Google => None,
-        }
-    }
-
-    fn count_tokens(
-        &self,
-        request: LanguageModelRequest,
-        cx: &App,
-    ) -> BoxFuture<'static, Result<u64>> {
-        match self.model.provider {
-            cloud_llm_client::LanguageModelProvider::Anthropic => cx
-                .background_spawn(async move { count_anthropic_tokens_with_tiktoken(request) })
-                .boxed(),
-            cloud_llm_client::LanguageModelProvider::OpenAi => {
-                let model = match open_ai::Model::from_id(&self.model.id.0) {
-                    Ok(model) => model,
-                    Err(err) => return async move { Err(anyhow!(err)) }.boxed(),
-                };
-                count_open_ai_tokens(request, model, cx)
-            }
-            cloud_llm_client::LanguageModelProvider::XAi => {
-                let model = match x_ai::Model::from_id(&self.model.id.0) {
-                    Ok(model) => model,
-                    Err(err) => return async move { Err(anyhow!(err)) }.boxed(),
-                };
-                count_xai_tokens(request, model, cx)
-            }
-            cloud_llm_client::LanguageModelProvider::Google => {
-                let client = self.client.clone();
-                let llm_api_token = self.llm_api_token.clone();
-                let organization_id = self
-                    .user_store
-                    .read(cx)
-                    .current_organization()
-                    .map(|organization| organization.id.clone());
-                let model_id = self.model.id.to_string();
-                let generate_content_request =
-                    into_google(request, model_id.clone(), GoogleModelMode::Default);
-                async move {
-                    let http_client = &client.http_client();
-                    let token = client
-                        .acquire_llm_token(&llm_api_token, organization_id)
-                        .await?;
-
-                    let request_body = CountTokensBody {
-                        provider: cloud_llm_client::LanguageModelProvider::Google,
-                        model: model_id,
-                        provider_request: serde_json::to_value(&google_ai::CountTokensRequest {
-                            generate_content_request,
-                        })?,
-                    };
-                    let request = http_client::Request::builder()
-                        .method(Method::POST)
-                        .uri(
-                            http_client
-                                .build_zed_llm_url("/count_tokens", &[])?
-                                .as_ref(),
-                        )
-                        .header("Content-Type", "application/json")
-                        .header("Authorization", format!("Bearer {token}"))
-                        .body(serde_json::to_string(&request_body)?.into())?;
-                    let mut response = http_client.send(request).await?;
-                    let status = response.status();
-                    let headers = response.headers().clone();
-                    let mut response_body = String::new();
-                    response
-                        .body_mut()
-                        .read_to_string(&mut response_body)
-                        .await?;
-
-                    if status.is_success() {
-                        let response_body: CountTokensResponse =
-                            serde_json::from_str(&response_body)?;
-
-                        Ok(response_body.tokens as u64)
-                    } else {
-                        Err(anyhow!(ApiError {
-                            status,
-                            body: response_body,
-                            headers
-                        }))
-                    }
-                }
-                .boxed()
-            }
-        }
-    }
-
-    fn stream_completion(
-        &self,
-        request: LanguageModelRequest,
-        cx: &AsyncApp,
-    ) -> BoxFuture<
-        'static,
-        Result<
-            BoxStream<'static, Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>,
-            LanguageModelCompletionError,
-        >,
-    > {
-        let thread_id = request.thread_id.clone();
-        let prompt_id = request.prompt_id.clone();
-        let app_version = Some(cx.update(|cx| AppVersion::global(cx)));
-        let user_store = self.user_store.clone();
-        let organization_id = cx.update(|cx| {
-            user_store
-                .read(cx)
-                .current_organization()
-                .map(|organization| organization.id.clone())
-        });
-        let thinking_allowed = request.thinking_allowed;
-        let enable_thinking = thinking_allowed && self.model.supports_thinking;
-        let provider_name = provider_name(&self.model.provider);
-        match self.model.provider {
-            cloud_llm_client::LanguageModelProvider::Anthropic => {
-                let effort = request
-                    .thinking_effort
-                    .as_ref()
-                    .and_then(|effort| anthropic::Effort::from_str(effort).ok());
-
-                let mut request = into_anthropic(
-                    request,
-                    self.model.id.to_string(),
-                    1.0,
-                    self.model.max_output_tokens as u64,
-                    if enable_thinking {
-                        AnthropicModelMode::Thinking {
-                            budget_tokens: Some(4_096),
-                        }
-                    } else {
-                        AnthropicModelMode::Default
-                    },
-                );
-
-                if enable_thinking && effort.is_some() {
-                    request.thinking = Some(anthropic::Thinking::Adaptive);
-                    request.output_config = Some(anthropic::OutputConfig { effort });
-                }
-
-                let client = self.client.clone();
-                let llm_api_token = self.llm_api_token.clone();
-                let organization_id = organization_id.clone();
-                let future = self.request_limiter.stream(async move {
-                    let PerformLlmCompletionResponse {
-                        response,
-                        includes_status_messages,
-                    } = Self::perform_llm_completion(
-                        client.clone(),
-                        llm_api_token,
-                        organization_id,
-                        app_version,
-                        CompletionBody {
-                            thread_id,
-                            prompt_id,
-                            provider: cloud_llm_client::LanguageModelProvider::Anthropic,
-                            model: request.model.clone(),
-                            provider_request: serde_json::to_value(&request)
-                                .map_err(|e| anyhow!(e))?,
-                        },
-                    )
-                    .await
-                    .map_err(|err| match err.downcast::<ApiError>() {
-                        Ok(api_err) => anyhow!(LanguageModelCompletionError::from(api_err)),
-                        Err(err) => anyhow!(err),
-                    })?;
-
-                    let mut mapper = AnthropicEventMapper::new();
-                    Ok(map_cloud_completion_events(
-                        Box::pin(response_lines(response, includes_status_messages)),
-                        &provider_name,
-                        move |event| mapper.map_event(event),
-                    ))
-                });
-                async move { Ok(future.await?.boxed()) }.boxed()
-            }
-            cloud_llm_client::LanguageModelProvider::OpenAi => {
-                let client = self.client.clone();
-                let llm_api_token = self.llm_api_token.clone();
-                let organization_id = organization_id.clone();
-                let effort = request
-                    .thinking_effort
-                    .as_ref()
-                    .and_then(|effort| open_ai::ReasoningEffort::from_str(effort).ok());
-
-                let mut request = into_open_ai_response(
-                    request,
-                    &self.model.id.0,
-                    self.model.supports_parallel_tool_calls,
-                    true,
-                    None,
-                    None,
-                );
-
-                if enable_thinking && let Some(effort) = effort {
-                    request.reasoning = Some(open_ai::responses::ReasoningConfig {
-                        effort,
-                        summary: Some(open_ai::responses::ReasoningSummaryMode::Auto),
-                    });
-                }
-
-                let future = self.request_limiter.stream(async move {
-                    let PerformLlmCompletionResponse {
-                        response,
-                        includes_status_messages,
-                    } = Self::perform_llm_completion(
-                        client.clone(),
-                        llm_api_token,
-                        organization_id,
-                        app_version,
-                        CompletionBody {
-                            thread_id,
-                            prompt_id,
-                            provider: cloud_llm_client::LanguageModelProvider::OpenAi,
-                            model: request.model.clone(),
-                            provider_request: serde_json::to_value(&request)
-                                .map_err(|e| anyhow!(e))?,
-                        },
-                    )
-                    .await?;
-
-                    let mut mapper = OpenAiResponseEventMapper::new();
-                    Ok(map_cloud_completion_events(
-                        Box::pin(response_lines(response, includes_status_messages)),
-                        &provider_name,
-                        move |event| mapper.map_event(event),
-                    ))
-                });
-                async move { Ok(future.await?.boxed()) }.boxed()
-            }
-            cloud_llm_client::LanguageModelProvider::XAi => {
-                let client = self.client.clone();
-                let request = into_open_ai(
-                    request,
-                    &self.model.id.0,
-                    self.model.supports_parallel_tool_calls,
-                    false,
-                    None,
-                    None,
-                );
-                let llm_api_token = self.llm_api_token.clone();
-                let organization_id = organization_id.clone();
-                let future = self.request_limiter.stream(async move {
-                    let PerformLlmCompletionResponse {
-                        response,
-                        includes_status_messages,
-                    } = Self::perform_llm_completion(
-                        client.clone(),
-                        llm_api_token,
-                        organization_id,
-                        app_version,
-                        CompletionBody {
-                            thread_id,
-                            prompt_id,
-                            provider: cloud_llm_client::LanguageModelProvider::XAi,
-                            model: request.model.clone(),
-                            provider_request: serde_json::to_value(&request)
-                                .map_err(|e| anyhow!(e))?,
-                        },
-                    )
-                    .await?;
-
-                    let mut mapper = OpenAiEventMapper::new();
-                    Ok(map_cloud_completion_events(
-                        Box::pin(response_lines(response, includes_status_messages)),
-                        &provider_name,
-                        move |event| mapper.map_event(event),
-                    ))
-                });
-                async move { Ok(future.await?.boxed()) }.boxed()
-            }
-            cloud_llm_client::LanguageModelProvider::Google => {
-                let client = self.client.clone();
-                let request =
-                    into_google(request, self.model.id.to_string(), GoogleModelMode::Default);
-                let llm_api_token = self.llm_api_token.clone();
-                let future = self.request_limiter.stream(async move {
-                    let PerformLlmCompletionResponse {
-                        response,
-                        includes_status_messages,
-                    } = Self::perform_llm_completion(
-                        client.clone(),
-                        llm_api_token,
-                        organization_id,
-                        app_version,
-                        CompletionBody {
-                            thread_id,
-                            prompt_id,
-                            provider: cloud_llm_client::LanguageModelProvider::Google,
-                            model: request.model.model_id.clone(),
-                            provider_request: serde_json::to_value(&request)
-                                .map_err(|e| anyhow!(e))?,
-                        },
-                    )
-                    .await?;
-
-                    let mut mapper = GoogleEventMapper::new();
-                    Ok(map_cloud_completion_events(
-                        Box::pin(response_lines(response, includes_status_messages)),
-                        &provider_name,
-                        move |event| mapper.map_event(event),
-                    ))
-                });
-                async move { Ok(future.await?.boxed()) }.boxed()
-            }
-        }
-    }
-}
-
-fn map_cloud_completion_events<T, F>(
-    stream: Pin<Box<dyn Stream<Item = Result<CompletionEvent<T>>> + Send>>,
-    provider: &LanguageModelProviderName,
-    mut map_callback: F,
-) -> BoxStream<'static, Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
-where
-    T: DeserializeOwned + 'static,
-    F: FnMut(T) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
-        + Send
-        + 'static,
-{
-    let provider = provider.clone();
-    let mut stream = stream.fuse();
-
-    let mut saw_stream_ended = false;
-
-    let mut done = false;
-    let mut pending = VecDeque::new();
-
-    stream::poll_fn(move |cx| {
-        loop {
-            if let Some(item) = pending.pop_front() {
-                return Poll::Ready(Some(item));
-            }
-
-            if done {
-                return Poll::Ready(None);
-            }
-
-            match stream.poll_next_unpin(cx) {
-                Poll::Ready(Some(event)) => {
-                    let items = match event {
-                        Err(error) => {
-                            vec![Err(LanguageModelCompletionError::from(error))]
-                        }
-                        Ok(CompletionEvent::Status(CompletionRequestStatus::StreamEnded)) => {
-                            saw_stream_ended = true;
-                            vec![]
-                        }
-                        Ok(CompletionEvent::Status(status)) => {
-                            LanguageModelCompletionEvent::from_completion_request_status(
-                                status,
-                                provider.clone(),
-                            )
-                            .transpose()
-                            .map(|event| vec![event])
-                            .unwrap_or_default()
-                        }
-                        Ok(CompletionEvent::Event(event)) => map_callback(event),
-                    };
-                    pending.extend(items);
-                }
-                Poll::Ready(None) => {
-                    done = true;
-
-                    if !saw_stream_ended {
-                        return Poll::Ready(Some(Err(
-                            LanguageModelCompletionError::StreamEndedUnexpectedly {
-                                provider: provider.clone(),
-                            },
-                        )));
-                    }
-                }
-                Poll::Pending => return Poll::Pending,
-            }
-        }
-    })
-    .boxed()
-}
-
-fn provider_name(provider: &cloud_llm_client::LanguageModelProvider) -> LanguageModelProviderName {
-    match provider {
-        cloud_llm_client::LanguageModelProvider::Anthropic => ANTHROPIC_PROVIDER_NAME,
-        cloud_llm_client::LanguageModelProvider::OpenAi => OPEN_AI_PROVIDER_NAME,
-        cloud_llm_client::LanguageModelProvider::Google => GOOGLE_PROVIDER_NAME,
-        cloud_llm_client::LanguageModelProvider::XAi => X_AI_PROVIDER_NAME,
-    }
-}
-
-fn response_lines<T: DeserializeOwned>(
-    response: Response<AsyncBody>,
-    includes_status_messages: bool,
-) -> impl Stream<Item = Result<CompletionEvent<T>>> {
-    futures::stream::try_unfold(
-        (String::new(), BufReader::new(response.into_body())),
-        move |(mut line, mut body)| async move {
-            match body.read_line(&mut line).await {
-                Ok(0) => Ok(None),
-                Ok(_) => {
-                    let event = if includes_status_messages {
-                        serde_json::from_str::<CompletionEvent<T>>(&line)?
-                    } else {
-                        CompletionEvent::Event(serde_json::from_str::<T>(&line)?)
-                    };
-
-                    line.clear();
-                    Ok(Some((event, (line, body))))
-                }
-                Err(e) => Err(e.into()),
-            }
-        },
-    )
-}
-
 #[derive(IntoElement, RegisterComponent)]
 struct ZedAiConfiguration {
     is_connected: bool,

crates/language_models/src/provider/copilot_chat.rs 🔗

@@ -32,7 +32,7 @@ use ui::prelude::*;
 use util::debug_panic;
 
 use crate::provider::anthropic::{AnthropicEventMapper, into_anthropic};
-use crate::provider::util::{fix_streamed_json, parse_tool_arguments};
+use language_model::util::{fix_streamed_json, parse_tool_arguments};
 
 const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("copilot_chat");
 const PROVIDER_NAME: LanguageModelProviderName =
@@ -268,15 +268,15 @@ impl LanguageModel for CopilotChatLanguageModel {
         levels
             .iter()
             .map(|level| {
-                let name: SharedString = match level.as_str() {
+                let name = match level.as_str() {
                     "low" => "Low".into(),
                     "medium" => "Medium".into(),
                     "high" => "High".into(),
-                    _ => SharedString::from(level.clone()),
+                    _ => language_model::SharedString::from(level.clone()),
                 };
                 LanguageModelEffortLevel {
                     name,
-                    value: SharedString::from(level.clone()),
+                    value: language_model::SharedString::from(level.clone()),
                     is_default: level == "high",
                 }
             })

crates/language_models/src/provider/deepseek.rs 🔗

@@ -23,7 +23,7 @@ use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
 use ui_input::InputField;
 use util::ResultExt;
 
-use crate::provider::util::{fix_streamed_json, parse_tool_arguments};
+use language_model::util::{fix_streamed_json, parse_tool_arguments};
 
 const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("deepseek");
 const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("DeepSeek");

crates/language_models/src/provider/google.rs 🔗

@@ -1,32 +1,25 @@
 use anyhow::{Context as _, Result};
 use collections::BTreeMap;
 use credentials_provider::CredentialsProvider;
-use futures::{FutureExt, Stream, StreamExt, future::BoxFuture};
-use google_ai::{
-    FunctionDeclaration, GenerateContentResponse, GoogleModelMode, Part, SystemInstruction,
-    ThinkingConfig, UsageMetadata,
-};
+use futures::{FutureExt, StreamExt, future::BoxFuture};
+pub use google_ai::completion::{GoogleEventMapper, count_google_tokens, into_google};
+use google_ai::{GenerateContentResponse, GoogleModelMode};
 use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window};
 use http_client::HttpClient;
 use language_model::{
     AuthenticateError, ConfigurationViewTargetAgent, EnvVar, LanguageModelCompletionError,
     LanguageModelCompletionEvent, LanguageModelToolChoice, LanguageModelToolSchemaFormat,
-    LanguageModelToolUse, LanguageModelToolUseId, MessageContent, StopReason,
 };
 use language_model::{
     GOOGLE_PROVIDER_ID, GOOGLE_PROVIDER_NAME, IconOrSvg, LanguageModel, LanguageModelId,
     LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName,
-    LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role,
+    LanguageModelProviderState, LanguageModelRequest, RateLimiter,
 };
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
 pub use settings::GoogleAvailableModel as AvailableModel;
 use settings::{Settings, SettingsStore};
-use std::pin::Pin;
-use std::sync::{
-    Arc, LazyLock,
-    atomic::{self, AtomicU64},
-};
+use std::sync::{Arc, LazyLock};
 use strum::IntoEnumIterator;
 use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
 use ui_input::InputField;
@@ -394,369 +387,6 @@ impl LanguageModel for GoogleLanguageModel {
     }
 }
 
-pub fn into_google(
-    mut request: LanguageModelRequest,
-    model_id: String,
-    mode: GoogleModelMode,
-) -> google_ai::GenerateContentRequest {
-    fn map_content(content: Vec<MessageContent>) -> Vec<Part> {
-        content
-            .into_iter()
-            .flat_map(|content| match content {
-                language_model::MessageContent::Text(text) => {
-                    if !text.is_empty() {
-                        vec![Part::TextPart(google_ai::TextPart { text })]
-                    } else {
-                        vec![]
-                    }
-                }
-                language_model::MessageContent::Thinking {
-                    text: _,
-                    signature: Some(signature),
-                } => {
-                    if !signature.is_empty() {
-                        vec![Part::ThoughtPart(google_ai::ThoughtPart {
-                            thought: true,
-                            thought_signature: signature,
-                        })]
-                    } else {
-                        vec![]
-                    }
-                }
-                language_model::MessageContent::Thinking { .. } => {
-                    vec![]
-                }
-                language_model::MessageContent::RedactedThinking(_) => vec![],
-                language_model::MessageContent::Image(image) => {
-                    vec![Part::InlineDataPart(google_ai::InlineDataPart {
-                        inline_data: google_ai::GenerativeContentBlob {
-                            mime_type: "image/png".to_string(),
-                            data: image.source.to_string(),
-                        },
-                    })]
-                }
-                language_model::MessageContent::ToolUse(tool_use) => {
-                    // Normalize empty string signatures to None
-                    let thought_signature = tool_use.thought_signature.filter(|s| !s.is_empty());
-
-                    vec![Part::FunctionCallPart(google_ai::FunctionCallPart {
-                        function_call: google_ai::FunctionCall {
-                            name: tool_use.name.to_string(),
-                            args: tool_use.input,
-                        },
-                        thought_signature,
-                    })]
-                }
-                language_model::MessageContent::ToolResult(tool_result) => {
-                    match tool_result.content {
-                        language_model::LanguageModelToolResultContent::Text(text) => {
-                            vec![Part::FunctionResponsePart(
-                                google_ai::FunctionResponsePart {
-                                    function_response: google_ai::FunctionResponse {
-                                        name: tool_result.tool_name.to_string(),
-                                        // The API expects a valid JSON object
-                                        response: serde_json::json!({
-                                            "output": text
-                                        }),
-                                    },
-                                },
-                            )]
-                        }
-                        language_model::LanguageModelToolResultContent::Image(image) => {
-                            vec![
-                                Part::FunctionResponsePart(google_ai::FunctionResponsePart {
-                                    function_response: google_ai::FunctionResponse {
-                                        name: tool_result.tool_name.to_string(),
-                                        // The API expects a valid JSON object
-                                        response: serde_json::json!({
-                                            "output": "Tool responded with an image"
-                                        }),
-                                    },
-                                }),
-                                Part::InlineDataPart(google_ai::InlineDataPart {
-                                    inline_data: google_ai::GenerativeContentBlob {
-                                        mime_type: "image/png".to_string(),
-                                        data: image.source.to_string(),
-                                    },
-                                }),
-                            ]
-                        }
-                    }
-                }
-            })
-            .collect()
-    }
-
-    let system_instructions = if request
-        .messages
-        .first()
-        .is_some_and(|msg| matches!(msg.role, Role::System))
-    {
-        let message = request.messages.remove(0);
-        Some(SystemInstruction {
-            parts: map_content(message.content),
-        })
-    } else {
-        None
-    };
-
-    google_ai::GenerateContentRequest {
-        model: google_ai::ModelName { model_id },
-        system_instruction: system_instructions,
-        contents: request
-            .messages
-            .into_iter()
-            .filter_map(|message| {
-                let parts = map_content(message.content);
-                if parts.is_empty() {
-                    None
-                } else {
-                    Some(google_ai::Content {
-                        parts,
-                        role: match message.role {
-                            Role::User => google_ai::Role::User,
-                            Role::Assistant => google_ai::Role::Model,
-                            Role::System => google_ai::Role::User, // Google AI doesn't have a system role
-                        },
-                    })
-                }
-            })
-            .collect(),
-        generation_config: Some(google_ai::GenerationConfig {
-            candidate_count: Some(1),
-            stop_sequences: Some(request.stop),
-            max_output_tokens: None,
-            temperature: request.temperature.map(|t| t as f64).or(Some(1.0)),
-            thinking_config: match (request.thinking_allowed, mode) {
-                (true, GoogleModelMode::Thinking { budget_tokens }) => {
-                    budget_tokens.map(|thinking_budget| ThinkingConfig { thinking_budget })
-                }
-                _ => None,
-            },
-            top_p: None,
-            top_k: None,
-        }),
-        safety_settings: None,
-        tools: (!request.tools.is_empty()).then(|| {
-            vec![google_ai::Tool {
-                function_declarations: request
-                    .tools
-                    .into_iter()
-                    .map(|tool| FunctionDeclaration {
-                        name: tool.name,
-                        description: tool.description,
-                        parameters: tool.input_schema,
-                    })
-                    .collect(),
-            }]
-        }),
-        tool_config: request.tool_choice.map(|choice| google_ai::ToolConfig {
-            function_calling_config: google_ai::FunctionCallingConfig {
-                mode: match choice {
-                    LanguageModelToolChoice::Auto => google_ai::FunctionCallingMode::Auto,
-                    LanguageModelToolChoice::Any => google_ai::FunctionCallingMode::Any,
-                    LanguageModelToolChoice::None => google_ai::FunctionCallingMode::None,
-                },
-                allowed_function_names: None,
-            },
-        }),
-    }
-}
-
-pub struct GoogleEventMapper {
-    usage: UsageMetadata,
-    stop_reason: StopReason,
-}
-
-impl GoogleEventMapper {
-    pub fn new() -> Self {
-        Self {
-            usage: UsageMetadata::default(),
-            stop_reason: StopReason::EndTurn,
-        }
-    }
-
-    pub fn map_stream(
-        mut self,
-        events: Pin<Box<dyn Send + Stream<Item = Result<GenerateContentResponse>>>>,
-    ) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
-    {
-        events
-            .map(Some)
-            .chain(futures::stream::once(async { None }))
-            .flat_map(move |event| {
-                futures::stream::iter(match event {
-                    Some(Ok(event)) => self.map_event(event),
-                    Some(Err(error)) => {
-                        vec![Err(LanguageModelCompletionError::from(error))]
-                    }
-                    None => vec![Ok(LanguageModelCompletionEvent::Stop(self.stop_reason))],
-                })
-            })
-    }
-
-    pub fn map_event(
-        &mut self,
-        event: GenerateContentResponse,
-    ) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
-        static TOOL_CALL_COUNTER: AtomicU64 = AtomicU64::new(0);
-
-        let mut events: Vec<_> = Vec::new();
-        let mut wants_to_use_tool = false;
-        if let Some(usage_metadata) = event.usage_metadata {
-            update_usage(&mut self.usage, &usage_metadata);
-            events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(
-                convert_usage(&self.usage),
-            )))
-        }
-
-        if let Some(prompt_feedback) = event.prompt_feedback
-            && let Some(block_reason) = prompt_feedback.block_reason.as_deref()
-        {
-            self.stop_reason = match block_reason {
-                "SAFETY" | "OTHER" | "BLOCKLIST" | "PROHIBITED_CONTENT" | "IMAGE_SAFETY" => {
-                    StopReason::Refusal
-                }
-                _ => {
-                    log::error!("Unexpected Google block_reason: {block_reason}");
-                    StopReason::Refusal
-                }
-            };
-            events.push(Ok(LanguageModelCompletionEvent::Stop(self.stop_reason)));
-
-            return events;
-        }
-
-        if let Some(candidates) = event.candidates {
-            for candidate in candidates {
-                if let Some(finish_reason) = candidate.finish_reason.as_deref() {
-                    self.stop_reason = match finish_reason {
-                        "STOP" => StopReason::EndTurn,
-                        "MAX_TOKENS" => StopReason::MaxTokens,
-                        _ => {
-                            log::error!("Unexpected google finish_reason: {finish_reason}");
-                            StopReason::EndTurn
-                        }
-                    };
-                }
-                candidate
-                    .content
-                    .parts
-                    .into_iter()
-                    .for_each(|part| match part {
-                        Part::TextPart(text_part) => {
-                            events.push(Ok(LanguageModelCompletionEvent::Text(text_part.text)))
-                        }
-                        Part::InlineDataPart(_) => {}
-                        Part::FunctionCallPart(function_call_part) => {
-                            wants_to_use_tool = true;
-                            let name: Arc<str> = function_call_part.function_call.name.into();
-                            let next_tool_id =
-                                TOOL_CALL_COUNTER.fetch_add(1, atomic::Ordering::SeqCst);
-                            let id: LanguageModelToolUseId =
-                                format!("{}-{}", name, next_tool_id).into();
-
-                            // Normalize empty string signatures to None
-                            let thought_signature = function_call_part
-                                .thought_signature
-                                .filter(|s| !s.is_empty());
-
-                            events.push(Ok(LanguageModelCompletionEvent::ToolUse(
-                                LanguageModelToolUse {
-                                    id,
-                                    name,
-                                    is_input_complete: true,
-                                    raw_input: function_call_part.function_call.args.to_string(),
-                                    input: function_call_part.function_call.args,
-                                    thought_signature,
-                                },
-                            )));
-                        }
-                        Part::FunctionResponsePart(_) => {}
-                        Part::ThoughtPart(part) => {
-                            events.push(Ok(LanguageModelCompletionEvent::Thinking {
-                                text: "(Encrypted thought)".to_string(), // TODO: Can we populate this from thought summaries?
-                                signature: Some(part.thought_signature),
-                            }));
-                        }
-                    });
-            }
-        }
-
-        // Even when Gemini wants to use a Tool, the API
-        // responds with `finish_reason: STOP`
-        if wants_to_use_tool {
-            self.stop_reason = StopReason::ToolUse;
-            events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)));
-        }
-        events
-    }
-}
-
-pub fn count_google_tokens(
-    request: LanguageModelRequest,
-    cx: &App,
-) -> BoxFuture<'static, Result<u64>> {
-    // We couldn't use the GoogleLanguageModelProvider to count tokens because the github copilot doesn't have the access to google_ai directly.
-    // So we have to use tokenizer from tiktoken_rs to count tokens.
-    cx.background_spawn(async move {
-        let messages = request
-            .messages
-            .into_iter()
-            .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
-                role: match message.role {
-                    Role::User => "user".into(),
-                    Role::Assistant => "assistant".into(),
-                    Role::System => "system".into(),
-                },
-                content: Some(message.string_contents()),
-                name: None,
-                function_call: None,
-            })
-            .collect::<Vec<_>>();
-
-        // Tiktoken doesn't yet support these models, so we manually use the
-        // same tokenizer as GPT-4.
-        tiktoken_rs::num_tokens_from_messages("gpt-4", &messages).map(|tokens| tokens as u64)
-    })
-    .boxed()
-}
-
-fn update_usage(usage: &mut UsageMetadata, new: &UsageMetadata) {
-    if let Some(prompt_token_count) = new.prompt_token_count {
-        usage.prompt_token_count = Some(prompt_token_count);
-    }
-    if let Some(cached_content_token_count) = new.cached_content_token_count {
-        usage.cached_content_token_count = Some(cached_content_token_count);
-    }
-    if let Some(candidates_token_count) = new.candidates_token_count {
-        usage.candidates_token_count = Some(candidates_token_count);
-    }
-    if let Some(tool_use_prompt_token_count) = new.tool_use_prompt_token_count {
-        usage.tool_use_prompt_token_count = Some(tool_use_prompt_token_count);
-    }
-    if let Some(thoughts_token_count) = new.thoughts_token_count {
-        usage.thoughts_token_count = Some(thoughts_token_count);
-    }
-    if let Some(total_token_count) = new.total_token_count {
-        usage.total_token_count = Some(total_token_count);
-    }
-}
-
-fn convert_usage(usage: &UsageMetadata) -> language_model::TokenUsage {
-    let prompt_tokens = usage.prompt_token_count.unwrap_or(0);
-    let cached_tokens = usage.cached_content_token_count.unwrap_or(0);
-    let input_tokens = prompt_tokens - cached_tokens;
-    let output_tokens = usage.candidates_token_count.unwrap_or(0);
-
-    language_model::TokenUsage {
-        input_tokens,
-        output_tokens,
-        cache_read_input_tokens: cached_tokens,
-        cache_creation_input_tokens: 0,
-    }
-}
-
 struct ConfigurationView {
     api_key_editor: Entity<InputField>,
     state: Entity<State>,
@@ -895,428 +525,3 @@ impl Render for ConfigurationView {
         }
     }
 }
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-    use google_ai::{
-        Content, FunctionCall, FunctionCallPart, GenerateContentCandidate, GenerateContentResponse,
-        Part, Role as GoogleRole, TextPart,
-    };
-    use language_model::{LanguageModelToolUseId, MessageContent, Role};
-    use serde_json::json;
-
-    #[test]
-    fn test_function_call_with_signature_creates_tool_use_with_signature() {
-        let mut mapper = GoogleEventMapper::new();
-
-        let response = GenerateContentResponse {
-            candidates: Some(vec![GenerateContentCandidate {
-                index: Some(0),
-                content: Content {
-                    parts: vec![Part::FunctionCallPart(FunctionCallPart {
-                        function_call: FunctionCall {
-                            name: "test_function".to_string(),
-                            args: json!({"arg": "value"}),
-                        },
-                        thought_signature: Some("test_signature_123".to_string()),
-                    })],
-                    role: GoogleRole::Model,
-                },
-                finish_reason: None,
-                finish_message: None,
-                safety_ratings: None,
-                citation_metadata: None,
-            }]),
-            prompt_feedback: None,
-            usage_metadata: None,
-        };
-
-        let events = mapper.map_event(response);
-
-        assert_eq!(events.len(), 2); // ToolUse event + Stop event
-
-        if let Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) = &events[0] {
-            assert_eq!(tool_use.name.as_ref(), "test_function");
-            assert_eq!(
-                tool_use.thought_signature.as_deref(),
-                Some("test_signature_123")
-            );
-        } else {
-            panic!("Expected ToolUse event");
-        }
-    }
-
-    #[test]
-    fn test_function_call_without_signature_has_none() {
-        let mut mapper = GoogleEventMapper::new();
-
-        let response = GenerateContentResponse {
-            candidates: Some(vec![GenerateContentCandidate {
-                index: Some(0),
-                content: Content {
-                    parts: vec![Part::FunctionCallPart(FunctionCallPart {
-                        function_call: FunctionCall {
-                            name: "test_function".to_string(),
-                            args: json!({"arg": "value"}),
-                        },
-                        thought_signature: None,
-                    })],
-                    role: GoogleRole::Model,
-                },
-                finish_reason: None,
-                finish_message: None,
-                safety_ratings: None,
-                citation_metadata: None,
-            }]),
-            prompt_feedback: None,
-            usage_metadata: None,
-        };
-
-        let events = mapper.map_event(response);
-
-        if let Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) = &events[0] {
-            assert_eq!(tool_use.thought_signature, None);
-        } else {
-            panic!("Expected ToolUse event");
-        }
-    }
-
-    #[test]
-    fn test_empty_string_signature_normalized_to_none() {
-        let mut mapper = GoogleEventMapper::new();
-
-        let response = GenerateContentResponse {
-            candidates: Some(vec![GenerateContentCandidate {
-                index: Some(0),
-                content: Content {
-                    parts: vec![Part::FunctionCallPart(FunctionCallPart {
-                        function_call: FunctionCall {
-                            name: "test_function".to_string(),
-                            args: json!({"arg": "value"}),
-                        },
-                        thought_signature: Some("".to_string()),
-                    })],
-                    role: GoogleRole::Model,
-                },
-                finish_reason: None,
-                finish_message: None,
-                safety_ratings: None,
-                citation_metadata: None,
-            }]),
-            prompt_feedback: None,
-            usage_metadata: None,
-        };
-
-        let events = mapper.map_event(response);
-
-        if let Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) = &events[0] {
-            assert_eq!(tool_use.thought_signature, None);
-        } else {
-            panic!("Expected ToolUse event");
-        }
-    }
-
-    #[test]
-    fn test_parallel_function_calls_preserve_signatures() {
-        let mut mapper = GoogleEventMapper::new();
-
-        let response = GenerateContentResponse {
-            candidates: Some(vec![GenerateContentCandidate {
-                index: Some(0),
-                content: Content {
-                    parts: vec![
-                        Part::FunctionCallPart(FunctionCallPart {
-                            function_call: FunctionCall {
-                                name: "function_1".to_string(),
-                                args: json!({"arg": "value1"}),
-                            },
-                            thought_signature: Some("signature_1".to_string()),
-                        }),
-                        Part::FunctionCallPart(FunctionCallPart {
-                            function_call: FunctionCall {
-                                name: "function_2".to_string(),
-                                args: json!({"arg": "value2"}),
-                            },
-                            thought_signature: None,
-                        }),
-                    ],
-                    role: GoogleRole::Model,
-                },
-                finish_reason: None,
-                finish_message: None,
-                safety_ratings: None,
-                citation_metadata: None,
-            }]),
-            prompt_feedback: None,
-            usage_metadata: None,
-        };
-
-        let events = mapper.map_event(response);
-
-        assert_eq!(events.len(), 3); // 2 ToolUse events + Stop event
-
-        if let Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) = &events[0] {
-            assert_eq!(tool_use.name.as_ref(), "function_1");
-            assert_eq!(tool_use.thought_signature.as_deref(), Some("signature_1"));
-        } else {
-            panic!("Expected ToolUse event for function_1");
-        }
-
-        if let Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) = &events[1] {
-            assert_eq!(tool_use.name.as_ref(), "function_2");
-            assert_eq!(tool_use.thought_signature, None);
-        } else {
-            panic!("Expected ToolUse event for function_2");
-        }
-    }
-
-    #[test]
-    fn test_tool_use_with_signature_converts_to_function_call_part() {
-        let tool_use = language_model::LanguageModelToolUse {
-            id: LanguageModelToolUseId::from("test_id"),
-            name: "test_function".into(),
-            raw_input: json!({"arg": "value"}).to_string(),
-            input: json!({"arg": "value"}),
-            is_input_complete: true,
-            thought_signature: Some("test_signature_456".to_string()),
-        };
-
-        let request = super::into_google(
-            LanguageModelRequest {
-                messages: vec![language_model::LanguageModelRequestMessage {
-                    role: Role::Assistant,
-                    content: vec![MessageContent::ToolUse(tool_use)],
-                    cache: false,
-                    reasoning_details: None,
-                }],
-                ..Default::default()
-            },
-            "gemini-2.5-flash".to_string(),
-            GoogleModelMode::Default,
-        );
-
-        assert_eq!(request.contents[0].parts.len(), 1);
-        if let Part::FunctionCallPart(fc_part) = &request.contents[0].parts[0] {
-            assert_eq!(fc_part.function_call.name, "test_function");
-            assert_eq!(
-                fc_part.thought_signature.as_deref(),
-                Some("test_signature_456")
-            );
-        } else {
-            panic!("Expected FunctionCallPart");
-        }
-    }
-
-    #[test]
-    fn test_tool_use_without_signature_omits_field() {
-        let tool_use = language_model::LanguageModelToolUse {
-            id: LanguageModelToolUseId::from("test_id"),
-            name: "test_function".into(),
-            raw_input: json!({"arg": "value"}).to_string(),
-            input: json!({"arg": "value"}),
-            is_input_complete: true,
-            thought_signature: None,
-        };
-
-        let request = super::into_google(
-            LanguageModelRequest {
-                messages: vec![language_model::LanguageModelRequestMessage {
-                    role: Role::Assistant,
-                    content: vec![MessageContent::ToolUse(tool_use)],
-                    cache: false,
-                    reasoning_details: None,
-                }],
-                ..Default::default()
-            },
-            "gemini-2.5-flash".to_string(),
-            GoogleModelMode::Default,
-        );
-
-        assert_eq!(request.contents[0].parts.len(), 1);
-        if let Part::FunctionCallPart(fc_part) = &request.contents[0].parts[0] {
-            assert_eq!(fc_part.thought_signature, None);
-        } else {
-            panic!("Expected FunctionCallPart");
-        }
-    }
-
-    #[test]
-    fn test_empty_signature_in_tool_use_normalized_to_none() {
-        let tool_use = language_model::LanguageModelToolUse {
-            id: LanguageModelToolUseId::from("test_id"),
-            name: "test_function".into(),
-            raw_input: json!({"arg": "value"}).to_string(),
-            input: json!({"arg": "value"}),
-            is_input_complete: true,
-            thought_signature: Some("".to_string()),
-        };
-
-        let request = super::into_google(
-            LanguageModelRequest {
-                messages: vec![language_model::LanguageModelRequestMessage {
-                    role: Role::Assistant,
-                    content: vec![MessageContent::ToolUse(tool_use)],
-                    cache: false,
-                    reasoning_details: None,
-                }],
-                ..Default::default()
-            },
-            "gemini-2.5-flash".to_string(),
-            GoogleModelMode::Default,
-        );
-
-        if let Part::FunctionCallPart(fc_part) = &request.contents[0].parts[0] {
-            assert_eq!(fc_part.thought_signature, None);
-        } else {
-            panic!("Expected FunctionCallPart");
-        }
-    }
-
-    #[test]
-    fn test_round_trip_preserves_signature() {
-        let mut mapper = GoogleEventMapper::new();
-
-        // Simulate receiving a response from Google with a signature
-        let response = GenerateContentResponse {
-            candidates: Some(vec![GenerateContentCandidate {
-                index: Some(0),
-                content: Content {
-                    parts: vec![Part::FunctionCallPart(FunctionCallPart {
-                        function_call: FunctionCall {
-                            name: "test_function".to_string(),
-                            args: json!({"arg": "value"}),
-                        },
-                        thought_signature: Some("round_trip_sig".to_string()),
-                    })],
-                    role: GoogleRole::Model,
-                },
-                finish_reason: None,
-                finish_message: None,
-                safety_ratings: None,
-                citation_metadata: None,
-            }]),
-            prompt_feedback: None,
-            usage_metadata: None,
-        };
-
-        let events = mapper.map_event(response);
-
-        let tool_use = if let Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) = &events[0] {
-            tool_use.clone()
-        } else {
-            panic!("Expected ToolUse event");
-        };
-
-        // Convert back to Google format
-        let request = super::into_google(
-            LanguageModelRequest {
-                messages: vec![language_model::LanguageModelRequestMessage {
-                    role: Role::Assistant,
-                    content: vec![MessageContent::ToolUse(tool_use)],
-                    cache: false,
-                    reasoning_details: None,
-                }],
-                ..Default::default()
-            },
-            "gemini-2.5-flash".to_string(),
-            GoogleModelMode::Default,
-        );
-
-        // Verify signature is preserved
-        if let Part::FunctionCallPart(fc_part) = &request.contents[0].parts[0] {
-            assert_eq!(fc_part.thought_signature.as_deref(), Some("round_trip_sig"));
-        } else {
-            panic!("Expected FunctionCallPart");
-        }
-    }
-
-    #[test]
-    fn test_mixed_text_and_function_call_with_signature() {
-        let mut mapper = GoogleEventMapper::new();
-
-        let response = GenerateContentResponse {
-            candidates: Some(vec![GenerateContentCandidate {
-                index: Some(0),
-                content: Content {
-                    parts: vec![
-                        Part::TextPart(TextPart {
-                            text: "I'll help with that.".to_string(),
-                        }),
-                        Part::FunctionCallPart(FunctionCallPart {
-                            function_call: FunctionCall {
-                                name: "helper_function".to_string(),
-                                args: json!({"query": "help"}),
-                            },
-                            thought_signature: Some("mixed_sig".to_string()),
-                        }),
-                    ],
-                    role: GoogleRole::Model,
-                },
-                finish_reason: None,
-                finish_message: None,
-                safety_ratings: None,
-                citation_metadata: None,
-            }]),
-            prompt_feedback: None,
-            usage_metadata: None,
-        };
-
-        let events = mapper.map_event(response);
-
-        assert_eq!(events.len(), 3); // Text event + ToolUse event + Stop event
-
-        if let Ok(LanguageModelCompletionEvent::Text(text)) = &events[0] {
-            assert_eq!(text, "I'll help with that.");
-        } else {
-            panic!("Expected Text event");
-        }
-
-        if let Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) = &events[1] {
-            assert_eq!(tool_use.name.as_ref(), "helper_function");
-            assert_eq!(tool_use.thought_signature.as_deref(), Some("mixed_sig"));
-        } else {
-            panic!("Expected ToolUse event");
-        }
-    }
-
-    #[test]
-    fn test_special_characters_in_signature_preserved() {
-        let mut mapper = GoogleEventMapper::new();
-
-        let signature_with_special_chars = "sig<>\"'&%$#@!{}[]".to_string();
-
-        let response = GenerateContentResponse {
-            candidates: Some(vec![GenerateContentCandidate {
-                index: Some(0),
-                content: Content {
-                    parts: vec![Part::FunctionCallPart(FunctionCallPart {
-                        function_call: FunctionCall {
-                            name: "test_function".to_string(),
-                            args: json!({"arg": "value"}),
-                        },
-                        thought_signature: Some(signature_with_special_chars.clone()),
-                    })],
-                    role: GoogleRole::Model,
-                },
-                finish_reason: None,
-                finish_message: None,
-                safety_ratings: None,
-                citation_metadata: None,
-            }]),
-            prompt_feedback: None,
-            usage_metadata: None,
-        };
-
-        let events = mapper.map_event(response);
-
-        if let Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) = &events[0] {
-            assert_eq!(
-                tool_use.thought_signature.as_deref(),
-                Some(signature_with_special_chars.as_str())
-            );
-        } else {
-            panic!("Expected ToolUse event");
-        }
-    }
-}

crates/language_models/src/provider/lmstudio.rs 🔗

@@ -28,7 +28,7 @@ use ui::{
 use ui_input::InputField;
 
 use crate::AllLanguageModelSettings;
-use crate::provider::util::parse_tool_arguments;
+use language_model::util::parse_tool_arguments;
 
 const LMSTUDIO_DOWNLOAD_URL: &str = "https://lmstudio.ai/download";
 const LMSTUDIO_CATALOG_URL: &str = "https://lmstudio.ai/models";

crates/language_models/src/provider/mistral.rs 🔗

@@ -23,7 +23,7 @@ use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
 use ui_input::InputField;
 use util::ResultExt;
 
-use crate::provider::util::{fix_streamed_json, parse_tool_arguments};
+use language_model::util::{fix_streamed_json, parse_tool_arguments};
 
 const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("mistral");
 const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("Mistral");

crates/language_models/src/provider/open_ai.rs 🔗

@@ -1,41 +1,33 @@
-use anyhow::{Result, anyhow};
-use collections::{BTreeMap, HashMap};
+use anyhow::Result;
+use collections::BTreeMap;
 use credentials_provider::CredentialsProvider;
-use futures::Stream;
 use futures::{FutureExt, StreamExt, future::BoxFuture};
 use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window};
 use http_client::HttpClient;
 use language_model::{
     ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError,
-    LanguageModelCompletionEvent, LanguageModelId, LanguageModelImage, LanguageModelName,
-    LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName,
-    LanguageModelProviderState, LanguageModelRequest, LanguageModelRequestMessage,
-    LanguageModelToolChoice, LanguageModelToolResultContent, LanguageModelToolUse,
-    LanguageModelToolUseId, MessageContent, OPEN_AI_PROVIDER_ID, OPEN_AI_PROVIDER_NAME,
-    RateLimiter, Role, StopReason, TokenUsage, env_var,
+    LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider,
+    LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState,
+    LanguageModelRequest, LanguageModelToolChoice, OPEN_AI_PROVIDER_ID, OPEN_AI_PROVIDER_NAME,
+    RateLimiter, env_var,
 };
 use menu;
-use open_ai::responses::{
-    ResponseFunctionCallItem, ResponseFunctionCallOutputContent, ResponseFunctionCallOutputItem,
-    ResponseInputContent, ResponseInputItem, ResponseMessageItem,
-};
 use open_ai::{
-    ImageUrl, Model, OPEN_AI_API_URL, ReasoningEffort, ResponseStreamEvent,
-    responses::{
-        Request as ResponseRequest, ResponseOutputItem, ResponseSummary as ResponsesSummary,
-        ResponseUsage as ResponsesUsage, StreamEvent as ResponsesStreamEvent, stream_response,
-    },
+    OPEN_AI_API_URL, ResponseStreamEvent,
+    responses::{Request as ResponseRequest, StreamEvent as ResponsesStreamEvent, stream_response},
     stream_completion,
 };
 use settings::{OpenAiAvailableModel as AvailableModel, Settings, SettingsStore};
-use std::pin::Pin;
 use std::sync::{Arc, LazyLock};
 use strum::IntoEnumIterator;
 use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
 use ui_input::InputField;
 use util::ResultExt;
 
-use crate::provider::util::{fix_streamed_json, parse_tool_arguments};
+pub use open_ai::completion::{
+    OpenAiEventMapper, OpenAiResponseEventMapper, collect_tiktoken_messages, count_open_ai_tokens,
+    into_open_ai, into_open_ai_response,
+};
 
 const PROVIDER_ID: LanguageModelProviderId = OPEN_AI_PROVIDER_ID;
 const PROVIDER_NAME: LanguageModelProviderName = OPEN_AI_PROVIDER_NAME;
@@ -189,7 +181,7 @@ impl LanguageModelProvider for OpenAiLanguageModelProvider {
                     max_tokens: model.max_tokens,
                     max_output_tokens: model.max_output_tokens,
                     max_completion_tokens: model.max_completion_tokens,
-                    reasoning_effort: model.reasoning_effort.clone(),
+                    reasoning_effort: model.reasoning_effort,
                     supports_chat_completions: model.capabilities.chat_completions,
                 },
             );
@@ -382,7 +374,9 @@ impl LanguageModel for OpenAiLanguageModel {
         request: LanguageModelRequest,
         cx: &App,
     ) -> BoxFuture<'static, Result<u64>> {
-        count_open_ai_tokens(request, self.model.clone(), cx)
+        let model = self.model.clone();
+        cx.background_spawn(async move { count_open_ai_tokens(request, model) })
+            .boxed()
     }
 
     fn stream_completion(
@@ -433,853 +427,6 @@ impl LanguageModel for OpenAiLanguageModel {
     }
 }
 
-pub fn into_open_ai(
-    request: LanguageModelRequest,
-    model_id: &str,
-    supports_parallel_tool_calls: bool,
-    supports_prompt_cache_key: bool,
-    max_output_tokens: Option<u64>,
-    reasoning_effort: Option<ReasoningEffort>,
-) -> open_ai::Request {
-    let stream = !model_id.starts_with("o1-");
-
-    let mut messages = Vec::new();
-    for message in request.messages {
-        for content in message.content {
-            match content {
-                MessageContent::Text(text) | MessageContent::Thinking { text, .. } => {
-                    let should_add = if message.role == Role::User {
-                        // Including whitespace-only user messages can cause error with OpenAI compatible APIs
-                        // See https://github.com/zed-industries/zed/issues/40097
-                        !text.trim().is_empty()
-                    } else {
-                        !text.is_empty()
-                    };
-                    if should_add {
-                        add_message_content_part(
-                            open_ai::MessagePart::Text { text },
-                            message.role,
-                            &mut messages,
-                        );
-                    }
-                }
-                MessageContent::RedactedThinking(_) => {}
-                MessageContent::Image(image) => {
-                    add_message_content_part(
-                        open_ai::MessagePart::Image {
-                            image_url: ImageUrl {
-                                url: image.to_base64_url(),
-                                detail: None,
-                            },
-                        },
-                        message.role,
-                        &mut messages,
-                    );
-                }
-                MessageContent::ToolUse(tool_use) => {
-                    let tool_call = open_ai::ToolCall {
-                        id: tool_use.id.to_string(),
-                        content: open_ai::ToolCallContent::Function {
-                            function: open_ai::FunctionContent {
-                                name: tool_use.name.to_string(),
-                                arguments: serde_json::to_string(&tool_use.input)
-                                    .unwrap_or_default(),
-                            },
-                        },
-                    };
-
-                    if let Some(open_ai::RequestMessage::Assistant { tool_calls, .. }) =
-                        messages.last_mut()
-                    {
-                        tool_calls.push(tool_call);
-                    } else {
-                        messages.push(open_ai::RequestMessage::Assistant {
-                            content: None,
-                            tool_calls: vec![tool_call],
-                        });
-                    }
-                }
-                MessageContent::ToolResult(tool_result) => {
-                    let content = match &tool_result.content {
-                        LanguageModelToolResultContent::Text(text) => {
-                            vec![open_ai::MessagePart::Text {
-                                text: text.to_string(),
-                            }]
-                        }
-                        LanguageModelToolResultContent::Image(image) => {
-                            vec![open_ai::MessagePart::Image {
-                                image_url: ImageUrl {
-                                    url: image.to_base64_url(),
-                                    detail: None,
-                                },
-                            }]
-                        }
-                    };
-
-                    messages.push(open_ai::RequestMessage::Tool {
-                        content: content.into(),
-                        tool_call_id: tool_result.tool_use_id.to_string(),
-                    });
-                }
-            }
-        }
-    }
-
-    open_ai::Request {
-        model: model_id.into(),
-        messages,
-        stream,
-        stream_options: if stream {
-            Some(open_ai::StreamOptions::default())
-        } else {
-            None
-        },
-        stop: request.stop,
-        temperature: request.temperature.or(Some(1.0)),
-        max_completion_tokens: max_output_tokens,
-        parallel_tool_calls: if supports_parallel_tool_calls && !request.tools.is_empty() {
-            Some(supports_parallel_tool_calls)
-        } else {
-            None
-        },
-        prompt_cache_key: if supports_prompt_cache_key {
-            request.thread_id
-        } else {
-            None
-        },
-        tools: request
-            .tools
-            .into_iter()
-            .map(|tool| open_ai::ToolDefinition::Function {
-                function: open_ai::FunctionDefinition {
-                    name: tool.name,
-                    description: Some(tool.description),
-                    parameters: Some(tool.input_schema),
-                },
-            })
-            .collect(),
-        tool_choice: request.tool_choice.map(|choice| match choice {
-            LanguageModelToolChoice::Auto => open_ai::ToolChoice::Auto,
-            LanguageModelToolChoice::Any => open_ai::ToolChoice::Required,
-            LanguageModelToolChoice::None => open_ai::ToolChoice::None,
-        }),
-        reasoning_effort,
-    }
-}
-
-pub fn into_open_ai_response(
-    request: LanguageModelRequest,
-    model_id: &str,
-    supports_parallel_tool_calls: bool,
-    supports_prompt_cache_key: bool,
-    max_output_tokens: Option<u64>,
-    reasoning_effort: Option<ReasoningEffort>,
-) -> ResponseRequest {
-    let stream = !model_id.starts_with("o1-");
-
-    let LanguageModelRequest {
-        thread_id,
-        prompt_id: _,
-        intent: _,
-        messages,
-        tools,
-        tool_choice,
-        stop: _,
-        temperature,
-        thinking_allowed: _,
-        thinking_effort: _,
-        speed: _,
-    } = request;
-
-    let mut input_items = Vec::new();
-    for (index, message) in messages.into_iter().enumerate() {
-        append_message_to_response_items(message, index, &mut input_items);
-    }
-
-    let tools: Vec<_> = tools
-        .into_iter()
-        .map(|tool| open_ai::responses::ToolDefinition::Function {
-            name: tool.name,
-            description: Some(tool.description),
-            parameters: Some(tool.input_schema),
-            strict: None,
-        })
-        .collect();
-
-    ResponseRequest {
-        model: model_id.into(),
-        input: input_items,
-        stream,
-        temperature,
-        top_p: None,
-        max_output_tokens,
-        parallel_tool_calls: if tools.is_empty() {
-            None
-        } else {
-            Some(supports_parallel_tool_calls)
-        },
-        tool_choice: tool_choice.map(|choice| match choice {
-            LanguageModelToolChoice::Auto => open_ai::ToolChoice::Auto,
-            LanguageModelToolChoice::Any => open_ai::ToolChoice::Required,
-            LanguageModelToolChoice::None => open_ai::ToolChoice::None,
-        }),
-        tools,
-        prompt_cache_key: if supports_prompt_cache_key {
-            thread_id
-        } else {
-            None
-        },
-        reasoning: reasoning_effort.map(|effort| open_ai::responses::ReasoningConfig {
-            effort,
-            summary: Some(open_ai::responses::ReasoningSummaryMode::Auto),
-        }),
-    }
-}
-
-fn append_message_to_response_items(
-    message: LanguageModelRequestMessage,
-    index: usize,
-    input_items: &mut Vec<ResponseInputItem>,
-) {
-    let mut content_parts: Vec<ResponseInputContent> = Vec::new();
-
-    for content in message.content {
-        match content {
-            MessageContent::Text(text) => {
-                push_response_text_part(&message.role, text, &mut content_parts);
-            }
-            MessageContent::Thinking { text, .. } => {
-                push_response_text_part(&message.role, text, &mut content_parts);
-            }
-            MessageContent::RedactedThinking(_) => {}
-            MessageContent::Image(image) => {
-                push_response_image_part(&message.role, image, &mut content_parts);
-            }
-            MessageContent::ToolUse(tool_use) => {
-                flush_response_parts(&message.role, index, &mut content_parts, input_items);
-                let call_id = tool_use.id.to_string();
-                input_items.push(ResponseInputItem::FunctionCall(ResponseFunctionCallItem {
-                    call_id,
-                    name: tool_use.name.to_string(),
-                    arguments: tool_use.raw_input,
-                }));
-            }
-            MessageContent::ToolResult(tool_result) => {
-                flush_response_parts(&message.role, index, &mut content_parts, input_items);
-                input_items.push(ResponseInputItem::FunctionCallOutput(
-                    ResponseFunctionCallOutputItem {
-                        call_id: tool_result.tool_use_id.to_string(),
-                        output: match tool_result.content {
-                            LanguageModelToolResultContent::Text(text) => {
-                                ResponseFunctionCallOutputContent::Text(text.to_string())
-                            }
-                            LanguageModelToolResultContent::Image(image) => {
-                                ResponseFunctionCallOutputContent::List(vec![
-                                    ResponseInputContent::Image {
-                                        image_url: image.to_base64_url(),
-                                    },
-                                ])
-                            }
-                        },
-                    },
-                ));
-            }
-        }
-    }
-
-    flush_response_parts(&message.role, index, &mut content_parts, input_items);
-}
-
-fn push_response_text_part(
-    role: &Role,
-    text: impl Into<String>,
-    parts: &mut Vec<ResponseInputContent>,
-) {
-    let text = text.into();
-    if text.trim().is_empty() {
-        return;
-    }
-
-    match role {
-        Role::Assistant => parts.push(ResponseInputContent::OutputText {
-            text,
-            annotations: Vec::new(),
-        }),
-        _ => parts.push(ResponseInputContent::Text { text }),
-    }
-}
-
-fn push_response_image_part(
-    role: &Role,
-    image: LanguageModelImage,
-    parts: &mut Vec<ResponseInputContent>,
-) {
-    match role {
-        Role::Assistant => parts.push(ResponseInputContent::OutputText {
-            text: "[image omitted]".to_string(),
-            annotations: Vec::new(),
-        }),
-        _ => parts.push(ResponseInputContent::Image {
-            image_url: image.to_base64_url(),
-        }),
-    }
-}
-
-fn flush_response_parts(
-    role: &Role,
-    _index: usize,
-    parts: &mut Vec<ResponseInputContent>,
-    input_items: &mut Vec<ResponseInputItem>,
-) {
-    if parts.is_empty() {
-        return;
-    }
-
-    let item = ResponseInputItem::Message(ResponseMessageItem {
-        role: match role {
-            Role::User => open_ai::Role::User,
-            Role::Assistant => open_ai::Role::Assistant,
-            Role::System => open_ai::Role::System,
-        },
-        content: parts.clone(),
-    });
-
-    input_items.push(item);
-    parts.clear();
-}
-
-fn add_message_content_part(
-    new_part: open_ai::MessagePart,
-    role: Role,
-    messages: &mut Vec<open_ai::RequestMessage>,
-) {
-    match (role, messages.last_mut()) {
-        (Role::User, Some(open_ai::RequestMessage::User { content }))
-        | (
-            Role::Assistant,
-            Some(open_ai::RequestMessage::Assistant {
-                content: Some(content),
-                ..
-            }),
-        )
-        | (Role::System, Some(open_ai::RequestMessage::System { content, .. })) => {
-            content.push_part(new_part);
-        }
-        _ => {
-            messages.push(match role {
-                Role::User => open_ai::RequestMessage::User {
-                    content: open_ai::MessageContent::from(vec![new_part]),
-                },
-                Role::Assistant => open_ai::RequestMessage::Assistant {
-                    content: Some(open_ai::MessageContent::from(vec![new_part])),
-                    tool_calls: Vec::new(),
-                },
-                Role::System => open_ai::RequestMessage::System {
-                    content: open_ai::MessageContent::from(vec![new_part]),
-                },
-            });
-        }
-    }
-}
-
-pub struct OpenAiEventMapper {
-    tool_calls_by_index: HashMap<usize, RawToolCall>,
-}
-
-impl OpenAiEventMapper {
-    pub fn new() -> Self {
-        Self {
-            tool_calls_by_index: HashMap::default(),
-        }
-    }
-
-    pub fn map_stream(
-        mut self,
-        events: Pin<Box<dyn Send + Stream<Item = Result<ResponseStreamEvent>>>>,
-    ) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
-    {
-        events.flat_map(move |event| {
-            futures::stream::iter(match event {
-                Ok(event) => self.map_event(event),
-                Err(error) => vec![Err(LanguageModelCompletionError::from(anyhow!(error)))],
-            })
-        })
-    }
-
-    pub fn map_event(
-        &mut self,
-        event: ResponseStreamEvent,
-    ) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
-        let mut events = Vec::new();
-        if let Some(usage) = event.usage {
-            events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage {
-                input_tokens: usage.prompt_tokens,
-                output_tokens: usage.completion_tokens,
-                cache_creation_input_tokens: 0,
-                cache_read_input_tokens: 0,
-            })));
-        }
-
-        let Some(choice) = event.choices.first() else {
-            return events;
-        };
-
-        if let Some(delta) = choice.delta.as_ref() {
-            if let Some(reasoning_content) = delta.reasoning_content.clone() {
-                if !reasoning_content.is_empty() {
-                    events.push(Ok(LanguageModelCompletionEvent::Thinking {
-                        text: reasoning_content,
-                        signature: None,
-                    }));
-                }
-            }
-            if let Some(content) = delta.content.clone() {
-                if !content.is_empty() {
-                    events.push(Ok(LanguageModelCompletionEvent::Text(content)));
-                }
-            }
-
-            if let Some(tool_calls) = delta.tool_calls.as_ref() {
-                for tool_call in tool_calls {
-                    let entry = self.tool_calls_by_index.entry(tool_call.index).or_default();
-
-                    if let Some(tool_id) = tool_call.id.clone() {
-                        entry.id = tool_id;
-                    }
-
-                    if let Some(function) = tool_call.function.as_ref() {
-                        if let Some(name) = function.name.clone() {
-                            entry.name = name;
-                        }
-
-                        if let Some(arguments) = function.arguments.clone() {
-                            entry.arguments.push_str(&arguments);
-                        }
-                    }
-
-                    if !entry.id.is_empty() && !entry.name.is_empty() {
-                        if let Ok(input) = serde_json::from_str::<serde_json::Value>(
-                            &fix_streamed_json(&entry.arguments),
-                        ) {
-                            events.push(Ok(LanguageModelCompletionEvent::ToolUse(
-                                LanguageModelToolUse {
-                                    id: entry.id.clone().into(),
-                                    name: entry.name.as_str().into(),
-                                    is_input_complete: false,
-                                    input,
-                                    raw_input: entry.arguments.clone(),
-                                    thought_signature: None,
-                                },
-                            )));
-                        }
-                    }
-                }
-            }
-        }
-
-        match choice.finish_reason.as_deref() {
-            Some("stop") => {
-                events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)));
-            }
-            Some("tool_calls") => {
-                events.extend(self.tool_calls_by_index.drain().map(|(_, tool_call)| {
-                    match parse_tool_arguments(&tool_call.arguments) {
-                        Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
-                            LanguageModelToolUse {
-                                id: tool_call.id.clone().into(),
-                                name: tool_call.name.as_str().into(),
-                                is_input_complete: true,
-                                input,
-                                raw_input: tool_call.arguments.clone(),
-                                thought_signature: None,
-                            },
-                        )),
-                        Err(error) => Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
-                            id: tool_call.id.into(),
-                            tool_name: tool_call.name.into(),
-                            raw_input: tool_call.arguments.clone().into(),
-                            json_parse_error: error.to_string(),
-                        }),
-                    }
-                }));
-
-                events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)));
-            }
-            Some(stop_reason) => {
-                log::error!("Unexpected OpenAI stop_reason: {stop_reason:?}",);
-                events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)));
-            }
-            None => {}
-        }
-
-        events
-    }
-}
-
-#[derive(Default)]
-struct RawToolCall {
-    id: String,
-    name: String,
-    arguments: String,
-}
-
-pub struct OpenAiResponseEventMapper {
-    function_calls_by_item: HashMap<String, PendingResponseFunctionCall>,
-    pending_stop_reason: Option<StopReason>,
-}
-
-#[derive(Default)]
-struct PendingResponseFunctionCall {
-    call_id: String,
-    name: Arc<str>,
-    arguments: String,
-}
-
-impl OpenAiResponseEventMapper {
-    pub fn new() -> Self {
-        Self {
-            function_calls_by_item: HashMap::default(),
-            pending_stop_reason: None,
-        }
-    }
-
-    pub fn map_stream(
-        mut self,
-        events: Pin<Box<dyn Send + Stream<Item = Result<ResponsesStreamEvent>>>>,
-    ) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
-    {
-        events.flat_map(move |event| {
-            futures::stream::iter(match event {
-                Ok(event) => self.map_event(event),
-                Err(error) => vec![Err(LanguageModelCompletionError::from(anyhow!(error)))],
-            })
-        })
-    }
-
-    pub fn map_event(
-        &mut self,
-        event: ResponsesStreamEvent,
-    ) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
-        match event {
-            ResponsesStreamEvent::OutputItemAdded { item, .. } => {
-                let mut events = Vec::new();
-
-                match &item {
-                    ResponseOutputItem::Message(message) => {
-                        if let Some(id) = &message.id {
-                            events.push(Ok(LanguageModelCompletionEvent::StartMessage {
-                                message_id: id.clone(),
-                            }));
-                        }
-                    }
-                    ResponseOutputItem::FunctionCall(function_call) => {
-                        if let Some(item_id) = function_call.id.clone() {
-                            let call_id = function_call
-                                .call_id
-                                .clone()
-                                .or_else(|| function_call.id.clone())
-                                .unwrap_or_else(|| item_id.clone());
-                            let entry = PendingResponseFunctionCall {
-                                call_id,
-                                name: Arc::<str>::from(
-                                    function_call.name.clone().unwrap_or_default(),
-                                ),
-                                arguments: function_call.arguments.clone(),
-                            };
-                            self.function_calls_by_item.insert(item_id, entry);
-                        }
-                    }
-                    ResponseOutputItem::Reasoning(_) | ResponseOutputItem::Unknown => {}
-                }
-                events
-            }
-            ResponsesStreamEvent::ReasoningSummaryTextDelta { delta, .. } => {
-                if delta.is_empty() {
-                    Vec::new()
-                } else {
-                    vec![Ok(LanguageModelCompletionEvent::Thinking {
-                        text: delta,
-                        signature: None,
-                    })]
-                }
-            }
-            ResponsesStreamEvent::OutputTextDelta { delta, .. } => {
-                if delta.is_empty() {
-                    Vec::new()
-                } else {
-                    vec![Ok(LanguageModelCompletionEvent::Text(delta))]
-                }
-            }
-            ResponsesStreamEvent::FunctionCallArgumentsDelta { item_id, delta, .. } => {
-                if let Some(entry) = self.function_calls_by_item.get_mut(&item_id) {
-                    entry.arguments.push_str(&delta);
-                    if let Ok(input) = serde_json::from_str::<serde_json::Value>(
-                        &fix_streamed_json(&entry.arguments),
-                    ) {
-                        return vec![Ok(LanguageModelCompletionEvent::ToolUse(
-                            LanguageModelToolUse {
-                                id: LanguageModelToolUseId::from(entry.call_id.clone()),
-                                name: entry.name.clone(),
-                                is_input_complete: false,
-                                input,
-                                raw_input: entry.arguments.clone(),
-                                thought_signature: None,
-                            },
-                        ))];
-                    }
-                }
-                Vec::new()
-            }
-            ResponsesStreamEvent::FunctionCallArgumentsDone {
-                item_id, arguments, ..
-            } => {
-                if let Some(mut entry) = self.function_calls_by_item.remove(&item_id) {
-                    if !arguments.is_empty() {
-                        entry.arguments = arguments;
-                    }
-                    let raw_input = entry.arguments.clone();
-                    self.pending_stop_reason = Some(StopReason::ToolUse);
-                    match parse_tool_arguments(&entry.arguments) {
-                        Ok(input) => {
-                            vec![Ok(LanguageModelCompletionEvent::ToolUse(
-                                LanguageModelToolUse {
-                                    id: LanguageModelToolUseId::from(entry.call_id.clone()),
-                                    name: entry.name.clone(),
-                                    is_input_complete: true,
-                                    input,
-                                    raw_input,
-                                    thought_signature: None,
-                                },
-                            ))]
-                        }
-                        Err(error) => {
-                            vec![Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
-                                id: LanguageModelToolUseId::from(entry.call_id.clone()),
-                                tool_name: entry.name.clone(),
-                                raw_input: Arc::<str>::from(raw_input),
-                                json_parse_error: error.to_string(),
-                            })]
-                        }
-                    }
-                } else {
-                    Vec::new()
-                }
-            }
-            ResponsesStreamEvent::Completed { response } => {
-                self.handle_completion(response, StopReason::EndTurn)
-            }
-            ResponsesStreamEvent::Incomplete { response } => {
-                let reason = response
-                    .status_details
-                    .as_ref()
-                    .and_then(|details| details.reason.as_deref());
-                let stop_reason = match reason {
-                    Some("max_output_tokens") => StopReason::MaxTokens,
-                    Some("content_filter") => {
-                        self.pending_stop_reason = Some(StopReason::Refusal);
-                        StopReason::Refusal
-                    }
-                    _ => self
-                        .pending_stop_reason
-                        .take()
-                        .unwrap_or(StopReason::EndTurn),
-                };
-
-                let mut events = Vec::new();
-                if self.pending_stop_reason.is_none() {
-                    events.extend(self.emit_tool_calls_from_output(&response.output));
-                }
-                if let Some(usage) = response.usage.as_ref() {
-                    events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(
-                        token_usage_from_response_usage(usage),
-                    )));
-                }
-                events.push(Ok(LanguageModelCompletionEvent::Stop(stop_reason)));
-                events
-            }
-            ResponsesStreamEvent::Failed { response } => {
-                let message = response
-                    .status_details
-                    .and_then(|details| details.error)
-                    .map(|error| error.to_string())
-                    .unwrap_or_else(|| "response failed".to_string());
-                vec![Err(LanguageModelCompletionError::Other(anyhow!(message)))]
-            }
-            ResponsesStreamEvent::Error { error }
-            | ResponsesStreamEvent::GenericError { error } => {
-                vec![Err(LanguageModelCompletionError::Other(anyhow!(
-                    error.message
-                )))]
-            }
-            ResponsesStreamEvent::ReasoningSummaryPartAdded { summary_index, .. } => {
-                if summary_index > 0 {
-                    vec![Ok(LanguageModelCompletionEvent::Thinking {
-                        text: "\n\n".to_string(),
-                        signature: None,
-                    })]
-                } else {
-                    Vec::new()
-                }
-            }
-            ResponsesStreamEvent::OutputTextDone { .. }
-            | ResponsesStreamEvent::OutputItemDone { .. }
-            | ResponsesStreamEvent::ContentPartAdded { .. }
-            | ResponsesStreamEvent::ContentPartDone { .. }
-            | ResponsesStreamEvent::ReasoningSummaryTextDone { .. }
-            | ResponsesStreamEvent::ReasoningSummaryPartDone { .. }
-            | ResponsesStreamEvent::Created { .. }
-            | ResponsesStreamEvent::InProgress { .. }
-            | ResponsesStreamEvent::Unknown => Vec::new(),
-        }
-    }
-
-    fn handle_completion(
-        &mut self,
-        response: ResponsesSummary,
-        default_reason: StopReason,
-    ) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
-        let mut events = Vec::new();
-
-        if self.pending_stop_reason.is_none() {
-            events.extend(self.emit_tool_calls_from_output(&response.output));
-        }
-
-        if let Some(usage) = response.usage.as_ref() {
-            events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(
-                token_usage_from_response_usage(usage),
-            )));
-        }
-
-        let stop_reason = self.pending_stop_reason.take().unwrap_or(default_reason);
-        events.push(Ok(LanguageModelCompletionEvent::Stop(stop_reason)));
-        events
-    }
-
-    fn emit_tool_calls_from_output(
-        &mut self,
-        output: &[ResponseOutputItem],
-    ) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
-        let mut events = Vec::new();
-        for item in output {
-            if let ResponseOutputItem::FunctionCall(function_call) = item {
-                let Some(call_id) = function_call
-                    .call_id
-                    .clone()
-                    .or_else(|| function_call.id.clone())
-                else {
-                    log::error!(
-                        "Function call item missing both call_id and id: {:?}",
-                        function_call
-                    );
-                    continue;
-                };
-                let name: Arc<str> = Arc::from(function_call.name.clone().unwrap_or_default());
-                let arguments = &function_call.arguments;
-                self.pending_stop_reason = Some(StopReason::ToolUse);
-                match parse_tool_arguments(arguments) {
-                    Ok(input) => {
-                        events.push(Ok(LanguageModelCompletionEvent::ToolUse(
-                            LanguageModelToolUse {
-                                id: LanguageModelToolUseId::from(call_id.clone()),
-                                name: name.clone(),
-                                is_input_complete: true,
-                                input,
-                                raw_input: arguments.clone(),
-                                thought_signature: None,
-                            },
-                        )));
-                    }
-                    Err(error) => {
-                        events.push(Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
-                            id: LanguageModelToolUseId::from(call_id.clone()),
-                            tool_name: name.clone(),
-                            raw_input: Arc::<str>::from(arguments.clone()),
-                            json_parse_error: error.to_string(),
-                        }));
-                    }
-                }
-            }
-        }
-        events
-    }
-}
-
-fn token_usage_from_response_usage(usage: &ResponsesUsage) -> TokenUsage {
-    TokenUsage {
-        input_tokens: usage.input_tokens.unwrap_or_default(),
-        output_tokens: usage.output_tokens.unwrap_or_default(),
-        cache_creation_input_tokens: 0,
-        cache_read_input_tokens: 0,
-    }
-}
-
-pub(crate) fn collect_tiktoken_messages(
-    request: LanguageModelRequest,
-) -> Vec<tiktoken_rs::ChatCompletionRequestMessage> {
-    request
-        .messages
-        .into_iter()
-        .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
-            role: match message.role {
-                Role::User => "user".into(),
-                Role::Assistant => "assistant".into(),
-                Role::System => "system".into(),
-            },
-            content: Some(message.string_contents()),
-            name: None,
-            function_call: None,
-        })
-        .collect::<Vec<_>>()
-}
-
-pub fn count_open_ai_tokens(
-    request: LanguageModelRequest,
-    model: Model,
-    cx: &App,
-) -> BoxFuture<'static, Result<u64>> {
-    cx.background_spawn(async move {
-        let messages = collect_tiktoken_messages(request);
-        match model {
-            Model::Custom { max_tokens, .. } => {
-                let model = if max_tokens >= 100_000 {
-                    // If the max tokens is 100k or more, it likely uses the o200k_base tokenizer
-                    "gpt-4o"
-                } else {
-                    // Otherwise fallback to gpt-4, since only cl100k_base and o200k_base are
-                    // supported with this tiktoken method
-                    "gpt-4"
-                };
-                tiktoken_rs::num_tokens_from_messages(model, &messages)
-            }
-            // Currently supported by tiktoken_rs
-            // Sometimes tiktoken-rs is behind on model support. If that is the case, make a new branch
-            // arm with an override. We enumerate all supported models here so that we can check if new
-            // models are supported yet or not.
-            Model::ThreePointFiveTurbo
-            | Model::Four
-            | Model::FourTurbo
-            | Model::FourOmniMini
-            | Model::FourPointOneNano
-            | Model::O1
-            | Model::O3
-            | Model::O3Mini
-            | Model::Five
-            | Model::FiveCodex
-            | Model::FiveMini
-            | Model::FiveNano => tiktoken_rs::num_tokens_from_messages(model.id(), &messages),
-            // GPT-5.1, 5.2, 5.2-codex, 5.3-codex, 5.4, and 5.4-pro don't have dedicated tiktoken support; use gpt-5 tokenizer
-            Model::FivePointOne
-            | Model::FivePointTwo
-            | Model::FivePointTwoCodex
-            | Model::FivePointThreeCodex
-            | Model::FivePointFour
-            | Model::FivePointFourPro => tiktoken_rs::num_tokens_from_messages("gpt-5", &messages),
-        }
-        .map(|tokens| tokens as u64)
-    })
-    .boxed()
-}
-
 struct ConfigurationView {
     api_key_editor: Entity<InputField>,
     state: Entity<State>,
@@ -1459,874 +606,3 @@ impl Render for ConfigurationView {
         }
     }
 }
-
-#[cfg(test)]
-mod tests {
-    use futures::{StreamExt, executor::block_on};
-    use gpui::TestAppContext;
-    use language_model::{
-        LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
-    };
-    use open_ai::responses::{
-        ReasoningSummaryPart, ResponseFunctionToolCall, ResponseOutputItem, ResponseOutputMessage,
-        ResponseReasoningItem, ResponseStatusDetails, ResponseSummary, ResponseUsage,
-        StreamEvent as ResponsesStreamEvent,
-    };
-    use pretty_assertions::assert_eq;
-    use serde_json::json;
-
-    use super::*;
-
-    fn map_response_events(events: Vec<ResponsesStreamEvent>) -> Vec<LanguageModelCompletionEvent> {
-        block_on(async {
-            OpenAiResponseEventMapper::new()
-                .map_stream(Box::pin(futures::stream::iter(events.into_iter().map(Ok))))
-                .collect::<Vec<_>>()
-                .await
-                .into_iter()
-                .map(Result::unwrap)
-                .collect()
-        })
-    }
-
-    fn response_item_message(id: &str) -> ResponseOutputItem {
-        ResponseOutputItem::Message(ResponseOutputMessage {
-            id: Some(id.to_string()),
-            role: Some("assistant".to_string()),
-            status: Some("in_progress".to_string()),
-            content: vec![],
-        })
-    }
-
-    fn response_item_function_call(id: &str, args: Option<&str>) -> ResponseOutputItem {
-        ResponseOutputItem::FunctionCall(ResponseFunctionToolCall {
-            id: Some(id.to_string()),
-            status: Some("in_progress".to_string()),
-            name: Some("get_weather".to_string()),
-            call_id: Some("call_123".to_string()),
-            arguments: args.map(|s| s.to_string()).unwrap_or_default(),
-        })
-    }
-
-    #[gpui::test]
-    fn tiktoken_rs_support(cx: &TestAppContext) {
-        let request = LanguageModelRequest {
-            thread_id: None,
-            prompt_id: None,
-            intent: None,
-            messages: vec![LanguageModelRequestMessage {
-                role: Role::User,
-                content: vec![MessageContent::Text("message".into())],
-                cache: false,
-                reasoning_details: None,
-            }],
-            tools: vec![],
-            tool_choice: None,
-            stop: vec![],
-            temperature: None,
-            thinking_allowed: true,
-            thinking_effort: None,
-            speed: None,
-        };
-
-        // Validate that all models are supported by tiktoken-rs
-        for model in Model::iter() {
-            let count = cx
-                .foreground_executor()
-                .block_on(count_open_ai_tokens(
-                    request.clone(),
-                    model,
-                    &cx.app.borrow(),
-                ))
-                .unwrap();
-            assert!(count > 0);
-        }
-    }
-
-    #[test]
-    fn responses_stream_maps_text_and_usage() {
-        let events = vec![
-            ResponsesStreamEvent::OutputItemAdded {
-                output_index: 0,
-                sequence_number: None,
-                item: response_item_message("msg_123"),
-            },
-            ResponsesStreamEvent::OutputTextDelta {
-                item_id: "msg_123".into(),
-                output_index: 0,
-                content_index: Some(0),
-                delta: "Hello".into(),
-            },
-            ResponsesStreamEvent::Completed {
-                response: ResponseSummary {
-                    usage: Some(ResponseUsage {
-                        input_tokens: Some(5),
-                        output_tokens: Some(3),
-                        total_tokens: Some(8),
-                    }),
-                    ..Default::default()
-                },
-            },
-        ];
-
-        let mapped = map_response_events(events);
-        assert!(matches!(
-            mapped[0],
-            LanguageModelCompletionEvent::StartMessage { ref message_id } if message_id == "msg_123"
-        ));
-        assert!(matches!(
-            mapped[1],
-            LanguageModelCompletionEvent::Text(ref text) if text == "Hello"
-        ));
-        assert!(matches!(
-            mapped[2],
-            LanguageModelCompletionEvent::UsageUpdate(TokenUsage {
-                input_tokens: 5,
-                output_tokens: 3,
-                ..
-            })
-        ));
-        assert!(matches!(
-            mapped[3],
-            LanguageModelCompletionEvent::Stop(StopReason::EndTurn)
-        ));
-    }
-
-    #[test]
-    fn into_open_ai_response_builds_complete_payload() {
-        let tool_call_id = LanguageModelToolUseId::from("call-42");
-        let tool_input = json!({ "city": "Boston" });
-        let tool_arguments = serde_json::to_string(&tool_input).unwrap();
-        let tool_use = LanguageModelToolUse {
-            id: tool_call_id.clone(),
-            name: Arc::from("get_weather"),
-            raw_input: tool_arguments.clone(),
-            input: tool_input,
-            is_input_complete: true,
-            thought_signature: None,
-        };
-        let tool_result = LanguageModelToolResult {
-            tool_use_id: tool_call_id,
-            tool_name: Arc::from("get_weather"),
-            is_error: false,
-            content: LanguageModelToolResultContent::Text(Arc::from("Sunny")),
-            output: Some(json!({ "forecast": "Sunny" })),
-        };
-        let user_image = LanguageModelImage {
-            source: SharedString::from("aGVsbG8="),
-            size: None,
-        };
-        let expected_image_url = user_image.to_base64_url();
-
-        let request = LanguageModelRequest {
-            thread_id: Some("thread-123".into()),
-            prompt_id: None,
-            intent: None,
-            messages: vec![
-                LanguageModelRequestMessage {
-                    role: Role::System,
-                    content: vec![MessageContent::Text("System context".into())],
-                    cache: false,
-                    reasoning_details: None,
-                },
-                LanguageModelRequestMessage {
-                    role: Role::User,
-                    content: vec![
-                        MessageContent::Text("Please check the weather.".into()),
-                        MessageContent::Image(user_image),
-                    ],
-                    cache: false,
-                    reasoning_details: None,
-                },
-                LanguageModelRequestMessage {
-                    role: Role::Assistant,
-                    content: vec![
-                        MessageContent::Text("Looking that up.".into()),
-                        MessageContent::ToolUse(tool_use),
-                    ],
-                    cache: false,
-                    reasoning_details: None,
-                },
-                LanguageModelRequestMessage {
-                    role: Role::Assistant,
-                    content: vec![MessageContent::ToolResult(tool_result)],
-                    cache: false,
-                    reasoning_details: None,
-                },
-            ],
-            tools: vec![LanguageModelRequestTool {
-                name: "get_weather".into(),
-                description: "Fetches the weather".into(),
-                input_schema: json!({ "type": "object" }),
-                use_input_streaming: false,
-            }],
-            tool_choice: Some(LanguageModelToolChoice::Any),
-            stop: vec!["<STOP>".into()],
-            temperature: None,
-            thinking_allowed: false,
-            thinking_effort: None,
-            speed: None,
-        };
-
-        let response = into_open_ai_response(
-            request,
-            "custom-model",
-            true,
-            true,
-            Some(2048),
-            Some(ReasoningEffort::Low),
-        );
-
-        let serialized = serde_json::to_value(&response).unwrap();
-        let expected = json!({
-            "model": "custom-model",
-            "input": [
-                {
-                    "type": "message",
-                    "role": "system",
-                    "content": [
-                        { "type": "input_text", "text": "System context" }
-                    ]
-                },
-                {
-                    "type": "message",
-                    "role": "user",
-                    "content": [
-                        { "type": "input_text", "text": "Please check the weather." },
-                        { "type": "input_image", "image_url": expected_image_url }
-                    ]
-                },
-                {
-                    "type": "message",
-                    "role": "assistant",
-                    "content": [
-                        { "type": "output_text", "text": "Looking that up.", "annotations": [] }
-                    ]
-                },
-                {
-                    "type": "function_call",
-                    "call_id": "call-42",
-                    "name": "get_weather",
-                    "arguments": tool_arguments
-                },
-                {
-                    "type": "function_call_output",
-                    "call_id": "call-42",
-                    "output": "Sunny"
-                }
-            ],
-            "stream": true,
-            "max_output_tokens": 2048,
-            "parallel_tool_calls": true,
-            "tool_choice": "required",
-            "tools": [
-                {
-                    "type": "function",
-                    "name": "get_weather",
-                    "description": "Fetches the weather",
-                    "parameters": { "type": "object" }
-                }
-            ],
-            "prompt_cache_key": "thread-123",
-            "reasoning": { "effort": "low", "summary": "auto" }
-        });
-
-        assert_eq!(serialized, expected);
-    }
-
-    #[test]
-    fn responses_stream_maps_tool_calls() {
-        let events = vec![
-            ResponsesStreamEvent::OutputItemAdded {
-                output_index: 0,
-                sequence_number: None,
-                item: response_item_function_call("item_fn", Some("{\"city\":\"Bos")),
-            },
-            ResponsesStreamEvent::FunctionCallArgumentsDelta {
-                item_id: "item_fn".into(),
-                output_index: 0,
-                delta: "ton\"}".into(),
-                sequence_number: None,
-            },
-            ResponsesStreamEvent::FunctionCallArgumentsDone {
-                item_id: "item_fn".into(),
-                output_index: 0,
-                arguments: "{\"city\":\"Boston\"}".into(),
-                sequence_number: None,
-            },
-            ResponsesStreamEvent::Completed {
-                response: ResponseSummary::default(),
-            },
-        ];
-
-        let mapped = map_response_events(events);
-        assert_eq!(mapped.len(), 3);
-        // First event is the partial tool use (from FunctionCallArgumentsDelta)
-        assert!(matches!(
-            mapped[0],
-            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
-                is_input_complete: false,
-                ..
-            })
-        ));
-        // Second event is the complete tool use (from FunctionCallArgumentsDone)
-        assert!(matches!(
-            mapped[1],
-            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
-                ref id,
-                ref name,
-                ref raw_input,
-                is_input_complete: true,
-                ..
-            }) if id.to_string() == "call_123"
-                && name.as_ref() == "get_weather"
-                && raw_input == "{\"city\":\"Boston\"}"
-        ));
-        assert!(matches!(
-            mapped[2],
-            LanguageModelCompletionEvent::Stop(StopReason::ToolUse)
-        ));
-    }
-
-    #[test]
-    fn responses_stream_uses_max_tokens_stop_reason() {
-        let events = vec![ResponsesStreamEvent::Incomplete {
-            response: ResponseSummary {
-                status_details: Some(ResponseStatusDetails {
-                    reason: Some("max_output_tokens".into()),
-                    r#type: Some("incomplete".into()),
-                    error: None,
-                }),
-                usage: Some(ResponseUsage {
-                    input_tokens: Some(10),
-                    output_tokens: Some(20),
-                    total_tokens: Some(30),
-                }),
-                ..Default::default()
-            },
-        }];
-
-        let mapped = map_response_events(events);
-        assert!(matches!(
-            mapped[0],
-            LanguageModelCompletionEvent::UsageUpdate(TokenUsage {
-                input_tokens: 10,
-                output_tokens: 20,
-                ..
-            })
-        ));
-        assert!(matches!(
-            mapped[1],
-            LanguageModelCompletionEvent::Stop(StopReason::MaxTokens)
-        ));
-    }
-
-    #[test]
-    fn responses_stream_handles_multiple_tool_calls() {
-        let events = vec![
-            ResponsesStreamEvent::OutputItemAdded {
-                output_index: 0,
-                sequence_number: None,
-                item: response_item_function_call("item_fn1", Some("{\"city\":\"NYC\"}")),
-            },
-            ResponsesStreamEvent::FunctionCallArgumentsDone {
-                item_id: "item_fn1".into(),
-                output_index: 0,
-                arguments: "{\"city\":\"NYC\"}".into(),
-                sequence_number: None,
-            },
-            ResponsesStreamEvent::OutputItemAdded {
-                output_index: 1,
-                sequence_number: None,
-                item: response_item_function_call("item_fn2", Some("{\"city\":\"LA\"}")),
-            },
-            ResponsesStreamEvent::FunctionCallArgumentsDone {
-                item_id: "item_fn2".into(),
-                output_index: 1,
-                arguments: "{\"city\":\"LA\"}".into(),
-                sequence_number: None,
-            },
-            ResponsesStreamEvent::Completed {
-                response: ResponseSummary::default(),
-            },
-        ];
-
-        let mapped = map_response_events(events);
-        assert_eq!(mapped.len(), 3);
-        assert!(matches!(
-            mapped[0],
-            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { ref raw_input, .. })
-            if raw_input == "{\"city\":\"NYC\"}"
-        ));
-        assert!(matches!(
-            mapped[1],
-            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { ref raw_input, .. })
-            if raw_input == "{\"city\":\"LA\"}"
-        ));
-        assert!(matches!(
-            mapped[2],
-            LanguageModelCompletionEvent::Stop(StopReason::ToolUse)
-        ));
-    }
-
-    #[test]
-    fn responses_stream_handles_mixed_text_and_tool_calls() {
-        let events = vec![
-            ResponsesStreamEvent::OutputItemAdded {
-                output_index: 0,
-                sequence_number: None,
-                item: response_item_message("msg_123"),
-            },
-            ResponsesStreamEvent::OutputTextDelta {
-                item_id: "msg_123".into(),
-                output_index: 0,
-                content_index: Some(0),
-                delta: "Let me check that".into(),
-            },
-            ResponsesStreamEvent::OutputItemAdded {
-                output_index: 1,
-                sequence_number: None,
-                item: response_item_function_call("item_fn", Some("{\"query\":\"test\"}")),
-            },
-            ResponsesStreamEvent::FunctionCallArgumentsDone {
-                item_id: "item_fn".into(),
-                output_index: 1,
-                arguments: "{\"query\":\"test\"}".into(),
-                sequence_number: None,
-            },
-            ResponsesStreamEvent::Completed {
-                response: ResponseSummary::default(),
-            },
-        ];
-
-        let mapped = map_response_events(events);
-        assert!(matches!(
-            mapped[0],
-            LanguageModelCompletionEvent::StartMessage { .. }
-        ));
-        assert!(matches!(
-            mapped[1],
-            LanguageModelCompletionEvent::Text(ref text) if text == "Let me check that"
-        ));
-        assert!(matches!(
-            mapped[2],
-            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { ref raw_input, .. })
-            if raw_input == "{\"query\":\"test\"}"
-        ));
-        assert!(matches!(
-            mapped[3],
-            LanguageModelCompletionEvent::Stop(StopReason::ToolUse)
-        ));
-    }
-
-    #[test]
-    fn responses_stream_handles_json_parse_error() {
-        let events = vec![
-            ResponsesStreamEvent::OutputItemAdded {
-                output_index: 0,
-                sequence_number: None,
-                item: response_item_function_call("item_fn", Some("{invalid json")),
-            },
-            ResponsesStreamEvent::FunctionCallArgumentsDone {
-                item_id: "item_fn".into(),
-                output_index: 0,
-                arguments: "{invalid json".into(),
-                sequence_number: None,
-            },
-            ResponsesStreamEvent::Completed {
-                response: ResponseSummary::default(),
-            },
-        ];
-
-        let mapped = map_response_events(events);
-        assert!(matches!(
-            mapped[0],
-            LanguageModelCompletionEvent::ToolUseJsonParseError {
-                ref raw_input,
-                ..
-            } if raw_input.as_ref() == "{invalid json"
-        ));
-    }
-
-    #[test]
-    fn responses_stream_handles_incomplete_function_call() {
-        let events = vec![
-            ResponsesStreamEvent::OutputItemAdded {
-                output_index: 0,
-                sequence_number: None,
-                item: response_item_function_call("item_fn", Some("{\"city\":")),
-            },
-            ResponsesStreamEvent::FunctionCallArgumentsDelta {
-                item_id: "item_fn".into(),
-                output_index: 0,
-                delta: "\"Boston\"".into(),
-                sequence_number: None,
-            },
-            ResponsesStreamEvent::Incomplete {
-                response: ResponseSummary {
-                    status_details: Some(ResponseStatusDetails {
-                        reason: Some("max_output_tokens".into()),
-                        r#type: Some("incomplete".into()),
-                        error: None,
-                    }),
-                    output: vec![response_item_function_call(
-                        "item_fn",
-                        Some("{\"city\":\"Boston\"}"),
-                    )],
-                    ..Default::default()
-                },
-            },
-        ];
-
-        let mapped = map_response_events(events);
-        assert_eq!(mapped.len(), 3);
-        // First event is the partial tool use (from FunctionCallArgumentsDelta)
-        assert!(matches!(
-            mapped[0],
-            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
-                is_input_complete: false,
-                ..
-            })
-        ));
-        // Second event is the complete tool use (from the Incomplete response output)
-        assert!(matches!(
-            mapped[1],
-            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
-                ref raw_input,
-                is_input_complete: true,
-                ..
-            })
-            if raw_input == "{\"city\":\"Boston\"}"
-        ));
-        assert!(matches!(
-            mapped[2],
-            LanguageModelCompletionEvent::Stop(StopReason::MaxTokens)
-        ));
-    }
-
-    #[test]
-    fn responses_stream_incomplete_does_not_duplicate_tool_calls() {
-        let events = vec![
-            ResponsesStreamEvent::OutputItemAdded {
-                output_index: 0,
-                sequence_number: None,
-                item: response_item_function_call("item_fn", Some("{\"city\":\"Boston\"}")),
-            },
-            ResponsesStreamEvent::FunctionCallArgumentsDone {
-                item_id: "item_fn".into(),
-                output_index: 0,
-                arguments: "{\"city\":\"Boston\"}".into(),
-                sequence_number: None,
-            },
-            ResponsesStreamEvent::Incomplete {
-                response: ResponseSummary {
-                    status_details: Some(ResponseStatusDetails {
-                        reason: Some("max_output_tokens".into()),
-                        r#type: Some("incomplete".into()),
-                        error: None,
-                    }),
-                    output: vec![response_item_function_call(
-                        "item_fn",
-                        Some("{\"city\":\"Boston\"}"),
-                    )],
-                    ..Default::default()
-                },
-            },
-        ];
-
-        let mapped = map_response_events(events);
-        assert_eq!(mapped.len(), 2);
-        assert!(matches!(
-            mapped[0],
-            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { ref raw_input, .. })
-            if raw_input == "{\"city\":\"Boston\"}"
-        ));
-        assert!(matches!(
-            mapped[1],
-            LanguageModelCompletionEvent::Stop(StopReason::MaxTokens)
-        ));
-    }
-
-    #[test]
-    fn responses_stream_handles_empty_tool_arguments() {
-        // Test that tools with no arguments (empty string) are handled correctly
-        let events = vec![
-            ResponsesStreamEvent::OutputItemAdded {
-                output_index: 0,
-                sequence_number: None,
-                item: response_item_function_call("item_fn", Some("")),
-            },
-            ResponsesStreamEvent::FunctionCallArgumentsDone {
-                item_id: "item_fn".into(),
-                output_index: 0,
-                arguments: "".into(),
-                sequence_number: None,
-            },
-            ResponsesStreamEvent::Completed {
-                response: ResponseSummary::default(),
-            },
-        ];
-
-        let mapped = map_response_events(events);
-        assert_eq!(mapped.len(), 2);
-
-        // Should produce a ToolUse event with an empty object
-        assert!(matches!(
-            &mapped[0],
-            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
-                id,
-                name,
-                raw_input,
-                input,
-                ..
-            }) if id.to_string() == "call_123"
-                && name.as_ref() == "get_weather"
-                && raw_input == ""
-                && input.is_object()
-                && input.as_object().unwrap().is_empty()
-        ));
-
-        assert!(matches!(
-            mapped[1],
-            LanguageModelCompletionEvent::Stop(StopReason::ToolUse)
-        ));
-    }
-
-    #[test]
-    fn responses_stream_emits_partial_tool_use_events() {
-        let events = vec![
-            ResponsesStreamEvent::OutputItemAdded {
-                output_index: 0,
-                sequence_number: None,
-                item: ResponseOutputItem::FunctionCall(ResponseFunctionToolCall {
-                    id: Some("item_fn".to_string()),
-                    status: Some("in_progress".to_string()),
-                    name: Some("get_weather".to_string()),
-                    call_id: Some("call_abc".to_string()),
-                    arguments: String::new(),
-                }),
-            },
-            ResponsesStreamEvent::FunctionCallArgumentsDelta {
-                item_id: "item_fn".into(),
-                output_index: 0,
-                delta: "{\"city\":\"Bos".into(),
-                sequence_number: None,
-            },
-            ResponsesStreamEvent::FunctionCallArgumentsDelta {
-                item_id: "item_fn".into(),
-                output_index: 0,
-                delta: "ton\"}".into(),
-                sequence_number: None,
-            },
-            ResponsesStreamEvent::FunctionCallArgumentsDone {
-                item_id: "item_fn".into(),
-                output_index: 0,
-                arguments: "{\"city\":\"Boston\"}".into(),
-                sequence_number: None,
-            },
-            ResponsesStreamEvent::Completed {
-                response: ResponseSummary::default(),
-            },
-        ];
-
-        let mapped = map_response_events(events);
-        // Two partial events + one complete event + Stop
-        assert!(mapped.len() >= 3);
-
-        // The last complete ToolUse event should have is_input_complete: true
-        let complete_tool_use = mapped.iter().find(|e| {
-            matches!(
-                e,
-                LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
-                    is_input_complete: true,
-                    ..
-                })
-            )
-        });
-        assert!(
-            complete_tool_use.is_some(),
-            "should have a complete tool use event"
-        );
-
-        // All ToolUse events before the final one should have is_input_complete: false
-        let tool_uses: Vec<_> = mapped
-            .iter()
-            .filter(|e| matches!(e, LanguageModelCompletionEvent::ToolUse(_)))
-            .collect();
-        assert!(
-            tool_uses.len() >= 2,
-            "should have at least one partial and one complete event"
-        );
-
-        let last = tool_uses.last().unwrap();
-        assert!(matches!(
-            last,
-            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
-                is_input_complete: true,
-                ..
-            })
-        ));
-    }
-
-    #[test]
-    fn responses_stream_maps_reasoning_summary_deltas() {
-        let events = vec![
-            ResponsesStreamEvent::OutputItemAdded {
-                output_index: 0,
-                sequence_number: None,
-                item: ResponseOutputItem::Reasoning(ResponseReasoningItem {
-                    id: Some("rs_123".into()),
-                    summary: vec![],
-                }),
-            },
-            ResponsesStreamEvent::ReasoningSummaryPartAdded {
-                item_id: "rs_123".into(),
-                output_index: 0,
-                summary_index: 0,
-            },
-            ResponsesStreamEvent::ReasoningSummaryTextDelta {
-                item_id: "rs_123".into(),
-                output_index: 0,
-                delta: "Thinking about".into(),
-            },
-            ResponsesStreamEvent::ReasoningSummaryTextDelta {
-                item_id: "rs_123".into(),
-                output_index: 0,
-                delta: " the answer".into(),
-            },
-            ResponsesStreamEvent::ReasoningSummaryTextDone {
-                item_id: "rs_123".into(),
-                output_index: 0,
-                text: "Thinking about the answer".into(),
-            },
-            ResponsesStreamEvent::ReasoningSummaryPartDone {
-                item_id: "rs_123".into(),
-                output_index: 0,
-                summary_index: 0,
-            },
-            ResponsesStreamEvent::ReasoningSummaryPartAdded {
-                item_id: "rs_123".into(),
-                output_index: 0,
-                summary_index: 1,
-            },
-            ResponsesStreamEvent::ReasoningSummaryTextDelta {
-                item_id: "rs_123".into(),
-                output_index: 0,
-                delta: "Second part".into(),
-            },
-            ResponsesStreamEvent::ReasoningSummaryTextDone {
-                item_id: "rs_123".into(),
-                output_index: 0,
-                text: "Second part".into(),
-            },
-            ResponsesStreamEvent::ReasoningSummaryPartDone {
-                item_id: "rs_123".into(),
-                output_index: 0,
-                summary_index: 1,
-            },
-            ResponsesStreamEvent::OutputItemDone {
-                output_index: 0,
-                sequence_number: None,
-                item: ResponseOutputItem::Reasoning(ResponseReasoningItem {
-                    id: Some("rs_123".into()),
-                    summary: vec![
-                        ReasoningSummaryPart::SummaryText {
-                            text: "Thinking about the answer".into(),
-                        },
-                        ReasoningSummaryPart::SummaryText {
-                            text: "Second part".into(),
-                        },
-                    ],
-                }),
-            },
-            ResponsesStreamEvent::OutputItemAdded {
-                output_index: 1,
-                sequence_number: None,
-                item: response_item_message("msg_456"),
-            },
-            ResponsesStreamEvent::OutputTextDelta {
-                item_id: "msg_456".into(),
-                output_index: 1,
-                content_index: Some(0),
-                delta: "The answer is 42".into(),
-            },
-            ResponsesStreamEvent::Completed {
-                response: ResponseSummary::default(),
-            },
-        ];
-
-        let mapped = map_response_events(events);
-
-        let thinking_events: Vec<_> = mapped
-            .iter()
-            .filter(|e| matches!(e, LanguageModelCompletionEvent::Thinking { .. }))
-            .collect();
-        assert_eq!(
-            thinking_events.len(),
-            4,
-            "expected 4 thinking events (2 deltas + separator + second delta), got {:?}",
-            thinking_events,
-        );
-
-        assert!(matches!(
-            &thinking_events[0],
-            LanguageModelCompletionEvent::Thinking { text, .. } if text == "Thinking about"
-        ));
-        assert!(matches!(
-            &thinking_events[1],
-            LanguageModelCompletionEvent::Thinking { text, .. } if text == " the answer"
-        ));
-        assert!(
-            matches!(
-                &thinking_events[2],
-                LanguageModelCompletionEvent::Thinking { text, .. } if text == "\n\n"
-            ),
-            "expected separator between summary parts"
-        );
-        assert!(matches!(
-            &thinking_events[3],
-            LanguageModelCompletionEvent::Thinking { text, .. } if text == "Second part"
-        ));
-
-        assert!(mapped.iter().any(|e| matches!(
-            e,
-            LanguageModelCompletionEvent::Text(t) if t == "The answer is 42"
-        )));
-    }
-
-    #[test]
-    fn responses_stream_maps_reasoning_from_done_only() {
-        let events = vec![
-            ResponsesStreamEvent::OutputItemAdded {
-                output_index: 0,
-                sequence_number: None,
-                item: ResponseOutputItem::Reasoning(ResponseReasoningItem {
-                    id: Some("rs_789".into()),
-                    summary: vec![],
-                }),
-            },
-            ResponsesStreamEvent::OutputItemDone {
-                output_index: 0,
-                sequence_number: None,
-                item: ResponseOutputItem::Reasoning(ResponseReasoningItem {
-                    id: Some("rs_789".into()),
-                    summary: vec![ReasoningSummaryPart::SummaryText {
-                        text: "Summary without deltas".into(),
-                    }],
-                }),
-            },
-            ResponsesStreamEvent::Completed {
-                response: ResponseSummary::default(),
-            },
-        ];
-
-        let mapped = map_response_events(events);
-
-        assert!(
-            !mapped
-                .iter()
-                .any(|e| matches!(e, LanguageModelCompletionEvent::Thinking { .. })),
-            "OutputItemDone reasoning should not produce Thinking events (no delta/done text events)"
-        );
-    }
-}

crates/language_models/src/provider/open_ai_compatible.rs 🔗

@@ -402,7 +402,7 @@ impl LanguageModel for OpenAiCompatibleLanguageModel {
                 self.model.capabilities.parallel_tool_calls,
                 self.model.capabilities.prompt_cache_key,
                 self.max_output_tokens(),
-                self.model.reasoning_effort.clone(),
+                self.model.reasoning_effort,
             );
             let completions = self.stream_completion(request, cx);
             async move {
@@ -417,7 +417,7 @@ impl LanguageModel for OpenAiCompatibleLanguageModel {
                 self.model.capabilities.parallel_tool_calls,
                 self.model.capabilities.prompt_cache_key,
                 self.max_output_tokens(),
-                self.model.reasoning_effort.clone(),
+                self.model.reasoning_effort,
             );
             let completions = self.stream_response(request, cx);
             async move {

crates/language_models/src/provider/open_router.rs 🔗

@@ -22,7 +22,7 @@ use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
 use ui_input::InputField;
 use util::ResultExt;
 
-use crate::provider::util::{fix_streamed_json, parse_tool_arguments};
+use language_model::util::{fix_streamed_json, parse_tool_arguments};
 
 const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("openrouter");
 const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("OpenRouter");

crates/language_models/src/provider/x_ai.rs 🔗

@@ -9,7 +9,7 @@ use language_model::{
     LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider,
     LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState,
     LanguageModelRequest, LanguageModelToolChoice, LanguageModelToolSchemaFormat, RateLimiter,
-    Role, env_var,
+    env_var,
 };
 use open_ai::ResponseStreamEvent;
 pub use settings::XaiAvailableModel as AvailableModel;
@@ -19,7 +19,8 @@ use strum::IntoEnumIterator;
 use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
 use ui_input::InputField;
 use util::ResultExt;
-use x_ai::{Model, XAI_API_URL};
+use x_ai::XAI_API_URL;
+pub use x_ai::completion::count_xai_tokens;
 
 const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("x_ai");
 const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("xAI");
@@ -320,7 +321,9 @@ impl LanguageModel for XAiLanguageModel {
         request: LanguageModelRequest,
         cx: &App,
     ) -> BoxFuture<'static, Result<u64>> {
-        count_xai_tokens(request, self.model.clone(), cx)
+        let model = self.model.clone();
+        cx.background_spawn(async move { count_xai_tokens(request, model) })
+            .boxed()
     }
 
     fn stream_completion(
@@ -354,37 +357,6 @@ impl LanguageModel for XAiLanguageModel {
     }
 }
 
-pub fn count_xai_tokens(
-    request: LanguageModelRequest,
-    model: Model,
-    cx: &App,
-) -> BoxFuture<'static, Result<u64>> {
-    cx.background_spawn(async move {
-        let messages = request
-            .messages
-            .into_iter()
-            .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
-                role: match message.role {
-                    Role::User => "user".into(),
-                    Role::Assistant => "assistant".into(),
-                    Role::System => "system".into(),
-                },
-                content: Some(message.string_contents()),
-                name: None,
-                function_call: None,
-            })
-            .collect::<Vec<_>>();
-
-        let model_name = if model.max_token_count() >= 100_000 {
-            "gpt-4o"
-        } else {
-            "gpt-4"
-        };
-        tiktoken_rs::num_tokens_from_messages(model_name, &messages).map(|tokens| tokens as u64)
-    })
-    .boxed()
-}
-
 struct ConfigurationView {
     api_key_editor: Entity<InputField>,
     state: Entity<State>,

crates/language_models_cloud/Cargo.toml 🔗

@@ -0,0 +1,33 @@
+[package]
+name = "language_models_cloud"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/language_models_cloud.rs"
+
+[dependencies]
+anthropic = { workspace = true, features = ["schemars"] }
+anyhow.workspace = true
+cloud_llm_client.workspace = true
+futures.workspace = true
+google_ai = { workspace = true, features = ["schemars"] }
+gpui.workspace = true
+http_client.workspace = true
+language_model.workspace = true
+open_ai = { workspace = true, features = ["schemars"] }
+schemars.workspace = true
+semver.workspace = true
+serde.workspace = true
+serde_json.workspace = true
+smol.workspace = true
+thiserror.workspace = true
+x_ai = { workspace = true, features = ["schemars"] }
+
+[dev-dependencies]
+language_model = { workspace = true, features = ["test-support"] }

crates/language_models_cloud/src/language_models_cloud.rs 🔗

@@ -0,0 +1,1059 @@
+use anthropic::AnthropicModelMode;
+use anyhow::{Context as _, Result, anyhow};
+use cloud_llm_client::{
+    CLIENT_SUPPORTS_STATUS_MESSAGES_HEADER_NAME, CLIENT_SUPPORTS_STATUS_STREAM_ENDED_HEADER_NAME,
+    CLIENT_SUPPORTS_X_AI_HEADER_NAME, CompletionBody, CompletionEvent, CompletionRequestStatus,
+    CountTokensBody, CountTokensResponse, EXPIRED_LLM_TOKEN_HEADER_NAME, ListModelsResponse,
+    OUTDATED_LLM_TOKEN_HEADER_NAME, SERVER_SUPPORTS_STATUS_MESSAGES_HEADER_NAME,
+    ZED_VERSION_HEADER_NAME,
+};
+use futures::{
+    AsyncBufReadExt, FutureExt, Stream, StreamExt,
+    future::BoxFuture,
+    stream::{self, BoxStream},
+};
+use google_ai::GoogleModelMode;
+use gpui::{App, AppContext, AsyncApp, Context, Task};
+use http_client::http::{HeaderMap, HeaderValue};
+use http_client::{
+    AsyncBody, HttpClient, HttpClientWithUrl, HttpRequestExt, Method, Response, StatusCode,
+};
+use language_model::{
+    ANTHROPIC_PROVIDER_ID, ANTHROPIC_PROVIDER_NAME, GOOGLE_PROVIDER_ID, GOOGLE_PROVIDER_NAME,
+    LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionError,
+    LanguageModelCompletionEvent, LanguageModelEffortLevel, LanguageModelId, LanguageModelName,
+    LanguageModelProviderId, LanguageModelProviderName, LanguageModelRequest,
+    LanguageModelToolChoice, LanguageModelToolSchemaFormat, OPEN_AI_PROVIDER_ID,
+    OPEN_AI_PROVIDER_NAME, PaymentRequiredError, RateLimiter, X_AI_PROVIDER_ID, X_AI_PROVIDER_NAME,
+    ZED_CLOUD_PROVIDER_ID, ZED_CLOUD_PROVIDER_NAME,
+};
+
+use schemars::JsonSchema;
+use semver::Version;
+use serde::{Deserialize, Serialize, de::DeserializeOwned};
+use smol::io::{AsyncReadExt, BufReader};
+use std::collections::VecDeque;
+use std::pin::Pin;
+use std::str::FromStr;
+use std::sync::Arc;
+use std::task::Poll;
+use std::time::Duration;
+use thiserror::Error;
+
+use anthropic::completion::{
+    AnthropicEventMapper, count_anthropic_tokens_with_tiktoken, into_anthropic,
+};
+use google_ai::completion::{GoogleEventMapper, into_google};
+use open_ai::completion::{
+    OpenAiEventMapper, OpenAiResponseEventMapper, count_open_ai_tokens, into_open_ai,
+    into_open_ai_response,
+};
+use x_ai::completion::count_xai_tokens;
+
+const PROVIDER_ID: LanguageModelProviderId = ZED_CLOUD_PROVIDER_ID;
+const PROVIDER_NAME: LanguageModelProviderName = ZED_CLOUD_PROVIDER_NAME;
+
+/// Trait for acquiring and refreshing LLM authentication tokens.
+pub trait CloudLlmTokenProvider: Send + Sync {
+    type AuthContext: Clone + Send + 'static;
+
+    fn auth_context(&self, cx: &AsyncApp) -> Self::AuthContext;
+    fn acquire_token(&self, auth_context: Self::AuthContext) -> BoxFuture<'static, Result<String>>;
+    fn refresh_token(&self, auth_context: Self::AuthContext) -> BoxFuture<'static, Result<String>>;
+}
+
+#[derive(Default, Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
+#[serde(tag = "type", rename_all = "lowercase")]
+pub enum ModelMode {
+    #[default]
+    Default,
+    Thinking {
+        /// The maximum number of tokens to use for reasoning. Must be lower than the model's `max_output_tokens`.
+        budget_tokens: Option<u32>,
+    },
+}
+
+impl From<ModelMode> for AnthropicModelMode {
+    fn from(value: ModelMode) -> Self {
+        match value {
+            ModelMode::Default => AnthropicModelMode::Default,
+            ModelMode::Thinking { budget_tokens } => AnthropicModelMode::Thinking { budget_tokens },
+        }
+    }
+}
+
+pub struct CloudLanguageModel<TP: CloudLlmTokenProvider> {
+    pub id: LanguageModelId,
+    pub model: Arc<cloud_llm_client::LanguageModel>,
+    pub token_provider: Arc<TP>,
+    pub http_client: Arc<HttpClientWithUrl>,
+    pub app_version: Option<Version>,
+    pub request_limiter: RateLimiter,
+}
+
+pub struct PerformLlmCompletionResponse {
+    pub response: Response<AsyncBody>,
+    pub includes_status_messages: bool,
+}
+
+impl<TP: CloudLlmTokenProvider> CloudLanguageModel<TP> {
+    pub async fn perform_llm_completion(
+        http_client: &HttpClientWithUrl,
+        token_provider: &TP,
+        auth_context: TP::AuthContext,
+        app_version: Option<Version>,
+        body: CompletionBody,
+    ) -> Result<PerformLlmCompletionResponse> {
+        let mut token = token_provider.acquire_token(auth_context.clone()).await?;
+        let mut refreshed_token = false;
+
+        loop {
+            let request = http_client::Request::builder()
+                .method(Method::POST)
+                .uri(http_client.build_zed_llm_url("/completions", &[])?.as_ref())
+                .when_some(app_version.as_ref(), |builder, app_version| {
+                    builder.header(ZED_VERSION_HEADER_NAME, app_version.to_string())
+                })
+                .header("Content-Type", "application/json")
+                .header("Authorization", format!("Bearer {token}"))
+                .header(CLIENT_SUPPORTS_STATUS_MESSAGES_HEADER_NAME, "true")
+                .header(CLIENT_SUPPORTS_STATUS_STREAM_ENDED_HEADER_NAME, "true")
+                .body(serde_json::to_string(&body)?.into())?;
+
+            let mut response = http_client.send(request).await?;
+            let status = response.status();
+            if status.is_success() {
+                let includes_status_messages = response
+                    .headers()
+                    .get(SERVER_SUPPORTS_STATUS_MESSAGES_HEADER_NAME)
+                    .is_some();
+
+                return Ok(PerformLlmCompletionResponse {
+                    response,
+                    includes_status_messages,
+                });
+            }
+
+            if !refreshed_token && needs_llm_token_refresh(&response) {
+                token = token_provider.refresh_token(auth_context.clone()).await?;
+                refreshed_token = true;
+                continue;
+            }
+
+            if status == StatusCode::PAYMENT_REQUIRED {
+                return Err(anyhow!(PaymentRequiredError));
+            }
+
+            let mut body = String::new();
+            let headers = response.headers().clone();
+            response.body_mut().read_to_string(&mut body).await?;
+            return Err(anyhow!(ApiError {
+                status,
+                body,
+                headers
+            }));
+        }
+    }
+}
+
+fn needs_llm_token_refresh(response: &Response<AsyncBody>) -> bool {
+    response
+        .headers()
+        .get(EXPIRED_LLM_TOKEN_HEADER_NAME)
+        .is_some()
+        || response
+            .headers()
+            .get(OUTDATED_LLM_TOKEN_HEADER_NAME)
+            .is_some()
+}
+
+#[derive(Debug, Error)]
+#[error("cloud language model request failed with status {status}: {body}")]
+struct ApiError {
+    status: StatusCode,
+    body: String,
+    headers: HeaderMap<HeaderValue>,
+}
+
+/// Represents error responses from Zed's cloud API.
+///
+/// Example JSON for an upstream HTTP error:
+/// ```json
+/// {
+///   "code": "upstream_http_error",
+///   "message": "Received an error from the Anthropic API: upstream connect error or disconnect/reset before headers, reset reason: connection timeout",
+///   "upstream_status": 503
+/// }
+/// ```
+#[derive(Debug, serde::Deserialize)]
+struct CloudApiError {
+    code: String,
+    message: String,
+    #[serde(default)]
+    #[serde(deserialize_with = "deserialize_optional_status_code")]
+    upstream_status: Option<StatusCode>,
+    #[serde(default)]
+    retry_after: Option<f64>,
+}
+
+fn deserialize_optional_status_code<'de, D>(deserializer: D) -> Result<Option<StatusCode>, D::Error>
+where
+    D: serde::Deserializer<'de>,
+{
+    let opt: Option<u16> = Option::deserialize(deserializer)?;
+    Ok(opt.and_then(|code| StatusCode::from_u16(code).ok()))
+}
+
+impl From<ApiError> for LanguageModelCompletionError {
+    fn from(error: ApiError) -> Self {
+        if let Ok(cloud_error) = serde_json::from_str::<CloudApiError>(&error.body) {
+            if cloud_error.code.starts_with("upstream_http_") {
+                let status = if let Some(status) = cloud_error.upstream_status {
+                    status
+                } else if cloud_error.code.ends_with("_error") {
+                    error.status
+                } else {
+                    // If there's a status code in the code string (e.g. "upstream_http_429")
+                    // then use that; otherwise, see if the JSON contains a status code.
+                    cloud_error
+                        .code
+                        .strip_prefix("upstream_http_")
+                        .and_then(|code_str| code_str.parse::<u16>().ok())
+                        .and_then(|code| StatusCode::from_u16(code).ok())
+                        .unwrap_or(error.status)
+                };
+
+                return LanguageModelCompletionError::UpstreamProviderError {
+                    message: cloud_error.message,
+                    status,
+                    retry_after: cloud_error.retry_after.map(Duration::from_secs_f64),
+                };
+            }
+
+            return LanguageModelCompletionError::from_http_status(
+                PROVIDER_NAME,
+                error.status,
+                cloud_error.message,
+                None,
+            );
+        }
+
+        let retry_after = None;
+        LanguageModelCompletionError::from_http_status(
+            PROVIDER_NAME,
+            error.status,
+            error.body,
+            retry_after,
+        )
+    }
+}
+
+impl<TP: CloudLlmTokenProvider + 'static> LanguageModel for CloudLanguageModel<TP> {
+    fn id(&self) -> LanguageModelId {
+        self.id.clone()
+    }
+
+    fn name(&self) -> LanguageModelName {
+        LanguageModelName::from(self.model.display_name.clone())
+    }
+
+    fn provider_id(&self) -> LanguageModelProviderId {
+        PROVIDER_ID
+    }
+
+    fn provider_name(&self) -> LanguageModelProviderName {
+        PROVIDER_NAME
+    }
+
+    fn upstream_provider_id(&self) -> LanguageModelProviderId {
+        use cloud_llm_client::LanguageModelProvider::*;
+        match self.model.provider {
+            Anthropic => ANTHROPIC_PROVIDER_ID,
+            OpenAi => OPEN_AI_PROVIDER_ID,
+            Google => GOOGLE_PROVIDER_ID,
+            XAi => X_AI_PROVIDER_ID,
+        }
+    }
+
+    fn upstream_provider_name(&self) -> LanguageModelProviderName {
+        use cloud_llm_client::LanguageModelProvider::*;
+        match self.model.provider {
+            Anthropic => ANTHROPIC_PROVIDER_NAME,
+            OpenAi => OPEN_AI_PROVIDER_NAME,
+            Google => GOOGLE_PROVIDER_NAME,
+            XAi => X_AI_PROVIDER_NAME,
+        }
+    }
+
+    fn is_latest(&self) -> bool {
+        self.model.is_latest
+    }
+
+    fn supports_tools(&self) -> bool {
+        self.model.supports_tools
+    }
+
+    fn supports_images(&self) -> bool {
+        self.model.supports_images
+    }
+
+    fn supports_thinking(&self) -> bool {
+        self.model.supports_thinking
+    }
+
+    fn supports_fast_mode(&self) -> bool {
+        self.model.supports_fast_mode
+    }
+
+    fn supported_effort_levels(&self) -> Vec<LanguageModelEffortLevel> {
+        self.model
+            .supported_effort_levels
+            .iter()
+            .map(|effort_level| LanguageModelEffortLevel {
+                name: effort_level.name.clone().into(),
+                value: effort_level.value.clone().into(),
+                is_default: effort_level.is_default.unwrap_or(false),
+            })
+            .collect()
+    }
+
+    fn supports_streaming_tools(&self) -> bool {
+        self.model.supports_streaming_tools
+    }
+
+    fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
+        match choice {
+            LanguageModelToolChoice::Auto
+            | LanguageModelToolChoice::Any
+            | LanguageModelToolChoice::None => true,
+        }
+    }
+
+    fn supports_split_token_display(&self) -> bool {
+        use cloud_llm_client::LanguageModelProvider::*;
+        matches!(self.model.provider, OpenAi | XAi)
+    }
+
+    fn telemetry_id(&self) -> String {
+        format!("zed.dev/{}", self.model.id)
+    }
+
+    fn tool_input_format(&self) -> LanguageModelToolSchemaFormat {
+        match self.model.provider {
+            cloud_llm_client::LanguageModelProvider::Anthropic
+            | cloud_llm_client::LanguageModelProvider::OpenAi => {
+                LanguageModelToolSchemaFormat::JsonSchema
+            }
+            cloud_llm_client::LanguageModelProvider::Google
+            | cloud_llm_client::LanguageModelProvider::XAi => {
+                LanguageModelToolSchemaFormat::JsonSchemaSubset
+            }
+        }
+    }
+
+    fn max_token_count(&self) -> u64 {
+        self.model.max_token_count as u64
+    }
+
+    fn max_output_tokens(&self) -> Option<u64> {
+        Some(self.model.max_output_tokens as u64)
+    }
+
+    fn cache_configuration(&self) -> Option<LanguageModelCacheConfiguration> {
+        match &self.model.provider {
+            cloud_llm_client::LanguageModelProvider::Anthropic => {
+                Some(LanguageModelCacheConfiguration {
+                    min_total_token: 2_048,
+                    should_speculate: true,
+                    max_cache_anchors: 4,
+                })
+            }
+            cloud_llm_client::LanguageModelProvider::OpenAi
+            | cloud_llm_client::LanguageModelProvider::XAi
+            | cloud_llm_client::LanguageModelProvider::Google => None,
+        }
+    }
+
+    fn count_tokens(
+        &self,
+        request: LanguageModelRequest,
+        cx: &App,
+    ) -> BoxFuture<'static, Result<u64>> {
+        match self.model.provider {
+            cloud_llm_client::LanguageModelProvider::Anthropic => cx
+                .background_spawn(async move { count_anthropic_tokens_with_tiktoken(request) })
+                .boxed(),
+            cloud_llm_client::LanguageModelProvider::OpenAi => {
+                let model = match open_ai::Model::from_id(&self.model.id.0) {
+                    Ok(model) => model,
+                    Err(err) => return async move { Err(anyhow!(err)) }.boxed(),
+                };
+                cx.background_spawn(async move { count_open_ai_tokens(request, model) })
+                    .boxed()
+            }
+            cloud_llm_client::LanguageModelProvider::XAi => {
+                let model = match x_ai::Model::from_id(&self.model.id.0) {
+                    Ok(model) => model,
+                    Err(err) => return async move { Err(anyhow!(err)) }.boxed(),
+                };
+                cx.background_spawn(async move { count_xai_tokens(request, model) })
+                    .boxed()
+            }
+            cloud_llm_client::LanguageModelProvider::Google => {
+                let http_client = self.http_client.clone();
+                let token_provider = self.token_provider.clone();
+                let model_id = self.model.id.to_string();
+                let generate_content_request =
+                    into_google(request, model_id.clone(), GoogleModelMode::Default);
+                let auth_context = token_provider.auth_context(&cx.to_async());
+                async move {
+                    let token = token_provider.acquire_token(auth_context).await?;
+
+                    let request_body = CountTokensBody {
+                        provider: cloud_llm_client::LanguageModelProvider::Google,
+                        model: model_id,
+                        provider_request: serde_json::to_value(&google_ai::CountTokensRequest {
+                            generate_content_request,
+                        })?,
+                    };
+                    let request = http_client::Request::builder()
+                        .method(Method::POST)
+                        .uri(
+                            http_client
+                                .build_zed_llm_url("/count_tokens", &[])?
+                                .as_ref(),
+                        )
+                        .header("Content-Type", "application/json")
+                        .header("Authorization", format!("Bearer {token}"))
+                        .body(serde_json::to_string(&request_body)?.into())?;
+                    let mut response = http_client.send(request).await?;
+                    let status = response.status();
+                    let headers = response.headers().clone();
+                    let mut response_body = String::new();
+                    response
+                        .body_mut()
+                        .read_to_string(&mut response_body)
+                        .await?;
+
+                    if status.is_success() {
+                        let response_body: CountTokensResponse =
+                            serde_json::from_str(&response_body)?;
+
+                        Ok(response_body.tokens as u64)
+                    } else {
+                        Err(anyhow!(ApiError {
+                            status,
+                            body: response_body,
+                            headers
+                        }))
+                    }
+                }
+                .boxed()
+            }
+        }
+    }
+
+    fn stream_completion(
+        &self,
+        request: LanguageModelRequest,
+        cx: &AsyncApp,
+    ) -> BoxFuture<
+        'static,
+        Result<
+            BoxStream<'static, Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>,
+            LanguageModelCompletionError,
+        >,
+    > {
+        let thread_id = request.thread_id.clone();
+        let prompt_id = request.prompt_id.clone();
+        let app_version = self.app_version.clone();
+        let thinking_allowed = request.thinking_allowed;
+        let enable_thinking = thinking_allowed && self.model.supports_thinking;
+        let provider_name = provider_name(&self.model.provider);
+        match self.model.provider {
+            cloud_llm_client::LanguageModelProvider::Anthropic => {
+                let effort = request
+                    .thinking_effort
+                    .as_ref()
+                    .and_then(|effort| anthropic::Effort::from_str(effort).ok());
+
+                let mut request = into_anthropic(
+                    request,
+                    self.model.id.to_string(),
+                    1.0,
+                    self.model.max_output_tokens as u64,
+                    if enable_thinking {
+                        AnthropicModelMode::Thinking {
+                            budget_tokens: Some(4_096),
+                        }
+                    } else {
+                        AnthropicModelMode::Default
+                    },
+                );
+
+                if enable_thinking && effort.is_some() {
+                    request.thinking = Some(anthropic::Thinking::Adaptive);
+                    request.output_config = Some(anthropic::OutputConfig { effort });
+                }
+
+                let http_client = self.http_client.clone();
+                let token_provider = self.token_provider.clone();
+                let auth_context = token_provider.auth_context(cx);
+                let future = self.request_limiter.stream(async move {
+                    let PerformLlmCompletionResponse {
+                        response,
+                        includes_status_messages,
+                    } = Self::perform_llm_completion(
+                        &http_client,
+                        &*token_provider,
+                        auth_context,
+                        app_version,
+                        CompletionBody {
+                            thread_id,
+                            prompt_id,
+                            provider: cloud_llm_client::LanguageModelProvider::Anthropic,
+                            model: request.model.clone(),
+                            provider_request: serde_json::to_value(&request)
+                                .map_err(|e| anyhow!(e))?,
+                        },
+                    )
+                    .await
+                    .map_err(|err| match err.downcast::<ApiError>() {
+                        Ok(api_err) => anyhow!(LanguageModelCompletionError::from(api_err)),
+                        Err(err) => anyhow!(err),
+                    })?;
+
+                    let mut mapper = AnthropicEventMapper::new();
+                    Ok(map_cloud_completion_events(
+                        Box::pin(response_lines(response, includes_status_messages)),
+                        &provider_name,
+                        move |event| mapper.map_event(event),
+                    ))
+                });
+                async move { Ok(future.await?.boxed()) }.boxed()
+            }
+            cloud_llm_client::LanguageModelProvider::OpenAi => {
+                let http_client = self.http_client.clone();
+                let token_provider = self.token_provider.clone();
+                let effort = request
+                    .thinking_effort
+                    .as_ref()
+                    .and_then(|effort| open_ai::ReasoningEffort::from_str(effort).ok());
+
+                let mut request = into_open_ai_response(
+                    request,
+                    &self.model.id.0,
+                    self.model.supports_parallel_tool_calls,
+                    true,
+                    None,
+                    None,
+                );
+
+                if enable_thinking && let Some(effort) = effort {
+                    request.reasoning = Some(open_ai::responses::ReasoningConfig {
+                        effort,
+                        summary: Some(open_ai::responses::ReasoningSummaryMode::Auto),
+                    });
+                }
+
+                let auth_context = token_provider.auth_context(cx);
+                let future = self.request_limiter.stream(async move {
+                    let PerformLlmCompletionResponse {
+                        response,
+                        includes_status_messages,
+                    } = Self::perform_llm_completion(
+                        &http_client,
+                        &*token_provider,
+                        auth_context,
+                        app_version,
+                        CompletionBody {
+                            thread_id,
+                            prompt_id,
+                            provider: cloud_llm_client::LanguageModelProvider::OpenAi,
+                            model: request.model.clone(),
+                            provider_request: serde_json::to_value(&request)
+                                .map_err(|e| anyhow!(e))?,
+                        },
+                    )
+                    .await?;
+
+                    let mut mapper = OpenAiResponseEventMapper::new();
+                    Ok(map_cloud_completion_events(
+                        Box::pin(response_lines(response, includes_status_messages)),
+                        &provider_name,
+                        move |event| mapper.map_event(event),
+                    ))
+                });
+                async move { Ok(future.await?.boxed()) }.boxed()
+            }
+            cloud_llm_client::LanguageModelProvider::XAi => {
+                let http_client = self.http_client.clone();
+                let token_provider = self.token_provider.clone();
+                let request = into_open_ai(
+                    request,
+                    &self.model.id.0,
+                    self.model.supports_parallel_tool_calls,
+                    false,
+                    None,
+                    None,
+                );
+                let auth_context = token_provider.auth_context(cx);
+                let future = self.request_limiter.stream(async move {
+                    let PerformLlmCompletionResponse {
+                        response,
+                        includes_status_messages,
+                    } = Self::perform_llm_completion(
+                        &http_client,
+                        &*token_provider,
+                        auth_context,
+                        app_version,
+                        CompletionBody {
+                            thread_id,
+                            prompt_id,
+                            provider: cloud_llm_client::LanguageModelProvider::XAi,
+                            model: request.model.clone(),
+                            provider_request: serde_json::to_value(&request)
+                                .map_err(|e| anyhow!(e))?,
+                        },
+                    )
+                    .await?;
+
+                    let mut mapper = OpenAiEventMapper::new();
+                    Ok(map_cloud_completion_events(
+                        Box::pin(response_lines(response, includes_status_messages)),
+                        &provider_name,
+                        move |event| mapper.map_event(event),
+                    ))
+                });
+                async move { Ok(future.await?.boxed()) }.boxed()
+            }
+            cloud_llm_client::LanguageModelProvider::Google => {
+                let http_client = self.http_client.clone();
+                let token_provider = self.token_provider.clone();
+                let request =
+                    into_google(request, self.model.id.to_string(), GoogleModelMode::Default);
+                let auth_context = token_provider.auth_context(cx);
+                let future = self.request_limiter.stream(async move {
+                    let PerformLlmCompletionResponse {
+                        response,
+                        includes_status_messages,
+                    } = Self::perform_llm_completion(
+                        &http_client,
+                        &*token_provider,
+                        auth_context,
+                        app_version,
+                        CompletionBody {
+                            thread_id,
+                            prompt_id,
+                            provider: cloud_llm_client::LanguageModelProvider::Google,
+                            model: request.model.model_id.clone(),
+                            provider_request: serde_json::to_value(&request)
+                                .map_err(|e| anyhow!(e))?,
+                        },
+                    )
+                    .await?;
+
+                    let mut mapper = GoogleEventMapper::new();
+                    Ok(map_cloud_completion_events(
+                        Box::pin(response_lines(response, includes_status_messages)),
+                        &provider_name,
+                        move |event| mapper.map_event(event),
+                    ))
+                });
+                async move { Ok(future.await?.boxed()) }.boxed()
+            }
+        }
+    }
+}
+
+pub struct CloudModelProvider<TP: CloudLlmTokenProvider> {
+    token_provider: Arc<TP>,
+    http_client: Arc<HttpClientWithUrl>,
+    app_version: Option<Version>,
+    models: Vec<Arc<cloud_llm_client::LanguageModel>>,
+    default_model: Option<Arc<cloud_llm_client::LanguageModel>>,
+    default_fast_model: Option<Arc<cloud_llm_client::LanguageModel>>,
+    recommended_models: Vec<Arc<cloud_llm_client::LanguageModel>>,
+}
+
+impl<TP: CloudLlmTokenProvider + 'static> CloudModelProvider<TP> {
+    pub fn new(
+        token_provider: Arc<TP>,
+        http_client: Arc<HttpClientWithUrl>,
+        app_version: Option<Version>,
+    ) -> Self {
+        Self {
+            token_provider,
+            http_client,
+            app_version,
+            models: Vec::new(),
+            default_model: None,
+            default_fast_model: None,
+            recommended_models: Vec::new(),
+        }
+    }
+
+    pub fn refresh_models(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
+        let http_client = self.http_client.clone();
+        let token_provider = self.token_provider.clone();
+        cx.spawn(async move |this, cx| {
+            let auth_context = token_provider.auth_context(cx);
+            let response =
+                Self::fetch_models_request(&http_client, &*token_provider, auth_context).await?;
+            this.update(cx, |this, cx| {
+                this.update_models(response);
+                cx.notify();
+            })
+        })
+    }
+
+    async fn fetch_models_request(
+        http_client: &HttpClientWithUrl,
+        token_provider: &TP,
+        auth_context: TP::AuthContext,
+    ) -> Result<ListModelsResponse> {
+        let token = token_provider.acquire_token(auth_context).await?;
+
+        let request = http_client::Request::builder()
+            .method(Method::GET)
+            .header(CLIENT_SUPPORTS_X_AI_HEADER_NAME, "true")
+            .uri(http_client.build_zed_llm_url("/models", &[])?.as_ref())
+            .header("Authorization", format!("Bearer {token}"))
+            .body(AsyncBody::empty())?;
+        let mut response = http_client
+            .send(request)
+            .await
+            .context("failed to send list models request")?;
+
+        if response.status().is_success() {
+            let mut body = String::new();
+            response.body_mut().read_to_string(&mut body).await?;
+            Ok(serde_json::from_str(&body)?)
+        } else {
+            let mut body = String::new();
+            response.body_mut().read_to_string(&mut body).await?;
+            anyhow::bail!(
+                "error listing models.\nStatus: {:?}\nBody: {body}",
+                response.status(),
+            );
+        }
+    }
+
+    pub fn update_models(&mut self, response: ListModelsResponse) {
+        let models: Vec<_> = response.models.into_iter().map(Arc::new).collect();
+
+        self.default_model = models
+            .iter()
+            .find(|model| {
+                response
+                    .default_model
+                    .as_ref()
+                    .is_some_and(|default_model_id| &model.id == default_model_id)
+            })
+            .cloned();
+        self.default_fast_model = models
+            .iter()
+            .find(|model| {
+                response
+                    .default_fast_model
+                    .as_ref()
+                    .is_some_and(|default_fast_model_id| &model.id == default_fast_model_id)
+            })
+            .cloned();
+        self.recommended_models = response
+            .recommended_models
+            .iter()
+            .filter_map(|id| models.iter().find(|model| &model.id == id))
+            .cloned()
+            .collect();
+        self.models = models;
+    }
+
+    pub fn create_model(
+        &self,
+        model: &Arc<cloud_llm_client::LanguageModel>,
+    ) -> Arc<dyn LanguageModel> {
+        Arc::new(CloudLanguageModel::<TP> {
+            id: LanguageModelId::from(model.id.0.to_string()),
+            model: model.clone(),
+            token_provider: self.token_provider.clone(),
+            http_client: self.http_client.clone(),
+            app_version: self.app_version.clone(),
+            request_limiter: RateLimiter::new(4),
+        })
+    }
+
+    pub fn models(&self) -> &[Arc<cloud_llm_client::LanguageModel>] {
+        &self.models
+    }
+
+    pub fn default_model(&self) -> Option<&Arc<cloud_llm_client::LanguageModel>> {
+        self.default_model.as_ref()
+    }
+
+    pub fn default_fast_model(&self) -> Option<&Arc<cloud_llm_client::LanguageModel>> {
+        self.default_fast_model.as_ref()
+    }
+
+    pub fn recommended_models(&self) -> &[Arc<cloud_llm_client::LanguageModel>] {
+        &self.recommended_models
+    }
+}
+
+pub fn map_cloud_completion_events<T, F>(
+    stream: Pin<Box<dyn Stream<Item = Result<CompletionEvent<T>>> + Send>>,
+    provider: &LanguageModelProviderName,
+    mut map_callback: F,
+) -> BoxStream<'static, Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
+where
+    T: DeserializeOwned + 'static,
+    F: FnMut(T) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
+        + Send
+        + 'static,
+{
+    let provider = provider.clone();
+    let mut stream = stream.fuse();
+
+    let mut saw_stream_ended = false;
+
+    let mut done = false;
+    let mut pending = VecDeque::new();
+
+    stream::poll_fn(move |cx| {
+        loop {
+            if let Some(item) = pending.pop_front() {
+                return Poll::Ready(Some(item));
+            }
+
+            if done {
+                return Poll::Ready(None);
+            }
+
+            match stream.poll_next_unpin(cx) {
+                Poll::Ready(Some(event)) => {
+                    let items = match event {
+                        Err(error) => {
+                            vec![Err(LanguageModelCompletionError::from(error))]
+                        }
+                        Ok(CompletionEvent::Status(CompletionRequestStatus::StreamEnded)) => {
+                            saw_stream_ended = true;
+                            vec![]
+                        }
+                        Ok(CompletionEvent::Status(status)) => {
+                            LanguageModelCompletionEvent::from_completion_request_status(
+                                status,
+                                provider.clone(),
+                            )
+                            .transpose()
+                            .map(|event| vec![event])
+                            .unwrap_or_default()
+                        }
+                        Ok(CompletionEvent::Event(event)) => map_callback(event),
+                    };
+                    pending.extend(items);
+                }
+                Poll::Ready(None) => {
+                    done = true;
+
+                    if !saw_stream_ended {
+                        return Poll::Ready(Some(Err(
+                            LanguageModelCompletionError::StreamEndedUnexpectedly {
+                                provider: provider.clone(),
+                            },
+                        )));
+                    }
+                }
+                Poll::Pending => return Poll::Pending,
+            }
+        }
+    })
+    .boxed()
+}
+
+pub fn provider_name(
+    provider: &cloud_llm_client::LanguageModelProvider,
+) -> LanguageModelProviderName {
+    match provider {
+        cloud_llm_client::LanguageModelProvider::Anthropic => ANTHROPIC_PROVIDER_NAME,
+        cloud_llm_client::LanguageModelProvider::OpenAi => OPEN_AI_PROVIDER_NAME,
+        cloud_llm_client::LanguageModelProvider::Google => GOOGLE_PROVIDER_NAME,
+        cloud_llm_client::LanguageModelProvider::XAi => X_AI_PROVIDER_NAME,
+    }
+}
+
+pub fn response_lines<T: DeserializeOwned>(
+    response: Response<AsyncBody>,
+    includes_status_messages: bool,
+) -> impl Stream<Item = Result<CompletionEvent<T>>> {
+    futures::stream::try_unfold(
+        (String::new(), BufReader::new(response.into_body())),
+        move |(mut line, mut body)| async move {
+            match body.read_line(&mut line).await {
+                Ok(0) => Ok(None),
+                Ok(_) => {
+                    let event = if includes_status_messages {
+                        serde_json::from_str::<CompletionEvent<T>>(&line)?
+                    } else {
+                        CompletionEvent::Event(serde_json::from_str::<T>(&line)?)
+                    };
+
+                    line.clear();
+                    Ok(Some((event, (line, body))))
+                }
+                Err(e) => Err(e.into()),
+            }
+        },
+    )
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use http_client::http::{HeaderMap, StatusCode};
+    use language_model::LanguageModelCompletionError;
+
+    #[test]
+    fn test_api_error_conversion_with_upstream_http_error() {
+        // upstream_http_error with 503 status should become ServerOverloaded
+        let error_body = r#"{"code":"upstream_http_error","message":"Received an error from the Anthropic API: upstream connect error or disconnect/reset before headers, reset reason: connection timeout","upstream_status":503}"#;
+
+        let api_error = ApiError {
+            status: StatusCode::INTERNAL_SERVER_ERROR,
+            body: error_body.to_string(),
+            headers: HeaderMap::new(),
+        };
+
+        let completion_error: LanguageModelCompletionError = api_error.into();
+
+        match completion_error {
+            LanguageModelCompletionError::UpstreamProviderError { message, .. } => {
+                assert_eq!(
+                    message,
+                    "Received an error from the Anthropic API: upstream connect error or disconnect/reset before headers, reset reason: connection timeout"
+                );
+            }
+            _ => panic!(
+                "Expected UpstreamProviderError for upstream 503, got: {:?}",
+                completion_error
+            ),
+        }
+
+        // upstream_http_error with 500 status should become ApiInternalServerError
+        let error_body = r#"{"code":"upstream_http_error","message":"Received an error from the OpenAI API: internal server error","upstream_status":500}"#;
+
+        let api_error = ApiError {
+            status: StatusCode::INTERNAL_SERVER_ERROR,
+            body: error_body.to_string(),
+            headers: HeaderMap::new(),
+        };
+
+        let completion_error: LanguageModelCompletionError = api_error.into();
+
+        match completion_error {
+            LanguageModelCompletionError::UpstreamProviderError { message, .. } => {
+                assert_eq!(
+                    message,
+                    "Received an error from the OpenAI API: internal server error"
+                );
+            }
+            _ => panic!(
+                "Expected UpstreamProviderError for upstream 500, got: {:?}",
+                completion_error
+            ),
+        }
+
+        // upstream_http_error with 429 status should become RateLimitExceeded
+        let error_body = r#"{"code":"upstream_http_error","message":"Received an error from the Google API: rate limit exceeded","upstream_status":429}"#;
+
+        let api_error = ApiError {
+            status: StatusCode::INTERNAL_SERVER_ERROR,
+            body: error_body.to_string(),
+            headers: HeaderMap::new(),
+        };
+
+        let completion_error: LanguageModelCompletionError = api_error.into();
+
+        match completion_error {
+            LanguageModelCompletionError::UpstreamProviderError { message, .. } => {
+                assert_eq!(
+                    message,
+                    "Received an error from the Google API: rate limit exceeded"
+                );
+            }
+            _ => panic!(
+                "Expected UpstreamProviderError for upstream 429, got: {:?}",
+                completion_error
+            ),
+        }
+
+        // Regular 500 error without upstream_http_error should remain ApiInternalServerError for Zed
+        let error_body = "Regular internal server error";
+
+        let api_error = ApiError {
+            status: StatusCode::INTERNAL_SERVER_ERROR,
+            body: error_body.to_string(),
+            headers: HeaderMap::new(),
+        };
+
+        let completion_error: LanguageModelCompletionError = api_error.into();
+
+        match completion_error {
+            LanguageModelCompletionError::ApiInternalServerError { provider, message } => {
+                assert_eq!(provider, PROVIDER_NAME);
+                assert_eq!(message, "Regular internal server error");
+            }
+            _ => panic!(
+                "Expected ApiInternalServerError for regular 500, got: {:?}",
+                completion_error
+            ),
+        }
+
+        // upstream_http_429 format should be converted to UpstreamProviderError
+        let error_body = r#"{"code":"upstream_http_429","message":"Upstream Anthropic rate limit exceeded.","retry_after":30.5}"#;
+
+        let api_error = ApiError {
+            status: StatusCode::INTERNAL_SERVER_ERROR,
+            body: error_body.to_string(),
+            headers: HeaderMap::new(),
+        };
+
+        let completion_error: LanguageModelCompletionError = api_error.into();
+
+        match completion_error {
+            LanguageModelCompletionError::UpstreamProviderError {
+                message,
+                status,
+                retry_after,
+            } => {
+                assert_eq!(message, "Upstream Anthropic rate limit exceeded.");
+                assert_eq!(status, StatusCode::TOO_MANY_REQUESTS);
+                assert_eq!(retry_after, Some(Duration::from_secs_f64(30.5)));
+            }
+            _ => panic!(
+                "Expected UpstreamProviderError for upstream_http_429, got: {:?}",
+                completion_error
+            ),
+        }
+
+        // Invalid JSON in error body should fall back to regular error handling
+        let error_body = "Not JSON at all";
+
+        let api_error = ApiError {
+            status: StatusCode::INTERNAL_SERVER_ERROR,
+            body: error_body.to_string(),
+            headers: HeaderMap::new(),
+        };
+
+        let completion_error: LanguageModelCompletionError = api_error.into();
+
+        match completion_error {
+            LanguageModelCompletionError::ApiInternalServerError { provider, .. } => {
+                assert_eq!(provider, PROVIDER_NAME);
+            }
+            _ => panic!(
+                "Expected ApiInternalServerError for invalid JSON, got: {:?}",
+                completion_error
+            ),
+        }
+    }
+}

crates/markdown/src/html/html_parser.rs 🔗

@@ -1,6 +1,6 @@
 use std::{cell::RefCell, collections::HashMap, mem, ops::Range};
 
-use gpui::{DefiniteLength, FontWeight, SharedString, px, relative};
+use gpui::{DefiniteLength, FontWeight, SharedString, TextAlign, px, relative};
 use html5ever::{
     Attribute, LocalName, ParseOpts, local_name, parse_document, tendril::TendrilSink,
 };
@@ -24,10 +24,17 @@ pub(crate) enum ParsedHtmlElement {
     List(ParsedHtmlList),
     Table(ParsedHtmlTable),
     BlockQuote(ParsedHtmlBlockQuote),
-    Paragraph(HtmlParagraph),
+    Paragraph(ParsedHtmlParagraph),
     Image(HtmlImage),
 }
 
+#[derive(Debug, Clone)]
+#[cfg_attr(test, derive(PartialEq))]
+pub(crate) struct ParsedHtmlParagraph {
+    pub text_align: Option<TextAlign>,
+    pub contents: HtmlParagraph,
+}
+
 impl ParsedHtmlElement {
     pub fn source_range(&self) -> Option<Range<usize>> {
         Some(match self {
@@ -35,7 +42,7 @@ impl ParsedHtmlElement {
             Self::List(list) => list.source_range.clone(),
             Self::Table(table) => table.source_range.clone(),
             Self::BlockQuote(block_quote) => block_quote.source_range.clone(),
-            Self::Paragraph(text) => match text.first()? {
+            Self::Paragraph(paragraph) => match paragraph.contents.first()? {
                 HtmlParagraphChunk::Text(text) => text.source_range.clone(),
                 HtmlParagraphChunk::Image(image) => image.source_range.clone(),
             },
@@ -83,6 +90,7 @@ pub(crate) struct ParsedHtmlHeading {
     pub source_range: Range<usize>,
     pub level: HeadingLevel,
     pub contents: HtmlParagraph,
+    pub text_align: Option<TextAlign>,
 }
 
 #[derive(Debug, Clone)]
@@ -236,20 +244,21 @@ fn parse_html_node(
             consume_children(source_range, node, elements, context);
         }
         NodeData::Text { contents } => {
-            elements.push(ParsedHtmlElement::Paragraph(vec![
-                HtmlParagraphChunk::Text(ParsedHtmlText {
+            elements.push(ParsedHtmlElement::Paragraph(ParsedHtmlParagraph {
+                text_align: None,
+                contents: vec![HtmlParagraphChunk::Text(ParsedHtmlText {
                     source_range,
                     highlights: Vec::default(),
                     links: Vec::default(),
                     contents: contents.borrow().to_string().into(),
-                }),
-            ]));
+                })],
+            }));
         }
         NodeData::Comment { .. } => {}
         NodeData::Element { name, attrs, .. } => {
-            let mut styles = if let Some(styles) =
-                html_style_from_html_styles(extract_styles_from_attributes(attrs))
-            {
+            let styles_map = extract_styles_from_attributes(attrs);
+            let text_align = text_align_from_attributes(attrs, &styles_map);
+            let mut styles = if let Some(styles) = html_style_from_html_styles(styles_map) {
                 vec![styles]
             } else {
                 Vec::default()
@@ -270,7 +279,10 @@ fn parse_html_node(
                 );
 
                 if !paragraph.is_empty() {
-                    elements.push(ParsedHtmlElement::Paragraph(paragraph));
+                    elements.push(ParsedHtmlElement::Paragraph(ParsedHtmlParagraph {
+                        text_align,
+                        contents: paragraph,
+                    }));
                 }
             } else if matches!(
                 name.local,
@@ -303,6 +315,7 @@ fn parse_html_node(
                             _ => unreachable!(),
                         },
                         contents: paragraph,
+                        text_align,
                     }));
                 }
             } else if name.local == local_name!("ul") || name.local == local_name!("ol") {
@@ -589,6 +602,30 @@ fn html_style_from_html_styles(styles: HashMap<String, String>) -> Option<HtmlHi
     }
 }
 
+fn parse_text_align(value: &str) -> Option<TextAlign> {
+    match value.trim().to_ascii_lowercase().as_str() {
+        "left" => Some(TextAlign::Left),
+        "center" => Some(TextAlign::Center),
+        "right" => Some(TextAlign::Right),
+        _ => None,
+    }
+}
+
+fn text_align_from_styles(styles: &HashMap<String, String>) -> Option<TextAlign> {
+    styles
+        .get("text-align")
+        .and_then(|value| parse_text_align(value))
+}
+
+fn text_align_from_attributes(
+    attrs: &RefCell<Vec<Attribute>>,
+    styles: &HashMap<String, String>,
+) -> Option<TextAlign> {
+    text_align_from_styles(styles).or_else(|| {
+        attr_value(attrs, local_name!("align")).and_then(|value| parse_text_align(&value))
+    })
+}
+
 fn extract_styles_from_attributes(attrs: &RefCell<Vec<Attribute>>) -> HashMap<String, String> {
     let mut styles = HashMap::new();
 
@@ -770,6 +807,7 @@ fn extract_html_table(node: &Node, source_range: Range<usize>) -> Option<ParsedH
 #[cfg(test)]
 mod tests {
     use super::*;
+    use gpui::TextAlign;
 
     #[test]
     fn parses_html_styled_text() {
@@ -783,7 +821,7 @@ mod tests {
         let ParsedHtmlElement::Paragraph(paragraph) = &parsed.children[0] else {
             panic!("expected paragraph");
         };
-        let HtmlParagraphChunk::Text(text) = &paragraph[0] else {
+        let HtmlParagraphChunk::Text(text) = &paragraph.contents[0] else {
             panic!("expected text chunk");
         };
 
@@ -851,7 +889,7 @@ mod tests {
         let ParsedHtmlElement::Paragraph(paragraph) = &first_item.content[0] else {
             panic!("expected first item paragraph");
         };
-        let HtmlParagraphChunk::Text(text) = &paragraph[0] else {
+        let HtmlParagraphChunk::Text(text) = &paragraph.contents[0] else {
             panic!("expected first item text");
         };
         assert_eq!(text.contents.as_ref(), "parent");
@@ -866,7 +904,7 @@ mod tests {
         else {
             panic!("expected nested item paragraph");
         };
-        let HtmlParagraphChunk::Text(nested_text) = &nested_paragraph[0] else {
+        let HtmlParagraphChunk::Text(nested_text) = &nested_paragraph.contents[0] else {
             panic!("expected nested item text");
         };
         assert_eq!(nested_text.contents.as_ref(), "child");
@@ -875,9 +913,58 @@ mod tests {
         let ParsedHtmlElement::Paragraph(second_paragraph) = &second_item.content[0] else {
             panic!("expected second item paragraph");
         };
-        let HtmlParagraphChunk::Text(second_text) = &second_paragraph[0] else {
+        let HtmlParagraphChunk::Text(second_text) = &second_paragraph.contents[0] else {
             panic!("expected second item text");
         };
         assert_eq!(second_text.contents.as_ref(), "sibling");
     }
+
+    #[test]
+    fn parses_paragraph_text_align_from_style() {
+        let parsed = parse_html_block("<p style=\"text-align: center\">x</p>", 0..40).unwrap();
+        let ParsedHtmlElement::Paragraph(paragraph) = &parsed.children[0] else {
+            panic!("expected paragraph");
+        };
+        assert_eq!(paragraph.text_align, Some(TextAlign::Center));
+    }
+
+    #[test]
+    fn parses_heading_text_align_from_style() {
+        let parsed = parse_html_block("<h2 style=\"text-align: right\">Title</h2>", 0..45).unwrap();
+        let ParsedHtmlElement::Heading(heading) = &parsed.children[0] else {
+            panic!("expected heading");
+        };
+        assert_eq!(heading.text_align, Some(TextAlign::Right));
+    }
+
+    #[test]
+    fn parses_paragraph_text_align_from_align_attribute() {
+        let parsed = parse_html_block("<p align=\"center\">x</p>", 0..24).unwrap();
+        let ParsedHtmlElement::Paragraph(paragraph) = &parsed.children[0] else {
+            panic!("expected paragraph");
+        };
+        assert_eq!(paragraph.text_align, Some(TextAlign::Center));
+    }
+
+    #[test]
+    fn parses_heading_text_align_from_align_attribute() {
+        let parsed = parse_html_block("<h2 align=\"right\">Title</h2>", 0..30).unwrap();
+        let ParsedHtmlElement::Heading(heading) = &parsed.children[0] else {
+            panic!("expected heading");
+        };
+        assert_eq!(heading.text_align, Some(TextAlign::Right));
+    }
+
+    #[test]
+    fn prefers_style_text_align_over_align_attribute() {
+        let parsed = parse_html_block(
+            "<p align=\"left\" style=\"text-align: center\">x</p>",
+            0..50,
+        )
+        .unwrap();
+        let ParsedHtmlElement::Paragraph(paragraph) = &parsed.children[0] else {
+            panic!("expected paragraph");
+        };
+        assert_eq!(paragraph.text_align, Some(TextAlign::Center));
+    }
 }

crates/markdown/src/html/html_rendering.rs 🔗

@@ -79,9 +79,20 @@ impl MarkdownElement {
 
         match element {
             ParsedHtmlElement::Paragraph(paragraph) => {
-                self.push_markdown_paragraph(builder, &source_range, markdown_end);
-                self.render_html_paragraph(paragraph, source_allocator, builder, cx, markdown_end);
-                builder.pop_div();
+                self.push_markdown_paragraph(
+                    builder,
+                    &source_range,
+                    markdown_end,
+                    paragraph.text_align,
+                );
+                self.render_html_paragraph(
+                    &paragraph.contents,
+                    source_allocator,
+                    builder,
+                    cx,
+                    markdown_end,
+                );
+                self.pop_markdown_paragraph(builder);
             }
             ParsedHtmlElement::Heading(heading) => {
                 self.push_markdown_heading(
@@ -89,6 +100,7 @@ impl MarkdownElement {
                     heading.level,
                     &heading.source_range,
                     markdown_end,
+                    heading.text_align,
                 );
                 self.render_html_paragraph(
                     &heading.contents,

crates/markdown/src/markdown.rs 🔗

@@ -36,8 +36,8 @@ use gpui::{
     FocusHandle, Focusable, FontStyle, FontWeight, GlobalElementId, Hitbox, Hsla, Image,
     ImageFormat, ImageSource, KeyContext, Length, MouseButton, MouseDownEvent, MouseEvent,
     MouseMoveEvent, MouseUpEvent, Point, ScrollHandle, Stateful, StrikethroughStyle,
-    StyleRefinement, StyledText, Task, TextLayout, TextRun, TextStyle, TextStyleRefinement,
-    actions, img, point, quad,
+    StyleRefinement, StyledText, Task, TextAlign, TextLayout, TextRun, TextStyle,
+    TextStyleRefinement, actions, img, point, quad,
 };
 use language::{CharClassifier, Language, LanguageRegistry, Rope};
 use parser::CodeBlockMetadata;
@@ -1025,8 +1025,17 @@ impl MarkdownElement {
         width: Option<DefiniteLength>,
         height: Option<DefiniteLength>,
     ) {
+        let align = builder.text_style().text_align;
         builder.modify_current_div(|el| {
-            el.items_center().flex().flex_row().child(
+            let mut image_container = el.flex().flex_row().items_center();
+
+            image_container = match align {
+                TextAlign::Left => image_container.justify_start(),
+                TextAlign::Center => image_container.justify_center(),
+                TextAlign::Right => image_container.justify_end(),
+            };
+
+            image_container.child(
                 img(source)
                     .max_w_full()
                     .when_some(height, |this, height| this.h(height))
@@ -1041,14 +1050,29 @@ impl MarkdownElement {
         builder: &mut MarkdownElementBuilder,
         range: &Range<usize>,
         markdown_end: usize,
+        text_align_override: Option<TextAlign>,
     ) {
-        builder.push_div(
-            div().when(!self.style.height_is_multiple_of_line_height, |el| {
-                el.mb_2().line_height(rems(1.3))
-            }),
-            range,
-            markdown_end,
-        );
+        let align = text_align_override.unwrap_or(self.style.base_text_style.text_align);
+        let mut paragraph = div().when(!self.style.height_is_multiple_of_line_height, |el| {
+            el.mb_2().line_height(rems(1.3))
+        });
+
+        paragraph = match align {
+            TextAlign::Center => paragraph.text_center(),
+            TextAlign::Left => paragraph.text_left(),
+            TextAlign::Right => paragraph.text_right(),
+        };
+
+        builder.push_text_style(TextStyleRefinement {
+            text_align: Some(align),
+            ..Default::default()
+        });
+        builder.push_div(paragraph, range, markdown_end);
+    }
+
+    fn pop_markdown_paragraph(&self, builder: &mut MarkdownElementBuilder) {
+        builder.pop_div();
+        builder.pop_text_style();
     }
 
     fn push_markdown_heading(
@@ -1057,15 +1081,26 @@ impl MarkdownElement {
         level: pulldown_cmark::HeadingLevel,
         range: &Range<usize>,
         markdown_end: usize,
+        text_align_override: Option<TextAlign>,
     ) {
+        let align = text_align_override.unwrap_or(self.style.base_text_style.text_align);
         let mut heading = div().mb_2();
         heading = apply_heading_style(heading, level, self.style.heading_level_styles.as_ref());
 
+        heading = match align {
+            TextAlign::Center => heading.text_center(),
+            TextAlign::Left => heading.text_left(),
+            TextAlign::Right => heading.text_right(),
+        };
+
         let mut heading_style = self.style.heading.clone();
         let heading_text_style = heading_style.text_style().clone();
         heading.style().refine(&heading_style);
 
-        builder.push_text_style(heading_text_style);
+        builder.push_text_style(TextStyleRefinement {
+            text_align: Some(align),
+            ..heading_text_style
+        });
         builder.push_div(heading, range, markdown_end);
     }
 
@@ -1571,10 +1606,16 @@ impl Element for MarkdownElement {
                             }
                         }
                         MarkdownTag::Paragraph => {
-                            self.push_markdown_paragraph(&mut builder, range, markdown_end);
+                            self.push_markdown_paragraph(&mut builder, range, markdown_end, None);
                         }
                         MarkdownTag::Heading { level, .. } => {
-                            self.push_markdown_heading(&mut builder, *level, range, markdown_end);
+                            self.push_markdown_heading(
+                                &mut builder,
+                                *level,
+                                range,
+                                markdown_end,
+                                None,
+                            );
                         }
                         MarkdownTag::BlockQuote => {
                             self.push_markdown_block_quote(&mut builder, range, markdown_end);
@@ -1826,7 +1867,7 @@ impl Element for MarkdownElement {
                         current_img_block_range.take();
                     }
                     MarkdownTagEnd::Paragraph => {
-                        builder.pop_div();
+                        self.pop_markdown_paragraph(&mut builder);
                     }
                     MarkdownTagEnd::Heading(_) => {
                         self.pop_markdown_heading(&mut builder);

crates/multi_buffer/src/anchor.rs 🔗

@@ -34,21 +34,28 @@ pub enum Anchor {
     Max,
 }
 
-pub(crate) enum AnchorSeekTarget {
+pub(crate) enum AnchorSeekTarget<'a> {
+    // buffer no longer exists at its original path key in the multibuffer
+    Missing {
+        path_key: &'a PathKey,
+    },
+    // we have excerpts for the buffer at the expected path key
     Excerpt {
-        path_key: PathKey,
-        anchor: ExcerptAnchor,
-        // None when the buffer no longer exists in the multibuffer
-        snapshot: Option<BufferSnapshot>,
+        path_key: &'a PathKey,
+        path_key_index: PathKeyIndex,
+        anchor: text::Anchor,
+        snapshot: &'a BufferSnapshot,
     },
+    // no excerpts and it's a min or max anchor
     Empty,
 }
 
-impl std::fmt::Debug for AnchorSeekTarget {
+impl std::fmt::Debug for AnchorSeekTarget<'_> {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         match self {
             Self::Excerpt {
                 path_key,
+                path_key_index: _,
                 anchor,
                 snapshot: _,
             } => f
@@ -56,7 +63,11 @@ impl std::fmt::Debug for AnchorSeekTarget {
                 .field("path_key", path_key)
                 .field("anchor", anchor)
                 .finish(),
-            Self::Empty => write!(f, "Empty"),
+            Self::Missing { path_key } => f
+                .debug_struct("Missing")
+                .field("path_key", path_key)
+                .finish(),
+            Self::Empty => f.debug_struct("Empty").finish(),
         }
     }
 }
@@ -110,15 +121,16 @@ impl ExcerptAnchor {
             return self.text_anchor.buffer_id.cmp(&other.text_anchor.buffer_id);
         }
 
-        let Some(buffer) = snapshot.buffer_for_path(&self_path_key) else {
-            return Ordering::Equal;
-        };
-        // Comparing two anchors into buffer A that formerly existed at path P,
-        // when path P has since been reused for a different buffer B
-        if buffer.remote_id() != self.text_anchor.buffer_id {
+        // two anchors into the same buffer at the same path
+        // TODO(cole) buffer_for_path is slow
+        let Some(buffer) = snapshot
+            .buffer_for_path(&self_path_key)
+            .filter(|buffer| buffer.remote_id() == self.text_anchor.buffer_id)
+        else {
+            // buffer no longer exists at the original path (which may have been reused for a different buffer),
+            // so no way to compare the anchors
             return Ordering::Equal;
         };
-        assert_eq!(self.text_anchor.buffer_id, buffer.remote_id());
         let text_cmp = self.text_anchor().cmp(&other.text_anchor(), buffer);
         if text_cmp != Ordering::Equal {
             return text_cmp;
@@ -234,21 +246,33 @@ impl ExcerptAnchor {
                 .is_ge()
     }
 
-    pub(crate) fn seek_target(&self, snapshot: &MultiBufferSnapshot) -> AnchorSeekTarget {
+    pub(crate) fn seek_target<'a>(
+        &self,
+        snapshot: &'a MultiBufferSnapshot,
+    ) -> AnchorSeekTarget<'a> {
         self.try_seek_target(snapshot)
             .expect("anchor is from different multi-buffer")
     }
 
-    pub(crate) fn try_seek_target(
+    pub(crate) fn try_seek_target<'a>(
         &self,
-        snapshot: &MultiBufferSnapshot,
-    ) -> Option<AnchorSeekTarget> {
+        snapshot: &'a MultiBufferSnapshot,
+    ) -> Option<AnchorSeekTarget<'a>> {
         let path_key = snapshot.try_path_for_anchor(*self)?;
-        let buffer = snapshot.buffer_for_path(&path_key).cloned();
+
+        let Some(state) = snapshot
+            .buffers
+            .get(&self.buffer_id())
+            .filter(|state| &state.path_key == path_key)
+        else {
+            return Some(AnchorSeekTarget::Missing { path_key });
+        };
+
         Some(AnchorSeekTarget::Excerpt {
             path_key,
-            anchor: *self,
-            snapshot: buffer,
+            path_key_index: self.path,
+            anchor: self.text_anchor(),
+            snapshot: &state.buffer_snapshot,
         })
     }
 }
@@ -372,7 +396,10 @@ impl Anchor {
         }
     }
 
-    pub(crate) fn seek_target(&self, snapshot: &MultiBufferSnapshot) -> AnchorSeekTarget {
+    pub(crate) fn seek_target<'a>(
+        &self,
+        snapshot: &'a MultiBufferSnapshot,
+    ) -> AnchorSeekTarget<'a> {
         let Some(excerpt_anchor) = self.to_excerpt_anchor(snapshot) else {
             return AnchorSeekTarget::Empty;
         };
@@ -406,10 +433,10 @@ impl Anchor {
         }
     }
 
-    pub(crate) fn try_seek_target(
+    pub(crate) fn try_seek_target<'a>(
         &self,
-        snapshot: &MultiBufferSnapshot,
-    ) -> Option<AnchorSeekTarget> {
+        snapshot: &'a MultiBufferSnapshot,
+    ) -> Option<AnchorSeekTarget<'a>> {
         let Some(excerpt_anchor) = self.to_excerpt_anchor(snapshot) else {
             return Some(AnchorSeekTarget::Empty);
         };

crates/multi_buffer/src/multi_buffer.rs 🔗

@@ -21,9 +21,9 @@ use itertools::Itertools;
 use language::{
     AutoindentMode, Buffer, BufferChunks, BufferRow, BufferSnapshot, Capability, CharClassifier,
     CharKind, CharScopeContext, Chunk, CursorShape, DiagnosticEntryRef, File, IndentGuideSettings,
-    IndentSize, Language, LanguageScope, OffsetRangeExt, OffsetUtf16, Outline, OutlineItem, Point,
-    PointUtf16, Selection, TextDimension, TextObject, ToOffset as _, ToPoint as _, TransactionId,
-    TreeSitterOptions, Unclipped,
+    IndentSize, Language, LanguageAwareStyling, LanguageScope, OffsetRangeExt, OffsetUtf16,
+    Outline, OutlineItem, Point, PointUtf16, Selection, TextDimension, TextObject, ToOffset as _,
+    ToPoint as _, TransactionId, TreeSitterOptions, Unclipped,
     language_settings::{AllLanguageSettings, LanguageSettings},
 };
 
@@ -870,6 +870,7 @@ impl ExcerptRange<text::Anchor> {
 #[derive(Clone, Debug)]
 pub struct ExcerptSummary {
     path_key: PathKey,
+    path_key_index: Option<PathKeyIndex>,
     max_anchor: Option<text::Anchor>,
     widest_line_number: u32,
     text: MBTextSummary,
@@ -880,6 +881,7 @@ impl ExcerptSummary {
     pub fn min() -> Self {
         ExcerptSummary {
             path_key: PathKey::min(),
+            path_key_index: None,
             max_anchor: None,
             widest_line_number: 0,
             text: MBTextSummary::default(),
@@ -1072,7 +1074,7 @@ pub struct MultiBufferChunks<'a> {
     range: Range<MultiBufferOffset>,
     excerpt_offset_range: Range<ExcerptOffset>,
     excerpt_chunks: Option<ExcerptChunks<'a>>,
-    language_aware: bool,
+    language_aware: LanguageAwareStyling,
     snapshot: &'a MultiBufferSnapshot,
 }
 
@@ -3340,9 +3342,15 @@ impl EventEmitter<Event> for MultiBuffer {}
 
 impl MultiBufferSnapshot {
     pub fn text(&self) -> String {
-        self.chunks(MultiBufferOffset::ZERO..self.len(), false)
-            .map(|chunk| chunk.text)
-            .collect()
+        self.chunks(
+            MultiBufferOffset::ZERO..self.len(),
+            LanguageAwareStyling {
+                tree_sitter: false,
+                diagnostics: false,
+            },
+        )
+        .map(|chunk| chunk.text)
+        .collect()
     }
 
     pub fn reversed_chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
@@ -3378,7 +3386,14 @@ impl MultiBufferSnapshot {
     }
 
     pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> impl Iterator<Item = &str> + '_ {
-        self.chunks(range, false).map(|chunk| chunk.text)
+        self.chunks(
+            range,
+            LanguageAwareStyling {
+                tree_sitter: false,
+                diagnostics: false,
+            },
+        )
+        .map(|chunk| chunk.text)
     }
 
     pub fn is_line_blank(&self, row: MultiBufferRow) -> bool {
@@ -4178,7 +4193,7 @@ impl MultiBufferSnapshot {
     pub fn chunks<T: ToOffset>(
         &self,
         range: Range<T>,
-        language_aware: bool,
+        language_aware: LanguageAwareStyling,
     ) -> MultiBufferChunks<'_> {
         let mut chunks = MultiBufferChunks {
             excerpt_offset_range: ExcerptDimension(MultiBufferOffset::ZERO)
@@ -6373,11 +6388,11 @@ impl MultiBufferSnapshot {
         self.buffers.get(&id).map(|state| &state.buffer_snapshot)
     }
 
-    fn try_path_for_anchor(&self, anchor: ExcerptAnchor) -> Option<PathKey> {
-        self.path_keys_by_index.get(&anchor.path).cloned()
+    fn try_path_for_anchor(&self, anchor: ExcerptAnchor) -> Option<&PathKey> {
+        self.path_keys_by_index.get(&anchor.path)
     }
 
-    pub fn path_for_anchor(&self, anchor: ExcerptAnchor) -> PathKey {
+    pub fn path_for_anchor(&self, anchor: ExcerptAnchor) -> &PathKey {
         self.try_path_for_anchor(anchor)
             .expect("invalid anchor: path was never added to multibuffer")
     }
@@ -7227,7 +7242,7 @@ impl Excerpt {
     fn chunks_in_range<'a>(
         &'a self,
         range: Range<usize>,
-        language_aware: bool,
+        language_aware: LanguageAwareStyling,
         snapshot: &'a MultiBufferSnapshot,
     ) -> ExcerptChunks<'a> {
         let buffer = self.buffer_snapshot(snapshot);
@@ -7314,6 +7329,7 @@ impl sum_tree::Item for Excerpt {
         }
         ExcerptSummary {
             path_key: self.path_key.clone(),
+            path_key_index: Some(self.path_key_index),
             max_anchor: Some(self.range.context.end),
             widest_line_number: self.max_buffer_row,
             text: text.into(),
@@ -7412,6 +7428,7 @@ impl sum_tree::ContextLessSummary for ExcerptSummary {
         );
 
         self.path_key = summary.path_key.clone();
+        self.path_key_index = summary.path_key_index;
         self.max_anchor = summary.max_anchor;
         self.text += summary.text;
         self.widest_line_number = cmp::max(self.widest_line_number, summary.widest_line_number);
@@ -7419,38 +7436,36 @@ impl sum_tree::ContextLessSummary for ExcerptSummary {
     }
 }
 
-impl sum_tree::SeekTarget<'_, ExcerptSummary, ExcerptSummary> for AnchorSeekTarget {
+impl sum_tree::SeekTarget<'_, ExcerptSummary, ExcerptSummary> for AnchorSeekTarget<'_> {
     fn cmp(
         &self,
         cursor_location: &ExcerptSummary,
         _cx: <ExcerptSummary as sum_tree::Summary>::Context<'_>,
     ) -> cmp::Ordering {
         match self {
+            AnchorSeekTarget::Missing { path_key } => {
+                // Want to end up after any excerpts for (a different buffer at) the original path
+                match Ord::cmp(*path_key, &cursor_location.path_key) {
+                    Ordering::Less => Ordering::Less,
+                    Ordering::Equal | Ordering::Greater => Ordering::Greater,
+                }
+            }
             AnchorSeekTarget::Excerpt {
                 path_key,
+                path_key_index,
                 anchor,
                 snapshot,
             } => {
-                let path_comparison = Ord::cmp(path_key, &cursor_location.path_key);
-                if path_comparison.is_ne() {
-                    path_comparison
-                } else if let Some(snapshot) = snapshot {
-                    if anchor.text_anchor.buffer_id != snapshot.remote_id() {
-                        Ordering::Greater
-                    } else if let Some(max_anchor) = cursor_location.max_anchor {
-                        debug_assert_eq!(max_anchor.buffer_id, snapshot.remote_id());
-                        anchor.text_anchor().cmp(&max_anchor, snapshot)
-                    } else {
-                        Ordering::Greater
-                    }
+                if Some(*path_key_index) != cursor_location.path_key_index {
+                    Ord::cmp(*path_key, &cursor_location.path_key)
+                } else if let Some(max_anchor) = cursor_location.max_anchor {
+                    debug_assert_eq!(max_anchor.buffer_id, snapshot.remote_id());
+                    anchor.cmp(&max_anchor, snapshot)
                 } else {
-                    // shouldn't happen because we expect this buffer not to have any excerpts
-                    // (otherwise snapshot would have been Some)
-                    Ordering::Equal
+                    Ordering::Greater
                 }
             }
-            // This should be dead code because Empty is only constructed for an empty snapshot
-            AnchorSeekTarget::Empty => Ordering::Equal,
+            AnchorSeekTarget::Empty => Ordering::Greater,
         }
     }
 }

crates/multi_buffer/src/multi_buffer_tests.rs 🔗

@@ -5039,7 +5039,13 @@ fn check_edits(
 fn assert_chunks_in_ranges(snapshot: &MultiBufferSnapshot) {
     let full_text = snapshot.text();
     for ix in 0..full_text.len() {
-        let mut chunks = snapshot.chunks(MultiBufferOffset(0)..snapshot.len(), false);
+        let mut chunks = snapshot.chunks(
+            MultiBufferOffset(0)..snapshot.len(),
+            LanguageAwareStyling {
+                tree_sitter: false,
+                diagnostics: false,
+            },
+        );
         chunks.seek(MultiBufferOffset(ix)..snapshot.len());
         let tail = chunks.map(|chunk| chunk.text).collect::<String>();
         assert_eq!(tail, &full_text[ix..], "seek to range: {:?}", ix..);
@@ -5300,7 +5306,13 @@ fn test_random_chunk_bitmaps(cx: &mut App, mut rng: StdRng) {
 
     let snapshot = multibuffer.read(cx).snapshot(cx);
 
-    let chunks = snapshot.chunks(MultiBufferOffset(0)..snapshot.len(), false);
+    let chunks = snapshot.chunks(
+        MultiBufferOffset(0)..snapshot.len(),
+        LanguageAwareStyling {
+            tree_sitter: false,
+            diagnostics: false,
+        },
+    );
 
     for chunk in chunks {
         let chunk_text = chunk.text;
@@ -5466,7 +5478,13 @@ fn test_random_chunk_bitmaps_with_diffs(cx: &mut App, mut rng: StdRng) {
 
     let snapshot = multibuffer.read(cx).snapshot(cx);
 
-    let chunks = snapshot.chunks(MultiBufferOffset(0)..snapshot.len(), false);
+    let chunks = snapshot.chunks(
+        MultiBufferOffset(0)..snapshot.len(),
+        LanguageAwareStyling {
+            tree_sitter: false,
+            diagnostics: false,
+        },
+    );
 
     for chunk in chunks {
         let chunk_text = chunk.text;

crates/onboarding/Cargo.toml 🔗

@@ -17,6 +17,8 @@ default = []
 [dependencies]
 anyhow.workspace = true
 client.workspace = true
+cloud_api_types.workspace = true
+collections.workspace = true
 component.workspace = true
 db.workspace = true
 documented.workspace = true

crates/onboarding/src/basics_page.rs 🔗

@@ -1,15 +1,23 @@
 use std::sync::Arc;
+use std::time::Duration;
 
-use client::TelemetrySettings;
+use client::{Client, TelemetrySettings, UserStore, zed_urls};
+use cloud_api_types::Plan;
+use collections::HashMap;
 use fs::Fs;
-use gpui::{Action, App, IntoElement};
+use gpui::{Action, Animation, AnimationExt, App, Entity, IntoElement, pulsating_between};
+use project::agent_server_store::AllAgentServersSettings;
 use project::project_settings::ProjectSettings;
-use settings::{BaseKeymap, Settings, update_settings_file};
+use project::{AgentRegistryStore, RegistryAgent};
+use settings::{
+    BaseKeymap, CustomAgentServerSettings, Settings, SettingsStore, update_settings_file,
+};
 use theme::{Appearance, SystemAppearance, ThemeRegistry};
 use theme_settings::{ThemeAppearanceMode, ThemeName, ThemeSelection, ThemeSettings};
 use ui::{
-    Divider, StatefulInteractiveElement, SwitchField, TintColor, ToggleButtonGroup,
-    ToggleButtonGroupSize, ToggleButtonSimple, ToggleButtonWithIcon, Tooltip, prelude::*,
+    AgentSetupButton, Divider, StatefulInteractiveElement, SwitchField, TintColor,
+    ToggleButtonGroup, ToggleButtonGroupSize, ToggleButtonSimple, ToggleButtonWithIcon, Tooltip,
+    prelude::*,
 };
 use vim_mode_setting::VimModeSetting;
 
@@ -86,7 +94,7 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement
         )
         .child(
             h_flex()
-                .gap_4()
+                .gap_2()
                 .justify_between()
                 .children(render_theme_previews(tab_index, &theme_selection, cx)),
         );
@@ -520,13 +528,182 @@ fn render_import_settings_section(tab_index: &mut isize, cx: &mut App) -> impl I
         .child(h_flex().gap_1().child(vscode).child(cursor))
 }
 
-pub(crate) fn render_basics_page(cx: &mut App) -> impl IntoElement {
+const FEATURED_AGENT_IDS: &[&str] = &["claude-acp", "codex-acp", "github-copilot-cli", "cursor"];
+
+fn render_registry_agent_button(
+    agent: &RegistryAgent,
+    installed: bool,
+    cx: &mut App,
+) -> impl IntoElement {
+    let agent_id = agent.id().to_string();
+    let element_id = format!("{}-onboarding", agent_id);
+
+    let icon = match agent.icon_path() {
+        Some(icon_path) => Icon::from_external_svg(icon_path.clone()),
+        None => Icon::new(IconName::Sparkle),
+    }
+    .size(IconSize::XSmall)
+    .color(Color::Muted);
+
+    let fs = <dyn Fs>::global(cx);
+
+    let state_element = if installed {
+        Icon::new(IconName::Check)
+            .size(IconSize::Small)
+            .color(Color::Success)
+            .into_any_element()
+    } else {
+        Label::new("Install")
+            .size(LabelSize::XSmall)
+            .color(Color::Muted)
+            .into_any_element()
+    };
+
+    AgentSetupButton::new(element_id)
+        .icon(icon)
+        .name(agent.name().clone())
+        .state(state_element)
+        .disabled(installed)
+        .on_click(move |_, _, cx| {
+            let agent_id = agent_id.clone();
+            update_settings_file(fs.clone(), cx, move |settings, _| {
+                let agent_servers = settings.agent_servers.get_or_insert_default();
+                agent_servers.entry(agent_id).or_insert_with(|| {
+                    CustomAgentServerSettings::Registry {
+                        env: Default::default(),
+                        default_mode: None,
+                        default_model: None,
+                        favorite_models: Vec::new(),
+                        default_config_options: HashMap::default(),
+                        favorite_config_option_values: HashMap::default(),
+                    }
+                });
+            });
+        })
+}
+
+fn render_zed_agent_button(user_store: &Entity<UserStore>, cx: &mut App) -> impl IntoElement {
+    let client = Client::global(cx);
+    let status = *client.status().borrow();
+
+    let plan = user_store.read(cx).plan();
+    let is_free = matches!(plan, Some(Plan::ZedFree) | None);
+    let is_pro = matches!(plan, Some(Plan::ZedPro));
+    let is_trial = matches!(plan, Some(Plan::ZedProTrial));
+
+    let is_signed_out = status.is_signed_out()
+        || matches!(
+            status,
+            client::Status::AuthenticationError | client::Status::ConnectionError
+        );
+    let is_signing_in = status.is_signing_in();
+    let is_signed_in = !is_signed_out;
+
+    let state_element = if is_signed_out {
+        Label::new("Sign In")
+            .size(LabelSize::XSmall)
+            .color(Color::Muted)
+            .into_any_element()
+    } else if is_signing_in {
+        Label::new("Signing In…")
+            .size(LabelSize::XSmall)
+            .color(Color::Muted)
+            .with_animation(
+                "signing-in",
+                Animation::new(Duration::from_secs(2))
+                    .repeat()
+                    .with_easing(pulsating_between(0.4, 0.8)),
+                |label, delta| label.alpha(delta),
+            )
+            .into_any_element()
+    } else if is_signed_in && is_free {
+        Label::new("Start Free Trial")
+            .size(LabelSize::XSmall)
+            .color(Color::Muted)
+            .into_any_element()
+    } else {
+        Icon::new(IconName::Check)
+            .size(IconSize::Small)
+            .color(Color::Success)
+            .into_any_element()
+    };
+
+    AgentSetupButton::new("zed-agent-onboarding")
+        .icon(
+            Icon::new(IconName::ZedAgent)
+                .size(IconSize::XSmall)
+                .color(Color::Muted),
+        )
+        .name("Zed Agent")
+        .state(state_element)
+        .disabled(is_trial || is_pro)
+        .map(|this| {
+            if is_signed_in && is_free {
+                this.on_click(move |_, _window, cx| {
+                    telemetry::event!("Start Trial Clicked", state = "post-sign-in");
+                    cx.open_url(&zed_urls::start_trial_url(cx))
+                })
+            } else {
+                this.on_click(move |_, _, cx| {
+                    let client = Client::global(cx);
+                    cx.spawn(async move |cx| client.sign_in_with_optional_connect(true, cx).await)
+                        .detach_and_log_err(cx);
+                })
+            }
+        })
+}
+
+fn render_ai_section(user_store: &Entity<UserStore>, cx: &mut App) -> impl IntoElement {
+    let registry_agents = AgentRegistryStore::try_global(cx)
+        .map(|store| store.read(cx).agents().to_vec())
+        .unwrap_or_default();
+
+    let installed_agents = cx
+        .global::<SettingsStore>()
+        .get::<AllAgentServersSettings>(None)
+        .clone();
+
+    let column_count = 1 + FEATURED_AGENT_IDS.len() as u16;
+
+    let grid = FEATURED_AGENT_IDS.iter().fold(
+        div()
+            .w_full()
+            .mt_1p5()
+            .grid()
+            .grid_cols(column_count)
+            .gap_2()
+            .child(render_zed_agent_button(user_store, cx)),
+        |grid, agent_id| {
+            let Some(agent) = registry_agents
+                .iter()
+                .find(|a| a.id().as_ref() == *agent_id)
+            else {
+                return grid;
+            };
+            let is_installed = installed_agents.contains_key(*agent_id);
+            grid.child(render_registry_agent_button(agent, is_installed, cx))
+        },
+    );
+
+    v_flex()
+        .gap_0p5()
+        .child(Label::new("Agent Setup"))
+        .child(
+            Label::new("Install your favorite agents and start your first thread.")
+                .color(Color::Muted),
+        )
+        .child(grid)
+}
+
+pub(crate) fn render_basics_page(user_store: &Entity<UserStore>, cx: &mut App) -> impl IntoElement {
     let mut tab_index = 0;
+
     v_flex()
         .id("basics-page")
         .gap_6()
         .child(render_theme_section(&mut tab_index, cx))
         .child(render_base_keymap_section(&mut tab_index, cx))
+        .child(render_ai_section(user_store, cx))
         .child(render_import_settings_section(&mut tab_index, cx))
         .child(render_vim_mode_switch(&mut tab_index, cx))
         .child(render_worktree_auto_trust_switch(&mut tab_index, cx))

crates/onboarding/src/onboarding.rs 🔗

@@ -16,6 +16,7 @@ use ui::{
     Divider, KeyBinding, ParentElement as _, StatefulInteractiveElement, Vector, VectorName,
     WithScrollbar as _, prelude::*, rems_from_px,
 };
+
 pub use workspace::welcome::ShowWelcome;
 use workspace::welcome::WelcomePage;
 use workspace::{
@@ -259,7 +260,7 @@ impl Onboarding {
     }
 
     fn render_page(&mut self, cx: &mut Context<Self>) -> AnyElement {
-        crate::basics_page::render_basics_page(cx).into_any_element()
+        crate::basics_page::render_basics_page(&self.user_store, cx).into_any_element()
     }
 }
 
@@ -329,15 +330,12 @@ impl Render for Onboarding {
                                         Button::new("finish_setup", "Finish Setup")
                                             .style(ButtonStyle::Filled)
                                             .size(ButtonSize::Medium)
-                                            .width(Rems(12.0))
-                                            .key_binding(
-                                                KeyBinding::for_action_in(
-                                                    &Finish,
-                                                    &self.focus_handle,
-                                                    cx,
-                                                )
-                                                .size(rems_from_px(12.)),
-                                            )
+                                            .width(rems_from_px(200.))
+                                            .key_binding(KeyBinding::for_action_in(
+                                                &Finish,
+                                                &self.focus_handle,
+                                                cx,
+                                            ))
                                             .on_click(|_, window, cx| {
                                                 window.dispatch_action(Finish.boxed_clone(), cx);
                                             })

crates/onboarding/src/theme_preview.rs 🔗

@@ -129,7 +129,7 @@ impl ThemePreviewTile {
             syntax_colors[idx].unwrap_or(colors.text)
         };
 
-        let line_count = 13;
+        let line_count = 10;
 
         let lines = (0..line_count)
             .map(|line_idx| {
@@ -147,7 +147,7 @@ impl ThemePreviewTile {
                     })
                     .collect::<Vec<_>>();
 
-                h_flex().gap(px(2.)).ml(relative(indent)).children(blocks)
+                h_flex().gap_0p5().ml(relative(indent)).children(blocks)
             })
             .collect::<Vec<_>>();
 
@@ -160,14 +160,16 @@ impl ThemePreviewTile {
         width: impl Into<Length> + Clone,
         skeleton_height: impl Into<Length>,
     ) -> impl IntoElement {
-        div()
+        v_flex()
             .h_full()
             .w(width)
-            .border_r(px(1.))
-            .border_color(colors.border_transparent)
+            .p_2()
+            .gap_1()
             .bg(colors.panel_background)
-            .child(v_flex().p_2().size_full().gap_1().children(
-                Self::render_sidebar_skeleton_items(seed, colors, skeleton_height.into()),
+            .children(Self::render_sidebar_skeleton_items(
+                seed,
+                colors,
+                skeleton_height.into(),
             ))
     }
 
@@ -176,18 +178,16 @@ impl ThemePreviewTile {
         theme: Arc<Theme>,
         skeleton_height: impl Into<Length>,
     ) -> impl IntoElement {
-        v_flex().h_full().flex_grow().child(
-            div()
-                .size_full()
-                .overflow_hidden()
-                .bg(theme.colors().editor_background)
-                .p_2()
-                .child(Self::render_pseudo_code_skeleton(
-                    seed,
-                    theme,
-                    skeleton_height.into(),
-                )),
-        )
+        div()
+            .p_2()
+            .size_full()
+            .overflow_hidden()
+            .bg(theme.colors().editor_background)
+            .child(Self::render_pseudo_code_skeleton(
+                seed,
+                theme,
+                skeleton_height.into(),
+            ))
     }
 
     pub fn render_editor(
@@ -197,8 +197,8 @@ impl ThemePreviewTile {
         skeleton_height: impl Into<Length> + Clone,
     ) -> impl IntoElement {
         div()
-            .size_full()
             .flex()
+            .size_full()
             .bg(theme.colors().background.alpha(1.00))
             .child(Self::render_sidebar(
                 seed,

crates/open_ai/Cargo.toml 🔗

@@ -17,13 +17,18 @@ schemars = ["dep:schemars"]
 
 [dependencies]
 anyhow.workspace = true
+collections.workspace = true
 futures.workspace = true
 http_client.workspace = true
+language_model_core.workspace = true
 rand.workspace = true
 schemars = { workspace = true, optional = true }
 log.workspace = true
 serde.workspace = true
 serde_json.workspace = true
-settings.workspace = true
 strum.workspace = true
 thiserror.workspace = true
+tiktoken-rs.workspace = true
+
+[dev-dependencies]
+pretty_assertions.workspace = true

crates/open_ai/src/completion.rs 🔗

@@ -0,0 +1,1693 @@
+use anyhow::{Result, anyhow};
+use collections::HashMap;
+use futures::{Stream, StreamExt};
+use language_model_core::{
+    LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelImage,
+    LanguageModelRequest, LanguageModelRequestMessage, LanguageModelToolChoice,
+    LanguageModelToolResultContent, LanguageModelToolUse, LanguageModelToolUseId, MessageContent,
+    Role, StopReason, TokenUsage,
+    util::{fix_streamed_json, parse_tool_arguments},
+};
+use std::pin::Pin;
+use std::sync::Arc;
+
+use crate::responses::{
+    Request as ResponseRequest, ResponseFunctionCallItem, ResponseFunctionCallOutputContent,
+    ResponseFunctionCallOutputItem, ResponseInputContent, ResponseInputItem, ResponseMessageItem,
+    ResponseOutputItem, ResponseSummary as ResponsesSummary, ResponseUsage as ResponsesUsage,
+    StreamEvent as ResponsesStreamEvent,
+};
+use crate::{
+    FunctionContent, FunctionDefinition, ImageUrl, MessagePart, Model, ReasoningEffort,
+    ResponseStreamEvent, ToolCall, ToolCallContent,
+};
+
+pub fn into_open_ai(
+    request: LanguageModelRequest,
+    model_id: &str,
+    supports_parallel_tool_calls: bool,
+    supports_prompt_cache_key: bool,
+    max_output_tokens: Option<u64>,
+    reasoning_effort: Option<ReasoningEffort>,
+) -> crate::Request {
+    let stream = !model_id.starts_with("o1-");
+
+    let mut messages = Vec::new();
+    for message in request.messages {
+        for content in message.content {
+            match content {
+                MessageContent::Text(text) | MessageContent::Thinking { text, .. } => {
+                    let should_add = if message.role == Role::User {
+                        // Including whitespace-only user messages can cause error with OpenAI compatible APIs
+                        // See https://github.com/zed-industries/zed/issues/40097
+                        !text.trim().is_empty()
+                    } else {
+                        !text.is_empty()
+                    };
+                    if should_add {
+                        add_message_content_part(
+                            MessagePart::Text { text },
+                            message.role,
+                            &mut messages,
+                        );
+                    }
+                }
+                MessageContent::RedactedThinking(_) => {}
+                MessageContent::Image(image) => {
+                    add_message_content_part(
+                        MessagePart::Image {
+                            image_url: ImageUrl {
+                                url: image.to_base64_url(),
+                                detail: None,
+                            },
+                        },
+                        message.role,
+                        &mut messages,
+                    );
+                }
+                MessageContent::ToolUse(tool_use) => {
+                    let tool_call = ToolCall {
+                        id: tool_use.id.to_string(),
+                        content: ToolCallContent::Function {
+                            function: FunctionContent {
+                                name: tool_use.name.to_string(),
+                                arguments: serde_json::to_string(&tool_use.input)
+                                    .unwrap_or_default(),
+                            },
+                        },
+                    };
+
+                    if let Some(crate::RequestMessage::Assistant { tool_calls, .. }) =
+                        messages.last_mut()
+                    {
+                        tool_calls.push(tool_call);
+                    } else {
+                        messages.push(crate::RequestMessage::Assistant {
+                            content: None,
+                            tool_calls: vec![tool_call],
+                        });
+                    }
+                }
+                MessageContent::ToolResult(tool_result) => {
+                    let content = match &tool_result.content {
+                        LanguageModelToolResultContent::Text(text) => {
+                            vec![MessagePart::Text {
+                                text: text.to_string(),
+                            }]
+                        }
+                        LanguageModelToolResultContent::Image(image) => {
+                            vec![MessagePart::Image {
+                                image_url: ImageUrl {
+                                    url: image.to_base64_url(),
+                                    detail: None,
+                                },
+                            }]
+                        }
+                    };
+
+                    messages.push(crate::RequestMessage::Tool {
+                        content: content.into(),
+                        tool_call_id: tool_result.tool_use_id.to_string(),
+                    });
+                }
+            }
+        }
+    }
+
+    crate::Request {
+        model: model_id.into(),
+        messages,
+        stream,
+        stream_options: if stream {
+            Some(crate::StreamOptions::default())
+        } else {
+            None
+        },
+        stop: request.stop,
+        temperature: request.temperature.or(Some(1.0)),
+        max_completion_tokens: max_output_tokens,
+        parallel_tool_calls: if supports_parallel_tool_calls && !request.tools.is_empty() {
+            Some(supports_parallel_tool_calls)
+        } else {
+            None
+        },
+        prompt_cache_key: if supports_prompt_cache_key {
+            request.thread_id
+        } else {
+            None
+        },
+        tools: request
+            .tools
+            .into_iter()
+            .map(|tool| crate::ToolDefinition::Function {
+                function: FunctionDefinition {
+                    name: tool.name,
+                    description: Some(tool.description),
+                    parameters: Some(tool.input_schema),
+                },
+            })
+            .collect(),
+        tool_choice: request.tool_choice.map(|choice| match choice {
+            LanguageModelToolChoice::Auto => crate::ToolChoice::Auto,
+            LanguageModelToolChoice::Any => crate::ToolChoice::Required,
+            LanguageModelToolChoice::None => crate::ToolChoice::None,
+        }),
+        reasoning_effort,
+    }
+}
+
+pub fn into_open_ai_response(
+    request: LanguageModelRequest,
+    model_id: &str,
+    supports_parallel_tool_calls: bool,
+    supports_prompt_cache_key: bool,
+    max_output_tokens: Option<u64>,
+    reasoning_effort: Option<ReasoningEffort>,
+) -> ResponseRequest {
+    let stream = !model_id.starts_with("o1-");
+
+    let LanguageModelRequest {
+        thread_id,
+        prompt_id: _,
+        intent: _,
+        messages,
+        tools,
+        tool_choice,
+        stop: _,
+        temperature,
+        thinking_allowed: _,
+        thinking_effort: _,
+        speed: _,
+    } = request;
+
+    let mut input_items = Vec::new();
+    for (index, message) in messages.into_iter().enumerate() {
+        append_message_to_response_items(message, index, &mut input_items);
+    }
+
+    let tools: Vec<_> = tools
+        .into_iter()
+        .map(|tool| crate::responses::ToolDefinition::Function {
+            name: tool.name,
+            description: Some(tool.description),
+            parameters: Some(tool.input_schema),
+            strict: None,
+        })
+        .collect();
+
+    ResponseRequest {
+        model: model_id.into(),
+        input: input_items,
+        stream,
+        temperature,
+        top_p: None,
+        max_output_tokens,
+        parallel_tool_calls: if tools.is_empty() {
+            None
+        } else {
+            Some(supports_parallel_tool_calls)
+        },
+        tool_choice: tool_choice.map(|choice| match choice {
+            LanguageModelToolChoice::Auto => crate::ToolChoice::Auto,
+            LanguageModelToolChoice::Any => crate::ToolChoice::Required,
+            LanguageModelToolChoice::None => crate::ToolChoice::None,
+        }),
+        tools,
+        prompt_cache_key: if supports_prompt_cache_key {
+            thread_id
+        } else {
+            None
+        },
+        reasoning: reasoning_effort.map(|effort| crate::responses::ReasoningConfig {
+            effort,
+            summary: Some(crate::responses::ReasoningSummaryMode::Auto),
+        }),
+    }
+}
+
+fn append_message_to_response_items(
+    message: LanguageModelRequestMessage,
+    index: usize,
+    input_items: &mut Vec<ResponseInputItem>,
+) {
+    let mut content_parts: Vec<ResponseInputContent> = Vec::new();
+
+    for content in message.content {
+        match content {
+            MessageContent::Text(text) => {
+                push_response_text_part(&message.role, text, &mut content_parts);
+            }
+            MessageContent::Thinking { text, .. } => {
+                push_response_text_part(&message.role, text, &mut content_parts);
+            }
+            MessageContent::RedactedThinking(_) => {}
+            MessageContent::Image(image) => {
+                push_response_image_part(&message.role, image, &mut content_parts);
+            }
+            MessageContent::ToolUse(tool_use) => {
+                flush_response_parts(&message.role, index, &mut content_parts, input_items);
+                let call_id = tool_use.id.to_string();
+                input_items.push(ResponseInputItem::FunctionCall(ResponseFunctionCallItem {
+                    call_id,
+                    name: tool_use.name.to_string(),
+                    arguments: tool_use.raw_input,
+                }));
+            }
+            MessageContent::ToolResult(tool_result) => {
+                flush_response_parts(&message.role, index, &mut content_parts, input_items);
+                input_items.push(ResponseInputItem::FunctionCallOutput(
+                    ResponseFunctionCallOutputItem {
+                        call_id: tool_result.tool_use_id.to_string(),
+                        output: match tool_result.content {
+                            LanguageModelToolResultContent::Text(text) => {
+                                ResponseFunctionCallOutputContent::Text(text.to_string())
+                            }
+                            LanguageModelToolResultContent::Image(image) => {
+                                ResponseFunctionCallOutputContent::List(vec![
+                                    ResponseInputContent::Image {
+                                        image_url: image.to_base64_url(),
+                                    },
+                                ])
+                            }
+                        },
+                    },
+                ));
+            }
+        }
+    }
+
+    flush_response_parts(&message.role, index, &mut content_parts, input_items);
+}
+
+fn push_response_text_part(
+    role: &Role,
+    text: impl Into<String>,
+    parts: &mut Vec<ResponseInputContent>,
+) {
+    let text = text.into();
+    if text.trim().is_empty() {
+        return;
+    }
+
+    match role {
+        Role::Assistant => parts.push(ResponseInputContent::OutputText {
+            text,
+            annotations: Vec::new(),
+        }),
+        _ => parts.push(ResponseInputContent::Text { text }),
+    }
+}
+
+fn push_response_image_part(
+    role: &Role,
+    image: LanguageModelImage,
+    parts: &mut Vec<ResponseInputContent>,
+) {
+    match role {
+        Role::Assistant => parts.push(ResponseInputContent::OutputText {
+            text: "[image omitted]".to_string(),
+            annotations: Vec::new(),
+        }),
+        _ => parts.push(ResponseInputContent::Image {
+            image_url: image.to_base64_url(),
+        }),
+    }
+}
+
+fn flush_response_parts(
+    role: &Role,
+    _index: usize,
+    parts: &mut Vec<ResponseInputContent>,
+    input_items: &mut Vec<ResponseInputItem>,
+) {
+    if parts.is_empty() {
+        return;
+    }
+
+    let item = ResponseInputItem::Message(ResponseMessageItem {
+        role: match role {
+            Role::User => crate::Role::User,
+            Role::Assistant => crate::Role::Assistant,
+            Role::System => crate::Role::System,
+        },
+        content: parts.clone(),
+    });
+
+    input_items.push(item);
+    parts.clear();
+}
+
+fn add_message_content_part(
+    new_part: MessagePart,
+    role: Role,
+    messages: &mut Vec<crate::RequestMessage>,
+) {
+    match (role, messages.last_mut()) {
+        (Role::User, Some(crate::RequestMessage::User { content }))
+        | (
+            Role::Assistant,
+            Some(crate::RequestMessage::Assistant {
+                content: Some(content),
+                ..
+            }),
+        )
+        | (Role::System, Some(crate::RequestMessage::System { content, .. })) => {
+            content.push_part(new_part);
+        }
+        _ => {
+            messages.push(match role {
+                Role::User => crate::RequestMessage::User {
+                    content: crate::MessageContent::from(vec![new_part]),
+                },
+                Role::Assistant => crate::RequestMessage::Assistant {
+                    content: Some(crate::MessageContent::from(vec![new_part])),
+                    tool_calls: Vec::new(),
+                },
+                Role::System => crate::RequestMessage::System {
+                    content: crate::MessageContent::from(vec![new_part]),
+                },
+            });
+        }
+    }
+}
+
+pub struct OpenAiEventMapper {
+    tool_calls_by_index: HashMap<usize, RawToolCall>,
+}
+
+impl OpenAiEventMapper {
+    pub fn new() -> Self {
+        Self {
+            tool_calls_by_index: HashMap::default(),
+        }
+    }
+
+    pub fn map_stream(
+        mut self,
+        events: Pin<Box<dyn Send + Stream<Item = Result<ResponseStreamEvent>>>>,
+    ) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
+    {
+        events.flat_map(move |event| {
+            futures::stream::iter(match event {
+                Ok(event) => self.map_event(event),
+                Err(error) => vec![Err(LanguageModelCompletionError::from(anyhow!(error)))],
+            })
+        })
+    }
+
+    pub fn map_event(
+        &mut self,
+        event: ResponseStreamEvent,
+    ) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
+        let mut events = Vec::new();
+        if let Some(usage) = event.usage {
+            events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage {
+                input_tokens: usage.prompt_tokens,
+                output_tokens: usage.completion_tokens,
+                cache_creation_input_tokens: 0,
+                cache_read_input_tokens: 0,
+            })));
+        }
+
+        let Some(choice) = event.choices.first() else {
+            return events;
+        };
+
+        if let Some(delta) = choice.delta.as_ref() {
+            if let Some(reasoning_content) = delta.reasoning_content.clone() {
+                if !reasoning_content.is_empty() {
+                    events.push(Ok(LanguageModelCompletionEvent::Thinking {
+                        text: reasoning_content,
+                        signature: None,
+                    }));
+                }
+            }
+            if let Some(content) = delta.content.clone() {
+                if !content.is_empty() {
+                    events.push(Ok(LanguageModelCompletionEvent::Text(content)));
+                }
+            }
+
+            if let Some(tool_calls) = delta.tool_calls.as_ref() {
+                for tool_call in tool_calls {
+                    let entry = self.tool_calls_by_index.entry(tool_call.index).or_default();
+
+                    if let Some(tool_id) = tool_call.id.clone() {
+                        entry.id = tool_id;
+                    }
+
+                    if let Some(function) = tool_call.function.as_ref() {
+                        if let Some(name) = function.name.clone() {
+                            entry.name = name;
+                        }
+
+                        if let Some(arguments) = function.arguments.clone() {
+                            entry.arguments.push_str(&arguments);
+                        }
+                    }
+
+                    if !entry.id.is_empty() && !entry.name.is_empty() {
+                        if let Ok(input) = serde_json::from_str::<serde_json::Value>(
+                            &fix_streamed_json(&entry.arguments),
+                        ) {
+                            events.push(Ok(LanguageModelCompletionEvent::ToolUse(
+                                LanguageModelToolUse {
+                                    id: entry.id.clone().into(),
+                                    name: entry.name.as_str().into(),
+                                    is_input_complete: false,
+                                    input,
+                                    raw_input: entry.arguments.clone(),
+                                    thought_signature: None,
+                                },
+                            )));
+                        }
+                    }
+                }
+            }
+        }
+
+        match choice.finish_reason.as_deref() {
+            Some("stop") => {
+                events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)));
+            }
+            Some("tool_calls") => {
+                events.extend(self.tool_calls_by_index.drain().map(|(_, tool_call)| {
+                    match parse_tool_arguments(&tool_call.arguments) {
+                        Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
+                            LanguageModelToolUse {
+                                id: tool_call.id.clone().into(),
+                                name: tool_call.name.as_str().into(),
+                                is_input_complete: true,
+                                input,
+                                raw_input: tool_call.arguments.clone(),
+                                thought_signature: None,
+                            },
+                        )),
+                        Err(error) => Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
+                            id: tool_call.id.into(),
+                            tool_name: tool_call.name.into(),
+                            raw_input: tool_call.arguments.clone().into(),
+                            json_parse_error: error.to_string(),
+                        }),
+                    }
+                }));
+
+                events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)));
+            }
+            Some(stop_reason) => {
+                log::error!("Unexpected OpenAI stop_reason: {stop_reason:?}",);
+                events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)));
+            }
+            None => {}
+        }
+
+        events
+    }
+}
+
+#[derive(Default)]
+struct RawToolCall {
+    id: String,
+    name: String,
+    arguments: String,
+}
+
+pub struct OpenAiResponseEventMapper {
+    function_calls_by_item: HashMap<String, PendingResponseFunctionCall>,
+    pending_stop_reason: Option<StopReason>,
+}
+
+#[derive(Default)]
+struct PendingResponseFunctionCall {
+    call_id: String,
+    name: Arc<str>,
+    arguments: String,
+}
+
+impl OpenAiResponseEventMapper {
+    pub fn new() -> Self {
+        Self {
+            function_calls_by_item: HashMap::default(),
+            pending_stop_reason: None,
+        }
+    }
+
+    pub fn map_stream(
+        mut self,
+        events: Pin<Box<dyn Send + Stream<Item = Result<ResponsesStreamEvent>>>>,
+    ) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
+    {
+        events.flat_map(move |event| {
+            futures::stream::iter(match event {
+                Ok(event) => self.map_event(event),
+                Err(error) => vec![Err(LanguageModelCompletionError::from(anyhow!(error)))],
+            })
+        })
+    }
+
+    pub fn map_event(
+        &mut self,
+        event: ResponsesStreamEvent,
+    ) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
+        match event {
+            ResponsesStreamEvent::OutputItemAdded { item, .. } => {
+                let mut events = Vec::new();
+
+                match &item {
+                    ResponseOutputItem::Message(message) => {
+                        if let Some(id) = &message.id {
+                            events.push(Ok(LanguageModelCompletionEvent::StartMessage {
+                                message_id: id.clone(),
+                            }));
+                        }
+                    }
+                    ResponseOutputItem::FunctionCall(function_call) => {
+                        if let Some(item_id) = function_call.id.clone() {
+                            let call_id = function_call
+                                .call_id
+                                .clone()
+                                .or_else(|| function_call.id.clone())
+                                .unwrap_or_else(|| item_id.clone());
+                            let entry = PendingResponseFunctionCall {
+                                call_id,
+                                name: Arc::<str>::from(
+                                    function_call.name.clone().unwrap_or_default(),
+                                ),
+                                arguments: function_call.arguments.clone(),
+                            };
+                            self.function_calls_by_item.insert(item_id, entry);
+                        }
+                    }
+                    ResponseOutputItem::Reasoning(_) | ResponseOutputItem::Unknown => {}
+                }
+                events
+            }
+            ResponsesStreamEvent::ReasoningSummaryTextDelta { delta, .. } => {
+                if delta.is_empty() {
+                    Vec::new()
+                } else {
+                    vec![Ok(LanguageModelCompletionEvent::Thinking {
+                        text: delta,
+                        signature: None,
+                    })]
+                }
+            }
+            ResponsesStreamEvent::OutputTextDelta { delta, .. } => {
+                if delta.is_empty() {
+                    Vec::new()
+                } else {
+                    vec![Ok(LanguageModelCompletionEvent::Text(delta))]
+                }
+            }
+            ResponsesStreamEvent::FunctionCallArgumentsDelta { item_id, delta, .. } => {
+                if let Some(entry) = self.function_calls_by_item.get_mut(&item_id) {
+                    entry.arguments.push_str(&delta);
+                    if let Ok(input) = serde_json::from_str::<serde_json::Value>(
+                        &fix_streamed_json(&entry.arguments),
+                    ) {
+                        return vec![Ok(LanguageModelCompletionEvent::ToolUse(
+                            LanguageModelToolUse {
+                                id: LanguageModelToolUseId::from(entry.call_id.clone()),
+                                name: entry.name.clone(),
+                                is_input_complete: false,
+                                input,
+                                raw_input: entry.arguments.clone(),
+                                thought_signature: None,
+                            },
+                        ))];
+                    }
+                }
+                Vec::new()
+            }
+            ResponsesStreamEvent::FunctionCallArgumentsDone {
+                item_id, arguments, ..
+            } => {
+                if let Some(mut entry) = self.function_calls_by_item.remove(&item_id) {
+                    if !arguments.is_empty() {
+                        entry.arguments = arguments;
+                    }
+                    let raw_input = entry.arguments.clone();
+                    self.pending_stop_reason = Some(StopReason::ToolUse);
+                    match parse_tool_arguments(&entry.arguments) {
+                        Ok(input) => {
+                            vec![Ok(LanguageModelCompletionEvent::ToolUse(
+                                LanguageModelToolUse {
+                                    id: LanguageModelToolUseId::from(entry.call_id.clone()),
+                                    name: entry.name.clone(),
+                                    is_input_complete: true,
+                                    input,
+                                    raw_input,
+                                    thought_signature: None,
+                                },
+                            ))]
+                        }
+                        Err(error) => {
+                            vec![Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
+                                id: LanguageModelToolUseId::from(entry.call_id.clone()),
+                                tool_name: entry.name.clone(),
+                                raw_input: Arc::<str>::from(raw_input),
+                                json_parse_error: error.to_string(),
+                            })]
+                        }
+                    }
+                } else {
+                    Vec::new()
+                }
+            }
+            ResponsesStreamEvent::Completed { response } => {
+                self.handle_completion(response, StopReason::EndTurn)
+            }
+            ResponsesStreamEvent::Incomplete { response } => {
+                let reason = response
+                    .status_details
+                    .as_ref()
+                    .and_then(|details| details.reason.as_deref());
+                let stop_reason = match reason {
+                    Some("max_output_tokens") => StopReason::MaxTokens,
+                    Some("content_filter") => {
+                        self.pending_stop_reason = Some(StopReason::Refusal);
+                        StopReason::Refusal
+                    }
+                    _ => self
+                        .pending_stop_reason
+                        .take()
+                        .unwrap_or(StopReason::EndTurn),
+                };
+
+                let mut events = Vec::new();
+                if self.pending_stop_reason.is_none() {
+                    events.extend(self.emit_tool_calls_from_output(&response.output));
+                }
+                if let Some(usage) = response.usage.as_ref() {
+                    events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(
+                        token_usage_from_response_usage(usage),
+                    )));
+                }
+                events.push(Ok(LanguageModelCompletionEvent::Stop(stop_reason)));
+                events
+            }
+            ResponsesStreamEvent::Failed { response } => {
+                let message = response
+                    .status_details
+                    .and_then(|details| details.error)
+                    .map(|error| error.to_string())
+                    .unwrap_or_else(|| "response failed".to_string());
+                vec![Err(LanguageModelCompletionError::Other(anyhow!(message)))]
+            }
+            ResponsesStreamEvent::Error { error }
+            | ResponsesStreamEvent::GenericError { error } => {
+                vec![Err(LanguageModelCompletionError::Other(anyhow!(
+                    error.message
+                )))]
+            }
+            ResponsesStreamEvent::ReasoningSummaryPartAdded { summary_index, .. } => {
+                if summary_index > 0 {
+                    vec![Ok(LanguageModelCompletionEvent::Thinking {
+                        text: "\n\n".to_string(),
+                        signature: None,
+                    })]
+                } else {
+                    Vec::new()
+                }
+            }
+            ResponsesStreamEvent::OutputTextDone { .. }
+            | ResponsesStreamEvent::OutputItemDone { .. }
+            | ResponsesStreamEvent::ContentPartAdded { .. }
+            | ResponsesStreamEvent::ContentPartDone { .. }
+            | ResponsesStreamEvent::ReasoningSummaryTextDone { .. }
+            | ResponsesStreamEvent::ReasoningSummaryPartDone { .. }
+            | ResponsesStreamEvent::Created { .. }
+            | ResponsesStreamEvent::InProgress { .. }
+            | ResponsesStreamEvent::Unknown => Vec::new(),
+        }
+    }
+
+    fn handle_completion(
+        &mut self,
+        response: ResponsesSummary,
+        default_reason: StopReason,
+    ) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
+        let mut events = Vec::new();
+
+        if self.pending_stop_reason.is_none() {
+            events.extend(self.emit_tool_calls_from_output(&response.output));
+        }
+
+        if let Some(usage) = response.usage.as_ref() {
+            events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(
+                token_usage_from_response_usage(usage),
+            )));
+        }
+
+        let stop_reason = self.pending_stop_reason.take().unwrap_or(default_reason);
+        events.push(Ok(LanguageModelCompletionEvent::Stop(stop_reason)));
+        events
+    }
+
+    fn emit_tool_calls_from_output(
+        &mut self,
+        output: &[ResponseOutputItem],
+    ) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
+        let mut events = Vec::new();
+        for item in output {
+            if let ResponseOutputItem::FunctionCall(function_call) = item {
+                let Some(call_id) = function_call
+                    .call_id
+                    .clone()
+                    .or_else(|| function_call.id.clone())
+                else {
+                    log::error!(
+                        "Function call item missing both call_id and id: {:?}",
+                        function_call
+                    );
+                    continue;
+                };
+                let name: Arc<str> = Arc::from(function_call.name.clone().unwrap_or_default());
+                let arguments = &function_call.arguments;
+                self.pending_stop_reason = Some(StopReason::ToolUse);
+                match parse_tool_arguments(arguments) {
+                    Ok(input) => {
+                        events.push(Ok(LanguageModelCompletionEvent::ToolUse(
+                            LanguageModelToolUse {
+                                id: LanguageModelToolUseId::from(call_id.clone()),
+                                name: name.clone(),
+                                is_input_complete: true,
+                                input,
+                                raw_input: arguments.clone(),
+                                thought_signature: None,
+                            },
+                        )));
+                    }
+                    Err(error) => {
+                        events.push(Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
+                            id: LanguageModelToolUseId::from(call_id.clone()),
+                            tool_name: name.clone(),
+                            raw_input: Arc::<str>::from(arguments.clone()),
+                            json_parse_error: error.to_string(),
+                        }));
+                    }
+                }
+            }
+        }
+        events
+    }
+}
+
+fn token_usage_from_response_usage(usage: &ResponsesUsage) -> TokenUsage {
+    TokenUsage {
+        input_tokens: usage.input_tokens.unwrap_or_default(),
+        output_tokens: usage.output_tokens.unwrap_or_default(),
+        cache_creation_input_tokens: 0,
+        cache_read_input_tokens: 0,
+    }
+}
+
+pub fn collect_tiktoken_messages(
+    request: LanguageModelRequest,
+) -> Vec<tiktoken_rs::ChatCompletionRequestMessage> {
+    request
+        .messages
+        .into_iter()
+        .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
+            role: match message.role {
+                Role::User => "user".into(),
+                Role::Assistant => "assistant".into(),
+                Role::System => "system".into(),
+            },
+            content: Some(message.string_contents()),
+            name: None,
+            function_call: None,
+        })
+        .collect::<Vec<_>>()
+}
+
+/// Count tokens for an OpenAI model. This is synchronous; callers should spawn
+/// it on a background thread if needed.
+pub fn count_open_ai_tokens(request: LanguageModelRequest, model: Model) -> Result<u64> {
+    let messages = collect_tiktoken_messages(request);
+    match model {
+        Model::Custom { max_tokens, .. } => {
+            let model = if max_tokens >= 100_000 {
+                // If the max tokens is 100k or more, it likely uses the o200k_base tokenizer
+                "gpt-4o"
+            } else {
+                // Otherwise fallback to gpt-4, since only cl100k_base and o200k_base are
+                // supported with this tiktoken method
+                "gpt-4"
+            };
+            tiktoken_rs::num_tokens_from_messages(model, &messages)
+        }
+        // Currently supported by tiktoken_rs
+        // Sometimes tiktoken-rs is behind on model support. If that is the case, make a new branch
+        // arm with an override. We enumerate all supported models here so that we can check if new
+        // models are supported yet or not.
+        Model::ThreePointFiveTurbo
+        | Model::Four
+        | Model::FourTurbo
+        | Model::FourOmniMini
+        | Model::FourPointOneNano
+        | Model::O1
+        | Model::O3
+        | Model::O3Mini
+        | Model::Five
+        | Model::FiveCodex
+        | Model::FiveMini
+        | Model::FiveNano => tiktoken_rs::num_tokens_from_messages(model.id(), &messages),
+        // GPT-5.1, 5.2, 5.2-codex, 5.3-codex, 5.4, and 5.4-pro don't have dedicated tiktoken support; use gpt-5 tokenizer
+        Model::FivePointOne
+        | Model::FivePointTwo
+        | Model::FivePointTwoCodex
+        | Model::FivePointThreeCodex
+        | Model::FivePointFour
+        | Model::FivePointFourPro => tiktoken_rs::num_tokens_from_messages("gpt-5", &messages),
+    }
+    .map(|tokens| tokens as u64)
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::responses::{
+        ReasoningSummaryPart, ResponseFunctionToolCall, ResponseOutputItem, ResponseOutputMessage,
+        ResponseReasoningItem, ResponseStatusDetails, ResponseSummary, ResponseUsage,
+        StreamEvent as ResponsesStreamEvent,
+    };
+    use futures::{StreamExt, executor::block_on};
+    use language_model_core::{
+        LanguageModelImage, LanguageModelRequestMessage, LanguageModelRequestTool,
+        LanguageModelToolResult, LanguageModelToolResultContent, LanguageModelToolUse,
+        LanguageModelToolUseId, SharedString,
+    };
+    use pretty_assertions::assert_eq;
+    use serde_json::json;
+
+    use super::*;
+
+    fn map_response_events(events: Vec<ResponsesStreamEvent>) -> Vec<LanguageModelCompletionEvent> {
+        block_on(async {
+            OpenAiResponseEventMapper::new()
+                .map_stream(Box::pin(futures::stream::iter(events.into_iter().map(Ok))))
+                .collect::<Vec<_>>()
+                .await
+                .into_iter()
+                .map(Result::unwrap)
+                .collect()
+        })
+    }
+
+    fn response_item_message(id: &str) -> ResponseOutputItem {
+        ResponseOutputItem::Message(ResponseOutputMessage {
+            id: Some(id.to_string()),
+            role: Some("assistant".to_string()),
+            status: Some("in_progress".to_string()),
+            content: vec![],
+        })
+    }
+
+    fn response_item_function_call(id: &str, args: Option<&str>) -> ResponseOutputItem {
+        ResponseOutputItem::FunctionCall(ResponseFunctionToolCall {
+            id: Some(id.to_string()),
+            status: Some("in_progress".to_string()),
+            name: Some("get_weather".to_string()),
+            call_id: Some("call_123".to_string()),
+            arguments: args.map(|s| s.to_string()).unwrap_or_default(),
+        })
+    }
+
+    #[test]
+    fn tiktoken_rs_support() {
+        let request = LanguageModelRequest {
+            thread_id: None,
+            prompt_id: None,
+            intent: None,
+            messages: vec![LanguageModelRequestMessage {
+                role: Role::User,
+                content: vec![MessageContent::Text("message".into())],
+                cache: false,
+                reasoning_details: None,
+            }],
+            tools: vec![],
+            tool_choice: None,
+            stop: vec![],
+            temperature: None,
+            thinking_allowed: true,
+            thinking_effort: None,
+            speed: None,
+        };
+
+        // Validate that all models are supported by tiktoken-rs
+        for model in <Model as strum::IntoEnumIterator>::iter() {
+            let count = count_open_ai_tokens(request.clone(), model).unwrap();
+            assert!(count > 0);
+        }
+    }
+
+    #[test]
+    fn responses_stream_maps_text_and_usage() {
+        let events = vec![
+            ResponsesStreamEvent::OutputItemAdded {
+                output_index: 0,
+                sequence_number: None,
+                item: response_item_message("msg_123"),
+            },
+            ResponsesStreamEvent::OutputTextDelta {
+                item_id: "msg_123".into(),
+                output_index: 0,
+                content_index: Some(0),
+                delta: "Hello".into(),
+            },
+            ResponsesStreamEvent::Completed {
+                response: ResponseSummary {
+                    usage: Some(ResponseUsage {
+                        input_tokens: Some(5),
+                        output_tokens: Some(3),
+                        total_tokens: Some(8),
+                    }),
+                    ..Default::default()
+                },
+            },
+        ];
+
+        let mapped = map_response_events(events);
+        assert!(matches!(
+            mapped[0],
+            LanguageModelCompletionEvent::StartMessage { ref message_id } if message_id == "msg_123"
+        ));
+        assert!(matches!(
+            mapped[1],
+            LanguageModelCompletionEvent::Text(ref text) if text == "Hello"
+        ));
+        assert!(matches!(
+            mapped[2],
+            LanguageModelCompletionEvent::UsageUpdate(TokenUsage {
+                input_tokens: 5,
+                output_tokens: 3,
+                ..
+            })
+        ));
+        assert!(matches!(
+            mapped[3],
+            LanguageModelCompletionEvent::Stop(StopReason::EndTurn)
+        ));
+    }
+
+    #[test]
+    fn into_open_ai_response_builds_complete_payload() {
+        let tool_call_id = LanguageModelToolUseId::from("call-42");
+        let tool_input = json!({ "city": "Boston" });
+        let tool_arguments = serde_json::to_string(&tool_input).unwrap();
+        let tool_use = LanguageModelToolUse {
+            id: tool_call_id.clone(),
+            name: Arc::from("get_weather"),
+            raw_input: tool_arguments.clone(),
+            input: tool_input,
+            is_input_complete: true,
+            thought_signature: None,
+        };
+        let tool_result = LanguageModelToolResult {
+            tool_use_id: tool_call_id,
+            tool_name: Arc::from("get_weather"),
+            is_error: false,
+            content: LanguageModelToolResultContent::Text(Arc::from("Sunny")),
+            output: Some(json!({ "forecast": "Sunny" })),
+        };
+        let user_image = LanguageModelImage {
+            source: SharedString::from("aGVsbG8="),
+            size: None,
+        };
+        let expected_image_url = user_image.to_base64_url();
+
+        let request = LanguageModelRequest {
+            thread_id: Some("thread-123".into()),
+            prompt_id: None,
+            intent: None,
+            messages: vec![
+                LanguageModelRequestMessage {
+                    role: Role::System,
+                    content: vec![MessageContent::Text("System context".into())],
+                    cache: false,
+                    reasoning_details: None,
+                },
+                LanguageModelRequestMessage {
+                    role: Role::User,
+                    content: vec![
+                        MessageContent::Text("Please check the weather.".into()),
+                        MessageContent::Image(user_image),
+                    ],
+                    cache: false,
+                    reasoning_details: None,
+                },
+                LanguageModelRequestMessage {
+                    role: Role::Assistant,
+                    content: vec![
+                        MessageContent::Text("Looking that up.".into()),
+                        MessageContent::ToolUse(tool_use),
+                    ],
+                    cache: false,
+                    reasoning_details: None,
+                },
+                LanguageModelRequestMessage {
+                    role: Role::Assistant,
+                    content: vec![MessageContent::ToolResult(tool_result)],
+                    cache: false,
+                    reasoning_details: None,
+                },
+            ],
+            tools: vec![LanguageModelRequestTool {
+                name: "get_weather".into(),
+                description: "Fetches the weather".into(),
+                input_schema: json!({ "type": "object" }),
+                use_input_streaming: false,
+            }],
+            tool_choice: Some(LanguageModelToolChoice::Any),
+            stop: vec!["<STOP>".into()],
+            temperature: None,
+            thinking_allowed: false,
+            thinking_effort: None,
+            speed: None,
+        };
+
+        let response = into_open_ai_response(
+            request,
+            "custom-model",
+            true,
+            true,
+            Some(2048),
+            Some(ReasoningEffort::Low),
+        );
+
+        let serialized = serde_json::to_value(&response).unwrap();
+        let expected = json!({
+            "model": "custom-model",
+            "input": [
+                {
+                    "type": "message",
+                    "role": "system",
+                    "content": [
+                        { "type": "input_text", "text": "System context" }
+                    ]
+                },
+                {
+                    "type": "message",
+                    "role": "user",
+                    "content": [
+                        { "type": "input_text", "text": "Please check the weather." },
+                        { "type": "input_image", "image_url": expected_image_url }
+                    ]
+                },
+                {
+                    "type": "message",
+                    "role": "assistant",
+                    "content": [
+                        { "type": "output_text", "text": "Looking that up.", "annotations": [] }
+                    ]
+                },
+                {
+                    "type": "function_call",
+                    "call_id": "call-42",
+                    "name": "get_weather",
+                    "arguments": tool_arguments
+                },
+                {
+                    "type": "function_call_output",
+                    "call_id": "call-42",
+                    "output": "Sunny"
+                }
+            ],
+            "stream": true,
+            "max_output_tokens": 2048,
+            "parallel_tool_calls": true,
+            "tool_choice": "required",
+            "tools": [
+                {
+                    "type": "function",
+                    "name": "get_weather",
+                    "description": "Fetches the weather",
+                    "parameters": { "type": "object" }
+                }
+            ],
+            "prompt_cache_key": "thread-123",
+            "reasoning": { "effort": "low", "summary": "auto" }
+        });
+
+        assert_eq!(serialized, expected);
+    }
+
+    #[test]
+    fn responses_stream_maps_tool_calls() {
+        let events = vec![
+            ResponsesStreamEvent::OutputItemAdded {
+                output_index: 0,
+                sequence_number: None,
+                item: response_item_function_call("item_fn", Some("{\"city\":\"Bos")),
+            },
+            ResponsesStreamEvent::FunctionCallArgumentsDelta {
+                item_id: "item_fn".into(),
+                output_index: 0,
+                delta: "ton\"}".into(),
+                sequence_number: None,
+            },
+            ResponsesStreamEvent::FunctionCallArgumentsDone {
+                item_id: "item_fn".into(),
+                output_index: 0,
+                arguments: "{\"city\":\"Boston\"}".into(),
+                sequence_number: None,
+            },
+            ResponsesStreamEvent::Completed {
+                response: ResponseSummary::default(),
+            },
+        ];
+
+        let mapped = map_response_events(events);
+        assert_eq!(mapped.len(), 3);
+        assert!(matches!(
+            mapped[0],
+            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
+                is_input_complete: false,
+                ..
+            })
+        ));
+        assert!(matches!(
+            mapped[1],
+            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
+                ref id,
+                ref name,
+                ref raw_input,
+                is_input_complete: true,
+                ..
+            }) if id.to_string() == "call_123"
+                && name.as_ref() == "get_weather"
+                && raw_input == "{\"city\":\"Boston\"}"
+        ));
+        assert!(matches!(
+            mapped[2],
+            LanguageModelCompletionEvent::Stop(StopReason::ToolUse)
+        ));
+    }
+
+    #[test]
+    fn responses_stream_uses_max_tokens_stop_reason() {
+        let events = vec![ResponsesStreamEvent::Incomplete {
+            response: ResponseSummary {
+                status_details: Some(ResponseStatusDetails {
+                    reason: Some("max_output_tokens".into()),
+                    r#type: Some("incomplete".into()),
+                    error: None,
+                }),
+                usage: Some(ResponseUsage {
+                    input_tokens: Some(10),
+                    output_tokens: Some(20),
+                    total_tokens: Some(30),
+                }),
+                ..Default::default()
+            },
+        }];
+
+        let mapped = map_response_events(events);
+        assert!(matches!(
+            mapped[0],
+            LanguageModelCompletionEvent::UsageUpdate(TokenUsage {
+                input_tokens: 10,
+                output_tokens: 20,
+                ..
+            })
+        ));
+        assert!(matches!(
+            mapped[1],
+            LanguageModelCompletionEvent::Stop(StopReason::MaxTokens)
+        ));
+    }
+
+    #[test]
+    fn responses_stream_handles_multiple_tool_calls() {
+        let events = vec![
+            ResponsesStreamEvent::OutputItemAdded {
+                output_index: 0,
+                sequence_number: None,
+                item: response_item_function_call("item_fn1", Some("{\"city\":\"NYC\"}")),
+            },
+            ResponsesStreamEvent::FunctionCallArgumentsDone {
+                item_id: "item_fn1".into(),
+                output_index: 0,
+                arguments: "{\"city\":\"NYC\"}".into(),
+                sequence_number: None,
+            },
+            ResponsesStreamEvent::OutputItemAdded {
+                output_index: 1,
+                sequence_number: None,
+                item: response_item_function_call("item_fn2", Some("{\"city\":\"LA\"}")),
+            },
+            ResponsesStreamEvent::FunctionCallArgumentsDone {
+                item_id: "item_fn2".into(),
+                output_index: 1,
+                arguments: "{\"city\":\"LA\"}".into(),
+                sequence_number: None,
+            },
+            ResponsesStreamEvent::Completed {
+                response: ResponseSummary::default(),
+            },
+        ];
+
+        let mapped = map_response_events(events);
+        assert_eq!(mapped.len(), 3);
+        assert!(matches!(
+            mapped[0],
+            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { ref raw_input, .. })
+            if raw_input == "{\"city\":\"NYC\"}"
+        ));
+        assert!(matches!(
+            mapped[1],
+            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { ref raw_input, .. })
+            if raw_input == "{\"city\":\"LA\"}"
+        ));
+        assert!(matches!(
+            mapped[2],
+            LanguageModelCompletionEvent::Stop(StopReason::ToolUse)
+        ));
+    }
+
+    #[test]
+    fn responses_stream_handles_mixed_text_and_tool_calls() {
+        let events = vec![
+            ResponsesStreamEvent::OutputItemAdded {
+                output_index: 0,
+                sequence_number: None,
+                item: response_item_message("msg_123"),
+            },
+            ResponsesStreamEvent::OutputTextDelta {
+                item_id: "msg_123".into(),
+                output_index: 0,
+                content_index: Some(0),
+                delta: "Let me check that".into(),
+            },
+            ResponsesStreamEvent::OutputItemAdded {
+                output_index: 1,
+                sequence_number: None,
+                item: response_item_function_call("item_fn", Some("{\"query\":\"test\"}")),
+            },
+            ResponsesStreamEvent::FunctionCallArgumentsDone {
+                item_id: "item_fn".into(),
+                output_index: 1,
+                arguments: "{\"query\":\"test\"}".into(),
+                sequence_number: None,
+            },
+            ResponsesStreamEvent::Completed {
+                response: ResponseSummary::default(),
+            },
+        ];
+
+        let mapped = map_response_events(events);
+        assert!(matches!(
+            mapped[0],
+            LanguageModelCompletionEvent::StartMessage { .. }
+        ));
+        assert!(
+            matches!(mapped[1], LanguageModelCompletionEvent::Text(ref text) if text == "Let me check that")
+        );
+        assert!(
+            matches!(mapped[2], LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { ref raw_input, .. }) if raw_input == "{\"query\":\"test\"}")
+        );
+        assert!(matches!(
+            mapped[3],
+            LanguageModelCompletionEvent::Stop(StopReason::ToolUse)
+        ));
+    }
+
+    #[test]
+    fn responses_stream_handles_json_parse_error() {
+        let events = vec![
+            ResponsesStreamEvent::OutputItemAdded {
+                output_index: 0,
+                sequence_number: None,
+                item: response_item_function_call("item_fn", Some("{invalid json")),
+            },
+            ResponsesStreamEvent::FunctionCallArgumentsDone {
+                item_id: "item_fn".into(),
+                output_index: 0,
+                arguments: "{invalid json".into(),
+                sequence_number: None,
+            },
+            ResponsesStreamEvent::Completed {
+                response: ResponseSummary::default(),
+            },
+        ];
+
+        let mapped = map_response_events(events);
+        assert!(matches!(
+            mapped[0],
+            LanguageModelCompletionEvent::ToolUseJsonParseError { ref raw_input, .. }
+            if raw_input.as_ref() == "{invalid json"
+        ));
+    }
+
+    #[test]
+    fn responses_stream_handles_incomplete_function_call() {
+        let events = vec![
+            ResponsesStreamEvent::OutputItemAdded {
+                output_index: 0,
+                sequence_number: None,
+                item: response_item_function_call("item_fn", Some("{\"city\":")),
+            },
+            ResponsesStreamEvent::FunctionCallArgumentsDelta {
+                item_id: "item_fn".into(),
+                output_index: 0,
+                delta: "\"Boston\"".into(),
+                sequence_number: None,
+            },
+            ResponsesStreamEvent::Incomplete {
+                response: ResponseSummary {
+                    status_details: Some(ResponseStatusDetails {
+                        reason: Some("max_output_tokens".into()),
+                        r#type: Some("incomplete".into()),
+                        error: None,
+                    }),
+                    output: vec![response_item_function_call(
+                        "item_fn",
+                        Some("{\"city\":\"Boston\"}"),
+                    )],
+                    ..Default::default()
+                },
+            },
+        ];
+
+        let mapped = map_response_events(events);
+        assert_eq!(mapped.len(), 3);
+        assert!(matches!(
+            mapped[0],
+            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
+                is_input_complete: false,
+                ..
+            })
+        ));
+        assert!(
+            matches!(mapped[1], LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { ref raw_input, is_input_complete: true, .. }) if raw_input == "{\"city\":\"Boston\"}")
+        );
+        assert!(matches!(
+            mapped[2],
+            LanguageModelCompletionEvent::Stop(StopReason::MaxTokens)
+        ));
+    }
+
+    #[test]
+    fn responses_stream_incomplete_does_not_duplicate_tool_calls() {
+        let events = vec![
+            ResponsesStreamEvent::OutputItemAdded {
+                output_index: 0,
+                sequence_number: None,
+                item: response_item_function_call("item_fn", Some("{\"city\":\"Boston\"}")),
+            },
+            ResponsesStreamEvent::FunctionCallArgumentsDone {
+                item_id: "item_fn".into(),
+                output_index: 0,
+                arguments: "{\"city\":\"Boston\"}".into(),
+                sequence_number: None,
+            },
+            ResponsesStreamEvent::Incomplete {
+                response: ResponseSummary {
+                    status_details: Some(ResponseStatusDetails {
+                        reason: Some("max_output_tokens".into()),
+                        r#type: Some("incomplete".into()),
+                        error: None,
+                    }),
+                    output: vec![response_item_function_call(
+                        "item_fn",
+                        Some("{\"city\":\"Boston\"}"),
+                    )],
+                    ..Default::default()
+                },
+            },
+        ];
+
+        let mapped = map_response_events(events);
+        assert_eq!(mapped.len(), 2);
+        assert!(
+            matches!(mapped[0], LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { ref raw_input, .. }) if raw_input == "{\"city\":\"Boston\"}")
+        );
+        assert!(matches!(
+            mapped[1],
+            LanguageModelCompletionEvent::Stop(StopReason::MaxTokens)
+        ));
+    }
+
+    #[test]
+    fn responses_stream_handles_empty_tool_arguments() {
+        let events = vec![
+            ResponsesStreamEvent::OutputItemAdded {
+                output_index: 0,
+                sequence_number: None,
+                item: response_item_function_call("item_fn", Some("")),
+            },
+            ResponsesStreamEvent::FunctionCallArgumentsDone {
+                item_id: "item_fn".into(),
+                output_index: 0,
+                arguments: "".into(),
+                sequence_number: None,
+            },
+            ResponsesStreamEvent::Completed {
+                response: ResponseSummary::default(),
+            },
+        ];
+
+        let mapped = map_response_events(events);
+        assert_eq!(mapped.len(), 2);
+        assert!(matches!(
+            &mapped[0],
+            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
+                id, name, raw_input, input, ..
+            }) if id.to_string() == "call_123"
+                && name.as_ref() == "get_weather"
+                && raw_input == ""
+                && input.is_object()
+                && input.as_object().unwrap().is_empty()
+        ));
+        assert!(matches!(
+            mapped[1],
+            LanguageModelCompletionEvent::Stop(StopReason::ToolUse)
+        ));
+    }
+
+    #[test]
+    fn responses_stream_emits_partial_tool_use_events() {
+        let events = vec![
+            ResponsesStreamEvent::OutputItemAdded {
+                output_index: 0,
+                sequence_number: None,
+                item: ResponseOutputItem::FunctionCall(
+                    crate::responses::ResponseFunctionToolCall {
+                        id: Some("item_fn".to_string()),
+                        status: Some("in_progress".to_string()),
+                        name: Some("get_weather".to_string()),
+                        call_id: Some("call_abc".to_string()),
+                        arguments: String::new(),
+                    },
+                ),
+            },
+            ResponsesStreamEvent::FunctionCallArgumentsDelta {
+                item_id: "item_fn".into(),
+                output_index: 0,
+                delta: "{\"city\":\"Bos".into(),
+                sequence_number: None,
+            },
+            ResponsesStreamEvent::FunctionCallArgumentsDelta {
+                item_id: "item_fn".into(),
+                output_index: 0,
+                delta: "ton\"}".into(),
+                sequence_number: None,
+            },
+            ResponsesStreamEvent::FunctionCallArgumentsDone {
+                item_id: "item_fn".into(),
+                output_index: 0,
+                arguments: "{\"city\":\"Boston\"}".into(),
+                sequence_number: None,
+            },
+            ResponsesStreamEvent::Completed {
+                response: ResponseSummary::default(),
+            },
+        ];
+
+        let mapped = map_response_events(events);
+        assert!(mapped.len() >= 3);
+
+        let complete_tool_use = mapped.iter().find(|e| {
+            matches!(
+                e,
+                LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
+                    is_input_complete: true,
+                    ..
+                })
+            )
+        });
+        assert!(
+            complete_tool_use.is_some(),
+            "should have a complete tool use event"
+        );
+
+        let tool_uses: Vec<_> = mapped
+            .iter()
+            .filter(|e| matches!(e, LanguageModelCompletionEvent::ToolUse(_)))
+            .collect();
+        assert!(
+            tool_uses.len() >= 2,
+            "should have at least one partial and one complete event"
+        );
+        assert!(matches!(
+            tool_uses.last().unwrap(),
+            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
+                is_input_complete: true,
+                ..
+            })
+        ));
+    }
+
+    #[test]
+    fn responses_stream_maps_reasoning_summary_deltas() {
+        let events = vec![
+            ResponsesStreamEvent::OutputItemAdded {
+                output_index: 0,
+                sequence_number: None,
+                item: ResponseOutputItem::Reasoning(ResponseReasoningItem {
+                    id: Some("rs_123".into()),
+                    summary: vec![],
+                }),
+            },
+            ResponsesStreamEvent::ReasoningSummaryPartAdded {
+                item_id: "rs_123".into(),
+                output_index: 0,
+                summary_index: 0,
+            },
+            ResponsesStreamEvent::ReasoningSummaryTextDelta {
+                item_id: "rs_123".into(),
+                output_index: 0,
+                delta: "Thinking about".into(),
+            },
+            ResponsesStreamEvent::ReasoningSummaryTextDelta {
+                item_id: "rs_123".into(),
+                output_index: 0,
+                delta: " the answer".into(),
+            },
+            ResponsesStreamEvent::ReasoningSummaryTextDone {
+                item_id: "rs_123".into(),
+                output_index: 0,
+                text: "Thinking about the answer".into(),
+            },
+            ResponsesStreamEvent::ReasoningSummaryPartDone {
+                item_id: "rs_123".into(),
+                output_index: 0,
+                summary_index: 0,
+            },
+            ResponsesStreamEvent::ReasoningSummaryPartAdded {
+                item_id: "rs_123".into(),
+                output_index: 0,
+                summary_index: 1,
+            },
+            ResponsesStreamEvent::ReasoningSummaryTextDelta {
+                item_id: "rs_123".into(),
+                output_index: 0,
+                delta: "Second part".into(),
+            },
+            ResponsesStreamEvent::ReasoningSummaryTextDone {
+                item_id: "rs_123".into(),
+                output_index: 0,
+                text: "Second part".into(),
+            },
+            ResponsesStreamEvent::ReasoningSummaryPartDone {
+                item_id: "rs_123".into(),
+                output_index: 0,
+                summary_index: 1,
+            },
+            ResponsesStreamEvent::OutputItemDone {
+                output_index: 0,
+                sequence_number: None,
+                item: ResponseOutputItem::Reasoning(ResponseReasoningItem {
+                    id: Some("rs_123".into()),
+                    summary: vec![
+                        ReasoningSummaryPart::SummaryText {
+                            text: "Thinking about the answer".into(),
+                        },
+                        ReasoningSummaryPart::SummaryText {
+                            text: "Second part".into(),
+                        },
+                    ],
+                }),
+            },
+            ResponsesStreamEvent::OutputItemAdded {
+                output_index: 1,
+                sequence_number: None,
+                item: response_item_message("msg_456"),
+            },
+            ResponsesStreamEvent::OutputTextDelta {
+                item_id: "msg_456".into(),
+                output_index: 1,
+                content_index: Some(0),
+                delta: "The answer is 42".into(),
+            },
+            ResponsesStreamEvent::Completed {
+                response: ResponseSummary::default(),
+            },
+        ];
+
+        let mapped = map_response_events(events);
+
+        let thinking_events: Vec<_> = mapped
+            .iter()
+            .filter(|e| matches!(e, LanguageModelCompletionEvent::Thinking { .. }))
+            .collect();
+        assert_eq!(
+            thinking_events.len(),
+            4,
+            "expected 4 thinking events, got {:?}",
+            thinking_events
+        );
+        assert!(
+            matches!(&thinking_events[0], LanguageModelCompletionEvent::Thinking { text, .. } if text == "Thinking about")
+        );
+        assert!(
+            matches!(&thinking_events[1], LanguageModelCompletionEvent::Thinking { text, .. } if text == " the answer")
+        );
+        assert!(
+            matches!(&thinking_events[2], LanguageModelCompletionEvent::Thinking { text, .. } if text == "\n\n"),
+            "expected separator between summary parts"
+        );
+        assert!(
+            matches!(&thinking_events[3], LanguageModelCompletionEvent::Thinking { text, .. } if text == "Second part")
+        );
+
+        assert!(mapped.iter().any(
+            |e| matches!(e, LanguageModelCompletionEvent::Text(t) if t == "The answer is 42")
+        ));
+    }
+
+    #[test]
+    fn responses_stream_maps_reasoning_from_done_only() {
+        let events = vec![
+            ResponsesStreamEvent::OutputItemAdded {
+                output_index: 0,
+                sequence_number: None,
+                item: ResponseOutputItem::Reasoning(ResponseReasoningItem {
+                    id: Some("rs_789".into()),
+                    summary: vec![],
+                }),
+            },
+            ResponsesStreamEvent::OutputItemDone {
+                output_index: 0,
+                sequence_number: None,
+                item: ResponseOutputItem::Reasoning(ResponseReasoningItem {
+                    id: Some("rs_789".into()),
+                    summary: vec![ReasoningSummaryPart::SummaryText {
+                        text: "Summary without deltas".into(),
+                    }],
+                }),
+            },
+            ResponsesStreamEvent::Completed {
+                response: ResponseSummary::default(),
+            },
+        ];
+
+        let mapped = map_response_events(events);
+        assert!(
+            !mapped
+                .iter()
+                .any(|e| matches!(e, LanguageModelCompletionEvent::Thinking { .. })),
+            "OutputItemDone reasoning should not produce Thinking events"
+        );
+    }
+}

crates/open_ai/src/open_ai.rs 🔗

@@ -1,4 +1,5 @@
 pub mod batches;
+pub mod completion;
 pub mod responses;
 
 use anyhow::{Context as _, Result, anyhow};
@@ -7,9 +8,9 @@ use http_client::{
     AsyncBody, HttpClient, Method, Request as HttpRequest, StatusCode,
     http::{HeaderMap, HeaderValue},
 };
+pub use language_model_core::ReasoningEffort;
 use serde::{Deserialize, Serialize};
 use serde_json::Value;
-pub use settings::OpenAiReasoningEffort as ReasoningEffort;
 use std::{convert::TryFrom, future::Future};
 use strum::EnumIter;
 use thiserror::Error;
@@ -717,3 +718,26 @@ pub fn embed<'a>(
         Ok(response)
     }
 }
+
+// -- Conversions to `language_model_core` types --
+
+impl From<RequestError> for language_model_core::LanguageModelCompletionError {
+    fn from(error: RequestError) -> Self {
+        match error {
+            RequestError::HttpResponseError {
+                provider,
+                status_code,
+                body,
+                headers,
+            } => {
+                let retry_after = headers
+                    .get(http_client::http::header::RETRY_AFTER)
+                    .and_then(|val| val.to_str().ok()?.parse::<u64>().ok())
+                    .map(std::time::Duration::from_secs);
+
+                Self::from_http_status(provider.into(), status_code, body, retry_after)
+            }
+            RequestError::Other(e) => Self::Other(e),
+        }
+    }
+}

crates/open_router/Cargo.toml 🔗

@@ -19,6 +19,7 @@ schemars = ["dep:schemars"]
 anyhow.workspace = true
 futures.workspace = true
 http_client.workspace = true
+language_model_core.workspace = true
 schemars = { workspace = true, optional = true }
 serde.workspace = true
 serde_json.workspace = true

crates/open_router/src/open_router.rs 🔗

@@ -744,3 +744,71 @@ impl ApiErrorCode {
         }
     }
 }
+
+// -- Conversions to `language_model_core` types --
+
+impl From<OpenRouterError> for language_model_core::LanguageModelCompletionError {
+    fn from(error: OpenRouterError) -> Self {
+        let provider = language_model_core::LanguageModelProviderName::new("OpenRouter");
+        match error {
+            OpenRouterError::SerializeRequest(error) => Self::SerializeRequest { provider, error },
+            OpenRouterError::BuildRequestBody(error) => Self::BuildRequestBody { provider, error },
+            OpenRouterError::HttpSend(error) => Self::HttpSend { provider, error },
+            OpenRouterError::DeserializeResponse(error) => {
+                Self::DeserializeResponse { provider, error }
+            }
+            OpenRouterError::ReadResponse(error) => Self::ApiReadResponseError { provider, error },
+            OpenRouterError::RateLimit { retry_after } => Self::RateLimitExceeded {
+                provider,
+                retry_after: Some(retry_after),
+            },
+            OpenRouterError::ServerOverloaded { retry_after } => Self::ServerOverloaded {
+                provider,
+                retry_after,
+            },
+            OpenRouterError::ApiError(api_error) => api_error.into(),
+        }
+    }
+}
+
+impl From<ApiError> for language_model_core::LanguageModelCompletionError {
+    fn from(error: ApiError) -> Self {
+        use ApiErrorCode::*;
+        let provider = language_model_core::LanguageModelProviderName::new("OpenRouter");
+        match error.code {
+            InvalidRequestError => Self::BadRequestFormat {
+                provider,
+                message: error.message,
+            },
+            AuthenticationError => Self::AuthenticationError {
+                provider,
+                message: error.message,
+            },
+            PaymentRequiredError => Self::AuthenticationError {
+                provider,
+                message: format!("Payment required: {}", error.message),
+            },
+            PermissionError => Self::PermissionError {
+                provider,
+                message: error.message,
+            },
+            RequestTimedOut => Self::HttpResponseError {
+                provider,
+                status_code: http_client::StatusCode::REQUEST_TIMEOUT,
+                message: error.message,
+            },
+            RateLimitError => Self::RateLimitExceeded {
+                provider,
+                retry_after: None,
+            },
+            ApiError => Self::ApiInternalServerError {
+                provider,
+                message: error.message,
+            },
+            OverloadedError => Self::ServerOverloaded {
+                provider,
+                retry_after: None,
+            },
+        }
+    }
+}

crates/outline_panel/src/outline_panel.rs 🔗

@@ -23,8 +23,8 @@ use gpui::{
     uniform_list,
 };
 use itertools::Itertools;
-use language::language_settings::LanguageSettings;
 use language::{Anchor, BufferId, BufferSnapshot, OffsetRangeExt, OutlineItem};
+use language::{LanguageAwareStyling, language_settings::LanguageSettings};
 
 use menu::{Cancel, SelectFirst, SelectLast, SelectNext, SelectPrevious};
 use std::{
@@ -217,10 +217,13 @@ impl SearchState {
                     let mut offset = context_offset_range.start;
                     let mut context_text = String::new();
                     let mut highlight_ranges = Vec::new();
-                    for mut chunk in highlight_arguments
-                        .multi_buffer_snapshot
-                        .chunks(context_offset_range.start..context_offset_range.end, true)
-                    {
+                    for mut chunk in highlight_arguments.multi_buffer_snapshot.chunks(
+                        context_offset_range.start..context_offset_range.end,
+                        LanguageAwareStyling {
+                            tree_sitter: true,
+                            diagnostics: true,
+                        },
+                    ) {
                         if !non_whitespace_symbol_occurred {
                             for c in chunk.text.chars() {
                                 if c.is_whitespace() {

crates/picker/src/highlighted_match_with_paths.rs 🔗

@@ -5,6 +5,7 @@ pub struct HighlightedMatchWithPaths {
     pub prefix: Option<SharedString>,
     pub match_label: HighlightedMatch,
     pub paths: Vec<HighlightedMatch>,
+    pub active: bool,
 }
 
 #[derive(Debug, Clone, IntoElement)]
@@ -63,18 +64,30 @@ impl HighlightedMatchWithPaths {
                 .color(Color::Muted)
         }))
     }
+
+    pub fn is_active(mut self, active: bool) -> Self {
+        self.active = active;
+        self
+    }
 }
 
 impl RenderOnce for HighlightedMatchWithPaths {
     fn render(mut self, _window: &mut Window, _: &mut App) -> impl IntoElement {
         v_flex()
             .child(
-                h_flex().gap_1().child(self.match_label.clone()).when_some(
-                    self.prefix.as_ref(),
-                    |this, prefix| {
+                h_flex()
+                    .gap_1()
+                    .child(self.match_label.clone())
+                    .when_some(self.prefix.as_ref(), |this, prefix| {
                         this.child(Label::new(format!("({})", prefix)).color(Color::Muted))
-                    },
-                ),
+                    })
+                    .when(self.active, |this| {
+                        this.child(
+                            Icon::new(IconName::Check)
+                                .size(IconSize::Small)
+                                .color(Color::Accent),
+                        )
+                    }),
             )
             .when(!self.paths.is_empty(), |this| {
                 self.render_paths_children(this)

crates/platform_title_bar/Cargo.toml 🔗

@@ -13,7 +13,6 @@ path = "src/platform_title_bar.rs"
 doctest = false
 
 [dependencies]
-feature_flags.workspace = true
 gpui.workspace = true
 project.workspace = true
 settings.workspace = true

crates/platform_title_bar/src/platform_title_bar.rs 🔗

@@ -1,7 +1,6 @@
 pub mod platforms;
 mod system_window_tabs;
 
-use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt};
 use gpui::{
     Action, AnyElement, App, Context, Decorations, Entity, Hsla, InteractiveElement, IntoElement,
     MouseButton, ParentElement, StatefulInteractiveElement, Styled, WeakEntity, Window,
@@ -111,7 +110,7 @@ impl PlatformTitleBar {
     }
 
     pub fn is_multi_workspace_enabled(cx: &App) -> bool {
-        cx.has_flag::<AgentV2FeatureFlag>() && !DisableAiSettings::get_global(cx).disable_ai
+        !DisableAiSettings::get_global(cx).disable_ai
     }
 }
 

crates/project/Cargo.toml 🔗

@@ -52,6 +52,7 @@ fancy-regex.workspace = true
 fs.workspace = true
 futures.workspace = true
 fuzzy.workspace = true
+fuzzy_nucleo.workspace = true
 git.workspace = true
 git_hosting_providers.workspace = true
 globset.workspace = true

crates/project/src/git_store.rs 🔗

@@ -32,10 +32,10 @@ use git::{
     blame::Blame,
     parse_git_remote_url,
     repository::{
-        Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
-        GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder,
-        LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode, SearchCommitArgs,
-        UpstreamTrackingStatus, Worktree as GitWorktree,
+        Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, CreateWorktreeTarget,
+        DiffType, FetchOptions, GitRepository, GitRepositoryCheckpoint, GraphCommitData,
+        InitialGraphCommitData, LogOrder, LogSource, PushOptions, Remote, RemoteCommandOutput,
+        RepoPath, ResetMode, SearchCommitArgs, UpstreamTrackingStatus, Worktree as GitWorktree,
     },
     stash::{GitStash, StashEntry},
     status::{
@@ -329,12 +329,6 @@ pub struct GraphDataResponse<'a> {
     pub error: Option<SharedString>,
 }
 
-#[derive(Clone, Debug)]
-enum CreateWorktreeStartPoint {
-    Detached,
-    Branched { name: String },
-}
-
 pub struct Repository {
     this: WeakEntity<Self>,
     snapshot: RepositorySnapshot,
@@ -2414,18 +2408,23 @@ impl GitStore {
         let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
         let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
         let directory = PathBuf::from(envelope.payload.directory);
-        let start_point = if envelope.payload.name.is_empty() {
-            CreateWorktreeStartPoint::Detached
+        let name = envelope.payload.name;
+        let commit = envelope.payload.commit;
+        let use_existing_branch = envelope.payload.use_existing_branch;
+        let target = if name.is_empty() {
+            CreateWorktreeTarget::Detached { base_sha: commit }
+        } else if use_existing_branch {
+            CreateWorktreeTarget::ExistingBranch { branch_name: name }
         } else {
-            CreateWorktreeStartPoint::Branched {
-                name: envelope.payload.name,
+            CreateWorktreeTarget::NewBranch {
+                branch_name: name,
+                base_sha: commit,
             }
         };
-        let commit = envelope.payload.commit;
 
         repository_handle
             .update(&mut cx, |repository_handle, _| {
-                repository_handle.create_worktree_with_start_point(start_point, directory, commit)
+                repository_handle.create_worktree(target, directory)
             })
             .await??;
 
@@ -6004,50 +6003,43 @@ impl Repository {
         })
     }
 
-    fn create_worktree_with_start_point(
+    pub fn create_worktree(
         &mut self,
-        start_point: CreateWorktreeStartPoint,
+        target: CreateWorktreeTarget,
         path: PathBuf,
-        commit: Option<String>,
     ) -> oneshot::Receiver<Result<()>> {
-        if matches!(
-            &start_point,
-            CreateWorktreeStartPoint::Branched { name } if name.is_empty()
-        ) {
-            let (sender, receiver) = oneshot::channel();
-            sender
-                .send(Err(anyhow!("branch name cannot be empty")))
-                .ok();
-            return receiver;
-        }
-
         let id = self.id;
-        let message = match &start_point {
-            CreateWorktreeStartPoint::Detached => "git worktree add (detached)".into(),
-            CreateWorktreeStartPoint::Branched { name } => {
-                format!("git worktree add: {name}").into()
-            }
+        let job_description = match target.branch_name() {
+            Some(branch_name) => format!("git worktree add: {branch_name}"),
+            None => "git worktree add (detached)".to_string(),
         };
-
-        self.send_job(Some(message), move |repo, _cx| async move {
-            let branch_name = match start_point {
-                CreateWorktreeStartPoint::Detached => None,
-                CreateWorktreeStartPoint::Branched { name } => Some(name),
-            };
-            let remote_name = branch_name.clone().unwrap_or_default();
-
+        self.send_job(Some(job_description.into()), move |repo, _cx| async move {
             match repo {
                 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
-                    backend.create_worktree(branch_name, path, commit).await
+                    backend.create_worktree(target, path).await
                 }
                 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
+                    let (name, commit, use_existing_branch) = match target {
+                        CreateWorktreeTarget::ExistingBranch { branch_name } => {
+                            (branch_name, None, true)
+                        }
+                        CreateWorktreeTarget::NewBranch {
+                            branch_name,
+                            base_sha: start_point,
+                        } => (branch_name, start_point, false),
+                        CreateWorktreeTarget::Detached {
+                            base_sha: start_point,
+                        } => (String::new(), start_point, false),
+                    };
+
                     client
                         .request(proto::GitCreateWorktree {
                             project_id: project_id.0,
                             repository_id: id.to_proto(),
-                            name: remote_name,
+                            name,
                             directory: path.to_string_lossy().to_string(),
                             commit,
+                            use_existing_branch,
                         })
                         .await?;
 
@@ -6057,28 +6049,16 @@ impl Repository {
         })
     }
 
-    pub fn create_worktree(
-        &mut self,
-        branch_name: String,
-        path: PathBuf,
-        commit: Option<String>,
-    ) -> oneshot::Receiver<Result<()>> {
-        self.create_worktree_with_start_point(
-            CreateWorktreeStartPoint::Branched { name: branch_name },
-            path,
-            commit,
-        )
-    }
-
     pub fn create_worktree_detached(
         &mut self,
         path: PathBuf,
         commit: String,
     ) -> oneshot::Receiver<Result<()>> {
-        self.create_worktree_with_start_point(
-            CreateWorktreeStartPoint::Detached,
+        self.create_worktree(
+            CreateWorktreeTarget::Detached {
+                base_sha: Some(commit),
+            },
             path,
-            Some(commit),
         )
     }
 

crates/project/src/lsp_store.rs 🔗

@@ -72,9 +72,10 @@ use itertools::Itertools as _;
 use language::{
     Bias, BinaryStatus, Buffer, BufferRow, BufferSnapshot, CachedLspAdapter, Capability, CodeLabel,
     CodeLabelExt, Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, Diff,
-    File as _, Language, LanguageName, LanguageRegistry, LocalFile, LspAdapter, LspAdapterDelegate,
-    LspInstaller, ManifestDelegate, ManifestName, ModelineSettings, OffsetUtf16, Patch, PointUtf16,
-    TextBufferSnapshot, ToOffset, ToOffsetUtf16, ToPointUtf16, Toolchain, Transaction, Unclipped,
+    File as _, Language, LanguageAwareStyling, LanguageName, LanguageRegistry, LocalFile,
+    LspAdapter, LspAdapterDelegate, LspInstaller, ManifestDelegate, ManifestName, ModelineSettings,
+    OffsetUtf16, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToOffsetUtf16, ToPointUtf16,
+    Toolchain, Transaction, Unclipped,
     language_settings::{
         AllLanguageSettings, FormatOnSave, Formatter, LanguageSettings, all_language_settings,
     },
@@ -13527,7 +13528,13 @@ fn resolve_word_completion(snapshot: &BufferSnapshot, completion: &mut Completio
     }
 
     let mut offset = 0;
-    for chunk in snapshot.chunks(word_range.clone(), true) {
+    for chunk in snapshot.chunks(
+        word_range.clone(),
+        LanguageAwareStyling {
+            tree_sitter: true,
+            diagnostics: true,
+        },
+    ) {
         let end_offset = offset + chunk.text.len();
         if let Some(highlight_id) = chunk.syntax_highlight_id {
             completion

crates/project/src/prettier_store.rs 🔗

@@ -412,7 +412,7 @@ impl PrettierStore {
             prettier_store
                 .update(cx, |prettier_store, cx| {
                     let name = if is_default {
-                        LanguageServerName("prettier (default)".to_string().into())
+                        LanguageServerName("prettier (default)".into())
                     } else {
                         let worktree_path = worktree_id
                             .and_then(|id| {

crates/project/src/project.rs 🔗

@@ -6224,6 +6224,76 @@ impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
     }
 }
 
+impl<'a> fuzzy_nucleo::PathMatchCandidateSet<'a> for PathMatchCandidateSet {
+    type Candidates = PathMatchCandidateSetNucleoIter<'a>;
+    fn id(&self) -> usize {
+        self.snapshot.id().to_usize()
+    }
+    fn len(&self) -> usize {
+        match self.candidates {
+            Candidates::Files => {
+                if self.include_ignored {
+                    self.snapshot.file_count()
+                } else {
+                    self.snapshot.visible_file_count()
+                }
+            }
+            Candidates::Directories => {
+                if self.include_ignored {
+                    self.snapshot.dir_count()
+                } else {
+                    self.snapshot.visible_dir_count()
+                }
+            }
+            Candidates::Entries => {
+                if self.include_ignored {
+                    self.snapshot.entry_count()
+                } else {
+                    self.snapshot.visible_entry_count()
+                }
+            }
+        }
+    }
+    fn prefix(&self) -> Arc<RelPath> {
+        if self.snapshot.root_entry().is_some_and(|e| e.is_file()) || self.include_root_name {
+            self.snapshot.root_name().into()
+        } else {
+            RelPath::empty().into()
+        }
+    }
+    fn root_is_file(&self) -> bool {
+        self.snapshot.root_entry().is_some_and(|f| f.is_file())
+    }
+    fn path_style(&self) -> PathStyle {
+        self.snapshot.path_style()
+    }
+    fn candidates(&'a self, start: usize) -> Self::Candidates {
+        PathMatchCandidateSetNucleoIter {
+            traversal: match self.candidates {
+                Candidates::Directories => self.snapshot.directories(self.include_ignored, start),
+                Candidates::Files => self.snapshot.files(self.include_ignored, start),
+                Candidates::Entries => self.snapshot.entries(self.include_ignored, start),
+            },
+        }
+    }
+}
+
+pub struct PathMatchCandidateSetNucleoIter<'a> {
+    traversal: Traversal<'a>,
+}
+
+impl<'a> Iterator for PathMatchCandidateSetNucleoIter<'a> {
+    type Item = fuzzy_nucleo::PathMatchCandidate<'a>;
+    fn next(&mut self) -> Option<Self::Item> {
+        self.traversal
+            .next()
+            .map(|entry| fuzzy_nucleo::PathMatchCandidate {
+                is_dir: entry.kind.is_dir(),
+                path: &entry.path,
+            })
+    }
+}
+
 impl EventEmitter<Event> for Project {}
 
 impl<'a> From<&'a ProjectPath> for SettingsLocation<'a> {

crates/project/tests/integration/git_store.rs 🔗

@@ -1267,9 +1267,11 @@ mod git_worktrees {
         cx.update(|cx| {
             repository.update(cx, |repository, _| {
                 repository.create_worktree(
-                    "feature-branch".to_string(),
+                    git::repository::CreateWorktreeTarget::NewBranch {
+                        branch_name: "feature-branch".to_string(),
+                        base_sha: Some("abc123".to_string()),
+                    },
                     worktree_1_directory.clone(),
-                    Some("abc123".to_string()),
                 )
             })
         })
@@ -1297,9 +1299,11 @@ mod git_worktrees {
         cx.update(|cx| {
             repository.update(cx, |repository, _| {
                 repository.create_worktree(
-                    "bugfix-branch".to_string(),
+                    git::repository::CreateWorktreeTarget::NewBranch {
+                        branch_name: "bugfix-branch".to_string(),
+                        base_sha: None,
+                    },
                     worktree_2_directory.clone(),
-                    None,
                 )
             })
         })

crates/project/tests/integration/project_tests.rs 🔗

@@ -41,9 +41,10 @@ use gpui::{
 use itertools::Itertools;
 use language::{
     Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
-    DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
-    LanguageName, LineEnding, ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point,
-    ToPoint, Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata,
+    DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageAwareStyling,
+    LanguageConfig, LanguageMatcher, LanguageName, LineEnding, ManifestName, ManifestProvider,
+    ManifestQuery, OffsetRangeExt, Point, ToPoint, Toolchain, ToolchainList, ToolchainLister,
+    ToolchainMetadata,
     language_settings::{LanguageSettings, LanguageSettingsContent},
     markdown_lang, rust_lang, tree_sitter_typescript,
 };
@@ -4382,7 +4383,13 @@ fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
     range: Range<T>,
 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
     let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
-    for chunk in buffer.snapshot().chunks(range, true) {
+    for chunk in buffer.snapshot().chunks(
+        range,
+        LanguageAwareStyling {
+            tree_sitter: true,
+            diagnostics: true,
+        },
+    ) {
         if chunks
             .last()
             .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)

crates/project_panel/src/project_panel_tests.rs 🔗

@@ -11,7 +11,7 @@ use std::path::{Path, PathBuf};
 use util::{path, paths::PathStyle, rel_path::rel_path};
 use workspace::{
     AppState, ItemHandle, MultiWorkspace, Pane, Workspace,
-    item::{Item, ProjectItem},
+    item::{Item, ProjectItem, test::TestItem},
     register_project_item,
 };
 
@@ -6015,6 +6015,150 @@ async fn test_explicit_reveal(cx: &mut gpui::TestAppContext) {
     );
 }
 
+#[gpui::test]
+async fn test_reveal_in_project_panel_notifications(cx: &mut gpui::TestAppContext) {
+    init_test_with_editor(cx);
+    let fs = FakeFs::new(cx.background_executor.clone());
+    fs.insert_tree(
+        "/workspace",
+        json!({
+            "README.md": ""
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs.clone(), ["/workspace".as_ref()], cx).await;
+    let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+    let workspace = window
+        .read_with(cx, |mw, _| mw.workspace().clone())
+        .unwrap();
+    let cx = &mut VisualTestContext::from_window(window.into(), cx);
+    let panel = workspace.update_in(cx, ProjectPanel::new);
+    cx.run_until_parked();
+
+    // Ensure that, attempting to run `pane: reveal in project panel` without
+    // any active item does nothing, i.e., does not focus the project panel but
+    // it also does not show a notification.
+    cx.dispatch_action(workspace::RevealInProjectPanel::default());
+    cx.run_until_parked();
+
+    panel.update_in(cx, |panel, window, cx| {
+        assert!(
+            !panel.focus_handle(cx).is_focused(window),
+            "Project panel should not be focused after attempting to reveal an invisible worktree entry"
+        );
+
+        panel.workspace.update(cx, |workspace, cx| {
+            assert!(
+                workspace.active_item(cx).is_none(),
+                "Workspace should not have an active item"
+            );
+            assert_eq!(
+                workspace.notification_ids(),
+                vec![],
+                "No notification should be shown when there's no active item"
+            );
+        }).unwrap();
+    });
+
+    // Create a file in a different folder than the one in the project so we can
+    // later open it and ensure that, attempting to reveal it in the project
+    // panel shows a notification and does not focus the project panel.
+    fs.insert_tree(
+        "/external",
+        json!({
+            "file.txt": "External File",
+        }),
+    )
+    .await;
+
+    let (worktree, _) = project
+        .update(cx, |project, cx| {
+            project.find_or_create_worktree("/external/file.txt", false, cx)
+        })
+        .await
+        .unwrap();
+
+    workspace
+        .update_in(cx, |workspace, window, cx| {
+            let worktree_id = worktree.read(cx).id();
+            let path = rel_path("").into();
+            let project_path = ProjectPath { worktree_id, path };
+
+            workspace.open_path(project_path, None, true, window, cx)
+        })
+        .await
+        .unwrap();
+    cx.run_until_parked();
+
+    cx.dispatch_action(workspace::RevealInProjectPanel::default());
+    cx.run_until_parked();
+
+    panel.update_in(cx, |panel, window, cx| {
+        assert!(
+            !panel.focus_handle(cx).is_focused(window),
+            "Project panel should not be focused after attempting to reveal an invisible worktree entry"
+        );
+
+        panel.workspace.update(cx, |workspace, cx| {
+            assert!(
+                workspace.active_item(cx).is_some(),
+                "Workspace should have an active item"
+            );
+
+            let notification_ids = workspace.notification_ids();
+            assert_eq!(
+                notification_ids.len(),
+                1,
+                "A notification should be shown when trying to reveal an invisible worktree entry"
+            );
+
+            workspace.dismiss_notification(&notification_ids[0], cx);
+            assert_eq!(
+                workspace.notification_ids().len(),
+                0,
+                "No notifications should be left after dismissing"
+            );
+        }).unwrap();
+    });
+
+    // Create an empty buffer so we can ensure that, attempting to reveal it in
+    // the project panel shows a notification and does not focus the project
+    // panel.
+    let pane = workspace.update(cx, |workspace, _| workspace.active_pane().clone());
+    pane.update_in(cx, |pane, window, cx| {
+        let item = cx.new(|cx| TestItem::new(cx).with_label("Unsaved buffer"));
+        pane.add_item(Box::new(item), false, false, None, window, cx);
+    });
+
+    cx.dispatch_action(workspace::RevealInProjectPanel::default());
+    cx.run_until_parked();
+
+    panel.update_in(cx, |panel, window, cx| {
+        assert!(
+            !panel.focus_handle(cx).is_focused(window),
+            "Project panel should not be focused after attempting to reveal an unsaved buffer"
+        );
+
+        panel
+            .workspace
+            .update(cx, |workspace, cx| {
+                assert!(
+                    workspace.active_item(cx).is_some(),
+                    "Workspace should have an active item"
+                );
+
+                let notification_ids = workspace.notification_ids();
+                assert_eq!(
+                    notification_ids.len(),
+                    1,
+                    "A notification should be shown when trying to reveal an unsaved buffer"
+                );
+            })
+            .unwrap();
+    });
+}
+
 #[gpui::test]
 async fn test_creating_excluded_entries(cx: &mut gpui::TestAppContext) {
     init_test(cx);

crates/proto/proto/git.proto 🔗

@@ -594,6 +594,7 @@ message GitCreateWorktree {
   string name = 3;
   string directory = 4;
   optional string commit = 5;
+  bool use_existing_branch = 6;
 }
 
 message GitCreateCheckpoint {

crates/recent_projects/src/recent_projects.rs 🔗

@@ -720,6 +720,9 @@ impl RecentProjects {
                         picker.delegate.workspaces.get(hit.candidate_id)
                     {
                         let workspace_id = *workspace_id;
+                        if picker.delegate.is_current_workspace(workspace_id, cx) {
+                            return;
+                        }
                         picker
                             .delegate
                             .remove_sibling_workspace(workspace_id, window, cx);
@@ -939,7 +942,7 @@ impl PickerDelegate for RecentProjectsDelegate {
             .workspaces
             .iter()
             .enumerate()
-            .filter(|(_, (id, _, _, _))| self.is_sibling_workspace(*id, cx))
+            .filter(|(_, (id, _, _, _))| self.sibling_workspace_ids.contains(id))
             .map(|(id, (_, _, paths, _))| {
                 let combined_string = paths
                     .ordered_paths()
@@ -1028,7 +1031,7 @@ impl PickerDelegate for RecentProjectsDelegate {
 
             if is_empty_query {
                 for (id, (workspace_id, _, _, _)) in self.workspaces.iter().enumerate() {
-                    if self.is_sibling_workspace(*workspace_id, cx) {
+                    if self.sibling_workspace_ids.contains(workspace_id) {
                         entries.push(ProjectPickerEntry::OpenProject(StringMatch {
                             candidate_id: id,
                             score: 0.0,
@@ -1106,6 +1109,11 @@ impl PickerDelegate for RecentProjectsDelegate {
                 };
                 let workspace_id = *workspace_id;
 
+                if self.is_current_workspace(workspace_id, cx) {
+                    cx.emit(DismissEvent);
+                    return;
+                }
+
                 if let Some(handle) = window.window_handle().downcast::<MultiWorkspace>() {
                     cx.defer(move |cx| {
                         handle
@@ -1349,6 +1357,7 @@ impl PickerDelegate for RecentProjectsDelegate {
             ProjectPickerEntry::OpenProject(hit) => {
                 let (workspace_id, location, paths, _) = self.workspaces.get(hit.candidate_id)?;
                 let workspace_id = *workspace_id;
+                let is_current = self.is_current_workspace(workspace_id, cx);
                 let ordered_paths: Vec<_> = paths
                     .ordered_paths()
                     .map(|p| p.compact().to_string_lossy().to_string())
@@ -1388,6 +1397,7 @@ impl PickerDelegate for RecentProjectsDelegate {
                     prefix,
                     match_label: HighlightedMatch::join(match_labels.into_iter().flatten(), ", "),
                     paths,
+                    active: is_current,
                 };
 
                 let icon = icon_for_remote_connection(match location {
@@ -1397,20 +1407,24 @@ impl PickerDelegate for RecentProjectsDelegate {
 
                 let secondary_actions = h_flex()
                     .gap_1()
-                    .child(
-                        IconButton::new("remove_open_project", IconName::Close)
-                            .icon_size(IconSize::Small)
-                            .tooltip(Tooltip::text("Remove Project from Window"))
-                            .on_click(cx.listener(move |picker, _, window, cx| {
-                                cx.stop_propagation();
-                                window.prevent_default();
-                                picker
-                                    .delegate
-                                    .remove_sibling_workspace(workspace_id, window, cx);
-                                let query = picker.query(cx);
-                                picker.update_matches(query, window, cx);
-                            })),
-                    )
+                    .when(!is_current, |this| {
+                        this.child(
+                            IconButton::new("remove_open_project", IconName::Close)
+                                .icon_size(IconSize::Small)
+                                .tooltip(Tooltip::text("Remove Project from Window"))
+                                .on_click(cx.listener(move |picker, _, window, cx| {
+                                    cx.stop_propagation();
+                                    window.prevent_default();
+                                    picker.delegate.remove_sibling_workspace(
+                                        workspace_id,
+                                        window,
+                                        cx,
+                                    );
+                                    let query = picker.query(cx);
+                                    picker.update_matches(query, window, cx);
+                                })),
+                        )
+                    })
                     .into_any_element();
 
                 Some(
@@ -1483,6 +1497,7 @@ impl PickerDelegate for RecentProjectsDelegate {
                     prefix,
                     match_label: HighlightedMatch::join(match_labels.into_iter().flatten(), ", "),
                     paths,
+                    active: false,
                 };
 
                 let focus_handle = self.focus_handle.clone();
@@ -1491,9 +1506,16 @@ impl PickerDelegate for RecentProjectsDelegate {
                     .gap_px()
                     .when(is_local, |this| {
                         this.child(
-                            IconButton::new("add_to_workspace", IconName::FolderPlus)
+                            IconButton::new("add_to_workspace", IconName::FolderOpenAdd)
                                 .icon_size(IconSize::Small)
-                                .tooltip(Tooltip::text("Add Project to this Workspace"))
+                                .tooltip(move |_, cx| {
+                                    Tooltip::with_meta(
+                                        "Add Project to this Workspace",
+                                        None,
+                                        "As a multi-root folder project",
+                                        cx,
+                                    )
+                                })
                                 .on_click({
                                     let paths_to_add = paths_to_add.clone();
                                     cx.listener(move |picker, _event, window, cx| {
@@ -1509,8 +1531,8 @@ impl PickerDelegate for RecentProjectsDelegate {
                         )
                     })
                     .child(
-                        IconButton::new("open_new_window", IconName::ArrowUpRight)
-                            .icon_size(IconSize::XSmall)
+                        IconButton::new("open_new_window", IconName::OpenNewWindow)
+                            .icon_size(IconSize::Small)
                             .tooltip({
                                 move |_, cx| {
                                     Tooltip::for_action_in(
@@ -1565,7 +1587,14 @@ impl PickerDelegate for RecentProjectsDelegate {
                                     }
                                     highlighted.render(window, cx)
                                 })
-                                .tooltip(Tooltip::text(tooltip_path)),
+                                .tooltip(move |_, cx| {
+                                    Tooltip::with_meta(
+                                        "Open Project in This Window",
+                                        None,
+                                        tooltip_path.clone(),
+                                        cx,
+                                    )
+                                }),
                         )
                         .end_slot(secondary_actions)
                         .show_end_slot_on_hover()
@@ -1625,27 +1654,41 @@ impl PickerDelegate for RecentProjectsDelegate {
 
         let selected_entry = self.filtered_entries.get(self.selected_index);
 
+        let is_current_workspace_entry =
+            if let Some(ProjectPickerEntry::OpenProject(hit)) = selected_entry {
+                self.workspaces
+                    .get(hit.candidate_id)
+                    .map(|(id, ..)| self.is_current_workspace(*id, cx))
+                    .unwrap_or(false)
+            } else {
+                false
+            };
+
         let secondary_footer_actions: Option<AnyElement> = match selected_entry {
-            Some(ProjectPickerEntry::OpenFolder { .. } | ProjectPickerEntry::OpenProject(_)) => {
-                let label = if matches!(selected_entry, Some(ProjectPickerEntry::OpenFolder { .. }))
-                {
-                    "Remove Folder"
-                } else {
-                    "Remove from Window"
-                };
-                Some(
-                    Button::new("remove_selected", label)
-                        .key_binding(KeyBinding::for_action_in(
-                            &RemoveSelected,
-                            &focus_handle,
-                            cx,
-                        ))
-                        .on_click(|_, window, cx| {
-                            window.dispatch_action(RemoveSelected.boxed_clone(), cx)
-                        })
-                        .into_any_element(),
-                )
-            }
+            Some(ProjectPickerEntry::OpenFolder { .. }) => Some(
+                Button::new("remove_selected", "Remove Folder")
+                    .key_binding(KeyBinding::for_action_in(
+                        &RemoveSelected,
+                        &focus_handle,
+                        cx,
+                    ))
+                    .on_click(|_, window, cx| {
+                        window.dispatch_action(RemoveSelected.boxed_clone(), cx)
+                    })
+                    .into_any_element(),
+            ),
+            Some(ProjectPickerEntry::OpenProject(_)) if !is_current_workspace_entry => Some(
+                Button::new("remove_selected", "Remove from Window")
+                    .key_binding(KeyBinding::for_action_in(
+                        &RemoveSelected,
+                        &focus_handle,
+                        cx,
+                    ))
+                    .on_click(|_, window, cx| {
+                        window.dispatch_action(RemoveSelected.boxed_clone(), cx)
+                    })
+                    .into_any_element(),
+            ),
             Some(ProjectPickerEntry::RecentProject(_)) => Some(
                 Button::new("delete_recent", "Delete")
                     .key_binding(KeyBinding::for_action_in(
@@ -1748,7 +1791,7 @@ impl PickerDelegate for RecentProjectsDelegate {
                                         menu.context(focus_handle)
                                             .when(show_add_to_workspace, |menu| {
                                                 menu.action(
-                                                    "Add to Workspace",
+                                                    "Add to this Workspace",
                                                     AddToWorkspace.boxed_clone(),
                                                 )
                                                 .separator()

crates/recent_projects/src/sidebar_recent_projects.rs 🔗

@@ -374,6 +374,7 @@ impl PickerDelegate for SidebarRecentProjectsDelegate {
             prefix,
             match_label: HighlightedMatch::join(match_labels.into_iter().flatten(), ", "),
             paths: Vec::new(),
+            active: false,
         };
 
         let icon = icon_for_remote_connection(match location {
@@ -395,7 +396,14 @@ impl PickerDelegate for SidebarRecentProjectsDelegate {
                         })
                         .child(highlighted_match.render(window, cx)),
                 )
-                .tooltip(Tooltip::text(tooltip_path))
+                .tooltip(move |_, cx| {
+                    Tooltip::with_meta(
+                        "Open Project in This Window",
+                        None,
+                        tooltip_path.clone(),
+                        cx,
+                    )
+                })
                 .into_any_element(),
         )
     }

crates/repl/src/kernels/ssh_kernel.rs 🔗

@@ -215,7 +215,7 @@ impl SshRunningKernel {
                 &session_id,
             )
             .await
-            .context("failed to create iopub connection")?;
+            .context("Failed to create iopub connection. Is `ipykernel` installed in the remote environment? Try running `pip install ipykernel` on the remote host.")?;
 
             let peer_identity = runtimelib::peer_identity_for_session(&session_id)?;
             let shell_socket = runtimelib::create_client_shell_connection_with_identity(

crates/repl/src/kernels/wsl_kernel.rs 🔗

@@ -354,7 +354,8 @@ impl WslRunningKernel {
                 "",
                 &session_id,
             )
-            .await?;
+            .await
+            .context("Failed to create iopub connection. Is `ipykernel` installed in the WSL environment? Try running `pip install ipykernel` inside your WSL distribution.")?;
 
             let peer_identity = runtimelib::peer_identity_for_session(&session_id)?;
             let shell_socket = runtimelib::create_client_shell_connection_with_identity(

crates/repl/src/notebook/notebook_ui.rs 🔗

@@ -10,8 +10,8 @@ use feature_flags::{FeatureFlagAppExt as _, NotebookFeatureFlag};
 use futures::FutureExt;
 use futures::future::Shared;
 use gpui::{
-    AnyElement, App, Entity, EventEmitter, FocusHandle, Focusable, ListScrollEvent, ListState,
-    Point, Task, actions, list, prelude::*,
+    AnyElement, App, Entity, EventEmitter, FocusHandle, Focusable, KeyContext, ListScrollEvent,
+    ListState, Point, Task, actions, list, prelude::*,
 };
 use jupyter_protocol::JupyterKernelspec;
 use language::{Language, LanguageRegistry};
@@ -41,33 +41,18 @@ use picker::Picker;
 use runtimelib::{ExecuteRequest, JupyterMessage, JupyterMessageContent};
 use ui::PopoverMenuHandle;
 use zed_actions::editor::{MoveDown, MoveUp};
-use zed_actions::notebook::{NotebookMoveDown, NotebookMoveUp};
-
-actions!(
-    notebook,
-    [
-        /// Opens a Jupyter notebook file.
-        OpenNotebook,
-        /// Runs all cells in the notebook.
-        RunAll,
-        /// Runs the current cell.
-        Run,
-        /// Clears all cell outputs.
-        ClearOutputs,
-        /// Moves the current cell up.
-        MoveCellUp,
-        /// Moves the current cell down.
-        MoveCellDown,
-        /// Adds a new markdown cell.
-        AddMarkdownBlock,
-        /// Adds a new code cell.
-        AddCodeBlock,
-        /// Restarts the kernel.
-        RestartKernel,
-        /// Interrupts the current execution.
-        InterruptKernel,
-    ]
-);
+use zed_actions::notebook::{
+    AddCodeBlock, AddMarkdownBlock, ClearOutputs, EnterCommandMode, EnterEditMode, InterruptKernel,
+    MoveCellDown, MoveCellUp, NotebookMoveDown, NotebookMoveUp, OpenNotebook, RestartKernel, Run,
+    RunAll, RunAndAdvance,
+};
+
+/// Whether the notebook is in command mode (navigating cells) or edit mode (editing a cell).
+#[derive(Clone, Copy, PartialEq, Eq)]
+pub(crate) enum NotebookMode {
+    Command,
+    Edit,
+}
 
 pub(crate) const MAX_TEXT_BLOCK_WIDTH: f32 = 9999.0;
 pub(crate) const SMALL_SPACING_SIZE: f32 = 8.0;
@@ -107,6 +92,7 @@ pub struct NotebookEditor {
     remote_id: Option<ViewId>,
     cell_list: ListState,
 
+    notebook_mode: NotebookMode,
     selected_cell_index: usize,
     cell_order: Vec<CellId>,
     original_cell_order: Vec<CellId>,
@@ -148,18 +134,9 @@ impl NotebookEditor {
             match &cell_entity {
                 Cell::Code(code_cell) => {
                     let cell_id_for_focus = cell_id.clone();
-                    cx.subscribe(code_cell, move |this, cell, event, cx| match event {
+                    cx.subscribe(code_cell, move |this, _cell, event, cx| match event {
                         CellEvent::Run(cell_id) => this.execute_cell(cell_id.clone(), cx),
-                        CellEvent::FocusedIn(_) => {
-                            if let Some(index) = this
-                                .cell_order
-                                .iter()
-                                .position(|id| id == &cell_id_for_focus)
-                            {
-                                this.selected_cell_index = index;
-                                cx.notify();
-                            }
-                        }
+                        CellEvent::FocusedIn(_) => this.select_cell_by_id(&cell_id_for_focus, cx),
                     })
                     .detach();
 
@@ -167,20 +144,12 @@ impl NotebookEditor {
                     let editor = code_cell.read(cx).editor().clone();
                     cx.subscribe(&editor, move |this, _editor, event, cx| {
                         if let editor::EditorEvent::Focused = event {
-                            if let Some(index) = this
-                                .cell_order
-                                .iter()
-                                .position(|id| id == &cell_id_for_editor)
-                            {
-                                this.selected_cell_index = index;
-                                cx.notify();
-                            }
+                            this.select_cell_by_id(&cell_id_for_editor, cx);
                         }
                     })
                     .detach();
                 }
                 Cell::Markdown(markdown_cell) => {
-                    let cell_id_for_focus = cell_id.clone();
                     cx.subscribe(
                         markdown_cell,
                         move |_this, cell, event: &MarkdownCellEvent, cx| {
@@ -206,14 +175,7 @@ impl NotebookEditor {
                     let editor = markdown_cell.read(cx).editor().clone();
                     cx.subscribe(&editor, move |this, _editor, event, cx| {
                         if let editor::EditorEvent::Focused = event {
-                            if let Some(index) = this
-                                .cell_order
-                                .iter()
-                                .position(|id| id == &cell_id_for_editor)
-                            {
-                                this.selected_cell_index = index;
-                                cx.notify();
-                            }
+                            this.select_cell_by_id(&cell_id_for_editor, cx);
                         }
                     })
                     .detach();
@@ -239,6 +201,7 @@ impl NotebookEditor {
             notebook_language,
             remote_id: None,
             cell_list,
+            notebook_mode: NotebookMode::Command,
             selected_cell_index: 0,
             cell_order: cell_order.clone(),
             original_cell_order: cell_order.clone(),
@@ -385,8 +348,7 @@ impl NotebookEditor {
         let working_directory = self
             .project
             .read(cx)
-            .worktrees(cx)
-            .next()
+            .worktree_for_id(self.worktree_id, cx)
             .map(|worktree| worktree.read(cx).abs_path().to_path_buf())
             .unwrap_or_else(std::env::temp_dir);
         let fs = self.project.read(cx).fs().clone();
@@ -590,6 +552,31 @@ impl NotebookEditor {
     }
 
     fn run_current_cell(&mut self, _: &Run, window: &mut Window, cx: &mut Context<Self>) {
+        let Some(cell_id) = self.cell_order.get(self.selected_cell_index).cloned() else {
+            return;
+        };
+        let Some(cell) = self.cell_map.get(&cell_id) else {
+            return;
+        };
+        match cell {
+            Cell::Code(_) => {
+                self.execute_cell(cell_id, cx);
+            }
+            Cell::Markdown(markdown_cell) => {
+                // for markdown, finish editing and move to next cell
+                let is_editing = markdown_cell.read(cx).is_editing();
+                if is_editing {
+                    markdown_cell.update(cx, |cell, cx| {
+                        cell.run(cx);
+                    });
+                    self.enter_command_mode(window, cx);
+                }
+            }
+            Cell::Raw(_) => {}
+        }
+    }
+
+    fn run_and_advance(&mut self, _: &RunAndAdvance, window: &mut Window, cx: &mut Context<Self>) {
         if let Some(cell_id) = self.cell_order.get(self.selected_cell_index).cloned() {
             if let Some(cell) = self.cell_map.get(&cell_id) {
                 match cell {
@@ -597,25 +584,83 @@ impl NotebookEditor {
                         self.execute_cell(cell_id, cx);
                     }
                     Cell::Markdown(markdown_cell) => {
-                        // for markdown, finish editing and move to next cell
-                        let is_editing = markdown_cell.read(cx).is_editing();
-                        if is_editing {
+                        if markdown_cell.read(cx).is_editing() {
                             markdown_cell.update(cx, |cell, cx| {
                                 cell.run(cx);
                             });
-                            // move to the next cell
-                            // Discussion can be done on this default implementation
-                            self.move_to_next_cell(window, cx);
                         }
                     }
                     Cell::Raw(_) => {}
                 }
             }
         }
+
+        let is_last_cell = self.selected_cell_index == self.cell_count().saturating_sub(1);
+        if is_last_cell {
+            self.add_code_block(window, cx);
+            self.enter_command_mode(window, cx);
+        } else {
+            self.advance_in_command_mode(window, cx);
+        }
+    }
+
+    fn enter_edit_mode(&mut self, _: &EnterEditMode, window: &mut Window, cx: &mut Context<Self>) {
+        self.notebook_mode = NotebookMode::Edit;
+        if let Some(cell_id) = self.cell_order.get(self.selected_cell_index) {
+            if let Some(cell) = self.cell_map.get(cell_id) {
+                match cell {
+                    Cell::Code(code_cell) => {
+                        let editor = code_cell.read(cx).editor().clone();
+                        window.focus(&editor.focus_handle(cx), cx);
+                    }
+                    Cell::Markdown(markdown_cell) => {
+                        markdown_cell.update(cx, |cell, cx| {
+                            cell.set_editing(true);
+                            cx.notify();
+                        });
+                        let editor = markdown_cell.read(cx).editor().clone();
+                        window.focus(&editor.focus_handle(cx), cx);
+                    }
+                    Cell::Raw(_) => {}
+                }
+            }
+        }
+        cx.notify();
+    }
+
+    fn enter_command_mode(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+        self.notebook_mode = NotebookMode::Command;
+        self.focus_handle.focus(window, cx);
+        cx.notify();
+    }
+
+    fn handle_enter_command_mode(
+        &mut self,
+        _: &EnterCommandMode,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        self.enter_command_mode(window, cx);
+    }
+
+    /// Advances to the next cell while staying in command mode (used by RunAndAdvance and shift-enter).
+    fn advance_in_command_mode(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+        let count = self.cell_count();
+        if count == 0 {
+            return;
+        }
+        if self.selected_cell_index < count - 1 {
+            self.selected_cell_index += 1;
+            self.cell_list
+                .scroll_to_reveal_item(self.selected_cell_index);
+        }
+        self.notebook_mode = NotebookMode::Command;
+        self.focus_handle.focus(window, cx);
+        cx.notify();
     }
 
     // Discussion can be done on this default implementation
-    /// Moves focus to the next cell, or creates a new code cell if at the end
+    /// Moves focus to the next cell editor (used when already in edit mode).
     fn move_to_next_cell(&mut self, window: &mut Window, cx: &mut Context<Self>) {
         if !self.cell_order.is_empty() && self.selected_cell_index < self.cell_order.len() - 1 {
             self.selected_cell_index += 1;
@@ -666,6 +711,19 @@ impl NotebookEditor {
         }
     }
 
+    fn insert_cell_at_current_position(&mut self, cell_id: CellId, cell: Cell) {
+        let insert_index = if self.cell_order.is_empty() {
+            0
+        } else {
+            self.selected_cell_index + 1
+        };
+        self.cell_order.insert(insert_index, cell_id.clone());
+        self.cell_map.insert(cell_id, cell);
+        self.selected_cell_index = insert_index;
+        self.cell_list.splice(insert_index..insert_index, 1);
+        self.cell_list.scroll_to_reveal_item(insert_index);
+    }
+
     fn add_markdown_block(&mut self, window: &mut Window, cx: &mut Context<Self>) {
         let new_cell_id: CellId = Uuid::new_v4().into();
         let languages = self.languages.clone();
@@ -683,16 +741,6 @@ impl NotebookEditor {
             )
         });
 
-        let insert_index = if self.cell_order.is_empty() {
-            0
-        } else {
-            self.selected_cell_index + 1
-        };
-        self.cell_order.insert(insert_index, new_cell_id.clone());
-        self.cell_map
-            .insert(new_cell_id.clone(), Cell::Markdown(markdown_cell.clone()));
-        self.selected_cell_index = insert_index;
-
         cx.subscribe(
             &markdown_cell,
             move |_this, cell, event: &MarkdownCellEvent, cx| match event {
@@ -709,19 +757,19 @@ impl NotebookEditor {
         let editor = markdown_cell.read(cx).editor().clone();
         cx.subscribe(&editor, move |this, _editor, event, cx| {
             if let editor::EditorEvent::Focused = event {
-                if let Some(index) = this
-                    .cell_order
-                    .iter()
-                    .position(|id| id == &cell_id_for_editor)
-                {
-                    this.selected_cell_index = index;
-                    cx.notify();
-                }
+                this.select_cell_by_id(&cell_id_for_editor, cx);
             }
         })
         .detach();
 
-        self.cell_list.reset(self.cell_order.len());
+        self.insert_cell_at_current_position(new_cell_id, Cell::Markdown(markdown_cell.clone()));
+        markdown_cell.update(cx, |cell, cx| {
+            cell.set_editing(true);
+            cx.notify();
+        });
+        let editor = markdown_cell.read(cx).editor().clone();
+        window.focus(&editor.focus_handle(cx), cx);
+        self.notebook_mode = NotebookMode::Edit;
         cx.notify();
     }
 
@@ -742,25 +790,10 @@ impl NotebookEditor {
             )
         });
 
-        let insert_index = if self.cell_order.is_empty() {
-            0
-        } else {
-            self.selected_cell_index + 1
-        };
-        self.cell_order.insert(insert_index, new_cell_id.clone());
-        self.cell_map
-            .insert(new_cell_id.clone(), Cell::Code(code_cell.clone()));
-        self.selected_cell_index = insert_index;
-
         let cell_id_for_run = new_cell_id.clone();
         cx.subscribe(&code_cell, move |this, _cell, event, cx| match event {
             CellEvent::Run(cell_id) => this.execute_cell(cell_id.clone(), cx),
-            CellEvent::FocusedIn(_) => {
-                if let Some(index) = this.cell_order.iter().position(|id| id == &cell_id_for_run) {
-                    this.selected_cell_index = index;
-                    cx.notify();
-                }
-            }
+            CellEvent::FocusedIn(_) => this.select_cell_by_id(&cell_id_for_run, cx),
         })
         .detach();
 
@@ -768,19 +801,15 @@ impl NotebookEditor {
         let editor = code_cell.read(cx).editor().clone();
         cx.subscribe(&editor, move |this, _editor, event, cx| {
             if let editor::EditorEvent::Focused = event {
-                if let Some(index) = this
-                    .cell_order
-                    .iter()
-                    .position(|id| id == &cell_id_for_editor)
-                {
-                    this.selected_cell_index = index;
-                    cx.notify();
-                }
+                this.select_cell_by_id(&cell_id_for_editor, cx);
             }
         })
         .detach();
 
-        self.cell_list.reset(self.cell_order.len());
+        self.insert_cell_at_current_position(new_cell_id, Cell::Code(code_cell.clone()));
+        let editor = code_cell.read(cx).editor().clone();
+        window.focus(&editor.focus_handle(cx), cx);
+        self.notebook_mode = NotebookMode::Edit;
         cx.notify();
     }
 
@@ -792,6 +821,14 @@ impl NotebookEditor {
         self.selected_cell_index
     }
 
+    fn select_cell_by_id(&mut self, cell_id: &CellId, cx: &mut Context<Self>) {
+        if let Some(index) = self.cell_order.iter().position(|id| id == cell_id) {
+            self.selected_cell_index = index;
+            self.notebook_mode = NotebookMode::Edit;
+            cx.notify();
+        }
+    }
+
     pub fn set_selected_index(
         &mut self,
         index: usize,
@@ -1216,9 +1253,19 @@ impl NotebookEditor {
 
 impl Render for NotebookEditor {
     fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+        let mut key_context = KeyContext::new_with_defaults();
+        key_context.add("NotebookEditor");
+        key_context.set(
+            "notebook_mode",
+            match self.notebook_mode {
+                NotebookMode::Command => "command",
+                NotebookMode::Edit => "edit",
+            },
+        );
+
         v_flex()
             .size_full()
-            .key_context("NotebookEditor")
+            .key_context(key_context)
             .track_focus(&self.focus_handle)
             .on_action(cx.listener(|this, _: &OpenNotebook, window, cx| {
                 this.open_notebook(&OpenNotebook, window, cx)
@@ -1229,6 +1276,9 @@ impl Render for NotebookEditor {
             .on_action(
                 cx.listener(|this, _: &Run, window, cx| this.run_current_cell(&Run, window, cx)),
             )
+            .on_action(
+                cx.listener(|this, action, window, cx| this.run_and_advance(action, window, cx)),
+            )
             .on_action(cx.listener(|this, _: &RunAll, window, cx| this.run_cells(window, cx)))
             .on_action(
                 cx.listener(|this, _: &MoveCellUp, window, cx| this.move_cell_up(window, cx)),
@@ -1242,6 +1292,20 @@ impl Render for NotebookEditor {
             .on_action(
                 cx.listener(|this, _: &AddCodeBlock, window, cx| this.add_code_block(window, cx)),
             )
+            .on_action(
+                cx.listener(|this, action, window, cx| this.enter_edit_mode(action, window, cx)),
+            )
+            .on_action(cx.listener(|this, action, window, cx| {
+                this.handle_enter_command_mode(action, window, cx)
+            }))
+            .on_action(cx.listener(|this, action, window, cx| this.select_next(action, window, cx)))
+            .on_action(
+                cx.listener(|this, action, window, cx| this.select_previous(action, window, cx)),
+            )
+            .on_action(
+                cx.listener(|this, action, window, cx| this.select_first(action, window, cx)),
+            )
+            .on_action(cx.listener(|this, action, window, cx| this.select_last(action, window, cx)))
             .on_action(cx.listener(|this, _: &MoveUp, window, cx| {
                 this.select_previous(&menu::SelectPrevious, window, cx);
                 if let Some(cell_id) = this.cell_order.get(this.selected_cell_index) {

crates/search/src/buffer_search.rs 🔗

@@ -849,6 +849,7 @@ impl BufferSearchBar {
         let query_editor = cx.new(|cx| {
             let mut editor = Editor::auto_height(1, 4, window, cx);
             editor.set_use_autoclose(false);
+            editor.set_use_selection_highlight(false);
             editor
         });
         cx.subscribe_in(&query_editor, window, Self::on_query_editor_event)

crates/search/src/project_search.rs 🔗

@@ -769,6 +769,17 @@ impl ProjectSearchView {
         }
     }
 
+    fn set_search_option_enabled(
+        &mut self,
+        option: SearchOptions,
+        enabled: bool,
+        cx: &mut Context<Self>,
+    ) {
+        if self.search_options.contains(option) != enabled {
+            self.toggle_search_option(option, cx);
+        }
+    }
+
     fn toggle_search_option(&mut self, option: SearchOptions, cx: &mut Context<Self>) {
         self.search_options.toggle(option);
         ActiveSettings::update_global(cx, |settings, cx| {
@@ -928,6 +939,7 @@ impl ProjectSearchView {
             let mut editor = Editor::auto_height(1, 4, window, cx);
             editor.set_placeholder_text("Search all files…", window, cx);
             editor.set_use_autoclose(false);
+            editor.set_use_selection_highlight(false);
             editor.set_text(query_text, window, cx);
             editor
         });
@@ -1153,7 +1165,7 @@ impl ProjectSearchView {
         window: &mut Window,
         cx: &mut Context<Workspace>,
     ) {
-        Self::existing_or_new_search(workspace, None, &DeploySearch::find(), window, cx)
+        Self::existing_or_new_search(workspace, None, &DeploySearch::default(), window, cx)
     }
 
     fn existing_or_new_search(
@@ -1203,8 +1215,29 @@ impl ProjectSearchView {
 
         search.update(cx, |search, cx| {
             search.replace_enabled |= action.replace_enabled;
+            if let Some(regex) = action.regex {
+                search.set_search_option_enabled(SearchOptions::REGEX, regex, cx);
+            }
+            if let Some(case_sensitive) = action.case_sensitive {
+                search.set_search_option_enabled(SearchOptions::CASE_SENSITIVE, case_sensitive, cx);
+            }
+            if let Some(whole_word) = action.whole_word {
+                search.set_search_option_enabled(SearchOptions::WHOLE_WORD, whole_word, cx);
+            }
+            if let Some(include_ignored) = action.include_ignored {
+                search.set_search_option_enabled(
+                    SearchOptions::INCLUDE_IGNORED,
+                    include_ignored,
+                    cx,
+                );
+            }
+            let query = action
+                .query
+                .as_deref()
+                .filter(|q| !q.is_empty())
+                .or(query.as_deref());
             if let Some(query) = query {
-                search.set_query(&query, window, cx);
+                search.set_query(query, window, cx);
             }
             if let Some(included_files) = action.included_files.as_deref() {
                 search
@@ -3101,7 +3134,7 @@ pub mod tests {
 
             ProjectSearchView::deploy_search(
                 workspace,
-                &workspace::DeploySearch::find(),
+                &workspace::DeploySearch::default(),
                 window,
                 cx,
             )
@@ -3252,7 +3285,7 @@ pub mod tests {
         workspace.update_in(cx, |workspace, window, cx| {
             ProjectSearchView::deploy_search(
                 workspace,
-                &workspace::DeploySearch::find(),
+                &workspace::DeploySearch::default(),
                 window,
                 cx,
             )
@@ -3325,7 +3358,7 @@ pub mod tests {
 
             ProjectSearchView::deploy_search(
                 workspace,
-                &workspace::DeploySearch::find(),
+                &workspace::DeploySearch::default(),
                 window,
                 cx,
             )
@@ -4560,7 +4593,7 @@ pub mod tests {
         });
 
         // Deploy a new search
-        cx.dispatch_action(DeploySearch::find());
+        cx.dispatch_action(DeploySearch::default());
 
         // Both panes should now have a project search in them
         workspace.update_in(cx, |workspace, window, cx| {
@@ -4585,7 +4618,7 @@ pub mod tests {
             .unwrap();
 
         // Deploy a new search
-        cx.dispatch_action(DeploySearch::find());
+        cx.dispatch_action(DeploySearch::default());
 
         // The project search view should now be focused in the second pane
         // And the number of items should be unchanged.
@@ -4823,7 +4856,7 @@ pub mod tests {
             assert!(workspace.has_active_modal(window, cx));
         });
 
-        cx.dispatch_action(DeploySearch::find());
+        cx.dispatch_action(DeploySearch::default());
 
         workspace.update_in(cx, |workspace, window, cx| {
             assert!(!workspace.has_active_modal(window, cx));
@@ -5136,6 +5169,271 @@ pub mod tests {
             .unwrap();
     }
 
+    #[gpui::test]
+    async fn test_deploy_search_applies_and_resets_options(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let fs = FakeFs::new(cx.background_executor.clone());
+        fs.insert_tree(
+            path!("/dir"),
+            json!({
+                "one.rs": "const ONE: usize = 1;",
+            }),
+        )
+        .await;
+        let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+        let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project, window, cx));
+        let workspace = window
+            .read_with(cx, |mw, _| mw.workspace().clone())
+            .unwrap();
+        let cx = &mut VisualTestContext::from_window(window.into(), cx);
+        let search_bar = window.build_entity(cx, |_, _| ProjectSearchBar::new());
+
+        workspace.update_in(cx, |workspace, window, cx| {
+            workspace.panes()[0].update(cx, |pane, cx| {
+                pane.toolbar()
+                    .update(cx, |toolbar, cx| toolbar.add_item(search_bar, window, cx))
+            });
+
+            ProjectSearchView::deploy_search(
+                workspace,
+                &workspace::DeploySearch {
+                    regex: Some(true),
+                    case_sensitive: Some(true),
+                    whole_word: Some(true),
+                    include_ignored: Some(true),
+                    query: Some("Test_Query".into()),
+                    ..Default::default()
+                },
+                window,
+                cx,
+            )
+        });
+
+        let search_view = cx
+            .read(|cx| {
+                workspace
+                    .read(cx)
+                    .active_pane()
+                    .read(cx)
+                    .active_item()
+                    .and_then(|item| item.downcast::<ProjectSearchView>())
+            })
+            .expect("Search view should be active after deploy");
+
+        search_view.update_in(cx, |search_view, _window, cx| {
+            assert!(
+                search_view.search_options.contains(SearchOptions::REGEX),
+                "Regex option should be enabled"
+            );
+            assert!(
+                search_view
+                    .search_options
+                    .contains(SearchOptions::CASE_SENSITIVE),
+                "Case sensitive option should be enabled"
+            );
+            assert!(
+                search_view
+                    .search_options
+                    .contains(SearchOptions::WHOLE_WORD),
+                "Whole word option should be enabled"
+            );
+            assert!(
+                search_view
+                    .search_options
+                    .contains(SearchOptions::INCLUDE_IGNORED),
+                "Include ignored option should be enabled"
+            );
+            let query_text = search_view.query_editor.read(cx).text(cx);
+            assert_eq!(
+                query_text, "Test_Query",
+                "Query should be set from the action"
+            );
+        });
+
+        // Redeploy with only regex - unspecified options should be preserved.
+        cx.dispatch_action(menu::Cancel);
+        workspace.update_in(cx, |workspace, window, cx| {
+            ProjectSearchView::deploy_search(
+                workspace,
+                &workspace::DeploySearch {
+                    regex: Some(true),
+                    ..Default::default()
+                },
+                window,
+                cx,
+            )
+        });
+
+        search_view.update_in(cx, |search_view, _window, _cx| {
+            assert!(
+                search_view.search_options.contains(SearchOptions::REGEX),
+                "Regex should still be enabled"
+            );
+            assert!(
+                search_view
+                    .search_options
+                    .contains(SearchOptions::CASE_SENSITIVE),
+                "Case sensitive should be preserved from previous deploy"
+            );
+            assert!(
+                search_view
+                    .search_options
+                    .contains(SearchOptions::WHOLE_WORD),
+                "Whole word should be preserved from previous deploy"
+            );
+            assert!(
+                search_view
+                    .search_options
+                    .contains(SearchOptions::INCLUDE_IGNORED),
+                "Include ignored should be preserved from previous deploy"
+            );
+        });
+
+        // Redeploy explicitly turning off options.
+        cx.dispatch_action(menu::Cancel);
+        workspace.update_in(cx, |workspace, window, cx| {
+            ProjectSearchView::deploy_search(
+                workspace,
+                &workspace::DeploySearch {
+                    regex: Some(true),
+                    case_sensitive: Some(false),
+                    whole_word: Some(false),
+                    include_ignored: Some(false),
+                    ..Default::default()
+                },
+                window,
+                cx,
+            )
+        });
+
+        search_view.update_in(cx, |search_view, _window, _cx| {
+            assert_eq!(
+                search_view.search_options,
+                SearchOptions::REGEX,
+                "Explicit Some(false) should turn off options"
+            );
+        });
+
+        // Redeploy with an empty query - should not overwrite the existing query.
+        cx.dispatch_action(menu::Cancel);
+        workspace.update_in(cx, |workspace, window, cx| {
+            ProjectSearchView::deploy_search(
+                workspace,
+                &workspace::DeploySearch {
+                    query: Some("".into()),
+                    ..Default::default()
+                },
+                window,
+                cx,
+            )
+        });
+
+        search_view.update_in(cx, |search_view, _window, cx| {
+            let query_text = search_view.query_editor.read(cx).text(cx);
+            assert_eq!(
+                query_text, "Test_Query",
+                "Empty query string should not overwrite the existing query"
+            );
+        });
+    }
+
+    #[gpui::test]
+    async fn test_smartcase_overrides_explicit_case_sensitive(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        cx.update(|cx| {
+            cx.update_global::<SettingsStore, _>(|store, cx| {
+                store.update_default_settings(cx, |settings| {
+                    settings.editor.use_smartcase_search = Some(true);
+                });
+            });
+        });
+
+        let fs = FakeFs::new(cx.background_executor.clone());
+        fs.insert_tree(
+            path!("/dir"),
+            json!({
+                "one.rs": "const ONE: usize = 1;",
+            }),
+        )
+        .await;
+        let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+        let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project, window, cx));
+        let workspace = window
+            .read_with(cx, |mw, _| mw.workspace().clone())
+            .unwrap();
+        let cx = &mut VisualTestContext::from_window(window.into(), cx);
+        let search_bar = window.build_entity(cx, |_, _| ProjectSearchBar::new());
+
+        workspace.update_in(cx, |workspace, window, cx| {
+            workspace.panes()[0].update(cx, |pane, cx| {
+                pane.toolbar()
+                    .update(cx, |toolbar, cx| toolbar.add_item(search_bar, window, cx))
+            });
+
+            ProjectSearchView::deploy_search(
+                workspace,
+                &workspace::DeploySearch {
+                    case_sensitive: Some(true),
+                    query: Some("lowercase_query".into()),
+                    ..Default::default()
+                },
+                window,
+                cx,
+            )
+        });
+
+        let search_view = cx
+            .read(|cx| {
+                workspace
+                    .read(cx)
+                    .active_pane()
+                    .read(cx)
+                    .active_item()
+                    .and_then(|item| item.downcast::<ProjectSearchView>())
+            })
+            .expect("Search view should be active after deploy");
+
+        // Smartcase should override the explicit case_sensitive flag
+        // because the query is all lowercase.
+        search_view.update_in(cx, |search_view, _window, cx| {
+            assert!(
+                !search_view
+                    .search_options
+                    .contains(SearchOptions::CASE_SENSITIVE),
+                "Smartcase should disable case sensitivity for a lowercase query, \
+                 even when case_sensitive was explicitly set in the action"
+            );
+            let query_text = search_view.query_editor.read(cx).text(cx);
+            assert_eq!(query_text, "lowercase_query");
+        });
+
+        // Now deploy with an uppercase query - smartcase should enable case sensitivity.
+        workspace.update_in(cx, |workspace, window, cx| {
+            ProjectSearchView::deploy_search(
+                workspace,
+                &workspace::DeploySearch {
+                    query: Some("Uppercase_Query".into()),
+                    ..Default::default()
+                },
+                window,
+                cx,
+            )
+        });
+
+        search_view.update_in(cx, |search_view, _window, cx| {
+            assert!(
+                search_view
+                    .search_options
+                    .contains(SearchOptions::CASE_SENSITIVE),
+                "Smartcase should enable case sensitivity for a query containing uppercase"
+            );
+            let query_text = search_view.query_editor.read(cx).text(cx);
+            assert_eq!(query_text, "Uppercase_Query");
+        });
+    }
+
     fn init_test(cx: &mut TestAppContext) {
         cx.update(|cx| {
             let settings = SettingsStore::test(cx);

crates/settings/src/vscode_import.rs 🔗

@@ -198,7 +198,7 @@ impl VsCodeSettings {
             log: None,
             message_editor: None,
             node: self.node_binary_settings(),
-            notification_panel: None,
+
             outline_panel: self.outline_panel_settings_content(),
             preview_tabs: self.preview_tabs_settings_content(),
             project: self.project_settings_content(),

crates/settings_content/Cargo.toml 🔗

@@ -19,6 +19,7 @@ anyhow.workspace = true
 collections.workspace = true
 derive_more.workspace = true
 gpui.workspace = true
+language_model_core.workspace = true
 log.workspace = true
 schemars.workspace = true
 serde.workspace = true

crates/settings_content/src/agent.rs 🔗

@@ -128,6 +128,12 @@ pub struct AgentSettingsContent {
     /// Default: 320
     #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")]
     pub default_height: Option<f32>,
+    /// Maximum content width in pixels for the agent panel. Content will be
+    /// centered when the panel is wider than this value.
+    ///
+    /// Default: 850
+    #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")]
+    pub max_content_width: Option<f32>,
     /// The default model to use when creating new chats and for other features when a specific model is not specified.
     pub default_model: Option<LanguageModelSelection>,
     /// Favorite models to show at the top of the model selector.

crates/settings_content/src/language_model.rs 🔗

@@ -1,8 +1,8 @@
+use crate::merge_from::MergeFrom;
 use collections::HashMap;
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
 use settings_macros::{MergeFrom, with_fallible_options};
-use strum::EnumString;
 
 use std::sync::Arc;
 
@@ -237,15 +237,12 @@ pub struct OpenAiAvailableModel {
     pub capabilities: OpenAiModelCapabilities,
 }
 
-#[derive(Debug, Serialize, Deserialize, PartialEq, Clone, EnumString, JsonSchema, MergeFrom)]
-#[serde(rename_all = "lowercase")]
-#[strum(serialize_all = "lowercase")]
-pub enum OpenAiReasoningEffort {
-    Minimal,
-    Low,
-    Medium,
-    High,
-    XHigh,
+pub use language_model_core::ReasoningEffort as OpenAiReasoningEffort;
+
+impl MergeFrom for OpenAiReasoningEffort {
+    fn merge_from(&mut self, other: &Self) {
+        *self = *other;
+    }
 }
 
 #[with_fallible_options]
@@ -479,15 +476,10 @@ pub struct LanguageModelCacheConfiguration {
     pub min_total_token: u64,
 }
 
-#[derive(
-    Copy, Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema, MergeFrom,
-)]
-#[serde(tag = "type", rename_all = "lowercase")]
-pub enum ModelMode {
-    #[default]
-    Default,
-    Thinking {
-        /// The maximum number of tokens to use for reasoning. Must be lower than the model's `max_output_tokens`.
-        budget_tokens: Option<u32>,
-    },
+pub use language_model_core::ModelMode;
+
+impl MergeFrom for ModelMode {
+    fn merge_from(&mut self, other: &Self) {
+        *self = *other;
+    }
 }

crates/settings_content/src/settings_content.rs 🔗

@@ -174,9 +174,6 @@ pub struct SettingsContent {
     /// Configuration for Node-related features
     pub node: Option<NodeBinarySettings>,
 
-    /// Configuration for the Notification Panel
-    pub notification_panel: Option<NotificationPanelSettingsContent>,
-
     pub proxy: Option<String>,
 
     /// The URL of the Zed server to connect to.
@@ -631,28 +628,6 @@ pub struct ScrollbarSettings {
     pub show: Option<ShowScrollbar>,
 }
 
-#[with_fallible_options]
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, MergeFrom, Debug, PartialEq)]
-pub struct NotificationPanelSettingsContent {
-    /// Whether to show the panel button in the status bar.
-    ///
-    /// Default: true
-    pub button: Option<bool>,
-    /// Where to dock the panel.
-    ///
-    /// Default: right
-    pub dock: Option<DockPosition>,
-    /// Default width of the panel in pixels.
-    ///
-    /// Default: 300
-    #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")]
-    pub default_width: Option<f32>,
-    /// Whether to show a badge on the notification panel icon with the count of unread notifications.
-    ///
-    /// Default: false
-    pub show_count_badge: Option<bool>,
-}
-
 #[with_fallible_options]
 #[derive(Clone, Default, Serialize, Deserialize, JsonSchema, MergeFrom, Debug, PartialEq)]
 pub struct PanelSettingsContent {
@@ -763,6 +738,7 @@ pub struct VimSettingsContent {
     pub toggle_relative_line_numbers: Option<bool>,
     pub use_system_clipboard: Option<UseSystemClipboard>,
     pub use_smartcase_find: Option<bool>,
+    pub use_regex_search: Option<bool>,
     /// When enabled, the `:substitute` command replaces all matches in a line
     /// by default. The 'g' flag then toggles this behavior.,
     pub gdefault: Option<bool>,

crates/settings_ui/Cargo.toml 🔗

@@ -28,7 +28,6 @@ cpal.workspace = true
 edit_prediction.workspace = true
 edit_prediction_ui.workspace = true
 editor.workspace = true
-feature_flags.workspace = true
 fs.workspace = true
 futures.workspace = true
 fuzzy.workspace = true

crates/settings_ui/src/components/input_field.rs 🔗

@@ -109,16 +109,37 @@ impl RenderOnce for SettingsInputField {
             ..Default::default()
         };
 
+        let first_render_initial_text = window.use_state(cx, |_, _| self.initial_text.clone());
+
         let editor = if let Some(id) = self.id {
             window.use_keyed_state(id, cx, {
                 let initial_text = self.initial_text.clone();
                 let placeholder = self.placeholder;
+                let mut confirm = self.confirm.clone();
+
                 move |window, cx| {
                     let mut editor = Editor::single_line(window, cx);
+                    let editor_focus_handle = editor.focus_handle(cx);
                     if let Some(text) = initial_text {
                         editor.set_text(text, window, cx);
                     }
 
+                    if let Some(confirm) = confirm.take()
+                        && !self.display_confirm_button
+                        && !self.display_clear_button
+                        && !self.clear_on_confirm
+                    {
+                        cx.on_focus_out(
+                            &editor_focus_handle,
+                            window,
+                            move |editor, _, window, cx| {
+                                let text = Some(editor.text(cx));
+                                confirm(text, window, cx);
+                            },
+                        )
+                        .detach();
+                    }
+
                     if let Some(placeholder) = placeholder {
                         editor.set_placeholder_text(placeholder, window, cx);
                     }
@@ -130,12 +151,31 @@ impl RenderOnce for SettingsInputField {
             window.use_state(cx, {
                 let initial_text = self.initial_text.clone();
                 let placeholder = self.placeholder;
+                let mut confirm = self.confirm.clone();
+
                 move |window, cx| {
                     let mut editor = Editor::single_line(window, cx);
+                    let editor_focus_handle = editor.focus_handle(cx);
                     if let Some(text) = initial_text {
                         editor.set_text(text, window, cx);
                     }
 
+                    if let Some(confirm) = confirm.take()
+                        && !self.display_confirm_button
+                        && !self.display_clear_button
+                        && !self.clear_on_confirm
+                    {
+                        cx.on_focus_out(
+                            &editor_focus_handle,
+                            window,
+                            move |editor, _, window, cx| {
+                                let text = Some(editor.text(cx));
+                                confirm(text, window, cx);
+                            },
+                        )
+                        .detach();
+                    }
+
                     if let Some(placeholder) = placeholder {
                         editor.set_placeholder_text(placeholder, window, cx);
                     }
@@ -149,11 +189,20 @@ impl RenderOnce for SettingsInputField {
         // re-renders but use_keyed_state returns the cached editor with stale text.
         // Reconcile with the expected initial_text when the editor is not focused,
         // so we don't clobber what the user is actively typing.
-        if let Some(initial_text) = &self.initial_text {
-            let current_text = editor.read(cx).text(cx);
-            if current_text != *initial_text && !editor.read(cx).is_focused(window) {
-                editor.update(cx, |editor, cx| {
-                    editor.set_text(initial_text.clone(), window, cx);
+        if let Some(initial_text) = &self.initial_text
+            && let Some(first_initial) = first_render_initial_text.read(cx)
+        {
+            if initial_text != first_initial && !editor.read(cx).is_focused(window) {
+                *first_render_initial_text.as_mut(cx) = self.initial_text.clone();
+                let weak_editor = editor.downgrade();
+                let initial_text = initial_text.clone();
+
+                window.defer(cx, move |window, cx| {
+                    weak_editor
+                        .update(cx, |editor, cx| {
+                            editor.set_text(initial_text, window, cx);
+                        })
+                        .ok();
                 });
             }
         }

crates/settings_ui/src/page_data.rs 🔗

@@ -1,4 +1,3 @@
-use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt as _};
 use gpui::{Action as _, App};
 use itertools::Itertools as _;
 use settings::{
@@ -2447,7 +2446,7 @@ fn editor_page() -> SettingsPage {
         ]
     }
 
-    fn vim_settings_section() -> [SettingsPageItem; 12] {
+    fn vim_settings_section() -> [SettingsPageItem; 13] {
         [
             SettingsPageItem::SectionHeader("Vim"),
             SettingsPageItem::SettingItem(SettingItem {
@@ -2556,6 +2555,24 @@ fn editor_page() -> SettingsPage {
                 metadata: None,
                 files: USER,
             }),
+            SettingsPageItem::SettingItem(SettingItem {
+                title: "Regex Search",
+                description: "Use regex search by default in Vim search.",
+                field: Box::new(SettingField {
+                    json_path: Some("vim.use_regex_search"),
+                    pick: |settings_content| {
+                        settings_content.vim.as_ref()?.use_regex_search.as_ref()
+                    },
+                    write: |settings_content, value| {
+                        settings_content
+                            .vim
+                            .get_or_insert_default()
+                            .use_regex_search = value;
+                    },
+                }),
+                metadata: None,
+                files: USER,
+            }),
             SettingsPageItem::SettingItem(SettingItem {
                 title: "Cursor Shape - Normal Mode",
                 description: "Cursor shape for normal mode.",
@@ -5561,96 +5578,6 @@ fn panels_page() -> SettingsPage {
         ]
     }
 
-    fn notification_panel_section() -> [SettingsPageItem; 5] {
-        [
-            SettingsPageItem::SectionHeader("Notification Panel"),
-            SettingsPageItem::SettingItem(SettingItem {
-                title: "Notification Panel Button",
-                description: "Show the notification panel button in the status bar.",
-                field: Box::new(SettingField {
-                    json_path: Some("notification_panel.button"),
-                    pick: |settings_content| {
-                        settings_content
-                            .notification_panel
-                            .as_ref()?
-                            .button
-                            .as_ref()
-                    },
-                    write: |settings_content, value| {
-                        settings_content
-                            .notification_panel
-                            .get_or_insert_default()
-                            .button = value;
-                    },
-                }),
-                metadata: None,
-                files: USER,
-            }),
-            SettingsPageItem::SettingItem(SettingItem {
-                title: "Notification Panel Dock",
-                description: "Where to dock the notification panel.",
-                field: Box::new(SettingField {
-                    json_path: Some("notification_panel.dock"),
-                    pick: |settings_content| {
-                        settings_content.notification_panel.as_ref()?.dock.as_ref()
-                    },
-                    write: |settings_content, value| {
-                        settings_content
-                            .notification_panel
-                            .get_or_insert_default()
-                            .dock = value;
-                    },
-                }),
-                metadata: None,
-                files: USER,
-            }),
-            SettingsPageItem::SettingItem(SettingItem {
-                title: "Notification Panel Default Width",
-                description: "Default width of the notification panel in pixels.",
-                field: Box::new(SettingField {
-                    json_path: Some("notification_panel.default_width"),
-                    pick: |settings_content| {
-                        settings_content
-                            .notification_panel
-                            .as_ref()?
-                            .default_width
-                            .as_ref()
-                    },
-                    write: |settings_content, value| {
-                        settings_content
-                            .notification_panel
-                            .get_or_insert_default()
-                            .default_width = value;
-                    },
-                }),
-                metadata: None,
-                files: USER,
-            }),
-            SettingsPageItem::SettingItem(SettingItem {
-                title: "Show Count Badge",
-                description: "Show a badge on the notification panel icon with the count of unread notifications.",
-                field: Box::new(SettingField {
-                    json_path: Some("notification_panel.show_count_badge"),
-                    pick: |settings_content| {
-                        settings_content
-                            .notification_panel
-                            .as_ref()?
-                            .show_count_badge
-                            .as_ref()
-                    },
-                    write: |settings_content, value| {
-                        settings_content
-                            .notification_panel
-                            .get_or_insert_default()
-                            .show_count_badge = value;
-                    },
-                }),
-                metadata: None,
-                files: USER,
-            }),
-        ]
-    }
-
     fn collaboration_panel_section() -> [SettingsPageItem; 4] {
         [
             SettingsPageItem::SectionHeader("Collaboration Panel"),
@@ -5719,7 +5646,7 @@ fn panels_page() -> SettingsPage {
         ]
     }
 
-    fn agent_panel_section() -> [SettingsPageItem; 6] {
+    fn agent_panel_section() -> [SettingsPageItem; 7] {
         [
             SettingsPageItem::SectionHeader("Agent Panel"),
             SettingsPageItem::SettingItem(SettingItem {
@@ -5794,6 +5721,24 @@ fn panels_page() -> SettingsPage {
                 metadata: None,
                 files: USER,
             }),
+            SettingsPageItem::SettingItem(SettingItem {
+                title: "Agent Panel Max Content Width",
+                description: "Maximum content width in pixels. Content will be centered when the panel is wider than this value.",
+                field: Box::new(SettingField {
+                    json_path: Some("agent.max_content_width"),
+                    pick: |settings_content| {
+                        settings_content.agent.as_ref()?.max_content_width.as_ref()
+                    },
+                    write: |settings_content, value| {
+                        settings_content
+                            .agent
+                            .get_or_insert_default()
+                            .max_content_width = value;
+                    },
+                }),
+                metadata: None,
+                files: USER,
+            }),
         ]
     }
 
@@ -5805,7 +5750,6 @@ fn panels_page() -> SettingsPage {
             outline_panel_section(),
             git_panel_section(),
             debugger_panel_section(),
-            notification_panel_section(),
             collaboration_panel_section(),
             agent_panel_section(),
         ],
@@ -7243,7 +7187,7 @@ fn ai_page(cx: &App) -> SettingsPage {
         ]
     }
 
-    fn agent_configuration_section(cx: &App) -> Box<[SettingsPageItem]> {
+    fn agent_configuration_section(_cx: &App) -> Box<[SettingsPageItem]> {
         let mut items = vec![
             SettingsPageItem::SectionHeader("Agent Configuration"),
             SettingsPageItem::SubPageLink(SubPageLink {
@@ -7257,30 +7201,28 @@ fn ai_page(cx: &App) -> SettingsPage {
             }),
         ];
 
-        if cx.has_flag::<AgentV2FeatureFlag>() {
-            items.push(SettingsPageItem::SettingItem(SettingItem {
-                title: "New Thread Location",
-                description: "Whether to start a new thread in the current local project or in a new Git worktree.",
-                field: Box::new(SettingField {
-                    json_path: Some("agent.new_thread_location"),
-                    pick: |settings_content| {
-                        settings_content
-                            .agent
-                            .as_ref()?
-                            .new_thread_location
-                            .as_ref()
-                    },
-                    write: |settings_content, value| {
-                        settings_content
-                            .agent
-                            .get_or_insert_default()
-                            .new_thread_location = value;
-                    },
-                }),
-                metadata: None,
-                files: USER,
-            }));
-        }
+        items.push(SettingsPageItem::SettingItem(SettingItem {
+            title: "New Thread Location",
+            description: "Whether to start a new thread in the current local project or in a new Git worktree.",
+            field: Box::new(SettingField {
+                json_path: Some("agent.new_thread_location"),
+                pick: |settings_content| {
+                    settings_content
+                        .agent
+                        .as_ref()?
+                        .new_thread_location
+                        .as_ref()
+                },
+                write: |settings_content, value| {
+                    settings_content
+                        .agent
+                        .get_or_insert_default()
+                        .new_thread_location = value;
+                },
+            }),
+            metadata: None,
+            files: USER,
+        }));
 
         items.extend([
             SettingsPageItem::SettingItem(SettingItem {

crates/sidebar/Cargo.toml 🔗

@@ -24,7 +24,6 @@ agent_ui = { workspace = true, features = ["audio"] }
 anyhow.workspace = true
 chrono.workspace = true
 editor.workspace = true
-feature_flags.workspace = true
 fs.workspace = true
 futures.workspace = true
 git.workspace = true
@@ -57,7 +56,6 @@ pretty_assertions.workspace = true
 prompt_store.workspace = true
 recent_projects = { workspace = true, features = ["test-support"] }
 serde_json.workspace = true
-feature_flags.workspace = true
 fs = { workspace = true, features = ["test-support"] }
 git.workspace = true
 gpui = { workspace = true, features = ["test-support"] }

crates/sidebar/src/sidebar.rs 🔗

@@ -15,7 +15,6 @@ use agent_ui::{
 };
 use chrono::{DateTime, Utc};
 use editor::Editor;
-use feature_flags::{AgentV2FeatureFlag, FeatureFlagViewExt as _};
 use gpui::{
     Action as _, AnyElement, App, Context, Entity, FocusHandle, Focusable, KeyContext, ListState,
     Pixels, Render, SharedString, WeakEntity, Window, WindowHandle, linear_color_stop,
@@ -46,9 +45,11 @@ use ui::{
 use util::ResultExt as _;
 use util::path_list::{PathList, SerializedPathList};
 use workspace::{
-    AddFolderToProject, CloseWindow, FocusWorkspaceSidebar, MultiWorkspace, MultiWorkspaceEvent,
-    Open, Sidebar as WorkspaceSidebar, SidebarSide, Toast, ToggleWorkspaceSidebar, Workspace,
-    WorkspaceId, notifications::NotificationId, sidebar_side_context_menu,
+    AddFolderToProject, CloseWindow, FocusWorkspaceSidebar, MoveWorkspaceToNewWindow,
+    MultiWorkspace, MultiWorkspaceEvent, NextProjectGroup, NextThread, Open, PreviousProjectGroup,
+    PreviousThread, ShowFewerThreads, ShowMoreThreads, Sidebar as WorkspaceSidebar, SidebarSide,
+    Toast, ToggleWorkspaceSidebar, Workspace, WorkspaceId, notifications::NotificationId,
+    sidebar_side_context_menu,
 };
 
 use zed_actions::OpenRecent;
@@ -170,6 +171,7 @@ struct WorktreeInfo {
     name: SharedString,
     full_path: SharedString,
     highlight_positions: Vec<usize>,
+    kind: ui::WorktreeKind,
 }
 
 #[derive(Clone)]
@@ -314,23 +316,25 @@ fn workspace_path_list(workspace: &Entity<Workspace>, cx: &App) -> PathList {
 
 /// Derives worktree display info from a thread's stored path list.
 ///
-/// For each path in the thread's `folder_paths` that is not one of the
-/// group's main paths (i.e. it's a git linked worktree), produces a
-/// [`WorktreeInfo`] with the short worktree name and full path.
+/// For each path in the thread's `folder_paths`, produces a
+/// [`WorktreeInfo`] with a short display name, full path, and whether
+/// the worktree is the main checkout or a linked git worktree.
 fn worktree_info_from_thread_paths(
     folder_paths: &PathList,
     group_key: &project::ProjectGroupKey,
-) -> Vec<WorktreeInfo> {
+) -> impl Iterator<Item = WorktreeInfo> {
     let main_paths = group_key.path_list().paths();
-    folder_paths
-        .paths()
-        .iter()
-        .filter_map(|path| {
-            if main_paths.iter().any(|mp| mp.as_path() == path.as_path()) {
-                return None;
-            }
-            // Find the main path whose file name matches this linked
-            // worktree's file name, falling back to the first main path.
+    folder_paths.paths().iter().filter_map(|path| {
+        let is_main = main_paths.iter().any(|mp| mp.as_path() == path.as_path());
+        if is_main {
+            let name = path.file_name()?.to_string_lossy().to_string();
+            Some(WorktreeInfo {
+                name: SharedString::from(name),
+                full_path: SharedString::from(path.display().to_string()),
+                highlight_positions: Vec::new(),
+                kind: ui::WorktreeKind::Main,
+            })
+        } else {
             let main_path = main_paths
                 .iter()
                 .find(|mp| mp.file_name() == path.file_name())
@@ -339,9 +343,10 @@ fn worktree_info_from_thread_paths(
                 name: linked_worktree_short_name(main_path, path).unwrap_or_default(),
                 full_path: SharedString::from(path.display().to_string()),
                 highlight_positions: Vec::new(),
+                kind: ui::WorktreeKind::Linked,
             })
-        })
-        .collect()
+        }
+    })
 }
 
 /// The sidebar re-derives its entire entry list from scratch on every
@@ -436,11 +441,6 @@ impl Sidebar {
         })
         .detach();
 
-        cx.observe_flag::<AgentV2FeatureFlag, _>(window, |_is_enabled, this, _window, cx| {
-            this.update_entries(cx);
-        })
-        .detach();
-
         let workspaces: Vec<_> = multi_workspace.read(cx).workspaces().cloned().collect();
         cx.defer_in(window, move |this, window, cx| {
             for workspace in &workspaces {
@@ -858,7 +858,8 @@ impl Sidebar {
                                          workspace: ThreadEntryWorkspace|
                  -> ThreadEntry {
                     let (icon, icon_from_external_svg) = resolve_agent_icon(&row.agent_id);
-                    let worktrees = worktree_info_from_thread_paths(&row.folder_paths, &group_key);
+                    let worktrees: Vec<WorktreeInfo> =
+                        worktree_info_from_thread_paths(&row.folder_paths, &group_key).collect();
                     ThreadEntry {
                         metadata: row,
                         icon,
@@ -1066,7 +1067,9 @@ impl Sidebar {
                     if let Some(ActiveEntry::Draft(draft_ws)) = &self.active_entry {
                         let ws_path_list = workspace_path_list(draft_ws, cx);
                         let worktrees = worktree_info_from_thread_paths(&ws_path_list, &group_key);
-                        entries.push(ListEntry::DraftThread { worktrees });
+                        entries.push(ListEntry::DraftThread {
+                            worktrees: worktrees.collect(),
+                        });
                     }
                 }
 
@@ -1080,7 +1083,8 @@ impl Sidebar {
                     && active_workspace.as_ref().is_some_and(|active_ws| {
                         let ws_path_list = workspace_path_list(active_ws, cx);
                         let has_linked_worktrees =
-                            !worktree_info_from_thread_paths(&ws_path_list, &group_key).is_empty();
+                            worktree_info_from_thread_paths(&ws_path_list, &group_key)
+                                .any(|wt| wt.kind == ui::WorktreeKind::Linked);
                         if !has_linked_worktrees {
                             return false;
                         }
@@ -1109,6 +1113,7 @@ impl Sidebar {
                                     &workspace_path_list(ws, cx),
                                     &group_key,
                                 )
+                                .collect()
                             })
                             .unwrap_or_default()
                     } else {
@@ -1609,9 +1614,7 @@ impl Sidebar {
                         let multi_workspace = multi_workspace.clone();
                         menu.entry(
                             "Move to New Window",
-                            Some(Box::new(
-                                zed_actions::agents_sidebar::MoveWorkspaceToNewWindow,
-                            )),
+                            Some(Box::new(MoveWorkspaceToNewWindow)),
                             move |window, cx| {
                                 multi_workspace
                                     .update(cx, |multi_workspace, cx| {
@@ -1946,7 +1949,7 @@ impl Sidebar {
                 match &thread.workspace {
                     ThreadEntryWorkspace::Open(workspace) => {
                         let workspace = workspace.clone();
-                        self.activate_thread(metadata, &workspace, window, cx);
+                        self.activate_thread(metadata, &workspace, false, window, cx);
                     }
                     ThreadEntryWorkspace::Closed(path_list) => {
                         self.open_workspace_and_activate_thread(
@@ -1965,13 +1968,10 @@ impl Sidebar {
             } => {
                 let path_list = key.path_list().clone();
                 if *is_fully_expanded {
-                    self.expanded_groups.remove(&path_list);
+                    self.reset_thread_group_expansion(&path_list, cx);
                 } else {
-                    let current = self.expanded_groups.get(&path_list).copied().unwrap_or(0);
-                    self.expanded_groups.insert(path_list, current + 1);
+                    self.expand_thread_group(&path_list, cx);
                 }
-                self.serialize(cx);
-                self.update_entries(cx);
             }
             ListEntry::DraftThread { .. } => {
                 // Already active — nothing to do.
@@ -2050,6 +2050,7 @@ impl Sidebar {
         &mut self,
         metadata: &ThreadMetadata,
         workspace: &Entity<Workspace>,
+        retain: bool,
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
@@ -2068,6 +2069,9 @@ impl Sidebar {
 
         multi_workspace.update(cx, |multi_workspace, cx| {
             multi_workspace.activate(workspace.clone(), window, cx);
+            if retain {
+                multi_workspace.retain_active_workspace(cx);
+            }
         });
 
         Self::load_agent_thread_in_workspace(workspace, metadata, true, window, cx);
@@ -2119,6 +2123,7 @@ impl Sidebar {
         &mut self,
         metadata: ThreadMetadata,
         workspace: &Entity<Workspace>,
+        retain: bool,
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
@@ -2126,7 +2131,7 @@ impl Sidebar {
             .find_workspace_in_current_window(cx, |candidate, _| candidate == workspace)
             .is_some()
         {
-            self.activate_thread_locally(&metadata, &workspace, window, cx);
+            self.activate_thread_locally(&metadata, &workspace, retain, window, cx);
             return;
         }
 
@@ -2157,7 +2162,7 @@ impl Sidebar {
         cx.spawn_in(window, async move |this, cx| {
             let workspace = open_task.await?;
             this.update_in(cx, |this, window, cx| {
-                this.activate_thread(metadata, &workspace, window, cx);
+                this.activate_thread(metadata, &workspace, false, window, cx);
             })?;
             anyhow::Ok(())
         })
@@ -2201,7 +2206,7 @@ impl Sidebar {
                 .map(|w| w.read(cx).workspace().clone());
 
             if let Some(workspace) = active_workspace {
-                self.activate_thread_locally(&metadata, &workspace, window, cx);
+                self.activate_thread_locally(&metadata, &workspace, false, window, cx);
             }
             return;
         }
@@ -2223,7 +2228,7 @@ impl Sidebar {
                     if let Some(workspace) =
                         this.find_current_workspace_for_path_list(&path_list, cx)
                     {
-                        this.activate_thread_locally(&metadata, &workspace, window, cx);
+                        this.activate_thread_locally(&metadata, &workspace, false, window, cx);
                     } else if let Some((target_window, workspace)) =
                         this.find_open_workspace_for_path_list(&path_list, cx)
                     {
@@ -2839,6 +2844,7 @@ impl Sidebar {
                                 name: wt.name.clone(),
                                 full_path: wt.full_path.clone(),
                                 highlight_positions: Vec::new(),
+                                kind: wt.kind,
                             })
                             .collect(),
                         diff_stats: thread.diff_stats,
@@ -2967,6 +2973,7 @@ impl Sidebar {
                     if let Some(mw) = weak_multi_workspace.upgrade() {
                         mw.update(cx, |mw, cx| {
                             mw.activate(workspace.clone(), window, cx);
+                            mw.retain_active_workspace(cx);
                         });
                     }
                     this.record_thread_access(&metadata.session_id);
@@ -3111,6 +3118,7 @@ impl Sidebar {
                         name: wt.name.clone(),
                         full_path: wt.full_path.clone(),
                         highlight_positions: wt.highlight_positions.clone(),
+                        kind: wt.kind,
                     })
                     .collect(),
             )
@@ -3179,7 +3187,7 @@ impl Sidebar {
                     this.selection = None;
                     match &thread_workspace {
                         ThreadEntryWorkspace::Open(workspace) => {
-                            this.activate_thread(metadata.clone(), workspace, window, cx);
+                            this.activate_thread(metadata.clone(), workspace, false, window, cx);
                         }
                         ThreadEntryWorkspace::Closed(path_list) => {
                             this.open_workspace_and_activate_thread(
@@ -3290,13 +3298,10 @@ impl Sidebar {
             .on_click(cx.listener(move |this, _, _window, cx| {
                 this.selection = None;
                 if is_fully_expanded {
-                    this.expanded_groups.remove(&path_list);
+                    this.reset_thread_group_expansion(&path_list, cx);
                 } else {
-                    let current = this.expanded_groups.get(&path_list).copied().unwrap_or(0);
-                    this.expanded_groups.insert(path_list.clone(), current + 1);
+                    this.expand_thread_group(&path_list, cx);
                 }
-                this.serialize(cx);
-                this.update_entries(cx);
             }))
             .into_any_element()
     }
@@ -3362,6 +3367,242 @@ impl Sidebar {
         });
     }
 
+    fn active_project_group_key(&self, cx: &App) -> Option<ProjectGroupKey> {
+        let multi_workspace = self.multi_workspace.upgrade()?;
+        let mw = multi_workspace.read(cx);
+        Some(mw.workspace().read(cx).project_group_key(cx))
+    }
+
+    fn active_project_header_position(&self, cx: &App) -> Option<usize> {
+        let active_key = self.active_project_group_key(cx)?;
+        self.contents
+            .project_header_indices
+            .iter()
+            .position(|&entry_ix| {
+                matches!(
+                    &self.contents.entries[entry_ix],
+                    ListEntry::ProjectHeader { key, .. } if *key == active_key
+                )
+            })
+    }
+
+    fn cycle_project_group_impl(
+        &mut self,
+        forward: bool,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        let Some(multi_workspace) = self.multi_workspace.upgrade() else {
+            return;
+        };
+
+        let header_count = self.contents.project_header_indices.len();
+        if header_count == 0 {
+            return;
+        }
+
+        let current_pos = self.active_project_header_position(cx);
+
+        let next_pos = match current_pos {
+            Some(pos) => {
+                if forward {
+                    (pos + 1) % header_count
+                } else {
+                    (pos + header_count - 1) % header_count
+                }
+            }
+            None => 0,
+        };
+
+        let header_entry_ix = self.contents.project_header_indices[next_pos];
+        let Some(ListEntry::ProjectHeader { key, .. }) = self.contents.entries.get(header_entry_ix)
+        else {
+            return;
+        };
+        let path_list = key.path_list().clone();
+
+        // Uncollapse the target group so that threads become visible.
+        self.collapsed_groups.remove(&path_list);
+
+        if let Some(workspace) = self.workspace_for_group(&path_list, cx) {
+            multi_workspace.update(cx, |multi_workspace, cx| {
+                multi_workspace.activate(workspace, window, cx);
+                multi_workspace.retain_active_workspace(cx);
+            });
+        } else {
+            self.open_workspace_for_group(&path_list, window, cx);
+        }
+    }
+
+    fn on_next_project_group(
+        &mut self,
+        _: &NextProjectGroup,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        self.cycle_project_group_impl(true, window, cx);
+    }
+
+    fn on_previous_project_group(
+        &mut self,
+        _: &PreviousProjectGroup,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        self.cycle_project_group_impl(false, window, cx);
+    }
+
+    fn cycle_thread_impl(&mut self, forward: bool, window: &mut Window, cx: &mut Context<Self>) {
+        let thread_indices: Vec<usize> = self
+            .contents
+            .entries
+            .iter()
+            .enumerate()
+            .filter_map(|(ix, entry)| match entry {
+                ListEntry::Thread(_) => Some(ix),
+                _ => None,
+            })
+            .collect();
+
+        if thread_indices.is_empty() {
+            return;
+        }
+
+        let current_thread_pos = self.active_entry.as_ref().and_then(|active| {
+            thread_indices
+                .iter()
+                .position(|&ix| active.matches_entry(&self.contents.entries[ix]))
+        });
+
+        let next_pos = match current_thread_pos {
+            Some(pos) => {
+                let count = thread_indices.len();
+                if forward {
+                    (pos + 1) % count
+                } else {
+                    (pos + count - 1) % count
+                }
+            }
+            None => 0,
+        };
+
+        let entry_ix = thread_indices[next_pos];
+        let ListEntry::Thread(thread) = &self.contents.entries[entry_ix] else {
+            return;
+        };
+
+        let metadata = thread.metadata.clone();
+        match &thread.workspace {
+            ThreadEntryWorkspace::Open(workspace) => {
+                let workspace = workspace.clone();
+                self.activate_thread(metadata, &workspace, true, window, cx);
+            }
+            ThreadEntryWorkspace::Closed(path_list) => {
+                self.open_workspace_and_activate_thread(metadata, path_list.clone(), window, cx);
+            }
+        }
+    }
+
+    fn on_next_thread(&mut self, _: &NextThread, window: &mut Window, cx: &mut Context<Self>) {
+        self.cycle_thread_impl(true, window, cx);
+    }
+
+    fn on_previous_thread(
+        &mut self,
+        _: &PreviousThread,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        self.cycle_thread_impl(false, window, cx);
+    }
+
+    fn expand_thread_group(&mut self, path_list: &PathList, cx: &mut Context<Self>) {
+        let current = self.expanded_groups.get(path_list).copied().unwrap_or(0);
+        self.expanded_groups.insert(path_list.clone(), current + 1);
+        self.serialize(cx);
+        self.update_entries(cx);
+    }
+
+    fn reset_thread_group_expansion(&mut self, path_list: &PathList, cx: &mut Context<Self>) {
+        self.expanded_groups.remove(path_list);
+        self.serialize(cx);
+        self.update_entries(cx);
+    }
+
+    fn collapse_thread_group(&mut self, path_list: &PathList, cx: &mut Context<Self>) {
+        match self.expanded_groups.get(path_list).copied() {
+            Some(batches) if batches > 1 => {
+                self.expanded_groups.insert(path_list.clone(), batches - 1);
+            }
+            Some(_) => {
+                self.expanded_groups.remove(path_list);
+            }
+            None => return,
+        }
+        self.serialize(cx);
+        self.update_entries(cx);
+    }
+
+    fn on_show_more_threads(
+        &mut self,
+        _: &ShowMoreThreads,
+        _window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        let Some(active_key) = self.active_project_group_key(cx) else {
+            return;
+        };
+        self.expand_thread_group(active_key.path_list(), cx);
+    }
+
+    fn on_show_fewer_threads(
+        &mut self,
+        _: &ShowFewerThreads,
+        _window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        let Some(active_key) = self.active_project_group_key(cx) else {
+            return;
+        };
+        self.collapse_thread_group(active_key.path_list(), cx);
+    }
+
+    fn on_new_thread(
+        &mut self,
+        _: &workspace::NewThread,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        let Some(workspace) = self.active_workspace(cx) else {
+            return;
+        };
+        self.create_new_thread(&workspace, window, cx);
+    }
+
+    fn on_move_workspace_to_new_window(
+        &mut self,
+        _: &MoveWorkspaceToNewWindow,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        let Some(multi_workspace) = self.multi_workspace.upgrade() else {
+            return;
+        };
+
+        let group_count = multi_workspace.read(cx).project_group_keys().count();
+        if group_count <= 1 {
+            return;
+        }
+
+        let Some(active_key) = self.active_project_group_key(cx) else {
+            return;
+        };
+
+        multi_workspace.update(cx, |multi_workspace, cx| {
+            multi_workspace.move_project_group_to_new_window(&active_key, window, cx);
+        });
+    }
+
     fn render_draft_thread(
         &self,
         ix: usize,
@@ -3389,6 +3630,7 @@ impl Sidebar {
                         name: wt.name.clone(),
                         full_path: wt.full_path.clone(),
                         highlight_positions: wt.highlight_positions.clone(),
+                        kind: wt.kind,
                     })
                     .collect(),
             )
@@ -3426,6 +3668,7 @@ impl Sidebar {
                         name: wt.name.clone(),
                         full_path: wt.full_path.clone(),
                         highlight_positions: wt.highlight_positions.clone(),
+                        kind: wt.kind,
                     })
                     .collect(),
             )
@@ -3899,6 +4142,18 @@ impl WorkspaceSidebar for Sidebar {
         self.toggle_thread_switcher_impl(select_last, window, cx);
     }
 
+    fn cycle_project_group(&mut self, forward: bool, window: &mut Window, cx: &mut Context<Self>) {
+        self.cycle_project_group_impl(forward, window, cx);
+    }
+
+    fn cycle_thread(&mut self, forward: bool, window: &mut Window, cx: &mut Context<Self>) {
+        self.cycle_thread_impl(forward, window, cx);
+    }
+
+    fn move_workspace_to_new_window(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+        self.on_move_workspace_to_new_window(&MoveWorkspaceToNewWindow, window, cx);
+    }
+
     fn serialized_state(&self, _cx: &App) -> Option<String> {
         let serialized = SerializedSidebar {
             width: Some(f32::from(self.width)),
@@ -3994,6 +4249,14 @@ impl Render for Sidebar {
             .on_action(cx.listener(Self::toggle_archive))
             .on_action(cx.listener(Self::focus_sidebar_filter))
             .on_action(cx.listener(Self::on_toggle_thread_switcher))
+            .on_action(cx.listener(Self::on_next_project_group))
+            .on_action(cx.listener(Self::on_previous_project_group))
+            .on_action(cx.listener(Self::on_next_thread))
+            .on_action(cx.listener(Self::on_previous_thread))
+            .on_action(cx.listener(Self::on_show_more_threads))
+            .on_action(cx.listener(Self::on_show_fewer_threads))
+            .on_action(cx.listener(Self::on_new_thread))
+            .on_action(cx.listener(Self::on_move_workspace_to_new_window))
             .on_action(cx.listener(|this, _: &OpenRecent, window, cx| {
                 this.recent_projects_popover_handle.toggle(window, cx);
             }))

crates/sidebar/src/sidebar_tests.rs 🔗

@@ -6,7 +6,6 @@ use agent_ui::{
     thread_metadata_store::ThreadMetadata,
 };
 use chrono::DateTime;
-use feature_flags::FeatureFlagAppExt as _;
 use fs::FakeFs;
 use gpui::TestAppContext;
 use pretty_assertions::assert_eq;
@@ -24,7 +23,6 @@ fn init_test(cx: &mut TestAppContext) {
         cx.set_global(settings_store);
         theme_settings::init(theme::LoadThemes::JustBase, cx);
         editor::init(cx);
-        cx.update_flags(false, vec!["agent-v2".into()]);
         ThreadStore::init_global(cx);
         ThreadMetadataStore::init_global(cx);
         language_model::LanguageModelRegistry::test(cx);
@@ -191,6 +189,25 @@ fn focus_sidebar(sidebar: &Entity<Sidebar>, cx: &mut gpui::VisualTestContext) {
     cx.run_until_parked();
 }
 
+fn format_linked_worktree_chips(worktrees: &[WorktreeInfo]) -> String {
+    let mut seen = Vec::new();
+    let mut chips = Vec::new();
+    for wt in worktrees {
+        if wt.kind == ui::WorktreeKind::Main {
+            continue;
+        }
+        if !seen.contains(&wt.name) {
+            seen.push(wt.name.clone());
+            chips.push(format!("{{{}}}", wt.name));
+        }
+    }
+    if chips.is_empty() {
+        String::new()
+    } else {
+        format!(" {}", chips.join(", "))
+    }
+}
+
 fn visible_entries_as_strings(
     sidebar: &Entity<Sidebar>,
     cx: &mut gpui::VisualTestContext,
@@ -238,23 +255,8 @@ fn visible_entries_as_strings(
                         } else {
                             ""
                         };
-                        let worktree = if thread.worktrees.is_empty() {
-                            String::new()
-                        } else {
-                            let mut seen = Vec::new();
-                            let mut chips = Vec::new();
-                            for wt in &thread.worktrees {
-                                if !seen.contains(&wt.name) {
-                                    seen.push(wt.name.clone());
-                                    chips.push(format!("{{{}}}", wt.name));
-                                }
-                            }
-                            format!(" {}", chips.join(", "))
-                        };
-                        format!(
-                            "  {}{}{}{}{}{}",
-                            title, worktree, active, status_str, notified, selected
-                        )
+                        let worktree = format_linked_worktree_chips(&thread.worktrees);
+                        format!("  {title}{worktree}{active}{status_str}{notified}{selected}")
                     }
                     ListEntry::ViewMore {
                         is_fully_expanded, ..
@@ -266,35 +268,11 @@ fn visible_entries_as_strings(
                         }
                     }
                     ListEntry::DraftThread { worktrees, .. } => {
-                        let worktree = if worktrees.is_empty() {
-                            String::new()
-                        } else {
-                            let mut seen = Vec::new();
-                            let mut chips = Vec::new();
-                            for wt in worktrees {
-                                if !seen.contains(&wt.name) {
-                                    seen.push(wt.name.clone());
-                                    chips.push(format!("{{{}}}", wt.name));
-                                }
-                            }
-                            format!(" {}", chips.join(", "))
-                        };
+                        let worktree = format_linked_worktree_chips(worktrees);
                         format!("  [~ Draft{}]{}", worktree, selected)
                     }
                     ListEntry::NewThread { worktrees, .. } => {
-                        let worktree = if worktrees.is_empty() {
-                            String::new()
-                        } else {
-                            let mut seen = Vec::new();
-                            let mut chips = Vec::new();
-                            for wt in worktrees {
-                                if !seen.contains(&wt.name) {
-                                    seen.push(wt.name.clone());
-                                    chips.push(format!("{{{}}}", wt.name));
-                                }
-                            }
-                            format!(" {}", chips.join(", "))
-                        };
+                        let worktree = format_linked_worktree_chips(worktrees);
                         format!("  [+ New Thread{}]{}", worktree, selected)
                     }
                 }
@@ -1240,7 +1218,6 @@ async fn init_test_project_with_agent_panel(
 ) -> Entity<project::Project> {
     agent_ui::test_support::init_test(cx);
     cx.update(|cx| {
-        cx.update_flags(false, vec!["agent-v2".into()]);
         ThreadStore::init_global(cx);
         ThreadMetadataStore::init_global(cx);
         language_model::LanguageModelRegistry::test(cx);
@@ -2074,6 +2051,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) {
                 archived: false,
             },
             &workspace_a,
+            false,
             window,
             cx,
         );
@@ -2097,7 +2075,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) {
             workspace.panel::<AgentPanel>(cx).is_some(),
             "Agent panel should exist"
         );
-        let dock = workspace.right_dock().read(cx);
+        let dock = workspace.left_dock().read(cx);
         assert!(
             dock.is_open(),
             "Clicking a thread should open the agent panel dock"
@@ -2129,6 +2107,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) {
                 archived: false,
             },
             &workspace_b,
+            false,
             window,
             cx,
         );
@@ -2443,7 +2422,6 @@ async fn test_cmd_n_shows_new_thread_entry_in_absorbed_worktree(cx: &mut TestApp
     // header and highlight it as active.
     agent_ui::test_support::init_test(cx);
     cx.update(|cx| {
-        cx.update_flags(false, vec!["agent-v2".into()]);
         ThreadStore::init_global(cx);
         ThreadMetadataStore::init_global(cx);
         language_model::LanguageModelRegistry::test(cx);
@@ -2992,7 +2970,6 @@ async fn test_absorbed_worktree_running_thread_shows_live_status(cx: &mut TestAp
     // live status (spinner + "(running)") in the sidebar.
     agent_ui::test_support::init_test(cx);
     cx.update(|cx| {
-        cx.update_flags(false, vec!["agent-v2".into()]);
         ThreadStore::init_global(cx);
         ThreadMetadataStore::init_global(cx);
         language_model::LanguageModelRegistry::test(cx);
@@ -3095,7 +3072,6 @@ async fn test_absorbed_worktree_running_thread_shows_live_status(cx: &mut TestAp
 async fn test_absorbed_worktree_completion_triggers_notification(cx: &mut TestAppContext) {
     agent_ui::test_support::init_test(cx);
     cx.update(|cx| {
-        cx.update_flags(false, vec!["agent-v2".into()]);
         ThreadStore::init_global(cx);
         ThreadMetadataStore::init_global(cx);
         language_model::LanguageModelRegistry::test(cx);
@@ -4008,7 +3984,6 @@ async fn test_archive_thread_uses_next_threads_own_workspace(cx: &mut TestAppCon
     // falling back to group_workspace only for Closed workspaces.
     agent_ui::test_support::init_test(cx);
     cx.update(|cx| {
-        cx.update_flags(false, vec!["agent-v2".into()]);
         ThreadStore::init_global(cx);
         ThreadMetadataStore::init_global(cx);
         language_model::LanguageModelRegistry::test(cx);
@@ -4790,7 +4765,6 @@ async fn init_multi_project_test(
 ) -> (Arc<FakeFs>, Entity<project::Project>) {
     agent_ui::test_support::init_test(cx);
     cx.update(|cx| {
-        cx.update_flags(false, vec!["agent-v2".into()]);
         ThreadStore::init_global(cx);
         ThreadMetadataStore::init_global(cx);
         language_model::LanguageModelRegistry::test(cx);
@@ -5064,6 +5038,7 @@ async fn test_legacy_thread_with_canonical_path_opens_main_repo_workspace(cx: &m
 
 mod property_test {
     use super::*;
+    use gpui::proptest::prelude::*;
 
     struct UnopenedWorktree {
         path: String,
@@ -5658,7 +5633,10 @@ mod property_test {
         Ok(())
     }
 
-    #[gpui::property_test]
+    #[gpui::property_test(config = ProptestConfig {
+        cases: 10,
+        ..Default::default()
+    })]
     async fn test_sidebar_invariants(
         #[strategy = gpui::proptest::collection::vec(0u32..DISTRIBUTION_SLOTS * 10, 1..5)]
         raw_operations: Vec<u32>,
@@ -5666,7 +5644,6 @@ mod property_test {
     ) {
         agent_ui::test_support::init_test(cx);
         cx.update(|cx| {
-            cx.update_flags(false, vec!["agent-v2".into()]);
             ThreadStore::init_global(cx);
             ThreadMetadataStore::init_global(cx);
             language_model::LanguageModelRegistry::test(cx);

crates/sidebar/src/thread_switcher.rs 🔗

@@ -126,6 +126,10 @@ impl ThreadSwitcher {
     }
 
     fn confirm(&mut self, _: &menu::Confirm, _window: &mut gpui::Window, cx: &mut Context<Self>) {
+        self.confirm_selected(cx);
+    }
+
+    fn confirm_selected(&mut self, cx: &mut Context<Self>) {
         if let Some(entry) = self.entries.get(self.selected_index) {
             cx.emit(ThreadSwitcherEvent::Confirmed {
                 metadata: entry.metadata.clone(),
@@ -135,6 +139,13 @@ impl ThreadSwitcher {
         cx.emit(DismissEvent);
     }
 
+    fn select_and_confirm(&mut self, index: usize, cx: &mut Context<Self>) {
+        if index < self.entries.len() {
+            self.selected_index = index;
+            self.confirm_selected(cx);
+        }
+    }
+
     fn cancel(&mut self, _: &menu::Cancel, _window: &mut gpui::Window, cx: &mut Context<Self>) {
         cx.emit(ThreadSwitcherEvent::Dismissed);
         cx.emit(DismissEvent);
@@ -202,28 +213,37 @@ impl Render for ThreadSwitcher {
             .children(self.entries.iter().enumerate().map(|(ix, entry)| {
                 let id = SharedString::from(format!("thread-switcher-{}", entry.session_id));
 
-                ThreadItem::new(id, entry.title.clone())
-                    .rounded(true)
-                    .icon(entry.icon)
-                    .status(entry.status)
-                    .when_some(entry.icon_from_external_svg.clone(), |this, svg| {
-                        this.custom_icon_from_external_svg(svg)
-                    })
-                    .when_some(entry.project_name.clone(), |this, name| {
-                        this.project_name(name)
-                    })
-                    .worktrees(entry.worktrees.clone())
-                    .timestamp(entry.timestamp.clone())
-                    .title_generating(entry.is_title_generating)
-                    .notified(entry.notified)
-                    .when(entry.diff_stats.lines_added > 0, |this| {
-                        this.added(entry.diff_stats.lines_added as usize)
-                    })
-                    .when(entry.diff_stats.lines_removed > 0, |this| {
-                        this.removed(entry.diff_stats.lines_removed as usize)
-                    })
-                    .selected(ix == selected_index)
-                    .base_bg(cx.theme().colors().surface_background)
+                div()
+                    .id(id.clone())
+                    .on_click(
+                        cx.listener(move |this, _event: &gpui::ClickEvent, _window, cx| {
+                            this.select_and_confirm(ix, cx);
+                        }),
+                    )
+                    .child(
+                        ThreadItem::new(id, entry.title.clone())
+                            .rounded(true)
+                            .icon(entry.icon)
+                            .status(entry.status)
+                            .when_some(entry.icon_from_external_svg.clone(), |this, svg| {
+                                this.custom_icon_from_external_svg(svg)
+                            })
+                            .when_some(entry.project_name.clone(), |this, name| {
+                                this.project_name(name)
+                            })
+                            .worktrees(entry.worktrees.clone())
+                            .timestamp(entry.timestamp.clone())
+                            .title_generating(entry.is_title_generating)
+                            .notified(entry.notified)
+                            .when(entry.diff_stats.lines_added > 0, |this| {
+                                this.added(entry.diff_stats.lines_added as usize)
+                            })
+                            .when(entry.diff_stats.lines_removed > 0, |this| {
+                                this.removed(entry.diff_stats.lines_removed as usize)
+                            })
+                            .selected(ix == selected_index)
+                            .base_bg(cx.theme().colors().elevated_surface_background),
+                    )
                     .into_any_element()
             }))
     }

crates/tasks_ui/src/modal.rs 🔗

@@ -566,9 +566,7 @@ impl PickerDelegate for TasksModalDelegate {
                                         .checked_sub(1);
                                     picker.refresh(window, cx);
                                 }))
-                                .tooltip(|_, cx| {
-                                    Tooltip::simple("Delete Previously Scheduled Task", cx)
-                                }),
+                                .tooltip(|_, cx| Tooltip::simple("Delete from Recent Tasks", cx)),
                         );
                         item.end_slot_on_hover(delete_button)
                     } else {

crates/terminal_view/src/terminal_view.rs 🔗

@@ -850,6 +850,7 @@ impl TerminalView {
 
     fn send_text(&mut self, text: &SendText, _: &mut Window, cx: &mut Context<Self>) {
         self.clear_bell(cx);
+        self.blink_manager.update(cx, BlinkManager::pause_blinking);
         self.terminal.update(cx, |term, _| {
             term.input(text.0.to_string().into_bytes());
         });
@@ -858,6 +859,7 @@ impl TerminalView {
     fn send_keystroke(&mut self, text: &SendKeystroke, _: &mut Window, cx: &mut Context<Self>) {
         if let Some(keystroke) = Keystroke::parse(&text.0).log_err() {
             self.clear_bell(cx);
+            self.blink_manager.update(cx, BlinkManager::pause_blinking);
             self.process_keystroke(&keystroke, cx);
         }
     }

crates/theme/src/registry.rs 🔗

@@ -126,6 +126,23 @@ impl ThemeRegistry {
         }
     }
 
+    /// Registers theme families for use in tests.
+    #[cfg(any(test, feature = "test-support"))]
+    pub fn register_test_themes(&self, families: impl IntoIterator<Item = ThemeFamily>) {
+        self.insert_theme_families(families);
+    }
+
+    /// Registers icon themes for use in tests.
+    #[cfg(any(test, feature = "test-support"))]
+    pub fn register_test_icon_themes(&self, icon_themes: impl IntoIterator<Item = IconTheme>) {
+        let mut state = self.state.write();
+        for icon_theme in icon_themes {
+            state
+                .icon_themes
+                .insert(icon_theme.name.clone(), Arc::new(icon_theme));
+        }
+    }
+
     /// Inserts the given themes into the registry.
     pub fn insert_themes(&self, themes: impl IntoIterator<Item = Theme>) {
         let mut state = self.state.write();

crates/theme_selector/Cargo.toml 🔗

@@ -29,3 +29,7 @@ workspace.workspace = true
 zed_actions.workspace = true
 
 [dev-dependencies]
+editor = { workspace = true, features = ["test-support"] }
+project.workspace = true
+serde_json.workspace = true
+theme = { workspace = true, features = ["test-support"] }

crates/theme_selector/src/icon_theme_selector.rs 🔗

@@ -267,7 +267,10 @@ impl PickerDelegate for IconThemeSelectorDelegate {
                 } else {
                     this.delegate.selected_index = 0;
                 }
-                this.delegate.selected_theme = this.delegate.show_selected_theme(cx);
+                // Preserve the previously selected theme when the filter yields no results.
+                if let Some(theme) = this.delegate.show_selected_theme(cx) {
+                    this.delegate.selected_theme = Some(theme);
+                }
             })
             .log_err();
         })
@@ -335,3 +338,158 @@ impl PickerDelegate for IconThemeSelectorDelegate {
         )
     }
 }
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use std::collections::HashMap;
+
+    use gpui::{TestAppContext, VisualTestContext};
+    use project::Project;
+    use serde_json::json;
+    use theme::{ChevronIcons, DirectoryIcons, IconTheme, ThemeRegistry};
+    use util::path;
+    use workspace::MultiWorkspace;
+
+    fn init_test(cx: &mut TestAppContext) -> Arc<workspace::AppState> {
+        cx.update(|cx| {
+            let app_state = workspace::AppState::test(cx);
+            settings::init(cx);
+            theme::init(theme::LoadThemes::JustBase, cx);
+            editor::init(cx);
+            crate::init(cx);
+            app_state
+        })
+    }
+
+    fn register_test_icon_themes(cx: &mut TestAppContext) {
+        cx.update(|cx| {
+            let registry = ThemeRegistry::global(cx);
+            let make_icon_theme = |name: &str, appearance: Appearance| IconTheme {
+                id: name.to_lowercase().replace(' ', "-"),
+                name: SharedString::from(name.to_string()),
+                appearance,
+                directory_icons: DirectoryIcons {
+                    collapsed: None,
+                    expanded: None,
+                },
+                named_directory_icons: HashMap::default(),
+                chevron_icons: ChevronIcons {
+                    collapsed: None,
+                    expanded: None,
+                },
+                file_icons: HashMap::default(),
+                file_stems: HashMap::default(),
+                file_suffixes: HashMap::default(),
+            };
+            registry.register_test_icon_themes([
+                make_icon_theme("Test Icons A", Appearance::Dark),
+                make_icon_theme("Test Icons B", Appearance::Dark),
+            ]);
+        });
+    }
+
+    async fn setup_test(cx: &mut TestAppContext) -> Arc<workspace::AppState> {
+        let app_state = init_test(cx);
+        register_test_icon_themes(cx);
+        app_state
+            .fs
+            .as_fake()
+            .insert_tree(path!("/test"), json!({}))
+            .await;
+        app_state
+    }
+
+    fn open_icon_theme_selector(
+        workspace: &Entity<workspace::Workspace>,
+        cx: &mut VisualTestContext,
+    ) -> Entity<Picker<IconThemeSelectorDelegate>> {
+        cx.dispatch_action(zed_actions::icon_theme_selector::Toggle {
+            themes_filter: None,
+        });
+        cx.run_until_parked();
+        workspace.update(cx, |workspace, cx| {
+            workspace
+                .active_modal::<IconThemeSelector>(cx)
+                .expect("icon theme selector should be open")
+                .read(cx)
+                .picker
+                .clone()
+        })
+    }
+
+    fn selected_theme_name(
+        picker: &Entity<Picker<IconThemeSelectorDelegate>>,
+        cx: &mut VisualTestContext,
+    ) -> String {
+        picker.read_with(cx, |picker, _| {
+            picker
+                .delegate
+                .matches
+                .get(picker.delegate.selected_index)
+                .expect("selected index should point to a match")
+                .string
+                .clone()
+        })
+    }
+
+    fn previewed_theme_name(
+        _picker: &Entity<Picker<IconThemeSelectorDelegate>>,
+        cx: &mut VisualTestContext,
+    ) -> String {
+        cx.read(|cx| {
+            ThemeSettings::get_global(cx)
+                .icon_theme
+                .name(SystemAppearance::global(cx).0)
+                .0
+                .to_string()
+        })
+    }
+
+    #[gpui::test]
+    async fn test_icon_theme_selector_preserves_selection_on_empty_filter(cx: &mut TestAppContext) {
+        let app_state = setup_test(cx).await;
+        let project = Project::test(app_state.fs.clone(), [path!("/test").as_ref()], cx).await;
+        let (multi_workspace, cx) =
+            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+        let workspace =
+            multi_workspace.read_with(cx, |multi_workspace, _| multi_workspace.workspace().clone());
+        let picker = open_icon_theme_selector(&workspace, cx);
+
+        let target_index = picker.read_with(cx, |picker, _| {
+            picker
+                .delegate
+                .matches
+                .iter()
+                .position(|m| m.string == "Test Icons A")
+                .unwrap()
+        });
+        picker.update_in(cx, |picker, window, cx| {
+            picker.set_selected_index(target_index, None, true, window, cx);
+        });
+        cx.run_until_parked();
+
+        assert_eq!(previewed_theme_name(&picker, cx), "Test Icons A");
+
+        picker.update_in(cx, |picker, window, cx| {
+            picker.update_matches("zzz".to_string(), window, cx);
+        });
+        cx.run_until_parked();
+
+        picker.update_in(cx, |picker, window, cx| {
+            picker.update_matches("".to_string(), window, cx);
+        });
+        cx.run_until_parked();
+
+        assert_eq!(
+            selected_theme_name(&picker, cx),
+            "Test Icons A",
+            "selected icon theme should be preserved after clearing an empty filter"
+        );
+        assert_eq!(
+            previewed_theme_name(&picker, cx),
+            "Test Icons A",
+            "previewed icon theme should be preserved after clearing an empty filter"
+        );
+    }
+}

crates/theme_selector/src/theme_selector.rs 🔗

@@ -455,7 +455,10 @@ impl PickerDelegate for ThemeSelectorDelegate {
                 } else {
                     this.delegate.selected_index = 0;
                 }
-                this.delegate.selected_theme = this.delegate.show_selected_theme(cx);
+                // Preserve the previously selected theme when the filter yields no results.
+                if let Some(theme) = this.delegate.show_selected_theme(cx) {
+                    this.delegate.selected_theme = Some(theme);
+                }
             })
             .log_err();
         })
@@ -523,3 +526,151 @@ impl PickerDelegate for ThemeSelectorDelegate {
         )
     }
 }
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use gpui::{TestAppContext, VisualTestContext};
+    use project::Project;
+    use serde_json::json;
+    use theme::{Appearance, ThemeFamily, ThemeRegistry, default_color_scales};
+    use util::path;
+    use workspace::MultiWorkspace;
+
+    fn init_test(cx: &mut TestAppContext) -> Arc<workspace::AppState> {
+        cx.update(|cx| {
+            let app_state = workspace::AppState::test(cx);
+            settings::init(cx);
+            theme::init(theme::LoadThemes::JustBase, cx);
+            editor::init(cx);
+            super::init(cx);
+            app_state
+        })
+    }
+
+    fn register_test_themes(cx: &mut TestAppContext) {
+        cx.update(|cx| {
+            let registry = ThemeRegistry::global(cx);
+            let base_theme = registry.get("One Dark").unwrap();
+
+            let mut test_light = (*base_theme).clone();
+            test_light.id = "test-light".to_string();
+            test_light.name = "Test Light".into();
+            test_light.appearance = Appearance::Light;
+
+            let mut test_dark_a = (*base_theme).clone();
+            test_dark_a.id = "test-dark-a".to_string();
+            test_dark_a.name = "Test Dark A".into();
+
+            let mut test_dark_b = (*base_theme).clone();
+            test_dark_b.id = "test-dark-b".to_string();
+            test_dark_b.name = "Test Dark B".into();
+
+            registry.register_test_themes([ThemeFamily {
+                id: "test-family".to_string(),
+                name: "Test Family".into(),
+                author: "test".into(),
+                themes: vec![test_light, test_dark_a, test_dark_b],
+                scales: default_color_scales(),
+            }]);
+        });
+    }
+
+    async fn setup_test(cx: &mut TestAppContext) -> Arc<workspace::AppState> {
+        let app_state = init_test(cx);
+        register_test_themes(cx);
+        app_state
+            .fs
+            .as_fake()
+            .insert_tree(path!("/test"), json!({}))
+            .await;
+        app_state
+    }
+
+    fn open_theme_selector(
+        workspace: &Entity<workspace::Workspace>,
+        cx: &mut VisualTestContext,
+    ) -> Entity<Picker<ThemeSelectorDelegate>> {
+        cx.dispatch_action(zed_actions::theme_selector::Toggle {
+            themes_filter: None,
+        });
+        cx.run_until_parked();
+        workspace.update(cx, |workspace, cx| {
+            workspace
+                .active_modal::<ThemeSelector>(cx)
+                .expect("theme selector should be open")
+                .read(cx)
+                .picker
+                .clone()
+        })
+    }
+
+    fn selected_theme_name(
+        picker: &Entity<Picker<ThemeSelectorDelegate>>,
+        cx: &mut VisualTestContext,
+    ) -> String {
+        picker.read_with(cx, |picker, _| {
+            picker
+                .delegate
+                .matches
+                .get(picker.delegate.selected_index)
+                .expect("selected index should point to a match")
+                .string
+                .clone()
+        })
+    }
+
+    fn previewed_theme_name(
+        picker: &Entity<Picker<ThemeSelectorDelegate>>,
+        cx: &mut VisualTestContext,
+    ) -> String {
+        picker.read_with(cx, |picker, _| picker.delegate.new_theme.name.to_string())
+    }
+
+    #[gpui::test]
+    async fn test_theme_selector_preserves_selection_on_empty_filter(cx: &mut TestAppContext) {
+        let app_state = setup_test(cx).await;
+        let project = Project::test(app_state.fs.clone(), [path!("/test").as_ref()], cx).await;
+        let (multi_workspace, cx) =
+            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+        let workspace =
+            multi_workspace.read_with(cx, |multi_workspace, _| multi_workspace.workspace().clone());
+        let picker = open_theme_selector(&workspace, cx);
+
+        let target_index = picker.read_with(cx, |picker, _| {
+            picker
+                .delegate
+                .matches
+                .iter()
+                .position(|m| m.string == "Test Light")
+                .unwrap()
+        });
+        picker.update_in(cx, |picker, window, cx| {
+            picker.set_selected_index(target_index, None, true, window, cx);
+        });
+        cx.run_until_parked();
+
+        assert_eq!(previewed_theme_name(&picker, cx), "Test Light");
+
+        picker.update_in(cx, |picker, window, cx| {
+            picker.update_matches("zzz".to_string(), window, cx);
+        });
+        cx.run_until_parked();
+
+        picker.update_in(cx, |picker, window, cx| {
+            picker.update_matches("".to_string(), window, cx);
+        });
+        cx.run_until_parked();
+
+        assert_eq!(
+            selected_theme_name(&picker, cx),
+            "Test Light",
+            "selected theme should be preserved after clearing an empty filter"
+        );
+        assert_eq!(
+            previewed_theme_name(&picker, cx),
+            "Test Light",
+            "previewed theme should be preserved after clearing an empty filter"
+        );
+    }
+}

crates/ui/src/components/ai.rs 🔗

@@ -1,7 +1,11 @@
+mod agent_setup_button;
 mod ai_setting_item;
 mod configured_api_card;
+mod parallel_agents_illustration;
 mod thread_item;
 
+pub use agent_setup_button::*;
 pub use ai_setting_item::*;
 pub use configured_api_card::*;
+pub use parallel_agents_illustration::*;
 pub use thread_item::*;

crates/ui/src/components/ai/agent_setup_button.rs 🔗

@@ -0,0 +1,110 @@
+use crate::prelude::*;
+use gpui::{ClickEvent, SharedString};
+
+#[derive(IntoElement, RegisterComponent)]
+pub struct AgentSetupButton {
+    id: ElementId,
+    icon: Option<Icon>,
+    name: Option<SharedString>,
+    state: Option<AnyElement>,
+    disabled: bool,
+    on_click: Option<Box<dyn Fn(&ClickEvent, &mut Window, &mut App) + 'static>>,
+}
+
+impl AgentSetupButton {
+    pub fn new(id: impl Into<ElementId>) -> Self {
+        Self {
+            id: id.into(),
+            icon: None,
+            name: None,
+            state: None,
+            disabled: false,
+            on_click: None,
+        }
+    }
+
+    pub fn icon(mut self, icon: Icon) -> Self {
+        self.icon = Some(icon);
+        self
+    }
+
+    pub fn name(mut self, name: impl Into<SharedString>) -> Self {
+        self.name = Some(name.into());
+        self
+    }
+
+    pub fn state(mut self, element: impl IntoElement) -> Self {
+        self.state = Some(element.into_any_element());
+        self
+    }
+
+    pub fn disabled(mut self, disabled: bool) -> Self {
+        self.disabled = disabled;
+        self
+    }
+
+    pub fn on_click(
+        mut self,
+        handler: impl Fn(&ClickEvent, &mut Window, &mut App) + 'static,
+    ) -> Self {
+        self.on_click = Some(Box::new(handler));
+        self
+    }
+}
+
+impl Component for AgentSetupButton {
+    fn scope() -> ComponentScope {
+        ComponentScope::Agent
+    }
+
+    fn preview(_window: &mut Window, _cx: &mut App) -> Option<AnyElement> {
+        None
+    }
+}
+
+impl RenderOnce for AgentSetupButton {
+    fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
+        let is_clickable = !self.disabled && self.on_click.is_some();
+
+        let has_top_section = self.icon.is_some() || self.name.is_some();
+        let top_section = has_top_section.then(|| {
+            h_flex()
+                .p_1p5()
+                .gap_1()
+                .justify_center()
+                .when_some(self.icon, |this, icon| this.child(icon))
+                .when_some(self.name, |this, name| {
+                    this.child(Label::new(name).size(LabelSize::Small))
+                })
+        });
+
+        let bottom_section = self.state.map(|state_element| {
+            h_flex()
+                .p_0p5()
+                .h_full()
+                .justify_center()
+                .border_t_1()
+                .border_color(cx.theme().colors().border_variant)
+                .bg(cx.theme().colors().element_background.opacity(0.5))
+                .child(state_element)
+        });
+
+        v_flex()
+            .id(self.id)
+            .border_1()
+            .border_color(cx.theme().colors().border_variant)
+            .rounded_sm()
+            .when(is_clickable, |this| {
+                this.cursor_pointer().hover(|style| {
+                    style
+                        .bg(cx.theme().colors().element_hover)
+                        .border_color(cx.theme().colors().border)
+                })
+            })
+            .when_some(top_section, |this, section| this.child(section))
+            .when_some(bottom_section, |this, section| this.child(section))
+            .when_some(self.on_click.filter(|_| is_clickable), |this, on_click| {
+                this.on_click(on_click)
+            })
+    }
+}

crates/ui/src/components/ai/parallel_agents_illustration.rs 🔗

@@ -0,0 +1,149 @@
+use crate::{DiffStat, Divider, prelude::*};
+use gpui::{Animation, AnimationExt, pulsating_between};
+use std::time::Duration;
+
+#[derive(IntoElement)]
+pub struct ParallelAgentsIllustration;
+
+impl ParallelAgentsIllustration {
+    pub fn new() -> Self {
+        Self
+    }
+}
+
+impl RenderOnce for ParallelAgentsIllustration {
+    fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
+        let icon_container = || h_flex().size_4().flex_shrink_0().justify_center();
+
+        let title_bar = |id: &'static str, width: DefiniteLength, duration_ms: u64| {
+            div()
+                .h_2()
+                .w(width)
+                .rounded_full()
+                .debug_bg_blue()
+                .bg(cx.theme().colors().element_selected)
+                .with_animation(
+                    id,
+                    Animation::new(Duration::from_millis(duration_ms))
+                        .repeat()
+                        .with_easing(pulsating_between(0.4, 0.8)),
+                    |label, delta| label.opacity(delta),
+                )
+        };
+
+        let time =
+            |time: SharedString| Label::new(time).size(LabelSize::XSmall).color(Color::Muted);
+
+        let worktree = |worktree: SharedString| {
+            h_flex()
+                .gap_1()
+                .child(
+                    Icon::new(IconName::GitWorktree)
+                        .color(Color::Muted)
+                        .size(IconSize::XSmall),
+                )
+                .child(
+                    Label::new(worktree)
+                        .size(LabelSize::XSmall)
+                        .color(Color::Muted),
+                )
+        };
+
+        let dot_separator = || {
+            Label::new("•")
+                .size(LabelSize::Small)
+                .color(Color::Muted)
+                .alpha(0.5)
+        };
+
+        let agent = |id: &'static str,
+                     icon: IconName,
+                     width: DefiniteLength,
+                     duration_ms: u64,
+                     data: Vec<AnyElement>| {
+            v_flex()
+                .p_2()
+                .child(
+                    h_flex()
+                        .w_full()
+                        .gap_2()
+                        .child(
+                            icon_container()
+                                .child(Icon::new(icon).size(IconSize::Small).color(Color::Muted)),
+                        )
+                        .child(title_bar(id, width, duration_ms)),
+                )
+                .child(
+                    h_flex()
+                        .opacity(0.8)
+                        .w_full()
+                        .gap_2()
+                        .child(icon_container())
+                        .children(data),
+                )
+        };
+
+        let agents = v_flex()
+            .absolute()
+            .w(rems_from_px(380.))
+            .top_8()
+            .rounded_t_sm()
+            .border_1()
+            .border_color(cx.theme().colors().border.opacity(0.5))
+            .bg(cx.theme().colors().elevated_surface_background)
+            .shadow_md()
+            .child(agent(
+                "zed-agent-bar",
+                IconName::ZedAgent,
+                relative(0.7),
+                1800,
+                vec![
+                    worktree("happy-tree".into()).into_any_element(),
+                    dot_separator().into_any_element(),
+                    DiffStat::new("ds", 23, 13)
+                        .label_size(LabelSize::XSmall)
+                        .into_any_element(),
+                    dot_separator().into_any_element(),
+                    time("2m".into()).into_any_element(),
+                ],
+            ))
+            .child(Divider::horizontal())
+            .child(agent(
+                "claude-bar",
+                IconName::AiClaude,
+                relative(0.85),
+                2400,
+                vec![
+                    DiffStat::new("ds", 120, 84)
+                        .label_size(LabelSize::XSmall)
+                        .into_any_element(),
+                    dot_separator().into_any_element(),
+                    time("16m".into()).into_any_element(),
+                ],
+            ))
+            .child(Divider::horizontal())
+            .child(agent(
+                "openai-bar",
+                IconName::AiOpenAi,
+                relative(0.4),
+                3100,
+                vec![
+                    worktree("silent-forest".into()).into_any_element(),
+                    dot_separator().into_any_element(),
+                    time("37m".into()).into_any_element(),
+                ],
+            ))
+            .child(Divider::horizontal());
+
+        h_flex()
+            .relative()
+            .h(rems_from_px(180.))
+            .bg(cx.theme().colors().editor_background)
+            .justify_center()
+            .items_end()
+            .rounded_t_md()
+            .overflow_hidden()
+            .bg(gpui::black().opacity(0.2))
+            .child(agents)
+    }
+}

crates/ui/src/components/ai/thread_item.rs 🔗

@@ -16,11 +16,19 @@ pub enum AgentThreadStatus {
     Error,
 }
 
+#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
+pub enum WorktreeKind {
+    #[default]
+    Main,
+    Linked,
+}
+
 #[derive(Clone)]
 pub struct ThreadItemWorktreeInfo {
     pub name: SharedString,
     pub full_path: SharedString,
     pub highlight_positions: Vec<usize>,
+    pub kind: WorktreeKind,
 }
 
 #[derive(IntoElement, RegisterComponent)]
@@ -359,7 +367,10 @@ impl RenderOnce for ThreadItem {
 
         let has_project_name = self.project_name.is_some();
         let has_project_paths = project_paths.is_some();
-        let has_worktree = !self.worktrees.is_empty();
+        let has_worktree = self
+            .worktrees
+            .iter()
+            .any(|wt| wt.kind == WorktreeKind::Linked);
         let has_timestamp = !self.timestamp.is_empty();
         let timestamp = self.timestamp;
 
@@ -449,6 +460,10 @@ impl RenderOnce for ThreadItem {
                             continue;
                         }
 
+                        if wt.kind == WorktreeKind::Main {
+                            continue;
+                        }
+
                         let chip_index = seen_names.len();
                         seen_names.push(wt.name.clone());
 
@@ -624,6 +639,7 @@ impl Component for ThreadItem {
                                 name: "link-agent-panel".into(),
                                 full_path: "link-agent-panel".into(),
                                 highlight_positions: Vec::new(),
+                                kind: WorktreeKind::Linked,
                             }]),
                     )
                     .into_any_element(),
@@ -650,6 +666,7 @@ impl Component for ThreadItem {
                                 name: "my-project".into(),
                                 full_path: "my-project".into(),
                                 highlight_positions: Vec::new(),
+                                kind: WorktreeKind::Linked,
                             }])
                             .added(42)
                             .removed(17)
@@ -729,6 +746,7 @@ impl Component for ThreadItem {
                                 name: "my-project-name".into(),
                                 full_path: "my-project-name".into(),
                                 highlight_positions: vec![3, 4, 5, 6, 7, 8, 9, 10, 11],
+                                kind: WorktreeKind::Linked,
                             }]),
                     )
                     .into_any_element(),

crates/ui/src/components/collab/collab_notification.rs 🔗

@@ -67,7 +67,7 @@ impl Component for CollabNotification {
         let avatar = "https://avatars.githubusercontent.com/u/67129314?v=4";
         let container = || div().h(px(72.)).w(px(400.)); // Size of the actual notification window
 
-        let examples = vec![
+        let call_examples = vec![
             single_example(
                 "Incoming Call",
                 container()
@@ -129,6 +129,58 @@ impl Component for CollabNotification {
             ),
         ];
 
-        Some(example_group(examples).vertical().into_any_element())
+        let toast_examples = vec![
+            single_example(
+                "Contact Request",
+                container()
+                    .child(
+                        CollabNotification::new(
+                            avatar,
+                            Button::new("accept", "Accept"),
+                            Button::new("decline", "Decline"),
+                        )
+                        .child(Label::new("maxbrunsfeld wants to add you as a contact")),
+                    )
+                    .into_any_element(),
+            ),
+            single_example(
+                "Contact Request Accepted",
+                container()
+                    .child(
+                        CollabNotification::new(
+                            avatar,
+                            Button::new("dismiss", "Dismiss"),
+                            Button::new("close", "Close"),
+                        )
+                        .child(Label::new("maxbrunsfeld accepted your contact request")),
+                    )
+                    .into_any_element(),
+            ),
+            single_example(
+                "Channel Invitation",
+                container()
+                    .child(
+                        CollabNotification::new(
+                            avatar,
+                            Button::new("accept", "Accept"),
+                            Button::new("decline", "Decline"),
+                        )
+                        .child(Label::new(
+                            "maxbrunsfeld invited you to join the #zed channel",
+                        )),
+                    )
+                    .into_any_element(),
+            ),
+        ];
+
+        Some(
+            v_flex()
+                .gap_6()
+                .child(example_group_with_title("Calls & Projects", call_examples).vertical())
+                .child(
+                    example_group_with_title("Contact & Channel Toasts", toast_examples).vertical(),
+                )
+                .into_any_element(),
+        )
     }
 }

crates/ui/src/components/image.rs 🔗

@@ -16,10 +16,11 @@ pub enum VectorName {
     AcpGrid,
     AcpLogo,
     AcpLogoSerif,
-    AiGrid,
+    BusinessStamp,
     Grid,
     ProTrialStamp,
     ProUserStamp,
+    StudentStamp,
     ZedLogo,
     ZedXCopilot,
 }

crates/ui/src/components/list/list_item.rs 🔗

@@ -52,7 +52,7 @@ pub struct ListItem {
     overflow_x: bool,
     focused: Option<bool>,
     docked_right: bool,
-    height: Option<Pixels>,
+    height: Option<DefiniteLength>,
 }
 
 impl ListItem {
@@ -207,8 +207,8 @@ impl ListItem {
         self
     }
 
-    pub fn height(mut self, height: Pixels) -> Self {
-        self.height = Some(height);
+    pub fn height(mut self, height: impl Into<DefiniteLength>) -> Self {
+        self.height = Some(height.into());
         self
     }
 }

crates/ui/src/components/notification/announcement_toast.rs 🔗

@@ -26,9 +26,9 @@ impl AnnouncementToast {
             heading: None,
             description: None,
             bullet_items: SmallVec::new(),
-            primary_action_label: "Learn More".into(),
+            primary_action_label: "Try Now".into(),
             primary_on_click: Box::new(|_, _, _| {}),
-            secondary_action_label: "View Release Notes".into(),
+            secondary_action_label: "Learn More".into(),
             secondary_on_click: Box::new(|_, _, _| {}),
             dismiss_on_click: Box::new(|_, _, _| {}),
         }
@@ -134,12 +134,13 @@ impl RenderOnce for AnnouncementToast {
                             .gap_1()
                             .child(
                                 Button::new("try-now", self.primary_action_label)
-                                    .style(ButtonStyle::Outlined)
+                                    .style(ButtonStyle::Tinted(crate::TintColor::Accent))
                                     .full_width()
                                     .on_click(self.primary_on_click),
                             )
                             .child(
                                 Button::new("release-notes", self.secondary_action_label)
+                                    .style(ButtonStyle::OutlinedGhost)
                                     .full_width()
                                     .on_click(self.secondary_on_click),
                             ),
@@ -208,19 +209,26 @@ impl Component for AnnouncementToast {
 
         let examples = vec![single_example(
             "Basic",
-            div().w_80().child(
-                AnnouncementToast::new()
-                    .illustration(illustration)
-                    .heading("What's new in Zed")
-                    .description(
-                        "This version comes in with some changes to the workspace for a better experience.",
-                    )
-                    .bullet_item(ListBulletItem::new("Improved agent performance"))
-                    .bullet_item(ListBulletItem::new("New agentic features"))
-                    .bullet_item(ListBulletItem::new("Better agent capabilities"))
-
-            )
-            .into_any_element(),
+            div()
+                .w_80()
+                .child(
+                    AnnouncementToast::new()
+                        .illustration(illustration)
+                        .heading("Introducing Parallel Agents")
+                        .description("Run multiple agent threads simultaneously across projects.")
+                        .bullet_item(ListBulletItem::new(
+                            "Mix and match Zed's agent with any ACP-compatible agent",
+                        ))
+                        .bullet_item(ListBulletItem::new(
+                            "Optional worktree isolation keeps agents from conflicting",
+                        ))
+                        .bullet_item(ListBulletItem::new(
+                            "Updated workspace layout designed for agentic workflows",
+                        ))
+                        .primary_action_label("Try Now")
+                        .secondary_action_label("Learn More"),
+                )
+                .into_any_element(),
         )];
 
         Some(

crates/vim/src/command.rs 🔗

@@ -1782,7 +1782,6 @@ fn generate_commands(_: &App) -> Vec<VimCommand> {
         VimCommand::str(("te", "rm"), "terminal_panel::Toggle"),
         VimCommand::str(("T", "erm"), "terminal_panel::Toggle"),
         VimCommand::str(("C", "ollab"), "collab_panel::ToggleFocus"),
-        VimCommand::str(("No", "tifications"), "notification_panel::ToggleFocus"),
         VimCommand::str(("A", "I"), "agent::ToggleFocus"),
         VimCommand::str(("G", "it"), "git_panel::ToggleFocus"),
         VimCommand::str(("D", "ebug"), "debug_panel::ToggleFocus"),

crates/vim/src/motion.rs 🔗

@@ -7,7 +7,7 @@ use editor::{
     },
 };
 use gpui::{Action, Context, Window, actions, px};
-use language::{CharKind, Point, Selection, SelectionGoal};
+use language::{CharKind, Point, Selection, SelectionGoal, TextObject, TreeSitterOptions};
 use multi_buffer::MultiBufferRow;
 use schemars::JsonSchema;
 use serde::Deserialize;
@@ -2451,6 +2451,10 @@ fn find_matching_bracket_text_based(
         .take_while(|(_, char_offset)| *char_offset < line_range.end)
         .find_map(|(ch, char_offset)| get_bracket_pair(ch).map(|info| (info, char_offset)));
 
+    if bracket_info.is_none() {
+        return find_matching_c_preprocessor_directive(map, line_range);
+    }
+
     let (open, close, is_opening) = bracket_info?.0;
     let bracket_offset = bracket_info?.1;
 
@@ -2482,6 +2486,122 @@ fn find_matching_bracket_text_based(
     None
 }
 
+fn find_matching_c_preprocessor_directive(
+    map: &DisplaySnapshot,
+    line_range: Range<MultiBufferOffset>,
+) -> Option<MultiBufferOffset> {
+    let line_start = map
+        .buffer_chars_at(line_range.start)
+        .skip_while(|(c, _)| *c == ' ' || *c == '\t')
+        .map(|(c, _)| c)
+        .take(6)
+        .collect::<String>();
+
+    if line_start.starts_with("#if")
+        || line_start.starts_with("#else")
+        || line_start.starts_with("#elif")
+    {
+        let mut depth = 0i32;
+        for (ch, char_offset) in map.buffer_chars_at(line_range.end) {
+            if ch != '\n' {
+                continue;
+            }
+            let mut line_offset = char_offset + '\n'.len_utf8();
+
+            // Skip leading whitespace
+            map.buffer_chars_at(line_offset)
+                .take_while(|(c, _)| *c == ' ' || *c == '\t')
+                .for_each(|(_, _)| line_offset += 1);
+
+            // Check what directive starts the next line
+            let next_line_start = map
+                .buffer_chars_at(line_offset)
+                .map(|(c, _)| c)
+                .take(6)
+                .collect::<String>();
+
+            if next_line_start.starts_with("#if") {
+                depth += 1;
+            } else if next_line_start.starts_with("#endif") {
+                if depth > 0 {
+                    depth -= 1;
+                } else {
+                    return Some(line_offset);
+                }
+            } else if next_line_start.starts_with("#else") || next_line_start.starts_with("#elif") {
+                if depth == 0 {
+                    return Some(line_offset);
+                }
+            }
+        }
+    } else if line_start.starts_with("#endif") {
+        let mut depth = 0i32;
+        for (ch, char_offset) in
+            map.reverse_buffer_chars_at(line_range.start.saturating_sub_usize(1))
+        {
+            let mut line_offset = if char_offset == MultiBufferOffset(0) {
+                MultiBufferOffset(0)
+            } else if ch != '\n' {
+                continue;
+            } else {
+                char_offset + '\n'.len_utf8()
+            };
+
+            // Skip leading whitespace
+            map.buffer_chars_at(line_offset)
+                .take_while(|(c, _)| *c == ' ' || *c == '\t')
+                .for_each(|(_, _)| line_offset += 1);
+
+            // Check what directive starts this line
+            let line_start = map
+                .buffer_chars_at(line_offset)
+                .skip_while(|(c, _)| *c == ' ' || *c == '\t')
+                .map(|(c, _)| c)
+                .take(6)
+                .collect::<String>();
+
+            if line_start.starts_with("\n\n") {
+                // empty line
+                continue;
+            } else if line_start.starts_with("#endif") {
+                depth += 1;
+            } else if line_start.starts_with("#if") {
+                if depth > 0 {
+                    depth -= 1;
+                } else {
+                    return Some(line_offset);
+                }
+            }
+        }
+    }
+    None
+}
+
+fn comment_delimiter_pair(
+    map: &DisplaySnapshot,
+    offset: MultiBufferOffset,
+) -> Option<(Range<MultiBufferOffset>, Range<MultiBufferOffset>)> {
+    let snapshot = map.buffer_snapshot();
+    snapshot
+        .text_object_ranges(offset..offset, TreeSitterOptions::default())
+        .find_map(|(range, obj)| {
+            if !matches!(obj, TextObject::InsideComment | TextObject::AroundComment)
+                || !range.contains(&offset)
+            {
+                return None;
+            }
+
+            let mut chars = snapshot.chars_at(range.start);
+            if (Some('/'), Some('*')) != (chars.next(), chars.next()) {
+                return None;
+            }
+
+            let open_range = range.start..range.start + 2usize;
+            let close_range = range.end - 2..range.end;
+            Some((open_range, close_range))
+        })
+}
+
 fn matching(
     map: &DisplaySnapshot,
     display_point: DisplayPoint,
@@ -2609,6 +2729,32 @@ fn matching(
             continue;
         }
 
+        if let Some((open_range, close_range)) = comment_delimiter_pair(map, offset) {
+            if open_range.contains(&offset) {
+                return close_range.start.to_display_point(map);
+            }
+
+            if close_range.contains(&offset) {
+                return open_range.start.to_display_point(map);
+            }
+
+            let open_candidate = (open_range.start >= offset
+                && line_range.contains(&open_range.start))
+            .then_some((open_range.start.saturating_sub(offset), close_range.start));
+
+            let close_candidate = (close_range.start >= offset
+                && line_range.contains(&close_range.start))
+            .then_some((close_range.start.saturating_sub(offset), open_range.start));
+
+            if let Some((_, destination)) = [open_candidate, close_candidate]
+                .into_iter()
+                .flatten()
+                .min_by_key(|(distance, _)| *distance)
+            {
+                return destination.to_display_point(map);
+            }
+        }
+
         closest_pair_destination
             .map(|destination| destination.to_display_point(map))
             .unwrap_or_else(|| {
@@ -3497,6 +3643,119 @@ mod test {
         );
     }
 
+    #[gpui::test]
+    async fn test_matching_comments(cx: &mut gpui::TestAppContext) {
+        let mut cx = NeovimBackedTestContext::new(cx).await;
+
+        cx.set_shared_state(indoc! {r"ˇ/*
+          this is a comment
+        */"})
+            .await;
+        cx.simulate_shared_keystrokes("%").await;
+        cx.shared_state().await.assert_eq(indoc! {r"/*
+          this is a comment
+        ˇ*/"});
+        cx.simulate_shared_keystrokes("%").await;
+        cx.shared_state().await.assert_eq(indoc! {r"ˇ/*
+          this is a comment
+        */"});
+        cx.simulate_shared_keystrokes("%").await;
+        cx.shared_state().await.assert_eq(indoc! {r"/*
+          this is a comment
+        ˇ*/"});
+
+        cx.set_shared_state("ˇ// comment").await;
+        cx.simulate_shared_keystrokes("%").await;
+        cx.shared_state().await.assert_eq("ˇ// comment");
+    }
+
+    #[gpui::test]
+    async fn test_matching_preprocessor_directives(cx: &mut gpui::TestAppContext) {
+        let mut cx = NeovimBackedTestContext::new(cx).await;
+
+        cx.set_shared_state(indoc! {r"#ˇif
+
+            #else
+
+            #endif
+            "})
+            .await;
+        cx.simulate_shared_keystrokes("%").await;
+        cx.shared_state().await.assert_eq(indoc! {r"#if
+
+          ˇ#else
+
+          #endif
+          "});
+
+        cx.simulate_shared_keystrokes("%").await;
+        cx.shared_state().await.assert_eq(indoc! {r"#if
+
+          #else
+
+          ˇ#endif
+          "});
+
+        cx.simulate_shared_keystrokes("%").await;
+        cx.shared_state().await.assert_eq(indoc! {r"ˇ#if
+
+          #else
+
+          #endif
+          "});
+
+        cx.set_shared_state(indoc! {r"
+            #ˇif
+              #if
+
+              #else
+
+              #endif
+
+            #else
+            #endif
+            "})
+            .await;
+
+        cx.simulate_shared_keystrokes("%").await;
+        cx.shared_state().await.assert_eq(indoc! {r"
+            #if
+              #if
+
+              #else
+
+              #endif
+
+            ˇ#else
+            #endif
+            "});
+
+        cx.simulate_shared_keystrokes("% %").await;
+        cx.shared_state().await.assert_eq(indoc! {r"
+            ˇ#if
+              #if
+
+              #else
+
+              #endif
+
+            #else
+            #endif
+            "});
+        cx.simulate_shared_keystrokes("j % % %").await;
+        cx.shared_state().await.assert_eq(indoc! {r"
+            #if
+              ˇ#if
+
+              #else
+
+              #endif
+
+            #else
+            #endif
+            "});
+    }
+
     #[gpui::test]
     async fn test_unmatched_forward(cx: &mut gpui::TestAppContext) {
         let mut cx = NeovimBackedTestContext::new(cx).await;

crates/vim/src/normal/search.rs 🔗

@@ -245,7 +245,7 @@ impl Vim {
 
             search_bar.set_replacement(None, cx);
             let mut options = SearchOptions::NONE;
-            if action.regex {
+            if action.regex && VimSettings::get_global(cx).use_regex_search {
                 options |= SearchOptions::REGEX;
             }
             if action.backwards {
@@ -1446,4 +1446,66 @@ mod test {
         // The cursor should be at the match location on line 3 (row 2).
         cx.assert_state("hello world\nfoo bar\nhello ˇagain\n", Mode::Normal);
     }
+
+    #[gpui::test]
+    async fn test_vim_search_respects_search_settings(cx: &mut gpui::TestAppContext) {
+        let mut cx = VimTestContext::new(cx, true).await;
+
+        cx.update_global(|store: &mut SettingsStore, cx| {
+            store.update_user_settings(cx, |settings| {
+                settings.vim.get_or_insert_default().use_regex_search = Some(false);
+            });
+        });
+
+        cx.set_state("ˇcontent", Mode::Normal);
+        cx.simulate_keystrokes("/");
+        cx.run_until_parked();
+
+        // Verify search options are set from settings
+        let search_bar = cx.workspace(|workspace, _, cx| {
+            workspace
+                .active_pane()
+                .read(cx)
+                .toolbar()
+                .read(cx)
+                .item_of_type::<BufferSearchBar>()
+                .expect("Buffer search bar should be active")
+        });
+
+        cx.update_entity(search_bar, |bar, _window, _cx| {
+            assert!(
+                !bar.has_search_option(search::SearchOptions::REGEX),
+                "Vim search open without regex mode"
+            );
+        });
+
+        cx.simulate_keystrokes("escape");
+        cx.run_until_parked();
+
+        cx.update_global(|store: &mut SettingsStore, cx| {
+            store.update_user_settings(cx, |settings| {
+                settings.vim.get_or_insert_default().use_regex_search = Some(true);
+            });
+        });
+
+        cx.simulate_keystrokes("/");
+        cx.run_until_parked();
+
+        let search_bar = cx.workspace(|workspace, _, cx| {
+            workspace
+                .active_pane()
+                .read(cx)
+                .toolbar()
+                .read(cx)
+                .item_of_type::<BufferSearchBar>()
+                .expect("Buffer search bar should be active")
+        });
+
+        cx.update_entity(search_bar, |bar, _window, _cx| {
+            assert!(
+                bar.has_search_option(search::SearchOptions::REGEX),
+                "Vim search opens with regex mode"
+            );
+        });
+    }
 }

crates/vim/src/state.rs 🔗

@@ -17,7 +17,7 @@ use gpui::{
     Action, App, AppContext, BorrowAppContext, ClipboardEntry, ClipboardItem, DismissEvent, Entity,
     EntityId, Global, HighlightStyle, StyledText, Subscription, Task, TextStyle, WeakEntity,
 };
-use language::{Buffer, BufferEvent, BufferId, Chunk, Point};
+use language::{Buffer, BufferEvent, BufferId, Chunk, LanguageAwareStyling, Point};
 
 use multi_buffer::MultiBufferRow;
 use picker::{Picker, PickerDelegate};
@@ -1504,7 +1504,10 @@ impl PickerDelegate for MarksViewDelegate {
                                     position.row,
                                     snapshot.line_len(MultiBufferRow(position.row)),
                                 ),
-                            true,
+                            LanguageAwareStyling {
+                                tree_sitter: true,
+                                diagnostics: true,
+                            },
                         );
                         matches.push(MarksMatch {
                             name: name.clone(),
@@ -1530,7 +1533,10 @@ impl PickerDelegate for MarksViewDelegate {
                             let chunks = snapshot.chunks(
                                 Point::new(position.row, 0)
                                     ..Point::new(position.row, snapshot.line_len(position.row)),
-                                true,
+                                LanguageAwareStyling {
+                                    tree_sitter: true,
+                                    diagnostics: true,
+                                },
                             );
 
                             matches.push(MarksMatch {

crates/vim/src/vim.rs 🔗

@@ -2141,6 +2141,7 @@ struct VimSettings {
     pub toggle_relative_line_numbers: bool,
     pub use_system_clipboard: settings::UseSystemClipboard,
     pub use_smartcase_find: bool,
+    pub use_regex_search: bool,
     pub gdefault: bool,
     pub custom_digraphs: HashMap<String, Arc<str>>,
     pub highlight_on_yank_duration: u64,
@@ -2227,6 +2228,7 @@ impl Settings for VimSettings {
             toggle_relative_line_numbers: vim.toggle_relative_line_numbers.unwrap(),
             use_system_clipboard: vim.use_system_clipboard.unwrap(),
             use_smartcase_find: vim.use_smartcase_find.unwrap(),
+            use_regex_search: vim.use_regex_search.unwrap(),
             gdefault: vim.gdefault.unwrap(),
             custom_digraphs: vim.custom_digraphs.unwrap(),
             highlight_on_yank_duration: vim.highlight_on_yank_duration.unwrap(),

crates/vim/test_data/test_matching_comments.json 🔗

@@ -0,0 +1,10 @@
+{"Put":{"state":"ˇ/*\n  this is a comment\n*/"}}
+{"Key":"%"}
+{"Get":{"state":"/*\n  this is a comment\nˇ*/","mode":"Normal"}}
+{"Key":"%"}
+{"Get":{"state":"ˇ/*\n  this is a comment\n*/","mode":"Normal"}}
+{"Key":"%"}
+{"Get":{"state":"/*\n  this is a comment\nˇ*/","mode":"Normal"}}
+{"Put":{"state":"ˇ// comment"}}
+{"Key":"%"}
+{"Get":{"state":"ˇ// comment","mode":"Normal"}}

crates/vim/test_data/test_matching_preprocessor_directives.json 🔗

@@ -0,0 +1,18 @@
+{"Put":{"state":"#ˇif\n\n#else\n\n#endif\n"}}
+{"Key":"%"}
+{"Get":{"state":"#if\n\nˇ#else\n\n#endif\n","mode":"Normal"}}
+{"Key":"%"}
+{"Get":{"state":"#if\n\n#else\n\nˇ#endif\n","mode":"Normal"}}
+{"Key":"%"}
+{"Get":{"state":"ˇ#if\n\n#else\n\n#endif\n","mode":"Normal"}}
+{"Put":{"state":"#ˇif\n  #if\n\n  #else\n\n  #endif\n\n#else\n#endif\n"}}
+{"Key":"%"}
+{"Get":{"state":"#if\n  #if\n\n  #else\n\n  #endif\n\nˇ#else\n#endif\n","mode":"Normal"}}
+{"Key":"%"}
+{"Key":"%"}
+{"Get":{"state":"ˇ#if\n  #if\n\n  #else\n\n  #endif\n\n#else\n#endif\n","mode":"Normal"}}
+{"Key":"j"}
+{"Key":"%"}
+{"Key":"%"}
+{"Key":"%"}
+{"Get":{"state":"#if\n  ˇ#if\n\n  #else\n\n  #endif\n\n#else\n#endif\n","mode":"Normal"}}

crates/web_search_providers/Cargo.toml 🔗

@@ -14,6 +14,7 @@ path = "src/web_search_providers.rs"
 [dependencies]
 anyhow.workspace = true
 client.workspace = true
+cloud_api_client.workspace = true
 cloud_api_types.workspace = true
 cloud_llm_client.workspace = true
 futures.workspace = true

crates/web_search_providers/src/cloud.rs 🔗

@@ -2,12 +2,12 @@ use std::sync::Arc;
 
 use anyhow::{Context as _, Result};
 use client::{Client, NeedsLlmTokenRefresh, UserStore, global_llm_token};
+use cloud_api_client::LlmApiToken;
 use cloud_api_types::OrganizationId;
 use cloud_llm_client::{WebSearchBody, WebSearchResponse};
 use futures::AsyncReadExt as _;
 use gpui::{App, AppContext, Context, Entity, Task};
 use http_client::{HttpClient, Method};
-use language_model::LlmApiToken;
 use web_search::{WebSearchProvider, WebSearchProviderId};
 
 pub struct CloudWebSearchProvider {

crates/workspace/src/multi_workspace.rs 🔗

@@ -1,5 +1,4 @@
 use anyhow::Result;
-use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt};
 use gpui::PathPromptOptions;
 use gpui::{
     AnyView, App, Context, DragMoveEvent, Entity, EntityId, EventEmitter, FocusHandle, Focusable,
@@ -16,7 +15,7 @@ use std::sync::Arc;
 use ui::prelude::*;
 use util::ResultExt;
 use util::path_list::PathList;
-use zed_actions::agents_sidebar::{MoveWorkspaceToNewWindow, ToggleThreadSwitcher};
+use zed_actions::agents_sidebar::ToggleThreadSwitcher;
 
 use agent_settings::AgentSettings;
 use settings::SidebarDockPosition;
@@ -40,7 +39,22 @@ actions!(
         CloseWorkspaceSidebar,
         /// Moves focus to or from the workspace sidebar without closing it.
         FocusWorkspaceSidebar,
-        //TODO: Restore next/previous workspace
+        /// Activates the next project group in the sidebar.
+        NextProjectGroup,
+        /// Activates the previous project group in the sidebar.
+        PreviousProjectGroup,
+        /// Activates the next thread in sidebar order.
+        NextThread,
+        /// Activates the previous thread in sidebar order.
+        PreviousThread,
+        /// Expands the thread list for the current project to show more threads.
+        ShowMoreThreads,
+        /// Collapses the thread list for the current project to show fewer threads.
+        ShowFewerThreads,
+        /// Creates a new thread in the current workspace.
+        NewThread,
+        /// Moves the current workspace's project group to a new window.
+        MoveWorkspaceToNewWindow,
     ]
 );
 
@@ -114,6 +128,21 @@ pub trait Sidebar: Focusable + Render + EventEmitter<SidebarEvent> + Sized {
     ) {
     }
 
+    /// Activates the next or previous project group.
+    fn cycle_project_group(
+        &mut self,
+        _forward: bool,
+        _window: &mut Window,
+        _cx: &mut Context<Self>,
+    ) {
+    }
+
+    /// Activates the next or previous thread in sidebar order.
+    fn cycle_thread(&mut self, _forward: bool, _window: &mut Window, _cx: &mut Context<Self>) {}
+
+    /// Moves the active workspace's project group to a new window.
+    fn move_workspace_to_new_window(&mut self, _window: &mut Window, _cx: &mut Context<Self>) {}
+
     /// Return an opaque JSON blob of sidebar-specific state to persist.
     fn serialized_state(&self, _cx: &App) -> Option<String> {
         None
@@ -139,6 +168,9 @@ pub trait SidebarHandle: 'static + Send + Sync {
     fn to_any(&self) -> AnyView;
     fn entity_id(&self) -> EntityId;
     fn toggle_thread_switcher(&self, select_last: bool, window: &mut Window, cx: &mut App);
+    fn cycle_project_group(&self, forward: bool, window: &mut Window, cx: &mut App);
+    fn cycle_thread(&self, forward: bool, window: &mut Window, cx: &mut App);
+    fn move_workspace_to_new_window(&self, window: &mut Window, cx: &mut App);
 
     fn is_threads_list_view_active(&self, cx: &App) -> bool;
 
@@ -199,6 +231,33 @@ impl<T: Sidebar> SidebarHandle for Entity<T> {
         });
     }
 
+    fn cycle_project_group(&self, forward: bool, window: &mut Window, cx: &mut App) {
+        let entity = self.clone();
+        window.defer(cx, move |window, cx| {
+            entity.update(cx, |this, cx| {
+                this.cycle_project_group(forward, window, cx);
+            });
+        });
+    }
+
+    fn cycle_thread(&self, forward: bool, window: &mut Window, cx: &mut App) {
+        let entity = self.clone();
+        window.defer(cx, move |window, cx| {
+            entity.update(cx, |this, cx| {
+                this.cycle_thread(forward, window, cx);
+            });
+        });
+    }
+
+    fn move_workspace_to_new_window(&self, window: &mut Window, cx: &mut App) {
+        let entity = self.clone();
+        window.defer(cx, move |window, cx| {
+            entity.update(cx, |this, cx| {
+                this.move_workspace_to_new_window(window, cx);
+            });
+        });
+    }
+
     fn is_threads_list_view_active(&self, cx: &App) -> bool {
         self.read(cx).is_threads_list_view_active()
     }
@@ -276,6 +335,7 @@ pub struct MultiWorkspace {
     pending_removal_tasks: Vec<Task<()>>,
     _serialize_task: Option<Task<()>>,
     _subscriptions: Vec<Subscription>,
+    previous_focus_handle: Option<FocusHandle>,
 }
 
 impl EventEmitter<MultiWorkspaceEvent> for MultiWorkspace {}
@@ -333,6 +393,7 @@ impl MultiWorkspace {
                 quit_subscription,
                 settings_subscription,
             ],
+            previous_focus_handle: None,
         }
     }
 
@@ -376,7 +437,7 @@ impl MultiWorkspace {
     }
 
     pub fn multi_workspace_enabled(&self, cx: &App) -> bool {
-        cx.has_flag::<AgentV2FeatureFlag>() && !DisableAiSettings::get_global(cx).disable_ai
+        !DisableAiSettings::get_global(cx).disable_ai
     }
 
     pub fn toggle_sidebar(&mut self, window: &mut Window, cx: &mut Context<Self>) {
@@ -387,6 +448,7 @@ impl MultiWorkspace {
         if self.sidebar_open() {
             self.close_sidebar(window, cx);
         } else {
+            self.previous_focus_handle = window.focused(cx);
             self.open_sidebar(cx);
             if let Some(sidebar) = &self.sidebar {
                 sidebar.prepare_for_focus(window, cx);
@@ -417,14 +479,16 @@ impl MultiWorkspace {
                 .is_some_and(|s| s.focus_handle(cx).contains_focused(window, cx));
 
             if sidebar_is_focused {
-                let pane = self.workspace().read(cx).active_pane().clone();
-                let pane_focus = pane.read(cx).focus_handle(cx);
-                window.focus(&pane_focus, cx);
-            } else if let Some(sidebar) = &self.sidebar {
-                sidebar.prepare_for_focus(window, cx);
-                sidebar.focus(window, cx);
+                self.restore_previous_focus(false, window, cx);
+            } else {
+                self.previous_focus_handle = window.focused(cx);
+                if let Some(sidebar) = &self.sidebar {
+                    sidebar.prepare_for_focus(window, cx);
+                    sidebar.focus(window, cx);
+                }
             }
         } else {
+            self.previous_focus_handle = window.focused(cx);
             self.open_sidebar(cx);
             if let Some(sidebar) = &self.sidebar {
                 sidebar.prepare_for_focus(window, cx);
@@ -457,13 +521,26 @@ impl MultiWorkspace {
                 workspace.set_sidebar_focus_handle(None);
             });
         }
-        let pane = self.workspace().read(cx).active_pane().clone();
-        let pane_focus = pane.read(cx).focus_handle(cx);
-        window.focus(&pane_focus, cx);
+        self.restore_previous_focus(true, window, cx);
         self.serialize(cx);
         cx.notify();
     }
 
+    fn restore_previous_focus(&mut self, clear: bool, window: &mut Window, cx: &mut Context<Self>) {
+        let focus_handle = if clear {
+            self.previous_focus_handle.take()
+        } else {
+            self.previous_focus_handle.clone()
+        };
+
+        if let Some(previous_focus) = focus_handle {
+            previous_focus.focus(window, cx);
+        } else {
+            let pane = self.workspace().read(cx).active_pane().clone();
+            window.focus(&pane.read(cx).focus_handle(cx), cx);
+        }
+    }
+
     pub fn close_window(&mut self, _: &CloseWindow, window: &mut Window, cx: &mut Context<Self>) {
         cx.spawn_in(window, async move |this, cx| {
             let workspaces = this.update(cx, |multi_workspace, _cx| {
@@ -808,6 +885,19 @@ impl MultiWorkspace {
         cx.notify();
     }
 
+    /// Promotes the currently active workspace to persistent if it is
+    /// transient, so it is retained across workspace switches even when
+    /// the sidebar is closed. No-op if the workspace is already persistent.
+    pub fn retain_active_workspace(&mut self, cx: &mut Context<Self>) {
+        if let ActiveWorkspace::Transient(workspace) = &self.active_workspace {
+            let workspace = workspace.clone();
+            let index = self.promote_transient(workspace, cx);
+            self.active_workspace = ActiveWorkspace::Persistent(index);
+            self.serialize(cx);
+            cx.notify();
+        }
+    }
+
     /// Promotes a former transient workspace into the persistent list.
     /// Returns the index of the newly inserted workspace.
     fn promote_transient(&mut self, workspace: Entity<Workspace>, cx: &mut Context<Self>) -> usize {
@@ -1282,16 +1372,6 @@ impl MultiWorkspace {
         });
     }
 
-    fn move_active_workspace_to_new_window(
-        &mut self,
-        _: &MoveWorkspaceToNewWindow,
-        window: &mut Window,
-        cx: &mut Context<Self>,
-    ) {
-        let workspace = self.workspace().clone();
-        self.move_workspace_to_new_window(&workspace, window, cx);
-    }
-
     pub fn open_project(
         &mut self,
         paths: Vec<PathBuf>,
@@ -1425,7 +1505,6 @@ impl Render for MultiWorkspace {
                             this.focus_sidebar(window, cx);
                         },
                     ))
-                    .on_action(cx.listener(Self::move_active_workspace_to_new_window))
                     .on_action(cx.listener(
                         |this: &mut Self, action: &ToggleThreadSwitcher, window, cx| {
                             if let Some(sidebar) = &this.sidebar {
@@ -1433,6 +1512,39 @@ impl Render for MultiWorkspace {
                             }
                         },
                     ))
+                    .on_action(
+                        cx.listener(|this: &mut Self, _: &NextProjectGroup, window, cx| {
+                            if let Some(sidebar) = &this.sidebar {
+                                sidebar.cycle_project_group(true, window, cx);
+                            }
+                        }),
+                    )
+                    .on_action(cx.listener(
+                        |this: &mut Self, _: &PreviousProjectGroup, window, cx| {
+                            if let Some(sidebar) = &this.sidebar {
+                                sidebar.cycle_project_group(false, window, cx);
+                            }
+                        },
+                    ))
+                    .on_action(cx.listener(|this: &mut Self, _: &NextThread, window, cx| {
+                        if let Some(sidebar) = &this.sidebar {
+                            sidebar.cycle_thread(true, window, cx);
+                        }
+                    }))
+                    .on_action(
+                        cx.listener(|this: &mut Self, _: &PreviousThread, window, cx| {
+                            if let Some(sidebar) = &this.sidebar {
+                                sidebar.cycle_thread(false, window, cx);
+                            }
+                        }),
+                    )
+                    .on_action(cx.listener(
+                        |this: &mut Self, _: &MoveWorkspaceToNewWindow, window, cx| {
+                            if let Some(sidebar) = &this.sidebar {
+                                sidebar.move_workspace_to_new_window(window, cx);
+                            }
+                        },
+                    ))
                 })
                 .when(
                     self.sidebar_open() && self.multi_workspace_enabled(cx),

crates/workspace/src/multi_workspace_tests.rs 🔗

@@ -1,5 +1,4 @@
 use super::*;
-use feature_flags::FeatureFlagAppExt;
 use fs::FakeFs;
 use gpui::TestAppContext;
 use project::{DisableAiSettings, ProjectGroupKey};
@@ -12,7 +11,6 @@ fn init_test(cx: &mut TestAppContext) {
         cx.set_global(settings_store);
         theme_settings::init(theme::LoadThemes::JustBase, cx);
         DisableAiSettings::register(cx);
-        cx.update_flags(false, vec!["agent-v2".into()]);
     });
 }
 

crates/workspace/src/pane.rs 🔗

@@ -10,7 +10,10 @@ use crate::{
         TabContentParams, TabTooltipContent, WeakItemHandle,
     },
     move_item,
-    notifications::NotifyResultExt,
+    notifications::{
+        NotificationId, NotifyResultExt, show_app_notification,
+        simple_message_notification::MessageNotification,
+    },
     toolbar::Toolbar,
     workspace_settings::{AutosaveSetting, FocusFollowsMouse, TabBarSettings, WorkspaceSettings},
 };
@@ -195,6 +198,16 @@ pub struct DeploySearch {
     pub included_files: Option<String>,
     #[serde(default)]
     pub excluded_files: Option<String>,
+    #[serde(default)]
+    pub query: Option<String>,
+    #[serde(default)]
+    pub regex: Option<bool>,
+    #[serde(default)]
+    pub case_sensitive: Option<bool>,
+    #[serde(default)]
+    pub whole_word: Option<bool>,
+    #[serde(default)]
+    pub include_ignored: Option<bool>,
 }
 
 #[derive(Clone, Copy, PartialEq, Debug, Deserialize, JsonSchema, Default)]
@@ -306,16 +319,6 @@ actions!(
     ]
 );
 
-impl DeploySearch {
-    pub fn find() -> Self {
-        Self {
-            replace_enabled: false,
-            included_files: None,
-            excluded_files: None,
-        }
-    }
-}
-
 const MAX_NAVIGATION_HISTORY_LEN: usize = 1024;
 
 pub enum Event {
@@ -4185,15 +4188,7 @@ fn default_render_tab_bar_buttons(
                         menu.action("New File", NewFile.boxed_clone())
                             .action("Open File", ToggleFileFinder::default().boxed_clone())
                             .separator()
-                            .action(
-                                "Search Project",
-                                DeploySearch {
-                                    replace_enabled: false,
-                                    included_files: None,
-                                    excluded_files: None,
-                                }
-                                .boxed_clone(),
-                            )
+                            .action("Search Project", DeploySearch::default().boxed_clone())
                             .action("Search Symbols", ToggleProjectSymbols.boxed_clone())
                             .separator()
                             .action("New Terminal", NewTerminal::default().boxed_clone())
@@ -4400,17 +4395,64 @@ impl Render for Pane {
             ))
             .on_action(
                 cx.listener(|pane: &mut Self, action: &RevealInProjectPanel, _, cx| {
+                    let Some(active_item) = pane.active_item() else {
+                        return;
+                    };
+
                     let entry_id = action
                         .entry_id
                         .map(ProjectEntryId::from_proto)
-                        .or_else(|| pane.active_item()?.project_entry_ids(cx).first().copied());
-                    if let Some(entry_id) = entry_id {
-                        pane.project
-                            .update(cx, |_, cx| {
-                                cx.emit(project::Event::RevealInProjectPanel(entry_id))
-                            })
-                            .ok();
+                        .or_else(|| active_item.project_entry_ids(cx).first().copied());
+
+                    let show_reveal_error_toast = |display_name: &str, cx: &mut App| {
+                        let notification_id = NotificationId::unique::<RevealInProjectPanel>();
+                        let message = SharedString::from(format!(
+                            "\"{display_name}\" is not part of any open projects."
+                        ));
+
+                        show_app_notification(notification_id, cx, move |cx| {
+                            let message = message.clone();
+                            cx.new(|cx| MessageNotification::new(message, cx))
+                        });
+                    };
+
+                    let Some(entry_id) = entry_id else {
+                        // When working with an unsaved buffer, display a toast
+                        // informing the user that the buffer is not present in
+                        // any of the open projects and stop execution, as we
+                        // don't want to open the project panel.
+                        let display_name = active_item
+                            .tab_tooltip_text(cx)
+                            .unwrap_or_else(|| active_item.tab_content_text(0, cx));
+
+                        return show_reveal_error_toast(&display_name, cx);
+                    };
+
+                    // We'll now check whether the entry belongs to a visible
+                    // worktree and, if that's not the case, it means the user
+                    // is interacting with a file that does not belong to any of
+                    // the open projects, so we'll show a toast informing them
+                    // of this and stop execution.
+                    let display_name = pane
+                        .project
+                        .read_with(cx, |project, cx| {
+                            project
+                                .worktree_for_entry(entry_id, cx)
+                                .filter(|worktree| !worktree.read(cx).is_visible())
+                                .map(|worktree| worktree.read(cx).root_name_str().to_string())
+                        })
+                        .ok()
+                        .flatten();
+
+                    if let Some(display_name) = display_name {
+                        return show_reveal_error_toast(&display_name, cx);
                     }
+
+                    pane.project
+                        .update(cx, |_, cx| {
+                            cx.emit(project::Event::RevealInProjectPanel(entry_id))
+                        })
+                        .log_err();
                 }),
             )
             .on_action(cx.listener(|_, _: &menu::Cancel, window, cx| {

crates/workspace/src/persistence.rs 🔗

@@ -2525,7 +2525,6 @@ mod tests {
 
         cx.update(|cx| {
             cx.set_staff(true);
-            cx.update_flags(true, vec!["agent-v2".to_string()]);
         });
 
         let fs = fs::FakeFs::new(cx.executor());
@@ -4082,7 +4081,6 @@ mod tests {
 
         cx.update(|cx| {
             cx.set_staff(true);
-            cx.update_flags(true, vec!["agent-v2".to_string()]);
         });
 
         let fs = fs::FakeFs::new(cx.executor());
@@ -4127,7 +4125,6 @@ mod tests {
 
         cx.update(|cx| {
             cx.set_staff(true);
-            cx.update_flags(true, vec!["agent-v2".to_string()]);
         });
 
         let fs = fs::FakeFs::new(cx.executor());
@@ -4184,7 +4181,6 @@ mod tests {
 
         cx.update(|cx| {
             cx.set_staff(true);
-            cx.update_flags(true, vec!["agent-v2".to_string()]);
         });
 
         let fs = fs::FakeFs::new(cx.executor());
@@ -4275,7 +4271,6 @@ mod tests {
 
         cx.update(|cx| {
             cx.set_staff(true);
-            cx.update_flags(true, vec!["agent-v2".to_string()]);
         });
 
         let fs = fs::FakeFs::new(cx.executor());
@@ -4381,7 +4376,6 @@ mod tests {
 
         cx.update(|cx| {
             cx.set_staff(true);
-            cx.update_flags(true, vec!["agent-v2".to_string()]);
         });
 
         let fs = fs::FakeFs::new(cx.executor());
@@ -4485,7 +4479,6 @@ mod tests {
 
         cx.update(|cx| {
             cx.set_staff(true);
-            cx.update_flags(true, vec!["agent-v2".to_string()]);
         });
 
         let fs = fs::FakeFs::new(cx.executor());
@@ -4542,7 +4535,6 @@ mod tests {
 
         cx.update(|cx| {
             cx.set_staff(true);
-            cx.update_flags(true, vec!["agent-v2".to_string()]);
         });
 
         let fs = fs::FakeFs::new(cx.executor());
@@ -4702,7 +4694,6 @@ mod tests {
 
         cx.update(|cx| {
             cx.set_staff(true);
-            cx.update_flags(true, vec!["agent-v2".to_string()]);
         });
 
         let fs = fs::FakeFs::new(cx.executor());

crates/workspace/src/welcome.rs 🔗

@@ -1,23 +1,27 @@
 use crate::{
-    NewFile, Open, OpenMode, PathList, SerializedWorkspaceLocation, Workspace, WorkspaceId,
+    NewFile, Open, OpenMode, PathList, SerializedWorkspaceLocation, ToggleWorkspaceSidebar,
+    Workspace, WorkspaceId,
     item::{Item, ItemEvent},
     persistence::WorkspaceDb,
 };
+use agent_settings::AgentSettings;
 use chrono::{DateTime, Utc};
 use git::Clone as GitClone;
-use gpui::WeakEntity;
 use gpui::{
     Action, App, Context, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement,
     ParentElement, Render, Styled, Task, Window, actions,
 };
+use gpui::{WeakEntity, linear_color_stop, linear_gradient};
 use menu::{SelectNext, SelectPrevious};
-use project::DisableAiSettings;
+
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
 use settings::Settings;
 use ui::{ButtonLike, Divider, DividerColor, KeyBinding, Vector, VectorName, prelude::*};
 use util::ResultExt;
-use zed_actions::{Extensions, OpenOnboarding, OpenSettings, agent, command_palette};
+use zed_actions::{
+    Extensions, OpenKeymap, OpenOnboarding, OpenSettings, assistant::ToggleFocus, command_palette,
+};
 
 #[derive(PartialEq, Clone, Debug, Deserialize, Serialize, JsonSchema, Action)]
 #[action(namespace = welcome)]
@@ -126,14 +130,12 @@ impl RenderOnce for SectionButton {
 
 enum SectionVisibility {
     Always,
-    Conditional(fn(&App) -> bool),
 }
 
 impl SectionVisibility {
-    fn is_visible(&self, cx: &App) -> bool {
+    fn is_visible(&self) -> bool {
         match self {
             SectionVisibility::Always => true,
-            SectionVisibility::Conditional(f) => f(cx),
         }
     }
 }
@@ -146,13 +148,8 @@ struct SectionEntry {
 }
 
 impl SectionEntry {
-    fn render(
-        &self,
-        button_index: usize,
-        focus: &FocusHandle,
-        cx: &App,
-    ) -> Option<impl IntoElement> {
-        self.visibility_guard.is_visible(cx).then(|| {
+    fn render(&self, button_index: usize, focus: &FocusHandle) -> Option<impl IntoElement> {
+        self.visibility_guard.is_visible().then(|| {
             SectionButton::new(
                 self.title,
                 self.icon,
@@ -204,12 +201,10 @@ const CONTENT: (Section<4>, Section<3>) = (
                 visibility_guard: SectionVisibility::Always,
             },
             SectionEntry {
-                icon: IconName::ZedAssistant,
-                title: "View AI Settings",
-                action: &agent::OpenSettings,
-                visibility_guard: SectionVisibility::Conditional(|cx| {
-                    !DisableAiSettings::get_global(cx).disable_ai
-                }),
+                icon: IconName::Keyboard,
+                title: "Customize Keymaps",
+                action: &OpenKeymap,
+                visibility_guard: SectionVisibility::Always,
             },
             SectionEntry {
                 icon: IconName::Blocks,
@@ -230,7 +225,7 @@ struct Section<const COLS: usize> {
 }
 
 impl<const COLS: usize> Section<COLS> {
-    fn render(self, index_offset: usize, focus: &FocusHandle, cx: &App) -> impl IntoElement {
+    fn render(self, index_offset: usize, focus: &FocusHandle) -> impl IntoElement {
         v_flex()
             .min_w_full()
             .child(SectionHeader::new(self.title))
@@ -238,7 +233,7 @@ impl<const COLS: usize> Section<COLS> {
                 self.entries
                     .iter()
                     .enumerate()
-                    .filter_map(|(index, entry)| entry.render(index_offset + index, focus, cx)),
+                    .filter_map(|(index, entry)| entry.render(index_offset + index, focus)),
             )
     }
 }
@@ -338,6 +333,55 @@ impl WelcomePage {
         }
     }
 
+    fn render_agent_card(&self, tab_index: usize, cx: &mut Context<Self>) -> impl IntoElement {
+        let focus = self.focus_handle.clone();
+        let color = cx.theme().colors();
+
+        let description = "Run multiple threads at once, mix and match any ACP-compatible agent, and keep work conflict-free with worktrees.";
+
+        v_flex()
+            .w_full()
+            .p_2()
+            .rounded_md()
+            .border_1()
+            .border_color(color.border_variant)
+            .bg(linear_gradient(
+                360.,
+                linear_color_stop(color.panel_background, 1.0),
+                linear_color_stop(color.editor_background, 0.45),
+            ))
+            .child(
+                h_flex()
+                    .gap_1p5()
+                    .child(
+                        Icon::new(IconName::ZedAssistant)
+                            .color(Color::Muted)
+                            .size(IconSize::Small),
+                    )
+                    .child(Label::new("Collaborate with Agents")),
+            )
+            .child(
+                Label::new(description)
+                    .size(LabelSize::Small)
+                    .color(Color::Muted)
+                    .mb_2(),
+            )
+            .child(
+                Button::new("open-agent", "Open Agent Panel")
+                    .full_width()
+                    .tab_index(tab_index as isize)
+                    .style(ButtonStyle::Outlined)
+                    .key_binding(
+                        KeyBinding::for_action_in(&ToggleFocus, &self.focus_handle, cx)
+                            .size(rems_from_px(12.)),
+                    )
+                    .on_click(move |_, window, cx| {
+                        focus.dispatch_action(&ToggleWorkspaceSidebar, window, cx);
+                        focus.dispatch_action(&ToggleFocus, window, cx);
+                    }),
+            )
+    }
+
     fn render_recent_project_section(
         &self,
         recent_projects: Vec<impl IntoElement>,
@@ -385,7 +429,9 @@ impl Render for WelcomePage {
     fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
         let (first_section, second_section) = CONTENT;
         let first_section_entries = first_section.entries.len();
-        let last_index = first_section_entries + second_section.entries.len();
+        let mut next_tab_index = first_section_entries + second_section.entries.len();
+
+        let ai_enabled = AgentSettings::get_global(cx).enabled(cx);
 
         let recent_projects = self
             .recent_workspaces
@@ -404,7 +450,7 @@ impl Render for WelcomePage {
                 .into_any_element()
         } else {
             second_section
-                .render(first_section_entries, &self.focus_handle, cx)
+                .render(first_section_entries, &self.focus_handle)
                 .into_any_element()
         };
 
@@ -421,58 +467,53 @@ impl Render for WelcomePage {
             .on_action(cx.listener(Self::select_next))
             .on_action(cx.listener(Self::open_recent_project))
             .size_full()
-            .justify_center()
-            .overflow_hidden()
             .bg(cx.theme().colors().editor_background)
+            .justify_center()
             .child(
-                h_flex()
-                    .relative()
+                v_flex()
+                    .id("welcome-content")
+                    .p_8()
+                    .max_w_128()
                     .size_full()
-                    .px_12()
-                    .max_w(px(1100.))
+                    .gap_6()
+                    .justify_center()
+                    .overflow_y_scroll()
                     .child(
-                        v_flex()
-                            .flex_1()
+                        h_flex()
+                            .w_full()
                             .justify_center()
-                            .max_w_128()
-                            .mx_auto()
-                            .gap_6()
-                            .overflow_x_hidden()
+                            .mb_4()
+                            .gap_4()
+                            .child(Vector::square(VectorName::ZedLogo, rems_from_px(45.)))
                             .child(
-                                h_flex()
-                                    .w_full()
-                                    .justify_center()
-                                    .mb_4()
-                                    .gap_4()
-                                    .child(Vector::square(VectorName::ZedLogo, rems_from_px(45.)))
-                                    .child(
-                                        v_flex().child(Headline::new(welcome_label)).child(
-                                            Label::new("The editor for what's next")
-                                                .size(LabelSize::Small)
-                                                .color(Color::Muted)
-                                                .italic(),
-                                        ),
-                                    ),
-                            )
-                            .child(first_section.render(Default::default(), &self.focus_handle, cx))
-                            .child(second_section)
-                            .when(!self.fallback_to_recent_projects, |this| {
-                                this.child(
-                                    v_flex().gap_1().child(Divider::horizontal()).child(
-                                        Button::new("welcome-exit", "Return to Onboarding")
-                                            .tab_index(last_index as isize)
-                                            .full_width()
-                                            .label_size(LabelSize::XSmall)
-                                            .on_click(|_, window, cx| {
-                                                window.dispatch_action(
-                                                    OpenOnboarding.boxed_clone(),
-                                                    cx,
-                                                );
-                                            }),
-                                    ),
-                                )
-                            }),
-                    ),
+                                v_flex().child(Headline::new(welcome_label)).child(
+                                    Label::new("The editor for what's next")
+                                        .size(LabelSize::Small)
+                                        .color(Color::Muted)
+                                        .italic(),
+                                ),
+                            ),
+                    )
+                    .child(first_section.render(Default::default(), &self.focus_handle))
+                    .child(second_section)
+                    .when(ai_enabled, |this| {
+                        let agent_tab_index = next_tab_index;
+                        next_tab_index += 1;
+                        this.child(self.render_agent_card(agent_tab_index, cx))
+                    })
+                    .when(!self.fallback_to_recent_projects, |this| {
+                        this.child(
+                            v_flex().gap_4().child(Divider::horizontal()).child(
+                                Button::new("welcome-exit", "Return to Onboarding")
+                                    .tab_index(next_tab_index as isize)
+                                    .full_width()
+                                    .label_size(LabelSize::XSmall)
+                                    .on_click(|_, window, cx| {
+                                        window.dispatch_action(OpenOnboarding.boxed_clone(), cx);
+                                    }),
+                            ),
+                        )
+                    }),
             )
     }
 }

crates/workspace/src/workspace.rs 🔗

@@ -31,9 +31,11 @@ mod workspace_settings;
 pub use crate::notifications::NotificationFrame;
 pub use dock::Panel;
 pub use multi_workspace::{
-    CloseWorkspaceSidebar, DraggedSidebar, FocusWorkspaceSidebar, MultiWorkspace,
-    MultiWorkspaceEvent, Sidebar, SidebarEvent, SidebarHandle, SidebarRenderState, SidebarSide,
-    ToggleWorkspaceSidebar, sidebar_side_context_menu,
+    CloseWorkspaceSidebar, DraggedSidebar, FocusWorkspaceSidebar, MoveWorkspaceToNewWindow,
+    MultiWorkspace, MultiWorkspaceEvent, NewThread, NextProjectGroup, NextThread,
+    PreviousProjectGroup, PreviousThread, ShowFewerThreads, ShowMoreThreads, Sidebar, SidebarEvent,
+    SidebarHandle, SidebarRenderState, SidebarSide, ToggleWorkspaceSidebar,
+    sidebar_side_context_menu,
 };
 pub use path_list::{PathList, SerializedPathList};
 pub use toast_layer::{ToastAction, ToastLayer, ToastView};
@@ -4860,12 +4862,31 @@ impl Workspace {
             .as_ref()
             .map(|h| Target::Sidebar(h.clone()));
 
+        let sidebar_on_right = self
+            .multi_workspace
+            .as_ref()
+            .and_then(|mw| mw.upgrade())
+            .map_or(false, |mw| {
+                mw.read(cx).sidebar_side(cx) == SidebarSide::Right
+            });
+
+        let away_from_sidebar = if sidebar_on_right {
+            SplitDirection::Left
+        } else {
+            SplitDirection::Right
+        };
+
+        let (near_dock, far_dock) = if sidebar_on_right {
+            (&self.right_dock, &self.left_dock)
+        } else {
+            (&self.left_dock, &self.right_dock)
+        };
+
         let target = match (origin, direction) {
-            // From the sidebar, only Right navigates into the workspace.
-            (Origin::Sidebar, SplitDirection::Right) => try_dock(&self.left_dock)
+            (Origin::Sidebar, dir) if dir == away_from_sidebar => try_dock(near_dock)
                 .or_else(|| get_last_active_pane().map(Target::Pane))
                 .or_else(|| try_dock(&self.bottom_dock))
-                .or_else(|| try_dock(&self.right_dock)),
+                .or_else(|| try_dock(far_dock)),
 
             (Origin::Sidebar, _) => None,
 
@@ -4878,8 +4899,22 @@ impl Workspace {
                     match direction {
                         SplitDirection::Up => None,
                         SplitDirection::Down => try_dock(&self.bottom_dock),
-                        SplitDirection::Left => try_dock(&self.left_dock).or(sidebar_target),
-                        SplitDirection::Right => try_dock(&self.right_dock),
+                        SplitDirection::Left => {
+                            let dock_target = try_dock(&self.left_dock);
+                            if sidebar_on_right {
+                                dock_target
+                            } else {
+                                dock_target.or(sidebar_target)
+                            }
+                        }
+                        SplitDirection::Right => {
+                            let dock_target = try_dock(&self.right_dock);
+                            if sidebar_on_right {
+                                dock_target.or(sidebar_target)
+                            } else {
+                                dock_target
+                            }
+                        }
                     }
                 }
             }
@@ -4892,24 +4927,48 @@ impl Workspace {
                 }
             }
 
-            (Origin::LeftDock, SplitDirection::Left) => sidebar_target,
+            (Origin::LeftDock, SplitDirection::Left) => {
+                if sidebar_on_right {
+                    None
+                } else {
+                    sidebar_target
+                }
+            }
 
             (Origin::LeftDock, SplitDirection::Down)
             | (Origin::RightDock, SplitDirection::Down) => try_dock(&self.bottom_dock),
 
             (Origin::BottomDock, SplitDirection::Up) => get_last_active_pane().map(Target::Pane),
             (Origin::BottomDock, SplitDirection::Left) => {
-                try_dock(&self.left_dock).or(sidebar_target)
+                let dock_target = try_dock(&self.left_dock);
+                if sidebar_on_right {
+                    dock_target
+                } else {
+                    dock_target.or(sidebar_target)
+                }
+            }
+            (Origin::BottomDock, SplitDirection::Right) => {
+                let dock_target = try_dock(&self.right_dock);
+                if sidebar_on_right {
+                    dock_target.or(sidebar_target)
+                } else {
+                    dock_target
+                }
             }
-            (Origin::BottomDock, SplitDirection::Right) => try_dock(&self.right_dock),
 
             (Origin::RightDock, SplitDirection::Left) => {
                 if let Some(last_active_pane) = get_last_active_pane() {
                     Some(Target::Pane(last_active_pane))
                 } else {
-                    try_dock(&self.bottom_dock)
-                        .or_else(|| try_dock(&self.left_dock))
-                        .or(sidebar_target)
+                    try_dock(&self.bottom_dock).or_else(|| try_dock(&self.left_dock))
+                }
+            }
+
+            (Origin::RightDock, SplitDirection::Right) => {
+                if sidebar_on_right {
+                    sidebar_target
+                } else {
+                    None
                 }
             }
 
@@ -9127,30 +9186,35 @@ pub async fn find_existing_workspace(
     let mut open_visible = OpenVisible::All;
     let mut best_match = None;
 
-    if open_options.open_new_workspace != Some(true) {
-        cx.update(|cx| {
-            for window in workspace_windows_for_location(location, cx) {
-                if let Ok(multi_workspace) = window.read(cx) {
-                    for workspace in multi_workspace.workspaces() {
-                        let project = workspace.read(cx).project.read(cx);
-                        let m = project.visibility_for_paths(
-                            abs_paths,
-                            open_options.open_new_workspace == None,
-                            cx,
-                        );
-                        if m > best_match {
-                            existing = Some((window, workspace.clone()));
-                            best_match = m;
-                        } else if best_match.is_none()
-                            && open_options.open_new_workspace == Some(false)
-                        {
-                            existing = Some((window, workspace.clone()))
-                        }
+    cx.update(|cx| {
+        for window in workspace_windows_for_location(location, cx) {
+            if let Ok(multi_workspace) = window.read(cx) {
+                for workspace in multi_workspace.workspaces() {
+                    let project = workspace.read(cx).project.read(cx);
+                    let m = project.visibility_for_paths(
+                        abs_paths,
+                        open_options.open_new_workspace == None,
+                        cx,
+                    );
+                    if m > best_match {
+                        existing = Some((window, workspace.clone()));
+                        best_match = m;
+                    } else if best_match.is_none() && open_options.open_new_workspace == Some(false)
+                    {
+                        existing = Some((window, workspace.clone()))
                     }
                 }
             }
-        });
+        }
+    });
 
+    // With -n, only reuse a window if the path is genuinely contained
+    // within an existing worktree (don't fall back to any arbitrary window).
+    if open_options.open_new_workspace == Some(true) && best_match.is_none() {
+        existing = None;
+    }
+
+    if open_options.open_new_workspace != Some(true) {
         let all_paths_are_files = existing
             .as_ref()
             .and_then(|(_, target_workspace)| {

crates/x_ai/Cargo.toml 🔗

@@ -17,6 +17,8 @@ schemars = ["dep:schemars"]
 
 [dependencies]
 anyhow.workspace = true
+language_model_core.workspace = true
 schemars = { workspace = true, optional = true }
 serde.workspace = true
 strum.workspace = true
+tiktoken-rs.workspace = true

crates/x_ai/src/completion.rs 🔗

@@ -0,0 +1,30 @@
+use anyhow::Result;
+use language_model_core::{LanguageModelRequest, Role};
+
+use crate::Model;
+
+/// Count tokens for an xAI model using tiktoken. This is synchronous;
+/// callers should spawn it on a background thread if needed.
+pub fn count_xai_tokens(request: LanguageModelRequest, model: Model) -> Result<u64> {
+    let messages = request
+        .messages
+        .into_iter()
+        .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
+            role: match message.role {
+                Role::User => "user".into(),
+                Role::Assistant => "assistant".into(),
+                Role::System => "system".into(),
+            },
+            content: Some(message.string_contents()),
+            name: None,
+            function_call: None,
+        })
+        .collect::<Vec<_>>();
+
+    let model_name = if model.max_token_count() >= 100_000 {
+        "gpt-4o"
+    } else {
+        "gpt-4"
+    };
+    tiktoken_rs::num_tokens_from_messages(model_name, &messages).map(|tokens| tokens as u64)
+}

crates/x_ai/src/x_ai.rs 🔗

@@ -1,3 +1,5 @@
+pub mod completion;
+
 use anyhow::Result;
 use serde::{Deserialize, Serialize};
 use strum::EnumIter;

crates/zed/src/visual_test_runner.rs 🔗

@@ -2527,11 +2527,6 @@ fn run_multi_workspace_sidebar_visual_tests(
     std::fs::create_dir_all(&workspace1_dir)?;
     std::fs::create_dir_all(&workspace2_dir)?;
 
-    // Enable the agent-v2 feature flag so multi-workspace is active
-    cx.update(|cx| {
-        cx.update_flags(true, vec!["agent-v2".to_string()]);
-    });
-
     // Create both projects upfront so we can build both workspaces during
     // window creation, before the MultiWorkspace entity exists.
     // This avoids a re-entrant read panic that occurs when Workspace::new
@@ -3080,12 +3075,7 @@ fn run_start_thread_in_selector_visual_tests(
     cx: &mut VisualTestAppContext,
     update_baseline: bool,
 ) -> Result<TestResult> {
-    use agent_ui::{AgentPanel, StartThreadIn, WorktreeCreationStatus};
-
-    // Enable feature flags so the thread target selector renders
-    cx.update(|cx| {
-        cx.update_flags(true, vec!["agent-v2".to_string()]);
-    });
+    use agent_ui::{AgentPanel, NewWorktreeBranchTarget, StartThreadIn, WorktreeCreationStatus};
 
     // Create a temp directory with a real git repo so "New Worktree" is enabled
     let temp_dir = tempfile::tempdir()?;
@@ -3401,7 +3391,13 @@ edition = "2021"
 
     cx.update_window(workspace_window.into(), |_, _window, cx| {
         panel.update(cx, |panel, cx| {
-            panel.set_start_thread_in_for_tests(StartThreadIn::NewWorktree, cx);
+            panel.set_start_thread_in_for_tests(
+                StartThreadIn::NewWorktree {
+                    worktree_name: None,
+                    branch_target: NewWorktreeBranchTarget::default(),
+                },
+                cx,
+            );
         });
     })?;
     cx.run_until_parked();
@@ -3474,7 +3470,13 @@ edition = "2021"
     cx.run_until_parked();
 
     cx.update_window(workspace_window.into(), |_, window, cx| {
-        window.dispatch_action(Box::new(StartThreadIn::NewWorktree), cx);
+        window.dispatch_action(
+            Box::new(StartThreadIn::NewWorktree {
+                worktree_name: None,
+                branch_target: NewWorktreeBranchTarget::default(),
+            }),
+            cx,
+        );
     })?;
     cx.run_until_parked();
 

crates/zed/src/zed.rs 🔗

@@ -652,10 +652,6 @@ fn initialize_panels(window: &mut Window, cx: &mut Context<Workspace>) -> Task<a
         let git_panel = GitPanel::load(workspace_handle.clone(), cx.clone());
         let channels_panel =
             collab_ui::collab_panel::CollabPanel::load(workspace_handle.clone(), cx.clone());
-        let notification_panel = collab_ui::notification_panel::NotificationPanel::load(
-            workspace_handle.clone(),
-            cx.clone(),
-        );
         let debug_panel = DebugPanel::load(workspace_handle.clone(), cx);
 
         async fn add_panel_when_ready(
@@ -679,7 +675,6 @@ fn initialize_panels(window: &mut Window, cx: &mut Context<Workspace>) -> Task<a
             add_panel_when_ready(terminal_panel, workspace_handle.clone(), cx.clone()),
             add_panel_when_ready(git_panel, workspace_handle.clone(), cx.clone()),
             add_panel_when_ready(channels_panel, workspace_handle.clone(), cx.clone()),
-            add_panel_when_ready(notification_panel, workspace_handle.clone(), cx.clone()),
             add_panel_when_ready(debug_panel, workspace_handle.clone(), cx.clone()),
             initialize_agent_panel(workspace_handle, cx.clone()).map(|r| r.log_err()),
         );
@@ -1037,16 +1032,6 @@ fn register_actions(
                 workspace.toggle_panel_focus::<collab_ui::collab_panel::CollabPanel>(window, cx);
             },
         )
-        .register_action(
-            |workspace: &mut Workspace,
-             _: &collab_ui::notification_panel::ToggleFocus,
-             window: &mut Window,
-             cx: &mut Context<Workspace>| {
-                workspace.toggle_panel_focus::<collab_ui::notification_panel::NotificationPanel>(
-                    window, cx,
-                );
-            },
-        )
         .register_action(
             |workspace: &mut Workspace,
              _: &terminal_panel::ToggleFocus,
@@ -2512,10 +2497,6 @@ mod tests {
     #[gpui::test]
     async fn test_open_paths_action(cx: &mut TestAppContext) {
         let app_state = init_test(cx);
-        cx.update(|cx| {
-            use feature_flags::FeatureFlagAppExt as _;
-            cx.update_flags(false, vec!["agent-v2".to_string()]);
-        });
         app_state
             .fs
             .as_fake()
@@ -2682,6 +2663,7 @@ mod tests {
         .unwrap();
         assert_eq!(cx.update(|cx| cx.windows().len()), 1);
 
+        // Opening a file inside the existing worktree with -n reuses the window.
         cx.update(|cx| {
             open_paths(
                 &[PathBuf::from(path!("/root/dir/c"))],
@@ -2695,6 +2677,22 @@ mod tests {
         })
         .await
         .unwrap();
+        assert_eq!(cx.update(|cx| cx.windows().len()), 1);
+
+        // Opening a path NOT in any existing worktree with -n creates a new window.
+        cx.update(|cx| {
+            open_paths(
+                &[PathBuf::from(path!("/root/b"))],
+                app_state.clone(),
+                workspace::OpenOptions {
+                    open_new_workspace: Some(true),
+                    ..Default::default()
+                },
+                cx,
+            )
+        })
+        .await
+        .unwrap();
         assert_eq!(cx.update(|cx| cx.windows().len()), 2);
     }
 
@@ -2748,7 +2746,7 @@ mod tests {
         .unwrap();
         assert_eq!(cx.update(|cx| cx.windows().len()), 1);
 
-        // Opening a directory with -n creates a new window.
+        // Opening a directory already in a worktree with -n reuses the window.
         cx.update(|cx| {
             open_paths(
                 &[PathBuf::from(path!("/root/dir2"))],
@@ -2762,6 +2760,22 @@ mod tests {
         })
         .await
         .unwrap();
+        assert_eq!(cx.update(|cx| cx.windows().len()), 1);
+
+        // Opening a directory NOT in any worktree with -n creates a new window.
+        cx.update(|cx| {
+            open_paths(
+                &[PathBuf::from(path!("/root"))],
+                app_state.clone(),
+                workspace::OpenOptions {
+                    open_new_workspace: Some(true),
+                    ..Default::default()
+                },
+                cx,
+            )
+        })
+        .await
+        .unwrap();
         assert_eq!(cx.update(|cx| cx.windows().len()), 2);
     }
 
@@ -4962,7 +4976,6 @@ mod tests {
                 "multi_workspace",
                 "new_process_modal",
                 "notebook",
-                "notification_panel",
                 "onboarding",
                 "outline",
                 "outline_panel",
@@ -5480,10 +5493,6 @@ mod tests {
     #[gpui::test]
     async fn test_open_paths_switches_to_best_workspace(cx: &mut TestAppContext) {
         let app_state = init_test(cx);
-        cx.update(|cx| {
-            use feature_flags::FeatureFlagAppExt as _;
-            cx.update_flags(false, vec!["agent-v2".to_string()]);
-        });
 
         app_state
             .fs
@@ -5685,10 +5694,6 @@ mod tests {
     async fn test_quit_checks_all_workspaces_for_dirty_items(cx: &mut TestAppContext) {
         let app_state = init_test(cx);
         cx.update(init);
-        cx.update(|cx| {
-            use feature_flags::FeatureFlagAppExt as _;
-            cx.update_flags(false, vec!["agent-v2".to_string()]);
-        });
 
         app_state
             .fs
@@ -5978,11 +5983,6 @@ mod tests {
 
         let app_state = init_test(cx);
 
-        cx.update(|cx| {
-            use feature_flags::FeatureFlagAppExt as _;
-            cx.update_flags(false, vec!["agent-v2".to_string()]);
-        });
-
         let dir1 = path!("/dir1");
         let dir2 = path!("/dir2");
         let dir3 = path!("/dir3");

crates/zed/src/zed/app_menus.rs 🔗

@@ -165,7 +165,7 @@ pub fn app_menus(cx: &mut App) -> Vec<Menu> {
                 MenuItem::os_action("Paste", editor::actions::Paste, OsAction::Paste),
                 MenuItem::separator(),
                 MenuItem::action("Find", search::buffer_search::Deploy::find()),
-                MenuItem::action("Find in Project", workspace::DeploySearch::find()),
+                MenuItem::action("Find in Project", workspace::DeploySearch::default()),
                 MenuItem::separator(),
                 MenuItem::action(
                     "Toggle Line Comment",

crates/zed/src/zed/open_listener.rs 🔗

@@ -1043,7 +1043,7 @@ mod tests {
             })
             .unwrap();
 
-        // Now open a file inside that workspace, but tell Zed to open a new window
+        // Opening a file inside the existing worktree with -n reuses the window.
         open_workspace_file(
             path!("/root/dir1/file1.txt"),
             Some(true),
@@ -1052,18 +1052,7 @@ mod tests {
         )
         .await;
 
-        assert_eq!(cx.windows().len(), 2);
-
-        let multi_workspace_2 = cx.windows()[1].downcast::<MultiWorkspace>().unwrap();
-        multi_workspace_2
-            .update(cx, |multi_workspace, _, cx| {
-                multi_workspace.workspace().update(cx, |workspace, cx| {
-                    assert!(workspace.active_item_as::<Editor>(cx).is_some());
-                    let items = workspace.items(cx).collect::<Vec<_>>();
-                    assert_eq!(items.len(), 1, "Workspace should have two items");
-                });
-            })
-            .unwrap();
+        assert_eq!(cx.windows().len(), 1);
     }
 
     #[gpui::test]

crates/zed_actions/src/lib.rs 🔗

@@ -785,8 +785,6 @@ pub mod agents_sidebar {
         [
             /// Moves focus to the sidebar's search/filter editor.
             FocusSidebarFilter,
-            /// Moves the active workspace to a new window.
-            MoveWorkspaceToNewWindow,
         ]
     );
 }
@@ -797,10 +795,36 @@ pub mod notebook {
     actions!(
         notebook,
         [
-            /// Move to down in cells
+            /// Opens a Jupyter notebook file.
+            OpenNotebook,
+            /// Runs all cells in the notebook.
+            RunAll,
+            /// Runs the current cell and stays on it.
+            Run,
+            /// Runs the current cell and advances to the next cell.
+            RunAndAdvance,
+            /// Clears all cell outputs.
+            ClearOutputs,
+            /// Moves the current cell up.
+            MoveCellUp,
+            /// Moves the current cell down.
+            MoveCellDown,
+            /// Adds a new markdown cell.
+            AddMarkdownBlock,
+            /// Adds a new code cell.
+            AddCodeBlock,
+            /// Restarts the kernel.
+            RestartKernel,
+            /// Interrupts the current execution.
+            InterruptKernel,
+            /// Move down in cells.
             NotebookMoveDown,
-            /// Move to up in cells
+            /// Move up in cells.
             NotebookMoveUp,
+            /// Enters the current cell's editor (edit mode).
+            EnterEditMode,
+            /// Exits the cell editor and returns to cell command mode.
+            EnterCommandMode,
         ]
     );
 }

docs/src/ai/models.md 🔗

@@ -1,12 +1,14 @@
 ---
 title: AI Models and Pricing - Zed
-description: AI models available via Zed Pro including Claude, GPT-5.2, Gemini 3.1 Pro, and Grok. Pricing, context windows, and tool call support.
+description: AI models available via Zed Pro including Claude, GPT-5.4, Gemini 3.1 Pro, and Grok. Pricing, context windows, and tool call support.
 ---
 
 # Models
 
 Zed's plans offer hosted versions of major LLMs with higher rate limits than direct API access. Model availability is updated regularly. To use your own API keys instead, see [LLM Providers](./llm-providers.md). For general setup, see [Configuration](./configuration.md).
 
+> **Note:** Claude Opus models and GPT-5.4 pro are not available on the [Student plan](./plans-and-usage.md#student).
+
 | Model                  | Provider  | Token Type          | Provider Price per 1M tokens | Zed Price per 1M tokens |
 | ---------------------- | --------- | ------------------- | ---------------------------- | ----------------------- |
 | Claude Opus 4.5        | Anthropic | Input               | $5.00                        | $5.50                   |
@@ -29,12 +31,20 @@ Zed's plans offer hosted versions of major LLMs with higher rate limits than dir
 |                        | Anthropic | Output              | $5.00                        | $5.50                   |
 |                        | Anthropic | Input - Cache Write | $1.25                        | $1.375                  |
 |                        | Anthropic | Input - Cache Read  | $0.10                        | $0.11                   |
-| GPT-5.2                | OpenAI    | Input               | $1.25                        | $1.375                  |
-|                        | OpenAI    | Output              | $10.00                       | $11.00                  |
-|                        | OpenAI    | Cached Input        | $0.125                       | $0.1375                 |
-| GPT-5.2 Codex          | OpenAI    | Input               | $1.25                        | $1.375                  |
-|                        | OpenAI    | Output              | $10.00                       | $11.00                  |
-|                        | OpenAI    | Cached Input        | $0.125                       | $0.1375                 |
+| GPT-5.4 pro            | OpenAI    | Input               | $30.00                       | $33.00                  |
+|                        | OpenAI    | Output              | $180.00                      | $198.00                 |
+| GPT-5.4                | OpenAI    | Input               | $2.50                        | $2.75                   |
+|                        | OpenAI    | Output              | $15.00                       | $16.50                  |
+|                        | OpenAI    | Cached Input        | $0.025                       | $0.0275                 |
+| GPT-5.3-Codex          | OpenAI    | Input               | $1.75                        | $1.925                  |
+|                        | OpenAI    | Output              | $14.00                       | $15.40                  |
+|                        | OpenAI    | Cached Input        | $0.175                       | $0.1925                 |
+| GPT-5.2                | OpenAI    | Input               | $1.75                        | $1.925                  |
+|                        | OpenAI    | Output              | $14.00                       | $15.40                  |
+|                        | OpenAI    | Cached Input        | $0.175                       | $0.1925                 |
+| GPT-5.2-Codex          | OpenAI    | Input               | $1.75                        | $1.925                  |
+|                        | OpenAI    | Output              | $14.00                       | $15.40                  |
+|                        | OpenAI    | Cached Input        | $0.175                       | $0.1925                 |
 | GPT-5 mini             | OpenAI    | Input               | $0.25                        | $0.275                  |
 |                        | OpenAI    | Output              | $2.00                        | $2.20                   |
 |                        | OpenAI    | Cached Input        | $0.025                       | $0.0275                 |
@@ -43,8 +53,8 @@ Zed's plans offer hosted versions of major LLMs with higher rate limits than dir
 |                        | OpenAI    | Cached Input        | $0.005                       | $0.0055                 |
 | Gemini 3.1 Pro         | Google    | Input               | $2.00                        | $2.20                   |
 |                        | Google    | Output              | $12.00                       | $13.20                  |
-| Gemini 3 Flash         | Google    | Input               | $0.30                        | $0.33                   |
-|                        | Google    | Output              | $2.50                        | $2.75                   |
+| Gemini 3 Flash         | Google    | Input               | $0.50                        | $0.55                   |
+|                        | Google    | Output              | $3.00                        | $3.30                   |
 | Grok 4                 | X.ai      | Input               | $3.00                        | $3.30                   |
 |                        | X.ai      | Output              | $15.00                       | $16.5                   |
 |                        | X.ai      | Cached Input        | $0.75                        | $0.825                  |
@@ -65,7 +75,7 @@ As of February 19, 2026, Zed Pro serves newer model versions in place of the ret
 - Claude Opus 4.1 → Claude Opus 4.5 or Claude Opus 4.6
 - Claude Sonnet 4 → Claude Sonnet 4.5 or Claude Sonnet 4.6
 - Claude Sonnet 3.7 (retired Feb 19) → Claude Sonnet 4.5 or Claude Sonnet 4.6
-- GPT-5.1 and GPT-5 → GPT-5.2 or GPT-5.2 Codex
+- GPT-5.1 and GPT-5 → GPT-5.2 or GPT-5.2-Codex
 - Gemini 2.5 Pro → Gemini 3.1 Pro
 - Gemini 3 Pro → Gemini 3.1 Pro
 - Gemini 2.5 Flash → Gemini 3 Flash
@@ -80,21 +90,28 @@ Any usage of a Zed-hosted model will be billed at the Zed Price (rightmost colum
 
 A context window is the maximum span of text and code an LLM can consider at once, including both the input prompt and output generated by the model.
 
-| Model             | Provider  | Zed-Hosted Context Window |
-| ----------------- | --------- | ------------------------- |
-| Claude Opus 4.5   | Anthropic | 200k                      |
-| Claude Opus 4.6   | Anthropic | 1M                        |
-| Claude Sonnet 4.5 | Anthropic | 200k                      |
-| Claude Sonnet 4.6 | Anthropic | 1M                        |
-| Claude Haiku 4.5  | Anthropic | 200k                      |
-| GPT-5.2           | OpenAI    | 400k                      |
-| GPT-5.2 Codex     | OpenAI    | 400k                      |
-| GPT-5 mini        | OpenAI    | 400k                      |
-| GPT-5 nano        | OpenAI    | 400k                      |
-| Gemini 3.1 Pro    | Google    | 200k                      |
-| Gemini 3 Flash    | Google    | 200k                      |
-
-> Context window limits for hosted Gemini 3.1 Pro/3 Pro/Flash may increase in future releases.
+| Model                       | Provider  | Zed-Hosted Context Window |
+| --------------------------- | --------- | ------------------------- |
+| Claude Opus 4.5             | Anthropic | 200k                      |
+| Claude Opus 4.6             | Anthropic | 1M                        |
+| Claude Sonnet 4.5           | Anthropic | 200k                      |
+| Claude Sonnet 4.6           | Anthropic | 1M                        |
+| Claude Haiku 4.5            | Anthropic | 200k                      |
+| GPT-5.4 pro                 | OpenAI    | 400k                      |
+| GPT-5.4                     | OpenAI    | 400k                      |
+| GPT-5.3-Codex               | OpenAI    | 400k                      |
+| GPT-5.2                     | OpenAI    | 400k                      |
+| GPT-5.2-Codex               | OpenAI    | 400k                      |
+| GPT-5 mini                  | OpenAI    | 400k                      |
+| GPT-5 nano                  | OpenAI    | 400k                      |
+| Gemini 3.1 Pro              | Google    | 200k                      |
+| Gemini 3 Flash              | Google    | 200k                      |
+| Grok 4                      | X.ai      | 128k                      |
+| Grok 4 Fast                 | X.ai      | 128k                      |
+| Grok 4 Fast (Non-Reasoning) | X.ai      | 128k                      |
+| Grok Code Fast 1            | X.ai      | 256k                      |
+
+> Context window limits for hosted Gemini 3.1 Pro/3 Flash may increase in future releases.
 
 Each Agent thread in Zed maintains its own context window.
 The more prompts, attached files, and responses included in a session, the larger the context window grows.

docs/src/vim.md 🔗

@@ -562,6 +562,7 @@ You can change the following settings to modify vim mode's behavior:
 | use_system_clipboard         | Determines how system clipboard is used:<br><ul><li>"always": use for all operations</li><li>"never": only use when explicitly specified</li><li>"on_yank": use for yank operations</li></ul> | "always"      |
 | use_multiline_find           | deprecated                                                                                                                                                                                    |
 | use_smartcase_find           | If `true`, `f` and `t` motions are case-insensitive when the target letter is lowercase.                                                                                                      | false         |
+| use_regex_search             | If `true`, then vim search will use regex mode                                                                                                                                                | true          |
 | gdefault                     | If `true`, the `:substitute` command replaces all matches in a line by default (as if `g` flag was given). The `g` flag then toggles this, replacing only the first match.                    | false         |
 | toggle_relative_line_numbers | If `true`, line numbers are relative in normal mode and absolute in insert mode, giving you the best of both options.                                                                         | false         |
 | custom_digraphs              | An object that allows you to add custom digraphs. Read below for an example.                                                                                                                  | {}            |
@@ -587,6 +588,7 @@ Here's an example of these settings changed:
     "default_mode": "insert",
     "use_system_clipboard": "never",
     "use_smartcase_find": true,
+    "use_regex_search": true,
     "gdefault": true,
     "toggle_relative_line_numbers": true,
     "highlight_on_yank_duration": 50,

docs/src/visual-customization.md 🔗

@@ -105,7 +105,7 @@ To disable this behavior use:
   // "outline_panel": {"button": false },
   // "collaboration_panel": {"button": false },
   // "git_panel": {"button": false },
-  // "notification_panel": {"button": false },
+
   // "agent": {"button": false },
   // "debugger": {"button": false },
   // "diagnostics": {"button": false },
@@ -588,16 +588,6 @@ See [Terminal settings](./reference/all-settings.md#terminal) for additional non
     "dock": "left", // Where to dock: left, right
     "default_width": 240 // Default width of the collaboration panel.
   },
-  "show_call_status_icon": true, // Shown call status in the OS status bar.
-
-  // Notification Panel
-  "notification_panel": {
-    // Whether to show the notification panel button in the status bar.
-    "button": true,
-    // Where to dock the notification panel. Can be 'left' or 'right'.
-    "dock": "right",
-    // Default width of the notification panel.
-    "default_width": 380
-  }
+  "show_call_status_icon": true // Shown call status in the OS status bar.
 }
 ```

plan.md 🔗

@@ -0,0 +1,117 @@
+# Code Review
+
+## Verbatim Review Items
+
+### 1. Broken Cancellation Check Mid-Archive (Correctness)
+In `sidebar.rs`'s `archive_worktree` loop, you check for cancellation mid-archive like this:
+```rust
+// Check for cancellation before each root
+if cancel_rx.try_recv().is_ok() {
+    // ...
+}
+```
+This will never trigger. The sender `cancel_tx` is never sent a message; it is simply dropped when `ThreadMetadataStore::unarchive` removes it from `in_flight_archives`. When a channel's sender is dropped, `try_recv()` returns `Err(smol::channel::TryRecvError::Closed)`. Because it returns an `Err`, `is_ok()` evaluates to `false`. Therefore, the loop will fail to abort if the user clicks "Unarchive" while archiving is in progress.
+
+**Suggestion:**
+Change the condition to check if the channel is closed:
+```rust
+if cancel_rx.is_closed() {
+    // ...
+}
+```
+
+### 2. Incomplete Worktree Linking for Multi-Worktree Threads (Correctness)
+In `persist_worktree_state` (inside `thread_worktree_archive.rs`), you link other threads to the archived worktree using `all_session_ids_for_path(folder_paths)`. The problem is that `folder_paths` here is the *exact* `PathList` of the archiving thread. 
+
+If Thread A has `["/a", "/b"]` and Thread B has just `["/a"]`:
+1. Thread B is archived first. It doesn't archive the worktree because `path_is_referenced_by_other_unarchived_threads` sees Thread A still using it.
+2. Thread A is archived. It archives both `/a` and `/b`. 
+3. When it links threads to `/a`'s archive record, it looks for threads with the exact `PathList` `["/a", "/b"]`. Thread B has `["/a"]`, so it is **not** linked.
+4. When Thread B is later unarchived, it will fail to find its worktree backup.
+
+**Suggestion:**
+Instead of matching the exact `PathList`, iterate over all threads in the store and link any thread whose `folder_paths` *contains* the path of the worktree currently being archived (`root.root_path`).
+```rust
+let session_ids: Vec<acp::SessionId> = store.read_with(cx, |store, _cx| {
+    store
+        .entries()
+        .filter(|thread| thread.folder_paths.paths().iter().any(|p| p.as_path() == root.root_path))
+        .map(|thread| thread.session_id.clone())
+        .collect()
+});
+```
+
+### 3. Permanent Leak of Git Refs & DB Records on Thread Deletion (Brittleness & Performance)
+When a thread is permanently deleted (e.g. by pressing Backspace or clicking the trash icon in the Archive view), it calls `ThreadMetadataStore::delete`, which deletes the thread from the `sidebar_threads` table. 
+
+However, it completely ignores the `archived_git_worktrees` and `thread_archived_worktrees` tables. Crucially, the git refs (e.g., `refs/archived-worktrees/<id>`) are left in the main repository forever. This prevents git from ever garbage-collecting the WIP commits and their potentially large file blobs, permanently leaking disk space.
+
+**Suggestion:**
+In `ThreadMetadataStore::delete` (or a new async method orchestrating the deletion), after removing the thread from `sidebar_threads`, fetch its associated `archived_git_worktrees`. Remove the mapping in `thread_archived_worktrees`. For any archived worktree that is no longer referenced by *any* thread, you must:
+1. Delete its DB row in `archived_git_worktrees`.
+2. Delete the git ref via `find_or_create_repository` + `repo.delete_ref(...)`.
+
+### 4. Silently Discarding Errors on Fallible Operations (Maintainability)
+The Zed project `.rules` explicitly state: *"Never silently discard errors with `let _ =` on fallible operations."* 
+
+This rule is violated extensively in `thread_worktree_archive.rs` during rollbacks and cleanup (e.g., lines 250, 303, 318, 344, 361, 392, 429, 477, 486, 649, 654). While it is correct to not halt a rollback if a single step fails, the errors should still be logged for visibility to aid in debugging.
+
+**Suggestion:**
+Since many of these are `oneshot::Receiver<Result<()>>`, you can handle them cleanly like this:
+```rust
+rx.await.ok().and_then(|r| r.log_err());
+```
+Or, if you want custom error contexts:
+```rust
+if let Err(e) = rx.await { 
+    log::error!("rollback failed: {e:#}"); 
+}
+```
+
+### 5. Silent Task Cancellation in `remove_root_after_worktree_removal` (Brittleness)
+In `remove_root_after_worktree_removal`, you await a list of tasks in a loop:
+```rust
+for task in release_tasks {
+    task.await?;
+}
+```
+If the first task errors out, the function returns early. Because Zed `Task`s cancel when dropped, the remaining `wait_for_worktree_release` tasks are instantly canceled. This might be fine because `project.remove_worktree` was already called synchronously, but using `futures::future::try_join_all` would be a more idiomatic way to await them all and handle errors cleanly, or simply logging the error and continuing to wait for the others.
+
+**Suggestion:**
+Consider logging the error and continuing to wait for the rest to ensure all projects actually release the worktree before proceeding to delete it from disk:
+```rust
+for task in release_tasks {
+    if let Err(e) = task.await {
+        log::error!("Failed waiting for worktree release: {e:#}");
+    }
+}
+```
+
+---
+
+## Plan to Address Issues
+
+### 1. Fix Broken Cancellation Check
+- **File:** `crates/sidebar/src/sidebar.rs`
+- **Action:** Update the `if cancel_rx.try_recv().is_ok()` check in `Sidebar::archive_worktree` to use `if cancel_rx.is_closed()`. This correctly detects when the sender is dropped by the unarchiving flow.
+
+### 2. Fix Incomplete Worktree Linking
+- **File:** `crates/agent_ui/src/thread_worktree_archive.rs`
+- **Action:** In `persist_worktree_state`, replace the call to `store.all_session_ids_for_path(folder_paths)` with an iteration over all `store.entries()`. Filter for any threads where `folder_paths.paths()` contains the currently-archiving `root.root_path`. Collect and return these `session_id`s so they are all correctly linked to the archived worktree record.
+
+### 3. Prevent Git Ref & DB Leaks on Thread Deletion
+- **Files:** `crates/agent_ui/src/thread_metadata_store.rs`, `crates/agent_ui/src/threads_archive_view.rs` (and potentially `thread_history_view.rs`)
+- **Action:** 
+  1. Add a method to `ThreadMetadataStore` or `thread_worktree_archive.rs` to handle "deep deletion" of a thread.
+  2. This method will query the DB for all `ArchivedGitWorktree` entries linked to the thread being deleted.
+  3. It will delete the mapping from `thread_archived_worktrees`.
+  4. For each worktree that now has exactly 0 threads mapped to it, delete the row from `archived_git_worktrees` and use the git API (via `find_or_create_repository`) to delete the archived-worktree git ref (`refs/archived-worktrees/<id>`).
+  5. Update the UI actions that currently call the shallow `ThreadMetadataStore::delete` to call this new deep cleanup method.
+
+### 4. Remove Silent Discards of Fallible Operations
+- **File:** `crates/agent_ui/src/thread_worktree_archive.rs`
+- **Action:** Scan for all `let _ = ...` instances where fallible git operations (like resets, branch creation, or branch deletion) occur during rollbacks or fallbacks. Replace them with proper `.log_err()` chains or explicit `if let Err(e) = ...` logging statements to comply with Zed's `.rules` file and improve debuggability.
+
+### 5. Ensure All Worktree Release Tasks Complete
+- **File:** `crates/agent_ui/src/thread_worktree_archive.rs`
+- **Action:** In the `remove_root_after_worktree_removal` function, change the `for` loop that `.await?`s release tasks. Modify it to await every task and log any errors that occur (`if let Err(error) = task.await { log::error!(...); }`), preventing the early return from silently dropping/canceling the remaining await tasks.

tooling/xtask/src/tasks/workflows/compliance_check.rs 🔗

@@ -1,6 +1,8 @@
-use gh_workflow::{Event, Expression, Job, Run, Schedule, Step, Workflow};
+use gh_workflow::{Event, Job, Run, Schedule, Step, Workflow, WorkflowDispatch};
+use indoc::formatdoc;
 
 use crate::tasks::workflows::{
+    release::{COMPLIANCE_REPORT_PATH, ComplianceContext, add_compliance_notification_steps},
     runners,
     steps::{self, CommonJobConditions, named},
     vars::{self, StepOutput},
@@ -10,7 +12,9 @@ pub fn compliance_check() -> Workflow {
     let check = scheduled_compliance_check();
 
     named::workflow()
-        .on(Event::default().schedule([Schedule::new("30 17 * * 2")]))
+        .on(Event::default()
+            .schedule([Schedule::new("30 17 * * 2")])
+            .workflow_dispatch(WorkflowDispatch::default()))
         .add_env(("CARGO_TERM_COLOR", "always"))
         .add_job(check.name, check.job)
 }
@@ -32,7 +36,11 @@ fn scheduled_compliance_check() -> steps::NamedJob {
 
     fn run_compliance_check(tag: &StepOutput) -> Step<Run> {
         named::bash(
-            r#"cargo xtask compliance "$LATEST_TAG" --branch main --report-path target/compliance-report"#,
+            formatdoc! {r#"
+                echo "tag=$LATEST_TAG" >> "$GITHUB_OUTPUT"
+                cargo xtask compliance "$LATEST_TAG" --branch main --report-path {COMPLIANCE_REPORT_PATH}
+                "#,
+            }
         )
         .id("run-compliance-check")
         .add_env(("LATEST_TAG", tag.to_string()))
@@ -40,27 +48,19 @@ fn scheduled_compliance_check() -> steps::NamedJob {
         .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY))
     }
 
-    fn send_failure_slack_notification(tag: &StepOutput) -> Step<Run> {
-        named::bash(indoc::indoc! {r#"
-            MESSAGE="⚠️ Scheduled compliance check failed for upcoming preview release $LATEST_TAG: There are PRs with missing reviews."
+    let job = Job::default()
+        .with_repository_owner_guard()
+        .runs_on(runners::LINUX_SMALL)
+        .add_step(steps::checkout_repo().with_full_history())
+        .add_step(steps::cache_rust_dependencies_namespace())
+        .add_step(determine_version_step)
+        .add_step(run_compliance_check(&tag_output));
 
-            curl -X POST -H 'Content-type: application/json' \
-                --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \
-                "$SLACK_WEBHOOK"
-        "#})
-        .if_condition(Expression::new("failure()"))
-        .add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES))
-        .add_env(("LATEST_TAG", tag.to_string()))
-    }
-
-    named::job(
-        Job::default()
-            .with_repository_owner_guard()
-            .runs_on(runners::LINUX_SMALL)
-            .add_step(steps::checkout_repo().with_full_history())
-            .add_step(steps::cache_rust_dependencies_namespace())
-            .add_step(determine_version_step)
-            .add_step(run_compliance_check(&tag_output))
-            .add_step(send_failure_slack_notification(&tag_output)),
-    )
+    named::job(add_compliance_notification_steps(
+        job,
+        ComplianceContext::Scheduled {
+            tag_source: tag_output,
+        },
+        "run-compliance-check",
+    ))
 }

tooling/xtask/src/tasks/workflows/release.rs 🔗

@@ -1,13 +1,11 @@
-use gh_workflow::{Event, Expression, Job, Push, Run, Step, Use, Workflow, ctx::Context};
+use gh_workflow::{Event, Expression, Push, Run, Step, Use, Workflow, ctx::Context};
 use indoc::formatdoc;
 
 use crate::tasks::workflows::{
-    run_bundling::{bundle_linux, bundle_mac, bundle_windows},
+    run_bundling::{bundle_linux, bundle_mac, bundle_windows, upload_artifact},
     run_tests,
     runners::{self, Arch, Platform},
-    steps::{
-        self, CommonJobConditions, FluentBuilder, NamedJob, dependant_job, named, release_job,
-    },
+    steps::{self, FluentBuilder, NamedJob, dependant_job, named, release_job},
     vars::{self, StepOutput, assets},
 };
 
@@ -153,57 +151,100 @@ pub(crate) fn create_sentry_release() -> Step<Use> {
     .add_with(("environment", "production"))
 }
 
-fn compliance_check() -> NamedJob {
-    fn run_compliance_check() -> Step<Run> {
-        named::bash(
-            r#"cargo xtask compliance "$GITHUB_REF_NAME" --report-path "$COMPLIANCE_FILE_OUTPUT""#,
-        )
-        .id("run-compliance-check")
-        .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID))
-        .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY))
-    }
+pub(crate) const COMPLIANCE_REPORT_PATH: &str = "compliance-report";
+const COMPLIANCE_REPORT_FILE: &str = "target/compliance-report.md";
+const NEEDS_REVIEW_PULLS_URL: &str = "https://github.com/zed-industries/zed/pulls?q=is%3Apr+is%3Aclosed+label%3A%22PR+state%3Aneeds+review%22";
 
-    fn send_compliance_slack_notification() -> Step<Run> {
-        named::bash(indoc::indoc! {r#"
-            if [ "$COMPLIANCE_OUTCOME" == "success" ]; then
-                STATUS="✅ Compliance check passed for $GITHUB_REF_NAME"
-            else
-                STATUS="❌ Compliance check failed for $GITHUB_REF_NAME"
-            fi
+pub(crate) enum ComplianceContext {
+    Release,
+    Scheduled { tag_source: StepOutput },
+}
 
-            REPORT_CONTENT=""
-            if [ -f "$COMPLIANCE_FILE_OUTPUT" ]; then
-                REPORT_CONTENT=$(cat "$REPORT_FILE")
-            fi
+pub(crate) fn add_compliance_notification_steps(
+    job: gh_workflow::Job,
+    context: ComplianceContext,
+    compliance_step_id: &str,
+) -> gh_workflow::Job {
+    let upload_step =
+        upload_artifact(COMPLIANCE_REPORT_FILE).if_condition(Expression::new("always()"));
+
+    let (success_prefix, failure_prefix) = match context {
+        ComplianceContext::Release => ("✅ Compliance check passed", "❌ Compliance check failed"),
+        ComplianceContext::Scheduled { .. } => (
+            "✅ Scheduled compliance check passed",
+            "⚠️ Scheduled compliance check failed",
+        ),
+    };
 
-            MESSAGE=$(printf "%s\n\n%s" "$STATUS" "$REPORT_CONTENT")
+    let script = formatdoc! {r#"
+        REPORT_CONTENT=""
+        if [ -f "{COMPLIANCE_REPORT_FILE}" ]; then
+            REPORT_CONTENT=$(cat "{COMPLIANCE_REPORT_FILE}")
+        fi
 
-            curl -X POST -H 'Content-type: application/json' \
-                --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \
-                "$SLACK_WEBHOOK"
-        "#})
+        if [ "$COMPLIANCE_OUTCOME" == "success" ]; then
+            STATUS="{success_prefix} for $COMPLIANCE_TAG"
+        else
+            STATUS="{failure_prefix} for $COMPLIANCE_TAG"
+        fi
+
+        MESSAGE=$(printf "%s\n\nReport: %s\nPRs needing review: %s\n\n%s" "$STATUS" "$ARTIFACT_URL" "{NEEDS_REVIEW_PULLS_URL}" "$REPORT_CONTENT")
+
+        curl -X POST -H 'Content-type: application/json' \
+            --data "$(jq -n --arg text "$MESSAGE" '{{"text": $text}}')" \
+            "$SLACK_WEBHOOK"
+        "#,
+    };
+
+    let notification_step = Step::new("send_compliance_slack_notification")
+        .run(&script)
         .if_condition(Expression::new("always()"))
         .add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES))
         .add_env((
             "COMPLIANCE_OUTCOME",
-            "${{ steps.run-compliance-check.outcome }}",
+            format!("${{{{ steps.{compliance_step_id}.outcome }}}}"),
+        ))
+        .add_env((
+            "COMPLIANCE_TAG",
+            match context {
+                ComplianceContext::Release => Context::github().ref_name().to_string(),
+                ComplianceContext::Scheduled { tag_source } => tag_source.to_string(),
+            },
         ))
+        .add_env((
+            "ARTIFACT_URL",
+            format!("{CURRENT_ACTION_RUN_URL}#artifacts"),
+        ));
+
+    job.add_step(upload_step).add_step(notification_step)
+}
+
+fn compliance_check() -> NamedJob {
+    fn run_compliance_check() -> Step<Run> {
+        named::bash(formatdoc! {r#"
+            cargo xtask compliance "$GITHUB_REF_NAME" --report-path {COMPLIANCE_REPORT_PATH}
+            "#,
+        })
+        .id("run-compliance-check")
+        .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID))
+        .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY))
     }
 
-    named::job(
-        Job::default()
-            .add_env(("COMPLIANCE_FILE_PATH", "compliance.md"))
-            .with_repository_owner_guard()
-            .runs_on(runners::LINUX_DEFAULT)
-            .add_step(
-                steps::checkout_repo()
-                    .with_full_history()
-                    .with_ref(Context::github().ref_()),
-            )
-            .add_step(steps::cache_rust_dependencies_namespace())
-            .add_step(run_compliance_check())
-            .add_step(send_compliance_slack_notification()),
-    )
+    let job = release_job(&[])
+        .runs_on(runners::LINUX_SMALL)
+        .add_step(
+            steps::checkout_repo()
+                .with_full_history()
+                .with_ref(Context::github().ref_()),
+        )
+        .add_step(steps::cache_rust_dependencies_namespace())
+        .add_step(run_compliance_check());
+
+    named::job(add_compliance_notification_steps(
+        job,
+        ComplianceContext::Release,
+        "run-compliance-check",
+    ))
 }
 
 fn validate_release_assets(deps: &[&NamedJob]) -> NamedJob {
@@ -229,54 +270,31 @@ fn validate_release_assets(deps: &[&NamedJob]) -> NamedJob {
     };
 
     fn run_post_upload_compliance_check() -> Step<Run> {
-        named::bash(
-            r#"cargo xtask compliance "$GITHUB_REF_NAME" --report-path target/compliance-report"#,
-        )
+        named::bash(formatdoc! {r#"
+            cargo xtask compliance "$GITHUB_REF_NAME" --report-path {COMPLIANCE_REPORT_PATH}
+            "#,
+        })
         .id("run-post-upload-compliance-check")
         .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID))
         .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY))
     }
 
-    fn send_post_upload_compliance_notification() -> Step<Run> {
-        named::bash(indoc::indoc! {r#"
-            if [ -z "$COMPLIANCE_OUTCOME" ] || [ "$COMPLIANCE_OUTCOME" == "skipped" ]; then
-                echo "Compliance check was skipped, not sending notification"
-                exit 0
-            fi
-
-            TAG="$GITHUB_REF_NAME"
-
-            if [ "$COMPLIANCE_OUTCOME" == "success" ]; then
-                MESSAGE="✅ Post-upload compliance re-check passed for $TAG"
-            else
-                MESSAGE="❌ Post-upload compliance re-check failed for $TAG"
-            fi
-
-            curl -X POST -H 'Content-type: application/json' \
-                --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \
-                "$SLACK_WEBHOOK"
-        "#})
-        .if_condition(Expression::new("always()"))
-        .add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES))
-        .add_env((
-            "COMPLIANCE_OUTCOME",
-            "${{ steps.run-post-upload-compliance-check.outcome }}",
-        ))
-    }
-
-    named::job(
-        dependant_job(deps)
-            .runs_on(runners::LINUX_SMALL)
-            .add_step(named::bash(&validation_script).add_env(("GITHUB_TOKEN", vars::GITHUB_TOKEN)))
-            .add_step(
-                steps::checkout_repo()
-                    .with_full_history()
-                    .with_ref(Context::github().ref_()),
-            )
-            .add_step(steps::cache_rust_dependencies_namespace())
-            .add_step(run_post_upload_compliance_check())
-            .add_step(send_post_upload_compliance_notification()),
-    )
+    let job = dependant_job(deps)
+        .runs_on(runners::LINUX_SMALL)
+        .add_step(named::bash(&validation_script).add_env(("GITHUB_TOKEN", vars::GITHUB_TOKEN)))
+        .add_step(
+            steps::checkout_repo()
+                .with_full_history()
+                .with_ref("${{ github.ref }}"),
+        )
+        .add_step(steps::cache_rust_dependencies_namespace())
+        .add_step(run_post_upload_compliance_check());
+
+    named::job(add_compliance_notification_steps(
+        job,
+        ComplianceContext::Release,
+        "run-post-upload-compliance-check",
+    ))
 }
 
 fn auto_release_preview(deps: &[&NamedJob]) -> NamedJob {