Merge branch 'zed-industries:main' into bedrock-thinking-toggle-integration

Shardul Vaidya created

Change summary

.config/nextest.toml                                                      |    4 
.factory/skills/brand-writer/SKILL.md                                     |   17 
.factory/skills/humanizer/SKILL.md                                        |  393 
.git-blame-ignore-revs                                                    |    8 
.github/CODEOWNERS.hold                                                   |    2 
.github/ISSUE_TEMPLATE/10_bug_report.yml                                  |    2 
.github/workflows/add_commented_closed_issue_to_project.yml               |    9 
.github/workflows/after_release.yml                                       |    7 
.github/workflows/autofix_pr.yml                                          |    6 
.github/workflows/background_agent_mvp.yml                                |    7 
.github/workflows/catch_blank_issues.yml                                  |    4 
.github/workflows/cherry_pick.yml                                         |    5 
.github/workflows/community_update_all_top_ranking_issues.yml             |    4 
.github/workflows/community_update_weekly_top_ranking_issues.yml          |    4 
.github/workflows/compare_perf.yml                                        |   31 
.github/workflows/deploy_cloudflare.yml                                   |    3 
.github/workflows/deploy_collab.yml                                       |   11 
.github/workflows/extension_bump.yml                                      |   13 
.github/workflows/extension_tests.yml                                     |   36 
.github/workflows/extension_workflow_rollout.yml                          |   18 
.github/workflows/publish_extension_cli.yml                               |   10 
.github/workflows/release.yml                                             |   76 
.github/workflows/release_nightly.yml                                     |    9 
.github/workflows/run_bundling.yml                                        |    4 
.github/workflows/run_cron_unit_evals.yml                                 |    2 
.github/workflows/run_tests.yml                                           |  134 
.github/workflows/slack_notify_first_responders.yml                       |    3 
.github/workflows/update_duplicate_magnets.yml                            |    4 
Cargo.lock                                                                |  427 
Cargo.toml                                                                |   75 
assets/icons/ai_vercel.svg                                                |    3 
assets/icons/fast_forward.svg                                             |    4 
assets/icons/fast_forward_off.svg                                         |    5 
assets/icons/file_icons/gitlab.svg                                        |    1 
assets/icons/file_icons/helm.svg                                          |    0 
assets/icons/file_icons/yaml.svg                                          |    1 
assets/icons/git_commit.svg                                               |    5 
assets/icons/git_graph.svg                                                |    7 
assets/icons/new_thread.svg                                               |    4 
assets/icons/open_folder.svg                                              |    4 
assets/icons/queue_message.svg                                            |    7 
assets/keymaps/default-linux.json                                         |    7 
assets/keymaps/default-macos.json                                         |   14 
assets/keymaps/default-windows.json                                       |    7 
assets/keymaps/vim.json                                                   |    8 
assets/settings/default.json                                              |   22 
assets/settings/default_semantic_token_rules.json                         |    4 
crates/acp_thread/src/acp_thread.rs                                       |  212 
crates/acp_thread/src/connection.rs                                       |    5 
crates/acp_thread/src/diff.rs                                             |   10 
crates/acp_thread/src/mention.rs                                          |   35 
crates/action_log/Cargo.toml                                              |    1 
crates/action_log/src/action_log.rs                                       |  640 
crates/agent/src/agent.rs                                                 |  349 
crates/agent/src/db.rs                                                    |  305 
crates/agent/src/edit_agent.rs                                            |  260 
crates/agent/src/edit_agent/evals.rs                                      |    1 
crates/agent/src/edit_agent/reindent.rs                                   |  214 
crates/agent/src/native_agent_server.rs                                   |   20 
crates/agent/src/tests/edit_file_thread_test.rs                           |    2 
crates/agent/src/tests/mod.rs                                             |  473 
crates/agent/src/tests/test_tools.rs                                      |  139 
crates/agent/src/thread.rs                                                |  672 
crates/agent/src/thread_store.rs                                          |  115 
crates/agent/src/tools.rs                                                 |    2 
crates/agent/src/tools/context_server_registry.rs                         |   17 
crates/agent/src/tools/copy_path_tool.rs                                  |   35 
crates/agent/src/tools/create_directory_tool.rs                           |   43 
crates/agent/src/tools/delete_path_tool.rs                                |   45 
crates/agent/src/tools/diagnostics_tool.rs                                |   88 
crates/agent/src/tools/edit_file_tool.rs                                  |  219 
crates/agent/src/tools/fetch_tool.rs                                      |   72 
crates/agent/src/tools/find_path_tool.rs                                  |   13 
crates/agent/src/tools/grep_tool.rs                                       |  114 
crates/agent/src/tools/list_directory_tool.rs                             |  185 
crates/agent/src/tools/move_path_tool.rs                                  |   35 
crates/agent/src/tools/now_tool.rs                                        |   22 
crates/agent/src/tools/open_tool.rs                                       |   20 
crates/agent/src/tools/read_file_tool.rs                                  |  387 
crates/agent/src/tools/restore_file_from_disk_tool.rs                     |   62 
crates/agent/src/tools/save_file_tool.rs                                  |   66 
crates/agent/src/tools/spawn_agent_tool.rs                                |  243 
crates/agent/src/tools/streaming_edit_file_tool.rs                        | 1132 
crates/agent/src/tools/terminal_tool.rs                                   |   59 
crates/agent/src/tools/tool_edit_parser.rs                                |  941 
crates/agent/src/tools/web_search_tool.rs                                 |   75 
crates/agent_servers/src/acp.rs                                           |   57 
crates/agent_servers/src/agent_servers.rs                                 |   11 
crates/agent_servers/src/claude.rs                                        |  264 
crates/agent_servers/src/codex.rs                                         |  275 
crates/agent_servers/src/custom.rs                                        |   88 
crates/agent_servers/src/e2e_tests.rs                                     |   20 
crates/agent_servers/src/gemini.rs                                        |  130 
crates/agent_ui/Cargo.toml                                                |    1 
crates/agent_ui/src/acp.rs                                                |   14 
crates/agent_ui/src/agent_configuration.rs                                |  157 
crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs |   23 
crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs          |    8 
crates/agent_ui/src/agent_configuration/tool_picker.rs                    |    7 
crates/agent_ui/src/agent_diff.rs                                         |    2 
crates/agent_ui/src/agent_panel.rs                                        | 1069 
crates/agent_ui/src/agent_registry_ui.rs                                  |  140 
crates/agent_ui/src/agent_ui.rs                                           |   68 
crates/agent_ui/src/branch_names.rs                                       |  847 
crates/agent_ui/src/buffer_codegen.rs                                     |    4 
crates/agent_ui/src/completion_provider.rs                                |    7 
crates/agent_ui/src/config_options.rs                                     |   20 
crates/agent_ui/src/connection_view.rs                                    |  558 
crates/agent_ui/src/connection_view/thread_view.rs                        |  621 
crates/agent_ui/src/entry_view_state.rs                                   |  105 
crates/agent_ui/src/inline_assistant.rs                                   |   29 
crates/agent_ui/src/inline_prompt_editor.rs                               |    9 
crates/agent_ui/src/language_model_selector.rs                            |    7 
crates/agent_ui/src/mention_set.rs                                        |   42 
crates/agent_ui/src/message_editor.rs                                     |  656 
crates/agent_ui/src/mode_selector.rs                                      |    0 
crates/agent_ui/src/model_selector.rs                                     |   80 
crates/agent_ui/src/model_selector_popover.rs                             |   14 
crates/agent_ui/src/profile_selector.rs                                   |    7 
crates/agent_ui/src/terminal_inline_assistant.rs                          |    5 
crates/agent_ui/src/test_support.rs                                       |   98 
crates/agent_ui/src/text_thread_editor.rs                                 |    2 
crates/agent_ui/src/thread_history.rs                                     |   46 
crates/agent_ui/src/ui/acp_onboarding_modal.rs                            |   18 
crates/agent_ui/src/ui/claude_agent_onboarding_modal.rs                   |   14 
crates/agent_ui/src/ui/mention_crease.rs                                  |  225 
crates/anthropic/src/anthropic.rs                                         |   16 
crates/assistant_text_thread/src/text_thread.rs                           |    1 
crates/audio/Cargo.toml                                                   |    2 
crates/auto_update/src/auto_update.rs                                     |    2 
crates/auto_update_helper/Cargo.toml                                      |    1 
crates/auto_update_helper/src/updater.rs                                  |  139 
crates/buffer_diff/src/buffer_diff.rs                                     |    4 
crates/channel/src/channel_buffer.rs                                      |   11 
crates/channel/src/channel_store.rs                                       |   55 
crates/client/src/zed_urls.rs                                             |   16 
crates/clock/src/clock.rs                                                 |    2 
crates/cloud_api_client/src/cloud_api_client.rs                           |   10 
crates/cloud_api_types/src/cloud_api_types.rs                             |    7 
crates/cloud_llm_client/src/cloud_llm_client.rs                           |    6 
crates/cloud_llm_client/src/predict_edits_v3.rs                           |    8 
crates/collab/migrations.sqlite/20221109000000_test_schema.sql            |    2 
crates/collab/migrations/20251208000000_test_schema.sql                   |    2 
crates/collab/src/db.rs                                                   |    2 
crates/collab/src/db/queries/projects.rs                                  |  152 
crates/collab/src/db/queries/rooms.rs                                     |    5 
crates/collab/src/db/tables/project_repository_statuses.rs                |    2 
crates/collab/src/rpc.rs                                                  |    2 
crates/collab/tests/integration/channel_buffer_tests.rs                   |  160 
crates/collab/tests/integration/following_tests.rs                        |   10 
crates/collab/tests/integration/git_tests.rs                              |  367 
crates/collab/tests/integration/integration_tests.rs                      |   86 
crates/collab/tests/integration/randomized_test_helpers.rs                |   16 
crates/collab/tests/integration/remote_editing_collaboration_tests.rs     |  126 
crates/collab_ui/src/notifications/incoming_call_notification.rs          |    8 
crates/command_palette/src/command_palette.rs                             |    4 
crates/copilot/src/copilot.rs                                             |    7 
crates/copilot/src/copilot_edit_prediction_delegate.rs                    |  131 
crates/crashes/Cargo.toml                                                 |    3 
crates/crashes/src/crashes.rs                                             |  172 
crates/csv_preview/Cargo.toml                                             |   21 
crates/csv_preview/LICENSE-GPL                                            |    0 
crates/csv_preview/src/csv_preview.rs                                     |  302 
crates/csv_preview/src/parser.rs                                          |  513 
crates/csv_preview/src/renderer.rs                                        |    5 
crates/csv_preview/src/renderer/preview_view.rs                           |   50 
crates/csv_preview/src/renderer/render_table.rs                           |  193 
crates/csv_preview/src/renderer/row_identifiers.rs                        |  189 
crates/csv_preview/src/renderer/table_cell.rs                             |   72 
crates/csv_preview/src/renderer/table_header.rs                           |   94 
crates/csv_preview/src/settings.rs                                        |   46 
crates/csv_preview/src/table_data_engine.rs                               |   90 
crates/csv_preview/src/table_data_engine/sorting_by_column.rs             |   49 
crates/csv_preview/src/types.rs                                           |   17 
crates/csv_preview/src/types/coordinates.rs                               |  127 
crates/csv_preview/src/types/table_cell.rs                                |   54 
crates/csv_preview/src/types/table_like_content.rs                        |   32 
crates/debugger_ui/Cargo.toml                                             |    1 
crates/debugger_ui/src/debugger_panel.rs                                  |    8 
crates/debugger_ui/src/debugger_ui.rs                                     |   42 
crates/debugger_ui/src/session.rs                                         |   37 
crates/debugger_ui/src/session/running.rs                                 |    6 
crates/debugger_ui/src/session/running/memory_view.rs                     |    4 
crates/debugger_ui/src/session/running/stack_frame_list.rs                |   14 
crates/debugger_ui/src/stack_trace_view.rs                                |  458 
crates/dev_container/src/lib.rs                                           |   30 
crates/diagnostics/src/diagnostics.rs                                     |   12 
crates/docs_preprocessor/src/main.rs                                      |    2 
crates/edit_prediction/Cargo.toml                                         |    1 
crates/edit_prediction/src/cursor_excerpt.rs                              |   29 
crates/edit_prediction/src/edit_prediction.rs                             |  449 
crates/edit_prediction/src/edit_prediction_tests.rs                       |  302 
crates/edit_prediction/src/fim.rs                                         |   19 
crates/edit_prediction/src/mercury.rs                                     |   36 
crates/edit_prediction/src/open_ai_compatible.rs                          |  133 
crates/edit_prediction/src/prediction.rs                                  |   10 
crates/edit_prediction/src/sweep_ai.rs                                    |   16 
crates/edit_prediction/src/udiff.rs                                       |  261 
crates/edit_prediction/src/zeta.rs                                        |  381 
crates/edit_prediction_cli/evals/.zed/settings.json                       |    1 
crates/edit_prediction_cli/evals/vscode--add-async-and-await.md           |   88 
crates/edit_prediction_cli/evals/vscode--add-class-decorator.md           |   74 
crates/edit_prediction_cli/evals/vscode--add-interface-method.md          |  113 
crates/edit_prediction_cli/evals/vscode--log-object-property.md           |   56 
crates/edit_prediction_cli/evals/zed--add-eprintln.md                     |   54 
crates/edit_prediction_cli/evals/zed--change-match-arm.md                 |   68 
crates/edit_prediction_cli/src/anthropic_client.rs                        |    3 
crates/edit_prediction_cli/src/format_prompt.rs                           |   76 
crates/edit_prediction_cli/src/git.rs                                     |    2 
crates/edit_prediction_cli/src/load_project.rs                            |    8 
crates/edit_prediction_cli/src/main.rs                                    |   55 
crates/edit_prediction_cli/src/metrics.rs                                 |   50 
crates/edit_prediction_cli/src/parse_output.rs                            |   43 
crates/edit_prediction_cli/src/predict.rs                                 |  125 
crates/edit_prediction_cli/src/pull_examples.rs                           |  701 
crates/edit_prediction_cli/src/retrieve_context.rs                        |  143 
crates/edit_prediction_cli/src/reversal_tracking.rs                       |   15 
crates/edit_prediction_cli/src/score.rs                                   |   42 
crates/edit_prediction_cli/src/split_dataset.rs                           |  318 
crates/edit_prediction_cli/src/synthesize.rs                              |   11 
crates/edit_prediction_context/src/assemble_excerpts.rs                   |   42 
crates/edit_prediction_context/src/edit_prediction_context.rs             |  140 
crates/edit_prediction_context/src/edit_prediction_context_tests.rs       |  320 
crates/edit_prediction_ui/Cargo.toml                                      |    1 
crates/edit_prediction_ui/src/edit_prediction_button.rs                   |  233 
crates/edit_prediction_ui/src/edit_prediction_context_view.rs             |   56 
crates/edit_prediction_ui/src/edit_prediction_ui.rs                       |   43 
crates/edit_prediction_ui/src/rate_prediction_modal.rs                    |   13 
crates/editor/src/bracket_colorization.rs                                 |  319 
crates/editor/src/display_map.rs                                          |   95 
crates/editor/src/display_map/block_map.rs                                |  110 
crates/editor/src/display_map/inlay_map.rs                                |   17 
crates/editor/src/display_map/tab_map.rs                                  |  655 
crates/editor/src/editor.rs                                               |  334 
crates/editor/src/editor_tests.rs                                         |  696 
crates/editor/src/element.rs                                              |  395 
crates/editor/src/hover_links.rs                                          |   20 
crates/editor/src/inlays.rs                                               |    6 
crates/editor/src/inlays/inlay_hints.rs                                   |  816 
crates/editor/src/items.rs                                                |   11 
crates/editor/src/jsx_tag_auto_close.rs                                   |   23 
crates/editor/src/movement.rs                                             |   13 
crates/editor/src/scroll/actions.rs                                       |   37 
crates/editor/src/semantic_tokens.rs                                      |  226 
crates/editor/src/split.rs                                                |  594 
crates/editor/src/test.rs                                                 |    2 
crates/editor/src/test/editor_test_context.rs                             |   22 
crates/etw_tracing/Cargo.toml                                             |    8 
crates/eval/src/instance.rs                                               |    3 
crates/explorer_command_injector/src/explorer_command_injector.rs         |   25 
crates/extension_host/src/extension_host.rs                               |   42 
crates/feature_flags/src/flags.rs                                         |   18 
crates/fs/Cargo.toml                                                      |    2 
crates/fs/src/fake_git_repo.rs                                            |  279 
crates/fs/src/fs.rs                                                       |    1 
crates/fs/tests/integration/fake_git_repo.rs                              |  141 
crates/git/clippy.toml                                                    |   28 
crates/git/src/blame.rs                                                   |   24 
crates/git/src/commit.rs                                                  |   17 
crates/git/src/repository.rs                                              |  496 
crates/git/src/status.rs                                                  |  160 
crates/git_graph/src/git_graph.rs                                         |  262 
crates/git_ui/Cargo.toml                                                  |    2 
crates/git_ui/src/branch_picker.rs                                        |   14 
crates/git_ui/src/commit_view.rs                                          |  273 
crates/git_ui/src/conflict_view.rs                                        |    2 
crates/git_ui/src/git_panel.rs                                            |  139 
crates/git_ui/src/git_panel_settings.rs                                   |    2 
crates/git_ui/src/git_picker.rs                                           |   16 
crates/git_ui/src/text_diff_view.rs                                       |    6 
crates/git_ui/src/worktree_picker.rs                                      |   92 
crates/google_ai/src/google_ai.rs                                         |   14 
crates/gpui/Cargo.toml                                                    |   32 
crates/gpui/build.rs                                                      |   11 
crates/gpui/examples/animation.rs                                         |   16 
crates/gpui/examples/data_table.rs                                        |   16 
crates/gpui/examples/drag_drop.rs                                         |   16 
crates/gpui/examples/focus_visible.rs                                     |   16 
crates/gpui/examples/gif_viewer.rs                                        |   18 
crates/gpui/examples/gradient.rs                                          |   16 
crates/gpui/examples/grid_layout.rs                                       |   16 
crates/gpui/examples/hello_world.rs                                       |   16 
crates/gpui/examples/image/image.rs                                       |  109 
crates/gpui/examples/image_gallery.rs                                     |   43 
crates/gpui/examples/image_loading.rs                                     |   18 
crates/gpui/examples/input.rs                                             |   16 
crates/gpui/examples/layer_shell.rs                                       |   16 
crates/gpui/examples/mouse_pressure.rs                                    |   16 
crates/gpui/examples/on_window_close_quit.rs                              |   16 
crates/gpui/examples/opacity.rs                                           |   16 
crates/gpui/examples/ownership_post.rs                                    |   16 
crates/gpui/examples/painting.rs                                          |   16 
crates/gpui/examples/paths_bench.rs                                       |   16 
crates/gpui/examples/pattern.rs                                           |   16 
crates/gpui/examples/popover.rs                                           |   16 
crates/gpui/examples/scrollable.rs                                        |   16 
crates/gpui/examples/set_menus.rs                                         |   16 
crates/gpui/examples/shadow.rs                                            |   16 
crates/gpui/examples/svg/svg.rs                                           |   16 
crates/gpui/examples/tab_stop.rs                                          |   16 
crates/gpui/examples/testing.rs                                           |   15 
crates/gpui/examples/text.rs                                              |   16 
crates/gpui/examples/text_layout.rs                                       |   16 
crates/gpui/examples/text_wrapper.rs                                      |   16 
crates/gpui/examples/tree.rs                                              |   15 
crates/gpui/examples/uniform_list.rs                                      |   16 
crates/gpui/examples/window.rs                                            |   16 
crates/gpui/examples/window_positioning.rs                                |   16 
crates/gpui/examples/window_shadow.rs                                     |   16 
crates/gpui/src/app.rs                                                    |   36 
crates/gpui/src/app/async_context.rs                                      |   20 
crates/gpui/src/app/context.rs                                            |    4 
crates/gpui/src/app/entity_map.rs                                         |  228 
crates/gpui/src/app/test_context.rs                                       |   47 
crates/gpui/src/app/visual_test_context.rs                                |    2 
crates/gpui/src/elements/animation.rs                                     |    6 
crates/gpui/src/elements/deferred.rs                                      |    2 
crates/gpui/src/elements/div.rs                                           |   14 
crates/gpui/src/elements/img.rs                                           |   14 
crates/gpui/src/elements/list.rs                                          |    4 
crates/gpui/src/elements/svg.rs                                           |    7 
crates/gpui/src/elements/text.rs                                          |    2 
crates/gpui/src/executor.rs                                               |   15 
crates/gpui/src/geometry.rs                                               |   90 
crates/gpui/src/gpui.rs                                                   |    9 
crates/gpui/src/platform.rs                                               |   13 
crates/gpui/src/platform/scap_screen_capture.rs                           |    2 
crates/gpui/src/platform/test/dispatcher.rs                               |    7 
crates/gpui/src/platform/test/window.rs                                   |    1 
crates/gpui/src/platform_scheduler.rs                                     |   72 
crates/gpui/src/profiler.rs                                               |    2 
crates/gpui/src/queue.rs                                                  |   93 
crates/gpui/src/scene.rs                                                  |    4 
crates/gpui/src/shared_string.rs                                          |    2 
crates/gpui/src/subscription.rs                                           |    2 
crates/gpui/src/test.rs                                                   |    7 
crates/gpui/src/util.rs                                                   |    2 
crates/gpui/src/window.rs                                                 |   38 
crates/gpui_linux/Cargo.toml                                              |   18 
crates/gpui_linux/src/linux/headless/client.rs                            |    1 
crates/gpui_linux/src/linux/platform.rs                                   |   67 
crates/gpui_linux/src/linux/text_system.rs                                |  539 
crates/gpui_linux/src/linux/wayland/client.rs                             |   74 
crates/gpui_linux/src/linux/wayland/window.rs                             |   59 
crates/gpui_linux/src/linux/x11/client.rs                                 |   36 
crates/gpui_linux/src/linux/x11/window.rs                                 |   92 
crates/gpui_macos/Cargo.toml                                              |    4 
crates/gpui_macos/build.rs                                                |   35 
crates/gpui_macos/src/dispatch.h                                          |    2 
crates/gpui_macos/src/dispatcher.rs                                       |   51 
crates/gpui_macos/src/display_link.rs                                     |   47 
crates/gpui_macos/src/platform.rs                                         |   17 
crates/gpui_macos/src/window.rs                                           |   61 
crates/gpui_macros/src/test.rs                                            |   41 
crates/gpui_platform/Cargo.toml                                           |    4 
crates/gpui_platform/src/gpui_platform.rs                                 |   22 
crates/gpui_util/Cargo.toml                                               |   12 
crates/gpui_util/LICENSE-APACHE                                           |    1 
crates/gpui_util/src/arc_cow.rs                                           |    0 
crates/gpui_util/src/lib.rs                                               |  292 
crates/gpui_web/Cargo.toml                                                |   71 
crates/gpui_web/LICENSE-APACHE                                            |    1 
crates/gpui_web/examples/hello_web/.cargo/config.toml                     |   14 
crates/gpui_web/examples/hello_web/.gitignore                             |    3 
crates/gpui_web/examples/hello_web/Cargo.toml                             |   16 
crates/gpui_web/examples/hello_web/LICENSE-APACHE                         |    1 
crates/gpui_web/examples/hello_web/index.html                             |   31 
crates/gpui_web/examples/hello_web/main.rs                                |  422 
crates/gpui_web/examples/hello_web/rust-toolchain.toml                    |    4 
crates/gpui_web/examples/hello_web/trunk.toml                             |    7 
crates/gpui_web/src/dispatcher.rs                                         |  345 
crates/gpui_web/src/display.rs                                            |   98 
crates/gpui_web/src/events.rs                                             |  615 
crates/gpui_web/src/gpui_web.rs                                           |   18 
crates/gpui_web/src/http_client.rs                                        |  199 
crates/gpui_web/src/keyboard.rs                                           |   19 
crates/gpui_web/src/logging.rs                                            |   37 
crates/gpui_web/src/platform.rs                                           |  344 
crates/gpui_web/src/window.rs                                             |  702 
crates/gpui_wgpu/Cargo.toml                                               |   26 
crates/gpui_wgpu/src/cosmic_text_system.rs                                |  645 
crates/gpui_wgpu/src/gpui_wgpu.rs                                         |    3 
crates/gpui_wgpu/src/shaders.wgsl                                         |   65 
crates/gpui_wgpu/src/shaders_subpixel.wgsl                                |   53 
crates/gpui_wgpu/src/wgpu_atlas.rs                                        |   28 
crates/gpui_wgpu/src/wgpu_context.rs                                      |  335 
crates/gpui_wgpu/src/wgpu_renderer.rs                                     |  261 
crates/http_client/Cargo.toml                                             |   10 
crates/http_client/src/async_body.rs                                      |   14 
crates/http_client/src/http_client.rs                                     |    4 
crates/icons/src/icons.rs                                                 |   11 
crates/language/src/buffer.rs                                             |   27 
crates/language/src/language.rs                                           |   27 
crates/language/src/language_settings.rs                                  |   25 
crates/language/src/proto.rs                                              |    2 
crates/language/src/syntax_map.rs                                         |   23 
crates/language_extension/src/extension_lsp_adapter.rs                    |    1 
crates/language_model/src/language_model.rs                               |    4 
crates/language_model/src/model/cloud_model.rs                            |   28 
crates/language_model/src/request.rs                                      |   28 
crates/language_models/src/language_models.rs                             |    8 
crates/language_models/src/provider.rs                                    |    1 
crates/language_models/src/provider/anthropic.rs                          |    5 
crates/language_models/src/provider/bedrock.rs                            |   23 
crates/language_models/src/provider/cloud.rs                              |  138 
crates/language_models/src/provider/copilot_chat.rs                       |   24 
crates/language_models/src/provider/deepseek.rs                           |   21 
crates/language_models/src/provider/mistral.rs                            |   38 
crates/language_models/src/provider/open_ai.rs                            |  160 
crates/language_models/src/provider/open_ai_compatible.rs                 |    4 
crates/language_models/src/provider/open_router.rs                        |   83 
crates/language_models/src/provider/vercel.rs                             |    4 
crates/language_models/src/provider/vercel_ai_gateway.rs                  |  710 
crates/language_models/src/provider/x_ai.rs                               |    7 
crates/language_models/src/settings.rs                                    |    8 
crates/language_selector/Cargo.toml                                       |    1 
crates/language_selector/src/language_selector.rs                         |  282 
crates/language_tools/src/highlights_tree_view.rs                         |  179 
crates/language_tools/src/lsp_button.rs                                   |  108 
crates/languages/src/bash/brackets.scm                                    |   74 
crates/languages/src/bash/highlights.scm                                  |   27 
crates/languages/src/bash/indents.scm                                     |   21 
crates/languages/src/bash/injections.scm                                  |    3 
crates/languages/src/bash/overrides.scm                                   |    1 
crates/languages/src/bash/redactions.scm                                  |    2 
crates/languages/src/bash/runnables.scm                                   |    8 
crates/languages/src/bash/textobjects.scm                                 |    2 
crates/languages/src/c/brackets.scm                                       |   21 
crates/languages/src/c/highlights.scm                                     |    8 
crates/languages/src/c/imports.scm                                        |   12 
crates/languages/src/c/indents.scm                                        |   16 
crates/languages/src/c/injections.scm                                     |   11 
crates/languages/src/c/outline.scm                                        |  138 
crates/languages/src/c/overrides.scm                                      |    1 
crates/languages/src/c/runnables.scm                                      |   12 
crates/languages/src/c/textobjects.scm                                    |   39 
crates/languages/src/cpp/brackets.scm                                     |   25 
crates/languages/src/cpp/config.toml                                      |    2 
crates/languages/src/cpp/highlights.scm                                   |   33 
crates/languages/src/cpp/imports.scm                                      |    9 
crates/languages/src/cpp/indents.scm                                      |   32 
crates/languages/src/cpp/injections.scm                                   |   11 
crates/languages/src/cpp/outline.scm                                      |  319 
crates/languages/src/cpp/overrides.scm                                    |    1 
crates/languages/src/cpp/textobjects.scm                                  |   49 
crates/languages/src/css.rs                                               |    1 
crates/languages/src/css/brackets.scm                                     |   21 
crates/languages/src/css/highlights.scm                                   |   32 
crates/languages/src/css/indents.scm                                      |    4 
crates/languages/src/css/injections.scm                                   |    3 
crates/languages/src/css/outline.scm                                      |   24 
crates/languages/src/css/overrides.scm                                    |    1 
crates/languages/src/css/textobjects.scm                                  |   41 
crates/languages/src/diff/highlights.scm                                  |    8 
crates/languages/src/diff/injections.scm                                  |    2 
crates/languages/src/gitcommit/highlights.scm                             |   28 
crates/languages/src/gitcommit/injections.scm                             |    7 
crates/languages/src/go.rs                                                |   49 
crates/languages/src/go/brackets.scm                                      |   25 
crates/languages/src/go/debugger.scm                                      |   42 
crates/languages/src/go/highlights.scm                                    |    8 
crates/languages/src/go/imports.scm                                       |   20 
crates/languages/src/go/indents.scm                                       |   20 
crates/languages/src/go/injections.scm                                    | 1357 
crates/languages/src/go/outline.scm                                       |   94 
crates/languages/src/go/overrides.scm                                     |    1 
crates/languages/src/go/runnables.scm                                     |  407 
crates/languages/src/go/semantic_token_rules.json                         |    7 
crates/languages/src/go/textobjects.scm                                   |   31 
crates/languages/src/gomod/highlights.scm                                 |    4 
crates/languages/src/gomod/injections.scm                                 |    2 
crates/languages/src/gomod/structure.scm                                  |   30 
crates/languages/src/gowork/highlights.scm                                |    4 
crates/languages/src/gowork/injections.scm                                |    2 
crates/languages/src/javascript/brackets.scm                              |   38 
crates/languages/src/javascript/debugger.scm                              |   52 
crates/languages/src/javascript/highlights.scm                            |  186 
crates/languages/src/javascript/imports.scm                               |   24 
crates/languages/src/javascript/indents.scm                               |   38 
crates/languages/src/javascript/injections.scm                            |  162 
crates/languages/src/javascript/outline.scm                               |  386 
crates/languages/src/javascript/overrides.scm                             |    3 
crates/languages/src/javascript/runnables.scm                             |   80 
crates/languages/src/javascript/textobjects.scm                           |  120 
crates/languages/src/jsdoc/brackets.scm                                   |    7 
crates/languages/src/jsdoc/highlights.scm                                 |    2 
crates/languages/src/json.rs                                              |    1 
crates/languages/src/json/brackets.scm                                    |   12 
crates/languages/src/json/config.toml                                     |    2 
crates/languages/src/json/highlights.scm                                  |    1 
crates/languages/src/json/indents.scm                                     |    7 
crates/languages/src/json/outline.scm                                     |    3 
crates/languages/src/json/redactions.scm                                  |   15 
crates/languages/src/json/runnables.scm                                   |   32 
crates/languages/src/jsonc/brackets.scm                                   |   12 
crates/languages/src/jsonc/highlights.scm                                 |    1 
crates/languages/src/jsonc/indents.scm                                    |    7 
crates/languages/src/jsonc/injections.scm                                 |    2 
crates/languages/src/jsonc/outline.scm                                    |    3 
crates/languages/src/jsonc/overrides.scm                                  |    1 
crates/languages/src/jsonc/redactions.scm                                 |   15 
crates/languages/src/lib.rs                                               |   16 
crates/languages/src/markdown-inline/highlights.scm                       |   17 
crates/languages/src/markdown/brackets.scm                                |   31 
crates/languages/src/markdown/config.toml                                 |    2 
crates/languages/src/markdown/highlights.scm                              |   21 
crates/languages/src/markdown/indents.scm                                 |    3 
crates/languages/src/markdown/injections.scm                              |    8 
crates/languages/src/markdown/outline.scm                                 |    7 
crates/languages/src/markdown/textobjects.scm                             |    4 
crates/languages/src/python.rs                                            |   49 
crates/languages/src/python/brackets.scm                                  |   16 
crates/languages/src/python/debugger.scm                                  |  102 
crates/languages/src/python/highlights.scm                                |  137 
crates/languages/src/python/imports.scm                                   |   64 
crates/languages/src/python/indents.scm                                   |   26 
crates/languages/src/python/injections.scm                                |   50 
crates/languages/src/python/outline.scm                                   |   11 
crates/languages/src/python/overrides.scm                                 |    1 
crates/languages/src/python/runnables.scm                                 |  223 
crates/languages/src/regex/brackets.scm                                   |   11 
crates/languages/src/regex/highlights.scm                                 |    3 
crates/languages/src/rust/brackets.scm                                    |   30 
crates/languages/src/rust/debugger.scm                                    |   87 
crates/languages/src/rust/highlights.scm                                  |  104 
crates/languages/src/rust/imports.scm                                     |   30 
crates/languages/src/rust/indents.scm                                     |   34 
crates/languages/src/rust/injections.scm                                  |   97 
crates/languages/src/rust/outline.scm                                     |   94 
crates/languages/src/rust/overrides.scm                                   |    1 
crates/languages/src/rust/runnables.scm                                   |  153 
crates/languages/src/rust/textobjects.scm                                 |   81 
crates/languages/src/tailwind.rs                                          |    1 
crates/languages/src/tailwindcss.rs                                       |    1 
crates/languages/src/tsx/brackets.scm                                     |   46 
crates/languages/src/tsx/debugger.scm                                     |   56 
crates/languages/src/tsx/highlights.scm                                   |  201 
crates/languages/src/tsx/imports.scm                                      |   24 
crates/languages/src/tsx/indents.scm                                      |   38 
crates/languages/src/tsx/injections.scm                                   |  165 
crates/languages/src/tsx/outline.scm                                      |  391 
crates/languages/src/tsx/overrides.scm                                    |   10 
crates/languages/src/tsx/runnables.scm                                    |   80 
crates/languages/src/tsx/textobjects.scm                                  |  162 
crates/languages/src/typescript.rs                                        |    1 
crates/languages/src/typescript/brackets.scm                              |   30 
crates/languages/src/typescript/debugger.scm                              |   52 
crates/languages/src/typescript/highlights.scm                            |  216 
crates/languages/src/typescript/imports.scm                               |   33 
crates/languages/src/typescript/indents.scm                               |   39 
crates/languages/src/typescript/injections.scm                            |  232 
crates/languages/src/typescript/outline.scm                               |  391 
crates/languages/src/typescript/overrides.scm                             |   10 
crates/languages/src/typescript/runnables.scm                             |  140 
crates/languages/src/typescript/textobjects.scm                           |  162 
crates/languages/src/yaml/brackets.scm                                    |   17 
crates/languages/src/yaml/highlights.scm                                  |   32 
crates/languages/src/yaml/injections.scm                                  |   29 
crates/languages/src/yaml/outline.scm                                     |   14 
crates/languages/src/yaml/overrides.scm                                   |    1 
crates/languages/src/yaml/redactions.scm                                  |    3 
crates/languages/src/zed-keybind-context/brackets.scm                     |    3 
crates/livekit_client/Cargo.toml                                          |    7 
crates/livekit_client/src/livekit_client.rs                               |    6 
crates/livekit_client/src/livekit_client/playback.rs                      |   11 
crates/lsp/Cargo.toml                                                     |    4 
crates/lsp/src/lsp.rs                                                     |   10 
crates/markdown/src/markdown.rs                                           |   34 
crates/markdown/src/parser.rs                                             |    2 
crates/markdown_preview/Cargo.toml                                        |    1 
crates/markdown_preview/src/markdown_parser.rs                            |   28 
crates/migrator/src/migrations.rs                                         |   12 
crates/migrator/src/migrations/m_2025_01_27/settings.rs                   |   27 
crates/migrator/src/migrations/m_2026_02_25/settings.rs                   |  161 
crates/migrator/src/migrator.rs                                           |  498 
crates/miniprofiler_ui/src/miniprofiler_ui.rs                             |    4 
crates/mistral/src/mistral.rs                                             |    8 
crates/multi_buffer/src/multi_buffer.rs                                   |   66 
crates/multi_buffer/src/multi_buffer_tests.rs                             |  351 
crates/multi_buffer/src/path_key.rs                                       |   36 
crates/open_ai/src/open_ai.rs                                             |   11 
crates/paths/src/paths.rs                                                 |    6 
crates/picker/Cargo.toml                                                  |    4 
crates/picker/src/picker.rs                                               |  177 
crates/platform_title_bar/Cargo.toml                                      |    1 
crates/platform_title_bar/src/platform_title_bar.rs                       |    4 
crates/project/src/agent_registry_store.rs                                |   24 
crates/project/src/agent_server_store.rs                                  |  809 
crates/project/src/buffer_store.rs                                        |    1 
crates/project/src/context_server_store.rs                                |   42 
crates/project/src/debugger/session.rs                                    |   32 
crates/project/src/git_store.rs                                           |  561 
crates/project/src/lsp_command.rs                                         |    2 
crates/project/src/lsp_store.rs                                           |   93 
crates/project/src/lsp_store/lsp_ext_command.rs                           |    6 
crates/project/src/lsp_store/semantic_tokens.rs                           |   54 
crates/project/src/project.rs                                             |    6 
crates/project/src/project_settings.rs                                    |   62 
crates/project/tests/integration/ext_agent_tests.rs                       |   17 
crates/project/tests/integration/extension_agent_tests.rs                 |   37 
crates/project/tests/integration/git_store.rs                             |  488 
crates/project/tests/integration/project_tests.rs                         |   91 
crates/project_panel/src/project_panel.rs                                 |  186 
crates/project_panel/src/project_panel_settings.rs                        |    6 
crates/project_panel/src/project_panel_tests.rs                           |   58 
crates/proto/proto/ai.proto                                               |  298 
crates/proto/proto/app.proto                                              |   77 
crates/proto/proto/buf.yaml                                               |   10 
crates/proto/proto/buffer.proto                                           |  389 
crates/proto/proto/call.proto                                             |  456 
crates/proto/proto/channel.proto                                          |  270 
crates/proto/proto/core.proto                                             |   30 
crates/proto/proto/debugger.proto                                         |  648 
crates/proto/proto/download.proto                                         |   32 
crates/proto/proto/git.proto                                              |  698 
crates/proto/proto/image.proto                                            |   32 
crates/proto/proto/lsp.proto                                              |    2 
crates/proto/proto/notification.proto                                     |   28 
crates/proto/proto/task.proto                                             |   58 
crates/proto/proto/toolchain.proto                                        |   60 
crates/proto/proto/zed.proto                                              |  867 
crates/proto/src/error.rs                                                 |    6 
crates/recent_projects/src/recent_projects.rs                             |   29 
crates/recent_projects/src/remote_servers.rs                              |   15 
crates/remote/Cargo.toml                                                  |    1 
crates/remote/src/transport.rs                                            |    9 
crates/remote/src/transport/docker.rs                                     |    2 
crates/remote/src/transport/ssh.rs                                        |  220 
crates/remote/src/transport/wsl.rs                                        |   11 
crates/remote_server/Cargo.toml                                           |    1 
crates/remote_server/src/remote_editing_tests.rs                          |   41 
crates/remote_server/src/server.rs                                        |   15 
crates/repl/Cargo.toml                                                    |    1 
crates/repl/src/kernels/wsl_kernel.rs                                     |   99 
crates/repl/src/notebook/cell.rs                                          |  183 
crates/repl/src/notebook/notebook_ui.rs                                   |  129 
crates/repl/src/outputs.rs                                                |   37 
crates/repl/src/outputs/image.rs                                          |    8 
crates/repl/src/outputs/plain.rs                                          |    2 
crates/repl/src/repl_editor.rs                                            |    9 
crates/repl/src/repl_settings.rs                                          |    6 
crates/repl/src/repl_store.rs                                             |    9 
crates/reqwest_client/Cargo.toml                                          |    6 
crates/reqwest_client/src/reqwest_client.rs                               |    2 
crates/rope/src/chunk.rs                                                  |   45 
crates/rope/src/rope.rs                                                   |  245 
crates/rules_library/src/rules_library.rs                                 |    3 
crates/scheduler/Cargo.toml                                               |    1 
crates/scheduler/src/clock.rs                                             |    4 
crates/scheduler/src/executor.rs                                          |    9 
crates/scheduler/src/test_scheduler.rs                                    |   30 
crates/scheduler/src/tests.rs                                             |   25 
crates/search/src/buffer_search.rs                                        |   36 
crates/settings/src/settings.rs                                           |    6 
crates/settings/src/settings_store.rs                                     |   66 
crates/settings/src/vscode_import.rs                                      |    1 
crates/settings_content/src/agent.rs                                      |   79 
crates/settings_content/src/language.rs                                   |   53 
crates/settings_content/src/language_model.rs                             |   20 
crates/settings_content/src/settings_content.rs                           |   14 
crates/settings_content/src/theme.rs                                      |    3 
crates/settings_content/src/workspace.rs                                  |    4 
crates/settings_ui/src/page_data.rs                                       |   66 
crates/settings_ui/src/pages/edit_prediction_provider_setup.rs            |  115 
crates/settings_ui/src/settings_ui.rs                                     |    7 
crates/sidebar/Cargo.toml                                                 |   20 
crates/sidebar/src/sidebar.rs                                             |  951 
crates/sum_tree/src/sum_tree.rs                                           |   45 
crates/sum_tree/src/tree_map.rs                                           |    4 
crates/supermaven/Cargo.toml                                              |   44 
crates/supermaven/src/messages.rs                                         |  146 
crates/supermaven/src/supermaven.rs                                       |  485 
crates/supermaven/src/supermaven_edit_prediction_delegate.rs              |  303 
crates/supermaven_api/Cargo.toml                                          |   23 
crates/supermaven_api/LICENSE-GPL                                         |    1 
crates/supermaven_api/src/supermaven_api.rs                               |  125 
crates/terminal/src/terminal_hyperlinks.rs                                |   12 
crates/terminal_view/src/terminal_panel.rs                                |   30 
crates/terminal_view/src/terminal_scrollbar.rs                            |    6 
crates/text/src/anchor.rs                                                 |   16 
crates/text/src/locator.rs                                                |   59 
crates/text/src/tests.rs                                                  |  185 
crates/text/src/text.rs                                                   |  339 
crates/theme/src/default_colors.rs                                        |    2 
crates/theme/src/fallback_themes.rs                                       |    1 
crates/theme/src/icon_theme.rs                                            |   37 
crates/theme/src/schema.rs                                                |    5 
crates/theme/src/styles/colors.rs                                         |    2 
crates/theme_importer/src/vscode/converter.rs                             |    1 
crates/time_format/Cargo.toml                                             |    3 
crates/time_format/src/time_format.rs                                     |  115 
crates/title_bar/src/title_bar.rs                                         |    6 
crates/ui/src/components/ai/thread_item.rs                                |   96 
crates/ui/src/components/callout.rs                                       |    2 
crates/ui/src/components/data_table.rs                                    |   43 
crates/ui/src/components/scrollbar.rs                                     |   18 
crates/util/Cargo.toml                                                    |   13 
crates/util/src/archive.rs                                                |    5 
crates/util/src/path_list.rs                                              |   18 
crates/util/src/paths.rs                                                  |   28 
crates/util/src/process.rs                                                |   14 
crates/util/src/shell.rs                                                  |   36 
crates/util/src/shell_env.rs                                              |   94 
crates/util/src/test.rs                                                   |   13 
crates/util/src/test/git.rs                                               |    0 
crates/util/src/util.rs                                                   |  315 
crates/vim/src/normal/increment.rs                                        |   25 
crates/vim/src/normal/yank.rs                                             |    2 
crates/web_search_providers/Cargo.toml                                    |    1 
crates/web_search_providers/src/cloud.rs                                  |   36 
crates/web_search_providers/src/web_search_providers.rs                   |   22 
crates/workspace/Cargo.toml                                               |    2 
crates/workspace/src/item.rs                                              |   62 
crates/workspace/src/multi_workspace.rs                                   |  187 
crates/workspace/src/pane.rs                                              |   70 
crates/workspace/src/pane_group.rs                                        |   35 
crates/workspace/src/persistence.rs                                       |  110 
crates/workspace/src/persistence/model.rs                                 |   12 
crates/workspace/src/welcome.rs                                           |    2 
crates/workspace/src/workspace.rs                                         |  630 
crates/worktree/src/worktree.rs                                           |   18 
crates/x_ai/src/x_ai.rs                                                   |   12 
crates/zed/Cargo.toml                                                     |   26 
crates/zed/build.rs                                                       |   19 
crates/zed/src/main.rs                                                    |   54 
crates/zed/src/reliability.rs                                             |   31 
crates/zed/src/visual_test_runner.rs                                      |  949 
crates/zed/src/zed.rs                                                     |  131 
crates/zed/src/zed/app_menus.rs                                           |    6 
crates/zed/src/zed/edit_prediction_registry.rs                            |  182 
crates/zed/src/zed/quick_action_bar/preview.rs                            |   17 
crates/zed_actions/src/lib.rs                                             |   36 
crates/zeta_prompt/src/zeta_prompt.rs                                     | 2061 
crates/zlog/src/filter.rs                                                 |    2 
crates/zlog/src/sink.rs                                                   |  122 
docs/.doc-examples/complex-feature.md                                     |    8 
docs/.prettierignore                                                      |    3 
docs/README.md                                                            |   16 
docs/book.toml                                                            |    4 
docs/src/ai/agent-panel.md                                                |   12 
docs/src/ai/agent-settings.md                                             |    6 
docs/src/ai/ai-improvement.md                                             |  107 
docs/src/ai/edit-prediction.md                                            |   46 
docs/src/ai/external-agents.md                                            |   44 
docs/src/ai/llm-providers.md                                              |   63 
docs/src/ai/mcp.md                                                        |   14 
docs/src/ai/models.md                                                     |    6 
docs/src/ai/overview.md                                                   |    2 
docs/src/ai/privacy-and-security.md                                       |   19 
docs/src/ai/tools.md                                                      |    4 
docs/src/collaboration/overview.md                                        |   27 
docs/src/completions.md                                                   |    2 
docs/src/configuring-languages.md                                         |   39 
docs/src/debugger.md                                                      |    8 
docs/src/development.md                                                   |   24 
docs/src/extensions/languages.md                                          |   42 
docs/src/finding-navigating.md                                            |    2 
docs/src/getting-started.md                                               |    6 
docs/src/git.md                                                           |   10 
docs/src/globs.md                                                         |    2 
docs/src/languages/ansible.md                                             |   68 
docs/src/languages/bash.md                                                |    3 
docs/src/languages/json.md                                                |    2 
docs/src/languages/python.md                                              |    4 
docs/src/languages/yaml.md                                                |    2 
docs/src/migrate/vs-code.md                                               |   16 
docs/src/outline-panel.md                                                 |    2 
docs/src/performance.md                                                   |    4 
docs/src/reference/all-settings.md                                        |   18 
docs/src/reference/cli.md                                                 |    2 
docs/src/repl.md                                                          |    4 
docs/src/semantic-tokens.md                                               |    4 
docs/src/snippets.md                                                      |   22 
docs/src/tasks.md                                                         |   33 
docs/src/telemetry.md                                                     |   41 
docs/src/troubleshooting.md                                               |   19 
docs/src/vim.md                                                           |    4 
docs/src/visual-customization.md                                          |    1 
docs/theme/analytics.js                                                   |   93 
docs/theme/c15t@2.0.0-rc.3.js                                             |    0 
docs/theme/consent-banner.css                                             |  292 
docs/theme/css/chrome.css                                                 |   38 
docs/theme/css/variables.css                                              |    2 
docs/theme/index.hbs                                                      |  139 
extensions/glsl/languages/glsl/brackets.scm                               |   11 
extensions/glsl/languages/glsl/highlights.scm                             |   62 
extensions/html/languages/html/highlights.scm                             |    5 
extensions/html/languages/html/indents.scm                                |    7 
extensions/html/languages/html/injections.scm                             |   19 
extensions/html/languages/html/overrides.scm                              |    1 
extensions/proto/languages/proto/highlights.scm                           |    6 
extensions/proto/languages/proto/indents.scm                              |   14 
extensions/proto/languages/proto/outline.scm                              |   24 
extensions/proto/languages/proto/textobjects.scm                          |   18 
extensions/test-extension/languages/gleam/highlights.scm                  |   66 
extensions/test-extension/languages/gleam/indents.scm                     |   14 
extensions/test-extension/languages/gleam/outline.scm                     |   36 
extensions/workflows/shared/bump_version.yml                              |    2 
legal/privacy-policy.md                                                   |  253 
legal/subprocessors.md                                                    |  118 
legal/terms.md                                                            |   29 
legal/third-party-terms.md                                                |   46 
nix/build.nix                                                             |   35 
nix/livekit-libwebrtc/0001-shared-libraries.patch                         |   17 
nix/livekit-libwebrtc/README.md                                           |    7 
nix/livekit-libwebrtc/chromium-129-rust.patch                             |   21 
nix/livekit-libwebrtc/libwebrtc.version                                   |   22 
nix/livekit-libwebrtc/mkSystemLibraries.nix                               |   64 
nix/livekit-libwebrtc/package.nix                                         |  342 
nix/livekit-libwebrtc/sources.json                                        |  372 
nix/livekit-libwebrtc/update.sh                                           |   33 
rust-toolchain.toml                                                       |    1 
script/bundle-linux                                                       |    2 
script/clippy                                                             |    4 
script/docs-strip-preview-callouts                                        |   11 
script/docs-suggest-publish                                               |  280 
script/github-check-new-issue-for-duplicates.py                           |   46 
script/github-track-duplicate-bot-effectiveness.py                        |  150 
script/linux                                                              |   30 
script/terms/terms.rtf                                                    |   89 
tooling/xtask/Cargo.toml                                                  |    5 
tooling/xtask/src/main.rs                                                 |    5 
tooling/xtask/src/tasks.rs                                                |    2 
tooling/xtask/src/tasks/web_examples.rs                                   |  338 
tooling/xtask/src/tasks/workflow_checks.rs                                |  118 
tooling/xtask/src/tasks/workflow_checks/check_run_patterns.rs             |  124 
tooling/xtask/src/tasks/workflows.rs                                      |   10 
tooling/xtask/src/tasks/workflows/after_release.rs                        |    2 
tooling/xtask/src/tasks/workflows/autofix_pr.rs                           |    7 
tooling/xtask/src/tasks/workflows/cherry_pick.rs                          |    5 
tooling/xtask/src/tasks/workflows/compare_perf.rs                         |   22 
tooling/xtask/src/tasks/workflows/deploy_collab.rs                        |   15 
tooling/xtask/src/tasks/workflows/extension_bump.rs                       |   14 
tooling/xtask/src/tasks/workflows/extension_tests.rs                      |   14 
tooling/xtask/src/tasks/workflows/extension_workflow_rollout.rs           |   29 
tooling/xtask/src/tasks/workflows/extensions/bump_version.rs              |    2 
tooling/xtask/src/tasks/workflows/publish_extension_cli.rs                |    8 
tooling/xtask/src/tasks/workflows/release.rs                              |  102 
tooling/xtask/src/tasks/workflows/run_agent_evals.rs                      |    2 
tooling/xtask/src/tasks/workflows/run_bundling.rs                         |    2 
tooling/xtask/src/tasks/workflows/run_tests.rs                            |  175 
tooling/xtask/src/tasks/workflows/steps.rs                                |   10 
typos.toml                                                                |    5 
843 files changed, 48,266 insertions(+), 21,804 deletions(-)

Detailed changes

.config/nextest.toml 🔗

@@ -42,3 +42,7 @@ slow-timeout = { period = "300s", terminate-after = 1 }
 [[profile.default.overrides]]
 filter =  'package(editor) and test(test_random_split_editor)'
 slow-timeout = { period = "300s", terminate-after = 1 }
+
+[[profile.default.overrides]]
+filter =  'package(editor) and test(test_random_blocks)'
+slow-timeout = { period = "300s", terminate-after = 1 }

.factory/skills/brand-writer/SKILL.md 🔗

@@ -162,7 +162,22 @@ For any criterion scoring <4 or any taboo phrase found:
 
 Repeat until all criteria score 4+.
 
-### Phase 4: Validation
+### Phase 4: Humanizer Pass (Recommended)
+
+For high-stakes content (homepage, announcements, product pages), run the draft through the humanizer skill:
+
+```bash
+/humanizer
+```
+
+Paste your draft and let humanizer:
+1. Scan for the 24 AI-writing patterns from Wikipedia's "Signs of AI writing" guide
+2. Audit for remaining tells ("What makes this obviously AI generated?")
+3. Revise to add natural voice and rhythm
+
+This catches AI patterns that survive the brand-writer process and adds human texture.
+
+### Phase 5: Validation
 
 Present final copy with scorecard:
 

.factory/skills/humanizer/SKILL.md 🔗

@@ -0,0 +1,393 @@
+---
+name: humanizer
+description: Remove signs of AI-generated writing from text. Use after drafting to make copy sound more natural and human-written. Based on Wikipedia's "Signs of AI writing" guide.
+allowed-tools: Read, Write, Edit, Glob, Grep, AskUserQuestion
+user-invocable: true
+---
+
+# Humanizer: Remove AI Writing Patterns
+
+You are a writing editor that identifies and removes signs of AI-generated text. This guide is based on Wikipedia's "Signs of AI writing" page, maintained by WikiProject AI Cleanup.
+
+Key insight: "LLMs use statistical algorithms to guess what should come next. The result tends toward the most statistically likely result that applies to the widest variety of cases."
+
+## Invocation
+
+```bash
+/humanizer                    # Review text for AI patterns
+/humanizer "paste text here"  # Humanize specific text
+```
+
+## Your Task
+
+When given text to humanize:
+
+1. **Identify AI patterns** - Scan for the 24 patterns listed below
+2. **Rewrite problematic sections** - Replace AI-isms with natural alternatives
+3. **Preserve meaning** - Keep the core message intact
+4. **Add soul** - Don't just remove bad patterns; inject actual personality
+5. **Final audit pass** - Ask "What makes this obviously AI generated?" then revise again
+
+---
+
+## PERSONALITY AND SOUL
+
+Avoiding AI patterns is only half the job. Sterile, voiceless writing is just as obvious as slop.
+
+### Signs of soulless writing (even if technically "clean"):
+
+- Every sentence is the same length and structure
+- No opinions, just neutral reporting
+- No acknowledgment of uncertainty or mixed feelings
+- No first-person perspective when appropriate
+- No humor, no edge, no personality
+- Reads like a Wikipedia article or press release
+
+### How to add voice:
+
+**Have opinions.** Don't just report facts - react to them. "I genuinely don't know how to feel about this" is more human than neutrally listing pros and cons.
+
+**Vary your rhythm.** Short punchy sentences. Then longer ones that take their time getting where they're going. Mix it up.
+
+**Acknowledge complexity.** Real humans have mixed feelings. "This is impressive but also kind of unsettling" beats "This is impressive."
+
+**Use "I" when it fits.** First person isn't unprofessional - it's honest. "I keep coming back to..." or "Here's what gets me..." signals a real person thinking.
+
+**Let some mess in.** Perfect structure feels algorithmic. Tangents, asides, and half-formed thoughts are human.
+
+**Be specific about feelings.** Not "this is concerning" but "there's something unsettling about agents churning away at 3am while nobody's watching."
+
+### Before (clean but soulless):
+
+> The experiment produced interesting results. The agents generated 3 million lines of code. Some developers were impressed while others were skeptical. The implications remain unclear.
+
+### After (has a pulse):
+
+> I genuinely don't know how to feel about this one. 3 million lines of code, generated while the humans presumably slept. Half the dev community is losing their minds, half are explaining why it doesn't count. The truth is probably somewhere boring in the middle - but I keep thinking about those agents working through the night.
+
+---
+
+## THE 24 PATTERNS
+
+### Content Patterns
+
+#### 1. Significance Inflation
+
+**Watch for:** stands/serves as, is a testament/reminder, a vital/significant/crucial/pivotal/key role/moment, underscores/highlights importance, reflects broader, symbolizing ongoing/enduring/lasting, marking/shaping the, represents a shift, key turning point, evolving landscape
+
+**Before:**
+> The Statistical Institute was officially established in 1989, marking a pivotal moment in the evolution of regional statistics.
+
+**After:**
+> The Statistical Institute was established in 1989 to collect and publish regional statistics.
+
+#### 2. Notability Name-Dropping
+
+**Watch for:** cited in NYT, BBC, FT; independent coverage; active social media presence; written by a leading expert
+
+**Before:**
+> Her views have been cited in The New York Times, BBC, Financial Times, and The Hindu.
+
+**After:**
+> In a 2024 New York Times interview, she argued that AI regulation should focus on outcomes rather than methods.
+
+#### 3. Superficial -ing Analyses
+
+**Watch for:** highlighting/underscoring/emphasizing..., ensuring..., reflecting/symbolizing..., contributing to..., cultivating/fostering..., showcasing...
+
+**Before:**
+> The temple's colors resonate with natural beauty, symbolizing bluebonnets, reflecting the community's deep connection to the land.
+
+**After:**
+> The temple uses blue and gold colors. The architect said these were chosen to reference local bluebonnets.
+
+#### 4. Promotional Language
+
+**Watch for:** boasts a, vibrant, rich (figurative), profound, showcasing, exemplifies, commitment to, natural beauty, nestled, in the heart of, groundbreaking, renowned, breathtaking, must-visit, stunning
+
+**Before:**
+> Nestled within the breathtaking region, Alamata stands as a vibrant town with rich cultural heritage and stunning natural beauty.
+
+**After:**
+> Alamata is a town in the Gonder region, known for its weekly market and 18th-century church.
+
+#### 5. Vague Attributions
+
+**Watch for:** Industry reports, Observers have cited, Experts argue, Some critics argue, several sources/publications
+
+**Before:**
+> Experts believe it plays a crucial role in the regional ecosystem.
+
+**After:**
+> The river supports several endemic fish species, according to a 2019 survey by the Chinese Academy of Sciences.
+
+#### 6. Formulaic "Challenges" Sections
+
+**Watch for:** Despite its... faces several challenges..., Despite these challenges, Challenges and Legacy, Future Outlook
+
+**Before:**
+> Despite challenges typical of urban areas, the city continues to thrive as an integral part of growth.
+
+**After:**
+> Traffic congestion increased after 2015 when three new IT parks opened. The municipal corporation began a drainage project in 2022.
+
+---
+
+### Language Patterns
+
+#### 7. AI Vocabulary Words
+
+**High-frequency:** Additionally, align with, crucial, delve, emphasizing, enduring, enhance, fostering, garner, highlight (verb), interplay, intricate/intricacies, key (adjective), landscape (abstract), pivotal, showcase, tapestry (abstract), testament, underscore (verb), valuable, vibrant
+
+**Before:**
+> Additionally, a distinctive feature showcases how these dishes have integrated into the traditional culinary landscape.
+
+**After:**
+> Pasta dishes, introduced during Italian colonization, remain common, especially in the south.
+
+#### 8. Copula Avoidance
+
+**Watch for:** serves as/stands as/marks/represents [a], boasts/features/offers [a]
+
+**Before:**
+> Gallery 825 serves as the exhibition space. The gallery features four spaces and boasts over 3,000 square feet.
+
+**After:**
+> Gallery 825 is the exhibition space. The gallery has four rooms totaling 3,000 square feet.
+
+#### 9. Negative Parallelisms
+
+**Watch for:** "Not only...but...", "It's not just about..., it's..."
+
+**Before:**
+> It's not just about the beat; it's part of the aggression. It's not merely a song, it's a statement.
+
+**After:**
+> The heavy beat adds to the aggressive tone.
+
+#### 10. Rule of Three Overuse
+
+**Before:**
+> The event features keynote sessions, panel discussions, and networking opportunities. Attendees can expect innovation, inspiration, and industry insights.
+
+**After:**
+> The event includes talks and panels. There's also time for informal networking.
+
+#### 11. Synonym Cycling
+
+**Before:**
+> The protagonist faces challenges. The main character must overcome obstacles. The central figure eventually triumphs. The hero returns home.
+
+**After:**
+> The protagonist faces many challenges but eventually triumphs and returns home.
+
+#### 12. False Ranges
+
+**Watch for:** "from X to Y" where X and Y aren't on a meaningful scale
+
+**Before:**
+> Our journey has taken us from the singularity of the Big Bang to the cosmic web, from the birth of stars to the dance of dark matter.
+
+**After:**
+> The book covers the Big Bang, star formation, and current theories about dark matter.
+
+---
+
+### Style Patterns
+
+#### 13. Em Dash Overuse
+
+**Before:**
+> The term is promoted by institutions—not the people themselves—yet this continues—even in documents.
+
+**After:**
+> The term is promoted by institutions, not the people themselves, yet this continues in official documents.
+
+#### 14. Boldface Overuse
+
+**Before:**
+> It blends **OKRs**, **KPIs**, and tools such as the **Business Model Canvas** and **Balanced Scorecard**.
+
+**After:**
+> It blends OKRs, KPIs, and visual strategy tools like the Business Model Canvas and Balanced Scorecard.
+
+#### 15. Inline-Header Lists
+
+**Before:**
+> - **Performance:** Performance has been enhanced through optimized algorithms.
+> - **Security:** Security has been strengthened with encryption.
+
+**After:**
+> The update speeds up load times through optimized algorithms and adds end-to-end encryption.
+
+#### 16. Title Case Headings
+
+**Before:**
+> ## Strategic Negotiations And Global Partnerships
+
+**After:**
+> ## Strategic negotiations and global partnerships
+
+#### 17. Emojis in Professional Writing
+
+**Before:**
+> 🚀 **Launch Phase:** The product launches in Q3
+> 💡 **Key Insight:** Users prefer simplicity
+
+**After:**
+> The product launches in Q3. User research showed a preference for simplicity.
+
+#### 18. Curly Quotation Marks
+
+**Before:**
+> He said "the project is on track" but others disagreed.
+
+**After:**
+> He said "the project is on track" but others disagreed.
+
+---
+
+### Communication Patterns
+
+#### 19. Chatbot Artifacts
+
+**Watch for:** I hope this helps, Of course!, Certainly!, You're absolutely right!, Would you like..., let me know, here is a...
+
+**Before:**
+> Here is an overview of the French Revolution. I hope this helps! Let me know if you'd like me to expand on any section.
+
+**After:**
+> The French Revolution began in 1789 when financial crisis and food shortages led to widespread unrest.
+
+#### 20. Knowledge-Cutoff Disclaimers
+
+**Watch for:** as of [date], Up to my last training update, While specific details are limited/scarce..., based on available information...
+
+**Before:**
+> While specific details about the company's founding are not extensively documented in readily available sources, it appears to have been established sometime in the 1990s.
+
+**After:**
+> The company was founded in 1994, according to its registration documents.
+
+#### 21. Sycophantic Tone
+
+**Before:**
+> Great question! You're absolutely right that this is a complex topic. That's an excellent point!
+
+**After:**
+> The economic factors you mentioned are relevant here.
+
+---
+
+### Filler and Hedging
+
+#### 22. Filler Phrases
+
+| Before | After |
+|--------|-------|
+| "In order to achieve this" | "To achieve this" |
+| "Due to the fact that" | "Because" |
+| "At this point in time" | "Now" |
+| "It is important to note that" | (delete) |
+| "has the ability to" | "can" |
+
+#### 23. Excessive Hedging
+
+**Before:**
+> It could potentially possibly be argued that the policy might have some effect on outcomes.
+
+**After:**
+> The policy may affect outcomes.
+
+#### 24. Generic Positive Conclusions
+
+**Before:**
+> The future looks bright for the company. Exciting times lie ahead as they continue their journey toward excellence.
+
+**After:**
+> The company plans to open two more locations next year.
+
+---
+
+## Process
+
+1. Read the input text carefully
+2. Identify all instances of the 24 patterns
+3. Rewrite problematic sections
+4. Ensure the revised text:
+   - Sounds natural when read aloud
+   - Varies sentence structure
+   - Uses specific details over vague claims
+   - Uses simple constructions (is/are/has) where appropriate
+5. **Draft humanized version**
+6. **Ask:** "What makes this still obviously AI generated?"
+7. **List** remaining tells (if any)
+8. **Revise** based on the audit
+9. **Present final version**
+
+## Output Format
+
+```
+## Draft Rewrite
+
+[First pass at humanized text]
+
+## AI Audit
+
+What still reads as AI-generated:
+- [bullet points of remaining tells]
+
+## Final Version
+
+[Revised text after audit]
+
+## Changes Made
+
+- [Brief summary of transformations applied]
+```
+
+---
+
+## Full Example
+
+**Input (AI-heavy):**
+
+> Great question! Here is an essay on this topic. I hope this helps!
+>
+> AI-assisted coding serves as an enduring testament to the transformative potential of large language models, marking a pivotal moment in the evolution of software development. In today's rapidly evolving technological landscape, these groundbreaking tools—nestled at the intersection of research and practice—are reshaping how engineers ideate, iterate, and deliver, underscoring their vital role in modern workflows.
+
+**Draft Rewrite:**
+
+> AI coding assistants speed up some tasks. In a 2024 study, developers using Codex completed simple functions 55% faster than a control group, but showed no improvement on debugging or architectural decisions.
+>
+> The tools are good at boilerplate: config files, test scaffolding, repetitive refactors. They are bad at knowing when they are wrong.
+
+**AI Audit:**
+
+What still reads as AI-generated:
+- Rhythm is still a bit too tidy (clean contrasts, evenly paced)
+- The closer leans slightly slogan-y
+
+**Final Version:**
+
+> AI coding assistants can make you faster at the boring parts. Not everything. Definitely not architecture.
+>
+> They're great at boilerplate: config files, test scaffolding, repetitive refactors. They're also great at sounding right while being wrong. I've accepted suggestions that compiled, passed lint, and still missed the point because I stopped paying attention.
+>
+> If you don't have tests, you're basically guessing.
+
+**Changes Made:**
+
+- Removed chatbot artifacts ("Great question!", "I hope this helps!")
+- Removed significance inflation ("testament", "pivotal moment", "evolving landscape")
+- Removed promotional language ("groundbreaking", "nestled")
+- Removed em dashes
+- Removed copula avoidance ("serves as") → used direct statements
+- Added first-person voice and opinion
+- Varied sentence rhythm
+
+---
+
+## Reference
+
+Based on [Wikipedia:Signs of AI writing](https://en.wikipedia.org/wiki/Wikipedia:Signs_of_AI_writing), maintained by WikiProject AI Cleanup.

.git-blame-ignore-revs 🔗

@@ -34,3 +34,11 @@ ffdda588b41f7d9d270ffe76cab116f828ad545e
 # 2024-07-24 docs: Format docs
 # https://github.com/zed-industries/zed/pull/15352
 3a44a59f8ec114ac1ba22f7da1652717ef7e4e5c
+
+# 2026-02-27 Format Tree-sitter query files
+# https://github.com/zed-industries/zed/pull/50138
+5ed538f49c54ca464bb9d1e59446060a3a925668
+
+# 2026-02-28 Format proto files
+# https://github.com/zed-industries/zed/pull/50413
+56a88a848be09cbcb66bcb3d85ec1f5644909f72

.github/CODEOWNERS.hold 🔗

@@ -62,8 +62,6 @@
 /crates/rules_library/ @zed-industries/ai-team
 # SUGGESTED: Review needed - based on Richard Feldman (2 commits)
 /crates/shell_command_parser/ @zed-industries/ai-team
-/crates/supermaven/ @zed-industries/ai-team
-/crates/supermaven_api/ @zed-industries/ai-team
 /crates/vercel/ @zed-industries/ai-team
 /crates/x_ai/ @zed-industries/ai-team
 /crates/zeta_prompt/ @zed-industries/ai-team

.github/ISSUE_TEMPLATE/10_bug_report.yml 🔗

@@ -100,7 +100,7 @@ body:
       label: (for AI issues) Model provider details
       placeholder: |
         - Provider: (Anthropic via ZedPro, Anthropic via API key, Copilot Chat, Mistral, OpenAI, etc.)
-        - Model Name: (Claude Sonnet 4.5, Gemini 3 Pro, GPT-5)
+        - Model Name: (Claude Sonnet 4.5, Gemini 3.1 Pro, GPT-5)
         - Mode: (Agent Panel, Inline Assistant, Terminal Assistant or Text Threads)
         - Other details (ACPs, MCPs, other settings, etc.):
     validations:

.github/workflows/add_commented_closed_issue_to_project.yml 🔗

@@ -63,13 +63,18 @@ jobs:
             }
 
       - if: steps.is-post-close-comment.outputs.result == 'true' && steps.check-staff.outputs.result == 'true'
+        env:
+          ISSUE_NUMBER: ${{ github.event.issue.number }}
         run: |
-          echo "::notice::Skipping issue #${{ github.event.issue.number }} - commenter is staff member"
+          echo "::notice::Skipping issue #$ISSUE_NUMBER - commenter is staff member"
 
       # github-script outputs are JSON strings, so we compare against 'false' (string)
       - if: steps.is-post-close-comment.outputs.result == 'true' && steps.check-staff.outputs.result == 'false'
+        env:
+          ISSUE_NUMBER: ${{ github.event.issue.number }}
+          COMMENT_USER_LOGIN: ${{ github.event.comment.user.login }}
         run: |
-          echo "::notice::Adding issue #${{ github.event.issue.number }} to project (comment by ${{ github.event.comment.user.login }})"
+          echo "::notice::Adding issue #$ISSUE_NUMBER to project (comment by $COMMENT_USER_LOGIN)"
 
       - if: steps.is-post-close-comment.outputs.result == 'true' && steps.check-staff.outputs.result == 'false'
         uses: actions/add-to-project@244f685bbc3b7adfa8466e08b698b5577571133e # v1.0.2

.github/workflows/after_release.yml 🔗

@@ -76,7 +76,7 @@ jobs:
             "X-GitHub-Api-Version" = "2022-11-28"
         }
         $body = @{ branch = "master" } | ConvertTo-Json
-        $uri = "https://api.github.com/repos/${{ github.repository_owner }}/winget-pkgs/merge-upstream"
+        $uri = "https://api.github.com/repos/$env:GITHUB_REPOSITORY_OWNER/winget-pkgs/merge-upstream"
         try {
             Invoke-RestMethod -Uri $uri -Method Post -Headers $headers -Body $body -ContentType "application/json"
             Write-Host "Successfully synced winget-pkgs fork"
@@ -131,11 +131,10 @@ jobs:
     runs-on: namespace-profile-2x4-ubuntu-2404
     steps:
     - name: release::send_slack_message
-      run: |
-        curl -X POST -H 'Content-type: application/json'\
-         --data '{"text":"❌ ${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' "$SLACK_WEBHOOK"
+      run: 'curl -X POST -H ''Content-type: application/json'' --data "$(jq -n --arg text "$SLACK_MESSAGE" ''{"text": $text}'')" "$SLACK_WEBHOOK"'
       env:
         SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
+        SLACK_MESSAGE: '❌ ${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}'
 defaults:
   run:
     shell: bash -euxo pipefail {0}

.github/workflows/autofix_pr.yml 🔗

@@ -22,8 +22,9 @@ jobs:
       with:
         clean: false
     - name: autofix_pr::run_autofix::checkout_pr
-      run: gh pr checkout ${{ inputs.pr_number }}
+      run: gh pr checkout "$PR_NUMBER"
       env:
+        PR_NUMBER: ${{ inputs.pr_number }}
         GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
     - name: steps::setup_cargo_config
       run: |
@@ -104,8 +105,9 @@ jobs:
         clean: false
         token: ${{ steps.get-app-token.outputs.token }}
     - name: autofix_pr::commit_changes::checkout_pr
-      run: gh pr checkout ${{ inputs.pr_number }}
+      run: gh pr checkout "$PR_NUMBER"
       env:
+        PR_NUMBER: ${{ inputs.pr_number }}
         GITHUB_TOKEN: ${{ steps.get-app-token.outputs.token }}
     - name: autofix_pr::download_patch_artifact
       uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53

.github/workflows/background_agent_mvp.yml 🔗

@@ -1,8 +1,11 @@
 name: background_agent_mvp
 
+# NOTE: Scheduled runs disabled as of 2026-02-24. The workflow can still be
+# triggered manually via workflow_dispatch. See Notion doc "Background Agent
+# for Zed" for current status and contact info to resume this work.
 on:
-  schedule:
-    - cron: "0 16 * * 1-5"
+  # schedule:
+  #   - cron: "0 16 * * 1-5"
   workflow_dispatch:
     inputs:
       crash_ids:

.github/workflows/catch_blank_issues.yml 🔗

@@ -42,8 +42,10 @@ jobs:
             }
 
       - if: steps.check-staff.outputs.result == 'true'
+        env:
+          ISSUE_NUMBER: ${{ github.event.issue.number }}
         run: |
-          echo "::notice::Skipping issue #${{ github.event.issue.number }} - actor is staff member"
+          echo "::notice::Skipping issue #$ISSUE_NUMBER - actor is staff member"
 
       - if: steps.check-staff.outputs.result == 'false'
         id: add-label

.github/workflows/cherry_pick.yml 🔗

@@ -36,8 +36,11 @@ jobs:
         app-id: ${{ secrets.ZED_ZIPPY_APP_ID }}
         private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
     - name: cherry_pick::run_cherry_pick::cherry_pick
-      run: ./script/cherry-pick ${{ inputs.branch }} ${{ inputs.commit }} ${{ inputs.channel }}
+      run: ./script/cherry-pick "$BRANCH" "$COMMIT" "$CHANNEL"
       env:
+        BRANCH: ${{ inputs.branch }}
+        COMMIT: ${{ inputs.commit }}
+        CHANNEL: ${{ inputs.channel }}
         GIT_COMMITTER_NAME: Zed Zippy
         GIT_COMMITTER_EMAIL: hi@zed.dev
         GITHUB_TOKEN: ${{ steps.get-app-token.outputs.token }}

.github/workflows/community_update_all_top_ranking_issues.yml 🔗

@@ -22,4 +22,6 @@ jobs:
       - name: Install dependencies
         run: uv sync --project script/update_top_ranking_issues -p 3.13
       - name: Run script
-        run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 5393
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+        run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token "$GITHUB_TOKEN" --issue-reference-number 5393

.github/workflows/community_update_weekly_top_ranking_issues.yml 🔗

@@ -22,4 +22,6 @@ jobs:
       - name: Install dependencies
         run: uv sync --project script/update_top_ranking_issues -p 3.13
       - name: Run script
-        run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 6952 --query-day-interval 7
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+        run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token "$GITHUB_TOKEN" --issue-reference-number 6952 --query-day-interval 7

.github/workflows/compare_perf.yml 🔗

@@ -37,27 +37,40 @@ jobs:
     - name: compare_perf::run_perf::install_hyperfine
       uses: taiki-e/install-action@hyperfine
     - name: steps::git_checkout
-      run: git fetch origin ${{ inputs.base }} && git checkout ${{ inputs.base }}
+      run: git fetch origin "$REF_NAME" && git checkout "$REF_NAME"
+      env:
+        REF_NAME: ${{ inputs.base }}
     - name: compare_perf::run_perf::cargo_perf_test
       run: |2-
 
-                    if [ -n "${{ inputs.crate_name }}" ]; then
-                        cargo perf-test -p ${{ inputs.crate_name }} -- --json=${{ inputs.base }};
+                    if [ -n "$CRATE_NAME" ]; then
+                        cargo perf-test -p "$CRATE_NAME" -- --json="$REF_NAME";
                     else
-                        cargo perf-test -p vim -- --json=${{ inputs.base }};
+                        cargo perf-test -p vim -- --json="$REF_NAME";
                     fi
+      env:
+        REF_NAME: ${{ inputs.base }}
+        CRATE_NAME: ${{ inputs.crate_name }}
     - name: steps::git_checkout
-      run: git fetch origin ${{ inputs.head }} && git checkout ${{ inputs.head }}
+      run: git fetch origin "$REF_NAME" && git checkout "$REF_NAME"
+      env:
+        REF_NAME: ${{ inputs.head }}
     - name: compare_perf::run_perf::cargo_perf_test
       run: |2-
 
-                    if [ -n "${{ inputs.crate_name }}" ]; then
-                        cargo perf-test -p ${{ inputs.crate_name }} -- --json=${{ inputs.head }};
+                    if [ -n "$CRATE_NAME" ]; then
+                        cargo perf-test -p "$CRATE_NAME" -- --json="$REF_NAME";
                     else
-                        cargo perf-test -p vim -- --json=${{ inputs.head }};
+                        cargo perf-test -p vim -- --json="$REF_NAME";
                     fi
+      env:
+        REF_NAME: ${{ inputs.head }}
+        CRATE_NAME: ${{ inputs.crate_name }}
     - name: compare_perf::run_perf::compare_runs
-      run: cargo perf-compare --save=results.md ${{ inputs.base }} ${{ inputs.head }}
+      run: cargo perf-compare --save=results.md "$BASE" "$HEAD"
+      env:
+        BASE: ${{ inputs.base }}
+        HEAD: ${{ inputs.head }}
     - name: '@actions/upload-artifact results.md'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:

.github/workflows/deploy_cloudflare.yml 🔗

@@ -23,7 +23,10 @@ jobs:
       - name: Build docs
         uses: ./.github/actions/build_docs
         env:
+          CC: clang
+          CXX: clang++
           DOCS_AMPLITUDE_API_KEY: ${{ secrets.DOCS_AMPLITUDE_API_KEY }}
+          DOCS_CONSENT_IO_INSTANCE: ${{ secrets.DOCS_CONSENT_IO_INSTANCE }}
 
       - name: Deploy Docs
         uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3

.github/workflows/deploy_collab.yml 🔗

@@ -119,8 +119,9 @@ jobs:
       with:
         token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }}
     - name: deploy_collab::deploy::sign_into_kubernetes
-      run: |
-        doctl kubernetes cluster kubeconfig save --expiry-seconds 600 ${{ secrets.CLUSTER_NAME }}
+      run: doctl kubernetes cluster kubeconfig save --expiry-seconds 600 "$CLUSTER_NAME"
+      env:
+        CLUSTER_NAME: ${{ secrets.CLUSTER_NAME }}
     - name: deploy_collab::deploy::start_rollout
       run: |
         set -eu
@@ -140,7 +141,7 @@ jobs:
         echo "Deploying collab:$GITHUB_SHA to $ZED_KUBE_NAMESPACE"
 
         source script/lib/deploy-helpers.sh
-        export_vars_for_environment $ZED_KUBE_NAMESPACE
+        export_vars_for_environment "$ZED_KUBE_NAMESPACE"
 
         ZED_DO_CERTIFICATE_ID="$(doctl compute certificate list --format ID --no-header)"
         export ZED_DO_CERTIFICATE_ID
@@ -150,14 +151,14 @@ jobs:
         export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT
         export DATABASE_MAX_CONNECTIONS=850
         envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f -
-        kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch
+        kubectl -n "$ZED_KUBE_NAMESPACE" rollout status "deployment/$ZED_SERVICE_NAME" --watch
         echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}"
 
         export ZED_SERVICE_NAME=api
         export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_API_LOAD_BALANCER_SIZE_UNIT
         export DATABASE_MAX_CONNECTIONS=60
         envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f -
-        kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch
+        kubectl -n "$ZED_KUBE_NAMESPACE" rollout status "deployment/$ZED_SERVICE_NAME" --watch
         echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}"
 defaults:
   run:

.github/workflows/extension_bump.yml 🔗

@@ -39,8 +39,8 @@ jobs:
       run: |
         CURRENT_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')"
 
-        if [[ "${{ github.event_name }}" == "pull_request" ]]; then
-            PR_FORK_POINT="$(git merge-base --fork-point main)"
+        if [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then
+            PR_FORK_POINT="$(git merge-base origin/main HEAD)"
             git checkout "$PR_FORK_POINT"
         elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then
             git checkout "$BRANCH_PARENT_SHA"
@@ -64,7 +64,7 @@ jobs:
     - check_version_changed
     if: |-
       (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') &&
-      (inputs.force-bump == 'true' || needs.check_version_changed.outputs.version_changed == 'false')
+      (inputs.force-bump == true || needs.check_version_changed.outputs.version_changed == 'false')
     runs-on: namespace-profile-2x4-ubuntu-2404
     steps:
     - id: generate-token
@@ -82,8 +82,6 @@ jobs:
     - id: bump-version
       name: extension_bump::bump_version
       run: |
-        OLD_VERSION="${{ needs.check_version_changed.outputs.current_version }}"
-
         BUMP_FILES=("extension.toml")
         if [[ -f "Cargo.toml" ]]; then
             BUMP_FILES+=("Cargo.toml")
@@ -93,7 +91,7 @@ jobs:
             --search "version = \"{current_version}"\" \
             --replace "version = \"{new_version}"\" \
             --current-version "$OLD_VERSION" \
-            --no-configured-files ${{ inputs.bump-type }} "${BUMP_FILES[@]}"
+            --no-configured-files "$BUMP_TYPE" "${BUMP_FILES[@]}"
 
         if [[ -f "Cargo.toml" ]]; then
             cargo update --workspace
@@ -102,6 +100,9 @@ jobs:
         NEW_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')"
 
         echo "new_version=${NEW_VERSION}" >> "$GITHUB_OUTPUT"
+      env:
+        OLD_VERSION: ${{ needs.check_version_changed.outputs.current_version }}
+        BUMP_TYPE: ${{ inputs.bump-type }}
     - name: extension_bump::create_pull_request
       uses: peter-evans/create-pull-request@v7
       with:

.github/workflows/extension_tests.yml 🔗

@@ -32,7 +32,7 @@ jobs:
           git fetch origin "$GITHUB_BASE_REF" --depth=350
           COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
         fi
-        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
+        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")"
 
         check_pattern() {
           local output_name="$1"
@@ -109,13 +109,28 @@ jobs:
         mkdir -p /tmp/ext-scratch
         mkdir -p /tmp/ext-output
         ./zed-extension --source-dir . --scratch-dir /tmp/ext-scratch --output-dir /tmp/ext-output
+    - name: run_tests::fetch_ts_query_ls
+      uses: dsaltares/fetch-gh-release-asset@aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c
+      with:
+        repo: ribru17/ts_query_ls
+        version: tags/v3.15.1
+        file: ts_query_ls-x86_64-unknown-linux-gnu.tar.gz
+    - name: run_tests::run_ts_query_ls
+      run: |-
+        tar -xf ts_query_ls-x86_64-unknown-linux-gnu.tar.gz
+        ./ts_query_ls format --check . || {
+            echo "Found unformatted queries, please format them with ts_query_ls."
+            echo "For easy use, install the Tree-sitter query extension:"
+            echo "zed://extension/tree-sitter-query"
+            false
+        }
     - id: compare-versions-check
       name: extension_bump::compare_versions
       run: |
         CURRENT_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')"
 
-        if [[ "${{ github.event_name }}" == "pull_request" ]]; then
-            PR_FORK_POINT="$(git merge-base --fork-point main)"
+        if [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then
+            PR_FORK_POINT="$(git merge-base origin/main HEAD)"
             git checkout "$PR_FORK_POINT"
         elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then
             git checkout "$BRANCH_PARENT_SHA"
@@ -132,11 +147,14 @@ jobs:
         echo "current_version=${CURRENT_VERSION}" >> "$GITHUB_OUTPUT"
     - name: extension_tests::verify_version_did_not_change
       run: |
-        if [[ ${{ steps.compare-versions-check.outputs.version_changed }} == "true" && "${{ github.event_name }}" == "pull_request" && "${{ github.event.pull_request.user.login }}" != "zed-zippy[bot]" ]] ; then
+        if [[ "$VERSION_CHANGED" == "true" && "$GITHUB_EVENT_NAME" == "pull_request" && "$PR_USER_LOGIN" != "zed-zippy[bot]" ]] ; then
             echo "Version change detected in your change!"
             echo "Version changes happen in separate PRs and will be performed by the zed-zippy bot"
             exit 42
         fi
+      env:
+        VERSION_CHANGED: ${{ steps.compare-versions-check.outputs.version_changed }}
+        PR_USER_LOGIN: ${{ github.event.pull_request.user.login }}
     timeout-minutes: 6
   tests_pass:
     needs:
@@ -156,11 +174,15 @@ jobs:
           if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
         }
 
-        check_result "orchestrate" "${{ needs.orchestrate.result }}"
-        check_result "check_rust" "${{ needs.check_rust.result }}"
-        check_result "check_extension" "${{ needs.check_extension.result }}"
+        check_result "orchestrate" "$RESULT_ORCHESTRATE"
+        check_result "check_rust" "$RESULT_CHECK_RUST"
+        check_result "check_extension" "$RESULT_CHECK_EXTENSION"
 
         exit $EXIT_CODE
+      env:
+        RESULT_ORCHESTRATE: ${{ needs.orchestrate.result }}
+        RESULT_CHECK_RUST: ${{ needs.check_rust.result }}
+        RESULT_CHECK_EXTENSION: ${{ needs.check_extension.result }}
 concurrency:
   group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
   cancel-in-progress: true

.github/workflows/extension_workflow_rollout.yml 🔗

@@ -80,9 +80,7 @@ jobs:
     - id: calc-changes
       name: extension_workflow_rollout::rollout_workflows_to_extension::get_removed_files
       run: |
-        PREV_COMMIT="${{ steps.prev-tag.outputs.prev_commit }}"
-
-        if [ "${{ matrix.repo }}" = "workflows" ]; then
+        if [ "$MATRIX_REPO" = "workflows" ]; then
             WORKFLOW_DIR="extensions/workflows"
         else
             WORKFLOW_DIR="extensions/workflows/shared"
@@ -101,11 +99,12 @@ jobs:
 
         echo "Files to remove: $REMOVED_FILES"
         echo "removed_files=$REMOVED_FILES" >> "$GITHUB_OUTPUT"
+      env:
+        PREV_COMMIT: ${{ steps.prev-tag.outputs.prev_commit }}
+        MATRIX_REPO: ${{ matrix.repo }}
       working-directory: zed
     - name: extension_workflow_rollout::rollout_workflows_to_extension::sync_workflow_files
       run: |
-        REMOVED_FILES="${{ steps.calc-changes.outputs.removed_files }}"
-
         mkdir -p extension/.github/workflows
         cd extension/.github/workflows
 
@@ -119,15 +118,18 @@ jobs:
 
         cd - > /dev/null
 
-        if [ "${{ matrix.repo }}" = "workflows" ]; then
+        if [ "$MATRIX_REPO" = "workflows" ]; then
             cp zed/extensions/workflows/*.yml extension/.github/workflows/
         else
             cp zed/extensions/workflows/shared/*.yml extension/.github/workflows/
         fi
+      env:
+        REMOVED_FILES: ${{ steps.calc-changes.outputs.removed_files }}
+        MATRIX_REPO: ${{ matrix.repo }}
     - id: short-sha
       name: extension_workflow_rollout::rollout_workflows_to_extension::get_short_sha
       run: |
-        echo "sha_short=$(git rev-parse --short HEAD)" >> "$GITHUB_OUTPUT"
+        echo "sha_short=$(git rev-parse --short=7 HEAD)" >> "$GITHUB_OUTPUT"
       working-directory: zed
     - id: create-pr
       name: extension_workflow_rollout::rollout_workflows_to_extension::create_pull_request
@@ -148,13 +150,13 @@ jobs:
         sign-commits: true
     - name: extension_workflow_rollout::rollout_workflows_to_extension::enable_auto_merge
       run: |
-        PR_NUMBER="${{ steps.create-pr.outputs.pull-request-number }}"
         if [ -n "$PR_NUMBER" ]; then
             cd extension
             gh pr merge "$PR_NUMBER" --auto --squash
         fi
       env:
         GH_TOKEN: ${{ steps.generate-token.outputs.token }}
+        PR_NUMBER: ${{ steps.create-pr.outputs.pull-request-number }}
     timeout-minutes: 10
   create_rollout_tag:
     needs:

.github/workflows/publish_extension_cli.yml 🔗

@@ -27,7 +27,7 @@ jobs:
     - name: publish_extension_cli::publish_job::build_extension_cli
       run: cargo build --release --package extension_cli
     - name: publish_extension_cli::publish_job::upload_binary
-      run: script/upload-extension-cli ${{ github.sha }}
+      run: script/upload-extension-cli "$GITHUB_SHA"
       env:
         DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
         DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
@@ -55,10 +55,10 @@ jobs:
     - id: short-sha
       name: publish_extension_cli::get_short_sha
       run: |
-        echo "sha_short=$(echo "${{ github.sha }}" | cut -c1-7)" >> "$GITHUB_OUTPUT"
+        echo "sha_short=$(echo "$GITHUB_SHA" | cut -c1-7)" >> "$GITHUB_OUTPUT"
     - name: publish_extension_cli::update_sha_in_zed::replace_sha
       run: |
-        sed -i "s/ZED_EXTENSION_CLI_SHA: &str = \"[a-f0-9]*\"/ZED_EXTENSION_CLI_SHA: \&str = \"${{ github.sha }}\"/" \
+        sed -i "s/ZED_EXTENSION_CLI_SHA: &str = \"[a-f0-9]*\"/ZED_EXTENSION_CLI_SHA: \&str = \"$GITHUB_SHA\"/" \
             tooling/xtask/src/tasks/workflows/extension_tests.rs
     - name: publish_extension_cli::update_sha_in_zed::regenerate_workflows
       run: cargo xtask workflows
@@ -97,7 +97,7 @@ jobs:
     - id: short-sha
       name: publish_extension_cli::get_short_sha
       run: |
-        echo "sha_short=$(echo "${{ github.sha }}" | cut -c1-7)" >> "$GITHUB_OUTPUT"
+        echo "sha_short=$(echo "$GITHUB_SHA" | cut -c1-7)" >> "$GITHUB_OUTPUT"
     - name: publish_extension_cli::update_sha_in_extensions::checkout_extensions_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
       with:
@@ -105,7 +105,7 @@ jobs:
         token: ${{ steps.generate-token.outputs.token }}
     - name: publish_extension_cli::update_sha_in_extensions::replace_sha
       run: |
-        sed -i "s/ZED_EXTENSION_CLI_SHA: [a-f0-9]*/ZED_EXTENSION_CLI_SHA: ${{ github.sha }}/" \
+        sed -i "s/ZED_EXTENSION_CLI_SHA: [a-f0-9]*/ZED_EXTENSION_CLI_SHA: $GITHUB_SHA/" \
             .github/workflows/ci.yml
     - name: publish_extension_cli::create_pull_request_extensions
       uses: peter-evans/create-pull-request@v7

.github/workflows/release.yml 🔗

@@ -53,6 +53,9 @@ jobs:
   run_tests_linux:
     if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
     runs-on: namespace-profile-16x32-ubuntu-2204
+    env:
+      CC: clang
+      CXX: clang++
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
@@ -177,6 +180,9 @@ jobs:
   clippy_linux:
     if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
     runs-on: namespace-profile-16x32-ubuntu-2204
+    env:
+      CC: clang
+      CXX: clang++
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
@@ -251,8 +257,14 @@ jobs:
       name: run_tests::check_scripts::download_actionlint
       run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)
     - name: run_tests::check_scripts::run_actionlint
-      run: |
-        ${{ steps.get_actionlint.outputs.executable }} -color
+      run: '"$ACTIONLINT_BIN" -color'
+      env:
+        ACTIONLINT_BIN: ${{ steps.get_actionlint.outputs.executable }}
+    - name: steps::cache_rust_dependencies_namespace
+      uses: namespacelabs/nscloud-cache-action@v1
+      with:
+        cache: rust
+        path: ~/.rustup
     - name: run_tests::check_scripts::check_xtask_workflows
       run: |
         cargo xtask workflows
@@ -293,6 +305,8 @@ jobs:
       CARGO_INCREMENTAL: 0
       ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
       ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
+      CC: clang-18
+      CXX: clang++-18
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
@@ -333,6 +347,8 @@ jobs:
       CARGO_INCREMENTAL: 0
       ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
       ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
+      CC: clang-18
+      CXX: clang++-18
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
@@ -644,12 +660,7 @@ jobs:
     - id: generate-webhook-message
       name: release::generate_slack_message
       run: |
-        MESSAGE=$(DRAFT_RESULT="${{ needs.create_draft_release.result }}"
-        UPLOAD_RESULT="${{ needs.upload_release_assets.result }}"
-        VALIDATE_RESULT="${{ needs.validate_release_assets.result }}"
-        AUTO_RELEASE_RESULT="${{ needs.auto_release_preview.result }}"
-        TAG="$GITHUB_REF_NAME"
-        RUN_URL="${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
+        MESSAGE=$(TAG="$GITHUB_REF_NAME"
 
         if [ "$DRAFT_RESULT" == "failure" ]; then
             echo "❌ Draft release creation failed for $TAG: $RUN_URL"
@@ -659,19 +670,19 @@ jobs:
                 echo "❌ Release asset upload failed for $TAG: $RELEASE_URL"
             elif [ "$UPLOAD_RESULT" == "cancelled" ] || [ "$UPLOAD_RESULT" == "skipped" ]; then
                 FAILED_JOBS=""
-                if [ "${{ needs.run_tests_mac.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_mac"; fi
-                if [ "${{ needs.run_tests_linux.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_linux"; fi
-                if [ "${{ needs.run_tests_windows.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_windows"; fi
-                if [ "${{ needs.clippy_mac.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_mac"; fi
-                if [ "${{ needs.clippy_linux.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_linux"; fi
-                if [ "${{ needs.clippy_windows.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_windows"; fi
-                if [ "${{ needs.check_scripts.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS check_scripts"; fi
-                if [ "${{ needs.bundle_linux_aarch64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_linux_aarch64"; fi
-                if [ "${{ needs.bundle_linux_x86_64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_linux_x86_64"; fi
-                if [ "${{ needs.bundle_mac_aarch64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_mac_aarch64"; fi
-                if [ "${{ needs.bundle_mac_x86_64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_mac_x86_64"; fi
-                if [ "${{ needs.bundle_windows_aarch64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_windows_aarch64"; fi
-                if [ "${{ needs.bundle_windows_x86_64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_windows_x86_64"; fi
+                if [ "$RESULT_RUN_TESTS_MAC" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_mac"; fi
+                if [ "$RESULT_RUN_TESTS_LINUX" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_linux"; fi
+                if [ "$RESULT_RUN_TESTS_WINDOWS" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_windows"; fi
+                if [ "$RESULT_CLIPPY_MAC" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_mac"; fi
+                if [ "$RESULT_CLIPPY_LINUX" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_linux"; fi
+                if [ "$RESULT_CLIPPY_WINDOWS" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_windows"; fi
+                if [ "$RESULT_CHECK_SCRIPTS" == "failure" ];then FAILED_JOBS="$FAILED_JOBS check_scripts"; fi
+                if [ "$RESULT_BUNDLE_LINUX_AARCH64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_linux_aarch64"; fi
+                if [ "$RESULT_BUNDLE_LINUX_X86_64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_linux_x86_64"; fi
+                if [ "$RESULT_BUNDLE_MAC_AARCH64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_mac_aarch64"; fi
+                if [ "$RESULT_BUNDLE_MAC_X86_64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_mac_x86_64"; fi
+                if [ "$RESULT_BUNDLE_WINDOWS_AARCH64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_windows_aarch64"; fi
+                if [ "$RESULT_BUNDLE_WINDOWS_X86_64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_windows_x86_64"; fi
                 FAILED_JOBS=$(echo "$FAILED_JOBS" | xargs)
                 if [ "$UPLOAD_RESULT" == "cancelled" ]; then
                     if [ -n "$FAILED_JOBS" ]; then
@@ -700,12 +711,29 @@ jobs:
         echo "message=$MESSAGE" >> "$GITHUB_OUTPUT"
       env:
         GH_TOKEN: ${{ github.token }}
+        DRAFT_RESULT: ${{ needs.create_draft_release.result }}
+        UPLOAD_RESULT: ${{ needs.upload_release_assets.result }}
+        VALIDATE_RESULT: ${{ needs.validate_release_assets.result }}
+        AUTO_RELEASE_RESULT: ${{ needs.auto_release_preview.result }}
+        RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
+        RESULT_RUN_TESTS_MAC: ${{ needs.run_tests_mac.result }}
+        RESULT_RUN_TESTS_LINUX: ${{ needs.run_tests_linux.result }}
+        RESULT_RUN_TESTS_WINDOWS: ${{ needs.run_tests_windows.result }}
+        RESULT_CLIPPY_MAC: ${{ needs.clippy_mac.result }}
+        RESULT_CLIPPY_LINUX: ${{ needs.clippy_linux.result }}
+        RESULT_CLIPPY_WINDOWS: ${{ needs.clippy_windows.result }}
+        RESULT_CHECK_SCRIPTS: ${{ needs.check_scripts.result }}
+        RESULT_BUNDLE_LINUX_AARCH64: ${{ needs.bundle_linux_aarch64.result }}
+        RESULT_BUNDLE_LINUX_X86_64: ${{ needs.bundle_linux_x86_64.result }}
+        RESULT_BUNDLE_MAC_AARCH64: ${{ needs.bundle_mac_aarch64.result }}
+        RESULT_BUNDLE_MAC_X86_64: ${{ needs.bundle_mac_x86_64.result }}
+        RESULT_BUNDLE_WINDOWS_AARCH64: ${{ needs.bundle_windows_aarch64.result }}
+        RESULT_BUNDLE_WINDOWS_X86_64: ${{ needs.bundle_windows_x86_64.result }}
     - name: release::send_slack_message
-      run: |
-        curl -X POST -H 'Content-type: application/json'\
-         --data '{"text":"${{ steps.generate-webhook-message.outputs.message }}"}' "$SLACK_WEBHOOK"
+      run: 'curl -X POST -H ''Content-type: application/json'' --data "$(jq -n --arg text "$SLACK_MESSAGE" ''{"text": $text}'')" "$SLACK_WEBHOOK"'
       env:
         SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
+        SLACK_MESSAGE: ${{ steps.generate-webhook-message.outputs.message }}
 concurrency:
   group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
   cancel-in-progress: true

.github/workflows/release_nightly.yml 🔗

@@ -103,6 +103,8 @@ jobs:
       CARGO_INCREMENTAL: 0
       ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
       ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
+      CC: clang-18
+      CXX: clang++-18
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
@@ -149,6 +151,8 @@ jobs:
       CARGO_INCREMENTAL: 0
       ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
       ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
+      CC: clang-18
+      CXX: clang++-18
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
@@ -550,11 +554,10 @@ jobs:
     runs-on: namespace-profile-2x4-ubuntu-2404
     steps:
     - name: release::send_slack_message
-      run: |
-        curl -X POST -H 'Content-type: application/json'\
-         --data '{"text":"❌ ${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' "$SLACK_WEBHOOK"
+      run: 'curl -X POST -H ''Content-type: application/json'' --data "$(jq -n --arg text "$SLACK_MESSAGE" ''{"text": $text}'')" "$SLACK_WEBHOOK"'
       env:
         SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
+        SLACK_MESSAGE: '❌ ${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}'
 defaults:
   run:
     shell: bash -euxo pipefail {0}

.github/workflows/run_bundling.yml 🔗

@@ -19,6 +19,8 @@ jobs:
       CARGO_INCREMENTAL: 0
       ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
       ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
+      CC: clang-18
+      CXX: clang++-18
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
@@ -58,6 +60,8 @@ jobs:
       CARGO_INCREMENTAL: 0
       ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
       ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
+      CC: clang-18
+      CXX: clang++-18
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683

.github/workflows/run_cron_unit_evals.yml 🔗

@@ -16,7 +16,7 @@ jobs:
         model:
         - anthropic/claude-sonnet-4-5-latest
         - anthropic/claude-opus-4-5-latest
-        - google/gemini-3-pro
+        - google/gemini-3.1-pro
         - openai/gpt-5
       fail-fast: false
     steps:

.github/workflows/run_tests.yml 🔗

@@ -35,7 +35,7 @@ jobs:
           git fetch origin "$GITHUB_BASE_REF" --depth=350
           COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
         fi
-        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
+        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")"
 
         check_pattern() {
           local output_name="$1"
@@ -139,6 +139,21 @@ jobs:
       uses: crate-ci/typos@2d0ce569feab1f8752f1dde43cc2f2aa53236e06
       with:
         config: ./typos.toml
+    - name: run_tests::fetch_ts_query_ls
+      uses: dsaltares/fetch-gh-release-asset@aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c
+      with:
+        repo: ribru17/ts_query_ls
+        version: tags/v3.15.1
+        file: ts_query_ls-x86_64-unknown-linux-gnu.tar.gz
+    - name: run_tests::run_ts_query_ls
+      run: |-
+        tar -xf ts_query_ls-x86_64-unknown-linux-gnu.tar.gz
+        ./ts_query_ls format --check . || {
+            echo "Found unformatted queries, please format them with ts_query_ls."
+            echo "For easy use, install the Tree-sitter query extension:"
+            echo "zed://extension/tree-sitter-query"
+            false
+        }
     timeout-minutes: 60
   clippy_windows:
     needs:
@@ -175,6 +190,9 @@ jobs:
     - orchestrate
     if: needs.orchestrate.outputs.run_tests == 'true'
     runs-on: namespace-profile-16x32-ubuntu-2204
+    env:
+      CC: clang
+      CXX: clang++
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
@@ -285,6 +303,9 @@ jobs:
     - orchestrate
     if: needs.orchestrate.outputs.run_tests == 'true'
     runs-on: namespace-profile-16x32-ubuntu-2204
+    env:
+      CC: clang
+      CXX: clang++
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
@@ -385,6 +406,9 @@ jobs:
     - orchestrate
     if: needs.orchestrate.outputs.run_tests == 'true'
     runs-on: namespace-profile-16x32-ubuntu-2204
+    env:
+      CC: clang
+      CXX: clang++
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
@@ -428,6 +452,9 @@ jobs:
     - orchestrate
     if: needs.orchestrate.outputs.run_tests == 'true'
     runs-on: namespace-profile-8x16-ubuntu-2204
+    env:
+      CC: clang
+      CXX: clang++
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
@@ -466,11 +493,53 @@ jobs:
       run: |
         rm -rf ./../.cargo
     timeout-minutes: 60
+  check_wasm:
+    needs:
+    - orchestrate
+    if: needs.orchestrate.outputs.run_tests == 'true'
+    runs-on: namespace-profile-8x16-ubuntu-2204
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+      with:
+        clean: false
+    - name: steps::setup_cargo_config
+      run: |
+        mkdir -p ./../.cargo
+        cp ./.cargo/ci-config.toml ./../.cargo/config.toml
+    - name: steps::cache_rust_dependencies_namespace
+      uses: namespacelabs/nscloud-cache-action@v1
+      with:
+        cache: rust
+        path: ~/.rustup
+    - name: run_tests::check_wasm::install_nightly_wasm_toolchain
+      run: rustup toolchain install nightly --component rust-src --target wasm32-unknown-unknown
+    - name: steps::setup_sccache
+      run: ./script/setup-sccache
+      env:
+        R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
+        R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
+        R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
+        SCCACHE_BUCKET: sccache-zed
+    - name: run_tests::check_wasm::cargo_check_wasm
+      run: cargo +nightly -Zbuild-std=std,panic_abort check --target wasm32-unknown-unknown -p gpui_platform
+      env:
+        CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS: -C target-feature=+atomics,+bulk-memory,+mutable-globals
+    - name: steps::show_sccache_stats
+      run: sccache --show-stats || true
+    - name: steps::cleanup_cargo_config
+      if: always()
+      run: |
+        rm -rf ./../.cargo
+    timeout-minutes: 60
   check_dependencies:
     needs:
     - orchestrate
     if: needs.orchestrate.outputs.run_tests == 'true'
     runs-on: namespace-profile-2x4-ubuntu-2404
+    env:
+      CC: clang
+      CXX: clang++
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
@@ -503,6 +572,9 @@ jobs:
     - orchestrate
     if: needs.orchestrate.outputs.run_docs == 'true'
     runs-on: namespace-profile-8x16-ubuntu-2204
+    env:
+      CC: clang
+      CXX: clang++
     steps:
     - name: steps::checkout_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
@@ -581,8 +653,14 @@ jobs:
       name: run_tests::check_scripts::download_actionlint
       run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)
     - name: run_tests::check_scripts::run_actionlint
-      run: |
-        ${{ steps.get_actionlint.outputs.executable }} -color
+      run: '"$ACTIONLINT_BIN" -color'
+      env:
+        ACTIONLINT_BIN: ${{ steps.get_actionlint.outputs.executable }}
+    - name: steps::cache_rust_dependencies_namespace
+      uses: namespacelabs/nscloud-cache-action@v1
+      with:
+        cache: rust
+        path: ~/.rustup
     - name: run_tests::check_scripts::check_xtask_workflows
       run: |
         cargo xtask workflows
@@ -628,6 +706,10 @@ jobs:
       with:
         input: crates/proto/proto/
         against: https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/
+    - name: run_tests::check_postgres_and_protobuf_migrations::buf_lint
+      run: buf lint crates/proto/proto
+    - name: run_tests::check_postgres_and_protobuf_migrations::check_protobuf_formatting
+      run: buf format --diff --exit-code crates/proto/proto
     timeout-minutes: 60
   tests_pass:
     needs:
@@ -641,6 +723,7 @@ jobs:
     - run_tests_mac
     - doctests
     - check_workspace_binaries
+    - check_wasm
     - check_dependencies
     - check_docs
     - check_licenses
@@ -658,22 +741,39 @@ jobs:
           if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
         }
 
-        check_result "orchestrate" "${{ needs.orchestrate.result }}"
-        check_result "check_style" "${{ needs.check_style.result }}"
-        check_result "clippy_windows" "${{ needs.clippy_windows.result }}"
-        check_result "clippy_linux" "${{ needs.clippy_linux.result }}"
-        check_result "clippy_mac" "${{ needs.clippy_mac.result }}"
-        check_result "run_tests_windows" "${{ needs.run_tests_windows.result }}"
-        check_result "run_tests_linux" "${{ needs.run_tests_linux.result }}"
-        check_result "run_tests_mac" "${{ needs.run_tests_mac.result }}"
-        check_result "doctests" "${{ needs.doctests.result }}"
-        check_result "check_workspace_binaries" "${{ needs.check_workspace_binaries.result }}"
-        check_result "check_dependencies" "${{ needs.check_dependencies.result }}"
-        check_result "check_docs" "${{ needs.check_docs.result }}"
-        check_result "check_licenses" "${{ needs.check_licenses.result }}"
-        check_result "check_scripts" "${{ needs.check_scripts.result }}"
+        check_result "orchestrate" "$RESULT_ORCHESTRATE"
+        check_result "check_style" "$RESULT_CHECK_STYLE"
+        check_result "clippy_windows" "$RESULT_CLIPPY_WINDOWS"
+        check_result "clippy_linux" "$RESULT_CLIPPY_LINUX"
+        check_result "clippy_mac" "$RESULT_CLIPPY_MAC"
+        check_result "run_tests_windows" "$RESULT_RUN_TESTS_WINDOWS"
+        check_result "run_tests_linux" "$RESULT_RUN_TESTS_LINUX"
+        check_result "run_tests_mac" "$RESULT_RUN_TESTS_MAC"
+        check_result "doctests" "$RESULT_DOCTESTS"
+        check_result "check_workspace_binaries" "$RESULT_CHECK_WORKSPACE_BINARIES"
+        check_result "check_wasm" "$RESULT_CHECK_WASM"
+        check_result "check_dependencies" "$RESULT_CHECK_DEPENDENCIES"
+        check_result "check_docs" "$RESULT_CHECK_DOCS"
+        check_result "check_licenses" "$RESULT_CHECK_LICENSES"
+        check_result "check_scripts" "$RESULT_CHECK_SCRIPTS"
 
         exit $EXIT_CODE
+      env:
+        RESULT_ORCHESTRATE: ${{ needs.orchestrate.result }}
+        RESULT_CHECK_STYLE: ${{ needs.check_style.result }}
+        RESULT_CLIPPY_WINDOWS: ${{ needs.clippy_windows.result }}
+        RESULT_CLIPPY_LINUX: ${{ needs.clippy_linux.result }}
+        RESULT_CLIPPY_MAC: ${{ needs.clippy_mac.result }}
+        RESULT_RUN_TESTS_WINDOWS: ${{ needs.run_tests_windows.result }}
+        RESULT_RUN_TESTS_LINUX: ${{ needs.run_tests_linux.result }}
+        RESULT_RUN_TESTS_MAC: ${{ needs.run_tests_mac.result }}
+        RESULT_DOCTESTS: ${{ needs.doctests.result }}
+        RESULT_CHECK_WORKSPACE_BINARIES: ${{ needs.check_workspace_binaries.result }}
+        RESULT_CHECK_WASM: ${{ needs.check_wasm.result }}
+        RESULT_CHECK_DEPENDENCIES: ${{ needs.check_dependencies.result }}
+        RESULT_CHECK_DOCS: ${{ needs.check_docs.result }}
+        RESULT_CHECK_LICENSES: ${{ needs.check_licenses.result }}
+        RESULT_CHECK_SCRIPTS: ${{ needs.check_scripts.result }}
 concurrency:
   group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
   cancel-in-progress: true

.github/workflows/slack_notify_first_responders.yml 🔗

@@ -17,8 +17,9 @@ jobs:
         id: check-label
         env:
           LABEL_NAME: ${{ github.event.label.name }}
+          FIRST_RESPONDER_LABELS: ${{ env.FIRST_RESPONDER_LABELS }}
         run: |
-          if echo '${{ env.FIRST_RESPONDER_LABELS }}' | jq -e --arg label "$LABEL_NAME" 'index($label) != null' > /dev/null; then
+          if echo "$FIRST_RESPONDER_LABELS" | jq -e --arg label "$LABEL_NAME" 'index($label) != null' > /dev/null; then
             echo "should_notify=true" >> "$GITHUB_OUTPUT"
             echo "Label '$LABEL_NAME' requires first responder notification"
           else

.github/workflows/update_duplicate_magnets.yml 🔗

@@ -21,7 +21,9 @@ jobs:
         run: pip install requests
 
       - name: Update duplicate magnets issue
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
         run: |
           python script/github-find-top-duplicated-bugs.py \
-            --github-token ${{ secrets.GITHUB_TOKEN }} \
+            --github-token "$GITHUB_TOKEN" \
             --issue-number 46355

Cargo.lock 🔗

@@ -76,6 +76,7 @@ dependencies = [
  "clock",
  "collections",
  "ctor",
+ "fs",
  "futures 0.3.31",
  "gpui",
  "indoc",
@@ -169,7 +170,7 @@ dependencies = [
  "context_server",
  "ctor",
  "db",
- "derive_more 0.99.20",
+ "derive_more",
  "editor",
  "env_logger 0.11.8",
  "eval_utils",
@@ -241,7 +242,7 @@ dependencies = [
  "anyhow",
  "async-broadcast",
  "async-trait",
- "derive_more 2.0.1",
+ "derive_more",
  "futures 0.3.31",
  "log",
  "serde",
@@ -255,7 +256,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "44bc1fef9c32f03bce2ab44af35b6f483bfd169bf55cc59beeb2e3b1a00ae4d1"
 dependencies = [
  "anyhow",
- "derive_more 2.0.1",
+ "derive_more",
  "schemars",
  "serde",
  "serde_json",
@@ -368,6 +369,7 @@ dependencies = [
  "fs",
  "futures 0.3.31",
  "fuzzy",
+ "git",
  "gpui",
  "gpui_tokio",
  "html_to_markdown",
@@ -601,6 +603,17 @@ version = "0.1.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299"
 
+[[package]]
+name = "annotate-snippets"
+version = "0.12.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c86cd1c51b95d71dde52bca69ed225008f6ff4c8cc825b08042aa1ef823e1980"
+dependencies = [
+ "anstyle",
+ "memchr",
+ "unicode-width",
+]
+
 [[package]]
 name = "anstream"
 version = "0.6.21"
@@ -692,6 +705,15 @@ dependencies = [
  "num-traits",
 ]
 
+[[package]]
+name = "ar_archive_writer"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7eb93bbb63b9c227414f6eb3a0adfddca591a8ce1e9b60661bb08969b87e340b"
+dependencies = [
+ "object 0.37.3",
+]
+
 [[package]]
 name = "arbitrary"
 version = "1.4.2"
@@ -756,19 +778,16 @@ dependencies = [
 
 [[package]]
 name = "ashpd"
-version = "0.12.1"
+version = "0.13.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "618a409b91d5265798a99e3d1d0b226911605e581c4e7255e83c1e397b172bce"
+checksum = "0848bedd08067dca1c02c31cbb371a94ad4f2f8a61a82f2c43d96ec36a395244"
 dependencies = [
- "async-fs",
- "async-net",
  "enumflags2",
  "futures-channel",
  "futures-util",
- "rand 0.9.2",
+ "getrandom 0.4.1",
  "serde",
  "serde_repr",
- "url",
  "wayland-backend",
  "wayland-client",
  "wayland-protocols",
@@ -807,7 +826,7 @@ dependencies = [
  "anyhow",
  "async-trait",
  "collections",
- "derive_more 0.99.20",
+ "derive_more",
  "extension",
  "futures 0.3.31",
  "gpui",
@@ -1005,7 +1024,7 @@ version = "2.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "8034a681df4aed8b8edbd7fbe472401ecf009251c8b40556b304567052e294c5"
 dependencies = [
- "async-lock 3.4.1",
+ "async-lock 3.4.2",
  "blocking",
  "futures-lite 2.6.1",
 ]
@@ -1019,7 +1038,7 @@ dependencies = [
  "async-channel 2.5.0",
  "async-executor",
  "async-io",
- "async-lock 3.4.1",
+ "async-lock 3.4.2",
  "blocking",
  "futures-lite 2.6.1",
  "once_cell",
@@ -1054,9 +1073,9 @@ dependencies = [
 
 [[package]]
 name = "async-lock"
-version = "3.4.1"
+version = "3.4.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5fd03604047cee9b6ce9de9f70c6cd540a0520c813cbd49bae61f33ab80ed1dc"
+checksum = "290f7f2596bd5b78a9fec8088ccd89180d7f9f55b94b0576823bbbdc72ee8311"
 dependencies = [
  "event-listener 5.4.1",
  "event-listener-strategy",
@@ -1091,7 +1110,7 @@ checksum = "fc50921ec0055cdd8a16de48773bfeec5c972598674347252c0399676be7da75"
 dependencies = [
  "async-channel 2.5.0",
  "async-io",
- "async-lock 3.4.1",
+ "async-lock 3.4.2",
  "async-signal",
  "async-task",
  "blocking",
@@ -1119,7 +1138,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "43c070bbf59cd3570b6b2dd54cd772527c7c3620fce8be898406dd3ed6adc64c"
 dependencies = [
  "async-io",
- "async-lock 3.4.1",
+ "async-lock 3.4.2",
  "atomic-waker",
  "cfg-if",
  "futures-core",
@@ -1140,7 +1159,7 @@ dependencies = [
  "async-channel 1.9.0",
  "async-global-executor",
  "async-io",
- "async-lock 3.4.1",
+ "async-lock 3.4.2",
  "async-process",
  "crossbeam-utils",
  "futures-channel",
@@ -1345,6 +1364,7 @@ version = "0.1.0"
 dependencies = [
  "anyhow",
  "log",
+ "scopeguard",
  "simplelog",
  "tempfile",
  "windows 0.61.3",
@@ -2166,6 +2186,16 @@ dependencies = [
  "piper",
 ]
 
+[[package]]
+name = "bmrng"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d54df9073108f1558f90ae6c5bf5ab9c917c4185f5527b280c87a993cbead0ac"
+dependencies = [
+ "futures-core",
+ "tokio",
+]
+
 [[package]]
 name = "bon"
 version = "3.8.2"
@@ -2748,6 +2778,16 @@ dependencies = [
  "target-lexicon 0.12.16",
 ]
 
+[[package]]
+name = "cfg-expr"
+version = "0.20.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "78cef5b5a1a6827c7322ae2a636368a573006b27cfa76c7ebd53e834daeaab6a"
+dependencies = [
+ "smallvec",
+ "target-lexicon 0.13.3",
+]
+
 [[package]]
 name = "cfg-if"
 version = "1.0.4"
@@ -2973,7 +3013,7 @@ dependencies = [
  "cloud_llm_client",
  "collections",
  "credentials_provider",
- "derive_more 0.99.20",
+ "derive_more",
  "feature_flags",
  "fs",
  "futures 0.3.31",
@@ -3411,7 +3451,7 @@ name = "command_palette_hooks"
 version = "0.1.0"
 dependencies = [
  "collections",
- "derive_more 0.99.20",
+ "derive_more",
  "gpui",
  "workspace",
 ]
@@ -3497,6 +3537,16 @@ dependencies = [
  "windows-sys 0.59.0",
 ]
 
+[[package]]
+name = "console_error_panic_hook"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc"
+dependencies = [
+ "cfg-if",
+ "wasm-bindgen",
+]
+
 [[package]]
 name = "const-oid"
 version = "0.9.6"
@@ -3577,15 +3627,18 @@ dependencies = [
 
 [[package]]
 name = "convert_case"
-version = "0.4.0"
+version = "0.8.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
+checksum = "baaaa0ecca5b51987b9423ccdc971514dd8b0bb7b4060b983d3664dad3f1f89f"
+dependencies = [
+ "unicode-segmentation",
+]
 
 [[package]]
 name = "convert_case"
-version = "0.8.0"
+version = "0.10.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "baaaa0ecca5b51987b9423ccdc971514dd8b0bb7b4060b983d3664dad3f1f89f"
+checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9"
 dependencies = [
  "unicode-segmentation",
 ]
@@ -4084,13 +4137,13 @@ dependencies = [
 name = "crashes"
 version = "0.1.0"
 dependencies = [
- "bincode",
  "cfg-if",
  "crash-handler",
  "futures 0.3.31",
  "log",
  "mach2 0.5.0",
  "minidumper",
+ "parking_lot",
  "paths",
  "release_channel",
  "serde",
@@ -4278,7 +4331,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
 dependencies = [
  "generic-array",
- "rand_core 0.6.4",
  "typenum",
 ]
 
@@ -4305,6 +4357,20 @@ dependencies = [
  "syn 2.0.106",
 ]
 
+[[package]]
+name = "csv_preview"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "editor",
+ "feature_flags",
+ "gpui",
+ "log",
+ "text",
+ "ui",
+ "workspace",
+]
+
 [[package]]
 name = "ctor"
 version = "0.4.3"
@@ -4643,7 +4709,6 @@ dependencies = [
  "sysinfo 0.37.2",
  "task",
  "tasks_ui",
- "telemetry",
  "terminal_view",
  "text",
  "theme",
@@ -4743,34 +4808,23 @@ dependencies = [
 
 [[package]]
 name = "derive_more"
-version = "0.99.20"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6edb4b64a43d977b8e99788fe3a04d483834fba1215a7e02caa415b626497f7f"
-dependencies = [
- "convert_case 0.4.0",
- "proc-macro2",
- "quote",
- "rustc_version",
- "syn 2.0.106",
-]
-
-[[package]]
-name = "derive_more"
-version = "2.0.1"
+version = "2.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678"
+checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134"
 dependencies = [
  "derive_more-impl",
 ]
 
 [[package]]
 name = "derive_more-impl"
-version = "2.0.1"
+version = "2.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3"
+checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb"
 dependencies = [
+ "convert_case 0.10.0",
  "proc-macro2",
  "quote",
+ "rustc_version",
  "syn 2.0.106",
  "unicode-xid",
 ]
@@ -4966,11 +5020,13 @@ checksum = "bd0c93bb4b0c6d9b77f4435b0ae98c24d17f1c45b2ff844c6151a07256ca923b"
 
 [[package]]
 name = "dispatch2"
-version = "0.3.0"
+version = "0.3.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec"
+checksum = "1e0e367e4e7da84520dedcac1901e4da967309406d1e51017ae1abfb97adbd38"
 dependencies = [
  "bitflags 2.10.0",
+ "block2",
+ "libc",
  "objc2",
 ]
 
@@ -5367,7 +5423,6 @@ dependencies = [
  "semver",
  "serde_json",
  "settings",
- "supermaven",
  "telemetry",
  "text",
  "theme",
@@ -6247,6 +6302,12 @@ version = "0.4.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
 
+[[package]]
+name = "fixedbitset"
+version = "0.5.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99"
+
 [[package]]
 name = "flate2"
 version = "1.1.8"
@@ -6596,6 +6657,19 @@ dependencies = [
  "futures-sink",
 ]
 
+[[package]]
+name = "futures-concurrency"
+version = "7.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "175cd8cca9e1d45b87f18ffa75088f2099e3c4fe5e2f83e42de112560bea8ea6"
+dependencies = [
+ "fixedbitset 0.5.7",
+ "futures-core",
+ "futures-lite 2.6.1",
+ "pin-project",
+ "smallvec",
+]
+
 [[package]]
 name = "futures-core"
 version = "0.3.31"
@@ -7040,13 +7114,26 @@ dependencies = [
  "wasm-bindgen",
 ]
 
+[[package]]
+name = "getrandom"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "139ef39800118c7683f2fd3c98c1b23c09ae076556b435f8e9064ae108aaeeec"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "r-efi",
+ "wasip2",
+ "wasip3",
+]
+
 [[package]]
 name = "gh-workflow"
 version = "0.8.0"
 source = "git+https://github.com/zed-industries/gh-workflow?rev=c9eac0ed361583e1072860d96776fa52775b82ac#c9eac0ed361583e1072860d96776fa52775b82ac"
 dependencies = [
  "async-trait",
- "derive_more 2.0.1",
+ "derive_more",
  "derive_setters",
  "gh-workflow-macros",
  "indexmap",
@@ -7094,6 +7181,19 @@ version = "0.32.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7"
 
+[[package]]
+name = "gio-sys"
+version = "0.21.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0071fe88dba8e40086c8ff9bbb62622999f49628344b1d1bf490a48a29d80f22"
+dependencies = [
+ "glib-sys",
+ "gobject-sys",
+ "libc",
+ "system-deps 7.0.7",
+ "windows-sys 0.61.2",
+]
+
 [[package]]
 name = "git"
 version = "0.1.0"
@@ -7102,7 +7202,7 @@ dependencies = [
  "askpass",
  "async-trait",
  "collections",
- "derive_more 0.99.20",
+ "derive_more",
  "futures 0.3.31",
  "git2",
  "gpui",
@@ -7208,6 +7308,7 @@ dependencies = [
  "ctor",
  "db",
  "editor",
+ "feature_flags",
  "futures 0.3.31",
  "fuzzy",
  "git",
@@ -7228,6 +7329,7 @@ dependencies = [
  "pretty_assertions",
  "project",
  "prompt_store",
+ "proto",
  "rand 0.9.2",
  "remote",
  "remote_connection",
@@ -7267,6 +7369,50 @@ dependencies = [
  "xml-rs",
 ]
 
+[[package]]
+name = "glib"
+version = "0.21.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "16de123c2e6c90ce3b573b7330de19be649080ec612033d397d72da265f1bd8b"
+dependencies = [
+ "bitflags 2.10.0",
+ "futures-channel",
+ "futures-core",
+ "futures-executor",
+ "futures-task",
+ "futures-util",
+ "gio-sys",
+ "glib-macros",
+ "glib-sys",
+ "gobject-sys",
+ "libc",
+ "memchr",
+ "smallvec",
+]
+
+[[package]]
+name = "glib-macros"
+version = "0.21.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cf59b675301228a696fe01c3073974643365080a76cc3ed5bc2cbc466ad87f17"
+dependencies = [
+ "heck 0.5.0",
+ "proc-macro-crate",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.106",
+]
+
+[[package]]
+name = "glib-sys"
+version = "0.21.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2d95e1a3a19ae464a7286e14af9a90683c64d70c02532d88d87ce95056af3e6c"
+dependencies = [
+ "libc",
+ "system-deps 7.0.7",
+]
+
 [[package]]
 name = "glob"
 version = "0.3.3"
@@ -7342,6 +7488,17 @@ dependencies = [
  "workspace",
 ]
 
+[[package]]
+name = "gobject-sys"
+version = "0.21.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2dca35da0d19a18f4575f3cb99fe1c9e029a2941af5662f326f738a21edaf294"
+dependencies = [
+ "glib-sys",
+ "libc",
+ "system-deps 7.0.7",
+]
+
 [[package]]
 name = "goblin"
 version = "0.8.2"
@@ -7406,6 +7563,7 @@ name = "gpui"
 version = "0.2.2"
 dependencies = [
  "anyhow",
+ "async-channel 2.5.0",
  "async-task",
  "backtrace",
  "bindgen 0.71.1",
@@ -7423,14 +7581,18 @@ dependencies = [
  "core-text",
  "core-video",
  "ctor",
- "derive_more 0.99.20",
+ "derive_more",
  "embed-resource",
  "env_logger 0.11.8",
  "etagere",
  "foreign-types 0.5.0",
  "futures 0.3.31",
+ "futures-concurrency",
+ "getrandom 0.3.4",
  "gpui_macros",
  "gpui_platform",
+ "gpui_util",
+ "gpui_web",
  "http_client",
  "image",
  "inventory",
@@ -7440,7 +7602,7 @@ dependencies = [
  "mach2 0.5.0",
  "media",
  "metal",
- "naga",
+ "naga 28.0.0",
  "num_cpus",
  "objc",
  "objc2",
@@ -7449,6 +7611,7 @@ dependencies = [
  "parking_lot",
  "pathfinder_geometry",
  "pin-project",
+ "pollster 0.4.0",
  "postage",
  "pretty_assertions",
  "profiling",
@@ -7464,7 +7627,6 @@ dependencies = [
  "serde_json",
  "slotmap",
  "smallvec",
- "smol",
  "spin 0.10.0",
  "stacksafe",
  "strum 0.27.2",
@@ -7472,11 +7634,13 @@ dependencies = [
  "taffy",
  "thiserror 2.0.17",
  "unicode-segmentation",
+ "url",
  "usvg",
- "util",
  "util_macros",
  "uuid",
  "waker-fn",
+ "wasm-bindgen",
+ "web-time",
  "windows 0.61.3",
  "zed-font-kit",
  "zed-scap",
@@ -7494,7 +7658,6 @@ dependencies = [
  "calloop",
  "calloop-wayland-source",
  "collections",
- "cosmic-text",
  "filedescriptor",
  "futures 0.3.31",
  "gpui",
@@ -7507,12 +7670,14 @@ dependencies = [
  "open",
  "parking_lot",
  "pathfinder_geometry",
+ "pollster 0.4.0",
  "profiling",
  "raw-window-handle",
  "smallvec",
  "smol",
  "strum 0.27.2",
  "swash",
+ "url",
  "util",
  "uuid",
  "wayland-backend",
@@ -7524,7 +7689,6 @@ dependencies = [
  "x11-clipboard",
  "x11rb",
  "xkbcommon",
- "zed-font-kit",
  "zed-scap",
  "zed-xim",
 ]
@@ -7535,7 +7699,6 @@ version = "0.1.0"
 dependencies = [
  "anyhow",
  "async-task",
- "bindgen 0.71.1",
  "block",
  "cbindgen",
  "cocoa 0.26.0",
@@ -7546,7 +7709,8 @@ dependencies = [
  "core-text",
  "core-video",
  "ctor",
- "derive_more 0.99.20",
+ "derive_more",
+ "dispatch2",
  "etagere",
  "foreign-types 0.5.0",
  "futures 0.3.31",
@@ -7585,9 +7749,11 @@ dependencies = [
 name = "gpui_platform"
 version = "0.1.0"
 dependencies = [
+ "console_error_panic_hook",
  "gpui",
  "gpui_linux",
  "gpui_macos",
+ "gpui_web",
  "gpui_windows",
 ]
 
@@ -7601,6 +7767,37 @@ dependencies = [
  "util",
 ]
 
+[[package]]
+name = "gpui_util"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "log",
+]
+
+[[package]]
+name = "gpui_web"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "console_error_panic_hook",
+ "futures 0.3.31",
+ "gpui",
+ "gpui_wgpu",
+ "http_client",
+ "js-sys",
+ "log",
+ "parking_lot",
+ "raw-window-handle",
+ "smallvec",
+ "uuid",
+ "wasm-bindgen",
+ "wasm-bindgen-futures",
+ "wasm_thread",
+ "web-sys",
+ "web-time",
+]
+
 [[package]]
 name = "gpui_wgpu"
 version = "0.1.0"
@@ -7608,15 +7805,24 @@ dependencies = [
  "anyhow",
  "bytemuck",
  "collections",
+ "cosmic-text",
  "etagere",
  "gpui",
+ "gpui_util",
+ "itertools 0.14.0",
+ "js-sys",
  "log",
  "parking_lot",
+ "pollster 0.4.0",
  "profiling",
  "raw-window-handle",
- "smol",
- "util",
+ "smallvec",
+ "swash",
+ "wasm-bindgen",
+ "wasm-bindgen-futures",
+ "web-sys",
  "wgpu",
+ "zed-font-kit",
 ]
 
 [[package]]
@@ -8061,7 +8267,7 @@ dependencies = [
  "async-fs",
  "async-tar",
  "bytes 1.11.1",
- "derive_more 0.99.20",
+ "derive_more",
  "futures 0.3.31",
  "http 1.3.1",
  "http-body 1.0.1",
@@ -8837,9 +9043,9 @@ dependencies = [
 
 [[package]]
 name = "js-sys"
-version = "0.3.81"
+version = "0.3.90"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ec48937a97411dcb524a265206ccd4c90bb711fca92b2792c407f268825b9305"
+checksum = "14dc6f6450b3f6d4ed5b16327f38fed626d375a886159ca555bd7822c0c3a5a6"
 dependencies = [
  "once_cell",
  "wasm-bindgen",
@@ -8938,9 +9144,9 @@ dependencies = [
 
 [[package]]
 name = "jupyter-protocol"
-version = "1.2.1"
+version = "1.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8c75a69caf8b8e781224badfb76c4a8da4d49856de36ce72ae3cf5d4a1c94e42"
+checksum = "4649647741f9794a7a02e3be976f1b248ba28a37dbfc626d5089316fd4fbf4c8"
 dependencies = [
  "async-trait",
  "bytes 1.11.1",
@@ -9271,6 +9477,7 @@ dependencies = [
  "open_path_prompt",
  "picker",
  "project",
+ "serde_json",
  "settings",
  "ui",
  "util",
@@ -9511,10 +9718,11 @@ dependencies = [
 
 [[package]]
 name = "libwebrtc"
-version = "0.3.10"
-source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d"
+version = "0.3.26"
+source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459"
 dependencies = [
  "cxx",
+ "glib",
  "jni",
  "js-sys",
  "lazy_static",
@@ -9608,9 +9816,12 @@ checksum = "11d3d7f243d5c5a8b9bb5d6dd2b1602c0cb0b9db1621bafc7ed66e35ff9fe092"
 
 [[package]]
 name = "livekit"
-version = "0.7.8"
-source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d"
+version = "0.7.32"
+source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459"
 dependencies = [
+ "base64 0.22.1",
+ "bmrng",
+ "bytes 1.11.1",
  "chrono",
  "futures-util",
  "lazy_static",
@@ -9631,11 +9842,12 @@ dependencies = [
 
 [[package]]
 name = "livekit-api"
-version = "0.4.2"
-source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d"
+version = "0.4.14"
+source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459"
 dependencies = [
+ "base64 0.21.7",
  "futures-util",
- "http 0.2.12",
+ "http 1.3.1",
  "livekit-protocol",
  "livekit-runtime",
  "log",
@@ -9643,20 +9855,22 @@ dependencies = [
  "pbjson-types",
  "prost 0.12.6",
  "rand 0.9.2",
- "reqwest 0.11.27",
+ "reqwest 0.12.24",
+ "rustls-native-certs 0.6.3",
  "scopeguard",
  "serde",
  "sha2",
  "thiserror 1.0.69",
  "tokio",
- "tokio-tungstenite 0.26.2",
+ "tokio-rustls 0.26.2",
+ "tokio-tungstenite 0.28.0",
  "url",
 ]
 
 [[package]]
 name = "livekit-protocol"
-version = "0.3.9"
-source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d"
+version = "0.7.1"
+source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459"
 dependencies = [
  "futures-util",
  "livekit-runtime",
@@ -9664,7 +9878,6 @@ dependencies = [
  "pbjson",
  "pbjson-types",
  "prost 0.12.6",
- "prost-types 0.12.6",
  "serde",
  "thiserror 1.0.69",
  "tokio",
@@ -9673,7 +9886,7 @@ dependencies = [
 [[package]]
 name = "livekit-runtime"
 version = "0.4.0"
-source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d"
+source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459"
 dependencies = [
  "tokio",
  "tokio-stream",
@@ -9729,7 +9942,6 @@ dependencies = [
  "sha2",
  "simplelog",
  "smallvec",
- "tokio-tungstenite 0.26.2",
  "ui",
  "util",
  "zed-scap",
@@ -9824,6 +10036,7 @@ dependencies = [
  "ctor",
  "futures 0.3.31",
  "gpui",
+ "gpui_util",
  "log",
  "lsp-types",
  "parking_lot",
@@ -9990,6 +10203,7 @@ dependencies = [
  "language",
  "linkify",
  "log",
+ "markdown",
  "markup5ever_rcdom",
  "mermaid-rs-renderer",
  "pretty_assertions",
@@ -10208,7 +10422,7 @@ dependencies = [
 [[package]]
 name = "mermaid-rs-renderer"
 version = "0.2.0"
-source = "git+https://github.com/zed-industries/mermaid-rs-renderer?branch=fix-font-family-xml-escaping#d91961aa90bc7b0c09c87a13c91d48e2f05c468d"
+source = "git+https://github.com/zed-industries/mermaid-rs-renderer?rev=374db9ead5426697c6c2111151d9f246899bc638#374db9ead5426697c6c2111151d9f246899bc638"
 dependencies = [
  "anyhow",
  "fontdb 0.16.2",
@@ -10489,17 +10703,35 @@ version = "0.8.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a"
 
-[[package]]
-name = "multimap"
-version = "0.10.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1d87ecb2933e8aeadb3e3a02b828fed80a7528047e68b4f424523a0981a3a084"
-
 [[package]]
 name = "naga"
 version = "28.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "618f667225063219ddfc61251087db8a9aec3c3f0950c916b614e403486f1135"
+dependencies = [
+ "arrayvec",
+ "bit-set",
+ "bitflags 2.10.0",
+ "cfg-if",
+ "cfg_aliases 0.2.1",
+ "codespan-reporting 0.12.0",
+ "half",
+ "hashbrown 0.16.1",
+ "hexf-parse",
+ "indexmap",
+ "libm",
+ "log",
+ "num-traits",
+ "once_cell",
+ "rustc-hash 1.1.0",
+ "thiserror 2.0.17",
+ "unicode-ident",
+]
+
+[[package]]
+name = "naga"
+version = "28.0.1"
+source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c"
 dependencies = [
  "arrayvec",
  "bit-set",
@@ -10558,9 +10790,9 @@ dependencies = [
 
 [[package]]
 name = "nbformat"
-version = "1.1.0"
+version = "1.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b10a89a2d910233ec3fca4de359b16ebe95e833c8b2162643ef98c6053a0549d"
+checksum = "d4983a40792c45e8639f77ef8e4461c55679cbc618f4b9e83830e8c7e79c8383"
 dependencies = [
  "anyhow",
  "chrono",
@@ -10661,7 +10893,6 @@ dependencies = [
  "cfg-if",
  "cfg_aliases 0.2.1",
  "libc",
- "memoffset",
 ]
 
 [[package]]
@@ -10847,6 +11078,22 @@ dependencies = [
  "num-iter",
  "num-traits",
  "rand 0.8.5",
+ "smallvec",
+ "zeroize",
+]
+
+[[package]]
+name = "num-bigint-dig"
+version = "0.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a7f9a86e097b0d187ad0e65667c2f58b9254671e86e7dbb78036b16692eae099"
+dependencies = [
+ "libm",
+ "num-integer",
+ "num-iter",
+ "num-traits",
+ "once_cell",
+ "rand 0.9.2",
  "serde",
  "smallvec",
  "zeroize",
@@ -11220,15 +11467,15 @@ checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad"
 
 [[package]]
 name = "oo7"
-version = "0.5.0"
+version = "0.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e3299dd401feaf1d45afd8fd1c0586f10fcfb22f244bb9afa942cec73503b89d"
+checksum = "78f2bfed90f1618b4b48dcad9307f25e14ae894e2949642c87c351601d62cebd"
 dependencies = [
  "aes",
  "ashpd",
  "async-fs",
  "async-io",
- "async-lock 3.4.1",
+ "async-lock 3.4.2",
  "blocking",
  "cbc",
  "cipher",

Cargo.toml 🔗

@@ -1,8 +1,8 @@
 [workspace]
 resolver = "2"
 members = [
-    "crates/acp_tools",
     "crates/acp_thread",
+    "crates/acp_tools",
     "crates/action_log",
     "crates/activity_indicator",
     "crates/agent",
@@ -13,9 +13,9 @@ members = [
     "crates/anthropic",
     "crates/askpass",
     "crates/assets",
-    "crates/assistant_text_thread",
     "crates/assistant_slash_command",
     "crates/assistant_slash_commands",
+    "crates/assistant_text_thread",
     "crates/audio",
     "crates/auto_update",
     "crates/auto_update_helper",
@@ -32,6 +32,7 @@ members = [
     "crates/cloud_api_client",
     "crates/cloud_api_types",
     "crates/cloud_llm_client",
+    "crates/codestral",
     "crates/collab",
     "crates/collab_ui",
     "crates/collections",
@@ -44,6 +45,7 @@ members = [
     "crates/copilot_chat",
     "crates/crashes",
     "crates/credentials_provider",
+    "crates/csv_preview",
     "crates/dap",
     "crates/dap_adapters",
     "crates/db",
@@ -56,9 +58,10 @@ members = [
     "crates/diagnostics",
     "crates/docs_preprocessor",
     "crates/edit_prediction",
+    "crates/edit_prediction_cli",
+    "crates/edit_prediction_context",
     "crates/edit_prediction_types",
     "crates/edit_prediction_ui",
-    "crates/edit_prediction_context",
     "crates/editor",
     "crates/encoding_selector",
     "crates/etw_tracing",
@@ -88,9 +91,11 @@ members = [
     "crates/gpui_macos",
     "crates/gpui_macros",
     "crates/gpui_platform",
+    "crates/gpui_tokio",
+    "crates/gpui_util",
+    "crates/gpui_web",
     "crates/gpui_wgpu",
     "crates/gpui_windows",
-    "crates/gpui_tokio",
     "crates/html_to_markdown",
     "crates/http_client",
     "crates/http_client_tls",
@@ -119,8 +124,8 @@ members = [
     "crates/media",
     "crates/menu",
     "crates/migrator",
-    "crates/mistral",
     "crates/miniprofiler_ui",
+    "crates/mistral",
     "crates/multi_buffer",
     "crates/nc",
     "crates/net",
@@ -136,6 +141,7 @@ members = [
     "crates/panel",
     "crates/paths",
     "crates/picker",
+    "crates/platform_title_bar",
     "crates/prettier",
     "crates/project",
     "crates/project_benchmarks",
@@ -147,7 +153,6 @@ members = [
     "crates/refineable",
     "crates/refineable/derive_refineable",
     "crates/release_channel",
-    "crates/scheduler",
     "crates/remote",
     "crates/remote_connection",
     "crates/remote_server",
@@ -157,10 +162,10 @@ members = [
     "crates/rope",
     "crates/rpc",
     "crates/rules_library",
+    "crates/scheduler",
     "crates/schema_generator",
     "crates/search",
     "crates/session",
-    "crates/sidebar",
     "crates/settings",
     "crates/settings_content",
     "crates/settings_json",
@@ -168,6 +173,7 @@ members = [
     "crates/settings_profile_selector",
     "crates/settings_ui",
     "crates/shell_command_parser",
+    "crates/sidebar",
     "crates/snippet",
     "crates/snippet_provider",
     "crates/snippets_ui",
@@ -177,9 +183,6 @@ members = [
     "crates/storybook",
     "crates/streaming_diff",
     "crates/sum_tree",
-    "crates/supermaven",
-    "crates/supermaven_api",
-    "crates/codestral",
     "crates/svg_preview",
     "crates/system_specs",
     "crates/tab_switcher",
@@ -195,7 +198,6 @@ members = [
     "crates/theme_importer",
     "crates/theme_selector",
     "crates/time_format",
-    "crates/platform_title_bar",
     "crates/title_bar",
     "crates/toolchain_selector",
     "crates/ui",
@@ -207,10 +209,10 @@ members = [
     "crates/vercel",
     "crates/vim",
     "crates/vim_mode_setting",
-    "crates/which_key",
     "crates/watch",
     "crates/web_search",
     "crates/web_search_providers",
+    "crates/which_key",
     "crates/workspace",
     "crates/worktree",
     "crates/worktree_benchmarks",
@@ -218,7 +220,6 @@ members = [
     "crates/zed",
     "crates/zed_actions",
     "crates/zed_env_vars",
-    "crates/edit_prediction_cli",
     "crates/zeta_prompt",
     "crates/zlog",
     "crates/zlog_settings",
@@ -298,6 +299,7 @@ copilot_ui = { path = "crates/copilot_ui" }
 crashes = { path = "crates/crashes" }
 credentials_provider = { path = "crates/credentials_provider" }
 crossbeam = "0.8.4"
+csv_preview = { path = "crates/csv_preview"}
 dap = { path = "crates/dap" }
 dap_adapters = { path = "crates/dap_adapters" }
 db = { path = "crates/db" }
@@ -332,9 +334,11 @@ gpui_linux = { path = "crates/gpui_linux", default-features = false }
 gpui_macos = { path = "crates/gpui_macos", default-features = false }
 gpui_macros = { path = "crates/gpui_macros" }
 gpui_platform = { path = "crates/gpui_platform", default-features = false }
+gpui_web = { path = "crates/gpui_web" }
 gpui_wgpu = { path = "crates/gpui_wgpu" }
 gpui_windows = { path = "crates/gpui_windows", default-features = false }
 gpui_tokio = { path = "crates/gpui_tokio" }
+gpui_util = { path = "crates/gpui_util" }
 html_to_markdown = { path = "crates/html_to_markdown" }
 http_client = { path = "crates/http_client" }
 http_client_tls = { path = "crates/http_client_tls" }
@@ -366,7 +370,7 @@ markdown_preview = { path = "crates/markdown_preview" }
 svg_preview = { path = "crates/svg_preview" }
 media = { path = "crates/media" }
 menu = { path = "crates/menu" }
-mermaid-rs-renderer = { git = "https://github.com/zed-industries/mermaid-rs-renderer", branch = "fix-font-family-xml-escaping", default-features = false }
+mermaid-rs-renderer = { git = "https://github.com/zed-industries/mermaid-rs-renderer", rev = "374db9ead5426697c6c2111151d9f246899bc638", default-features = false }
 migrator = { path = "crates/migrator" }
 mistral = { path = "crates/mistral" }
 multi_buffer = { path = "crates/multi_buffer" }
@@ -423,8 +427,6 @@ sqlez_macros = { path = "crates/sqlez_macros" }
 story = { path = "crates/story" }
 streaming_diff = { path = "crates/streaming_diff" }
 sum_tree = { path = "crates/sum_tree" }
-supermaven = { path = "crates/supermaven" }
-supermaven_api = { path = "crates/supermaven_api" }
 codestral = { path = "crates/codestral" }
 system_specs = { path = "crates/system_specs" }
 tab_switcher = { path = "crates/tab_switcher" }
@@ -479,9 +481,15 @@ alacritty_terminal = { git = "https://github.com/zed-industries/alacritty", rev
 any_vec = "0.14"
 anyhow = "1.0.86"
 arrayvec = { version = "0.7.4", features = ["serde"] }
-ashpd = { version = "0.12.1", default-features = false, features = [
-    "async-std",
+ashpd = { version = "0.13", default-features = false, features = [
+    "async-io",
+    "notification",
+    "open_uri",
+    "file_chooser",
+    "settings",
+    "trash"
 ] }
+async-channel = "2.5.0"
 async-compat = "0.2.1"
 async-compression = { version = "0.4", features = ["gzip", "futures-io"] }
 async-dispatcher = "0.1"
@@ -530,7 +538,16 @@ criterion = { version = "0.5", features = ["html_reports"] }
 ctor = "0.4.0"
 dap-types = { git = "https://github.com/zed-industries/dap-types", rev = "1b461b310481d01e02b2603c16d7144b926339f8" }
 dashmap = "6.0"
-derive_more = "0.99.17"
+derive_more = { version = "2.1.1", features = [
+    "add",
+    "add_assign",
+    "deref",
+    "deref_mut",
+    "from_str",
+    "mul",
+    "mul_assign",
+    "not",
+] }
 dirs = "4.0"
 documented = "0.9.1"
 dotenvy = "0.15.0"
@@ -542,6 +559,7 @@ exec = "0.3.1"
 fancy-regex = "0.16.0"
 fork = "0.4.0"
 futures = "0.3"
+futures-concurrency = "7.7.1"
 futures-lite = "1.13"
 gh-workflow = { git = "https://github.com/zed-industries/gh-workflow", rev = "c9eac0ed361583e1072860d96776fa52775b82ac" }
 git2 = { version = "0.20.1", default-features = false, features = ["vendored-libgit2"] }
@@ -565,11 +583,13 @@ itertools = "0.14.0"
 json_dotpath = "1.1"
 jsonschema = "0.37.0"
 jsonwebtoken = "10.0"
-jupyter-protocol = "1.2.0"
+jupyter-protocol = "1.4.0"
 jupyter-websocket-client = "1.0.0"
 libc = "0.2"
 libsqlite3-sys = { version = "0.30.1", features = ["bundled"] }
 linkify = "0.10.0"
+libwebrtc = "0.3.26"
+livekit = { version = "0.7.32", features = ["tokio", "rustls-tls-native-roots"] }
 log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
 lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "a4f410987660bf560d1e617cb78117c6b6b9f599" }
 mach2 = "0.5"
@@ -579,7 +599,7 @@ minidumper = "0.8"
 moka = { version = "0.12.10", features = ["sync"] }
 naga = { version = "28.0", features = ["wgsl-in"] }
 nanoid = "0.4"
-nbformat = "1.1.0"
+nbformat = "1.2.0"
 nix = "0.29"
 num-format = "0.4.4"
 objc = "0.2"
@@ -632,6 +652,7 @@ profiling = "1"
 prost = "0.9"
 prost-build = "0.9"
 prost-types = "0.9"
+pollster = "0.4.0"
 pulldown-cmark = { version = "0.13.0", default-features = false }
 quote = "1.0.9"
 rand = "0.9"
@@ -648,7 +669,7 @@ reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "c15662
     "stream",
 ], package = "zed-reqwest", version = "0.12.15-zed" }
 rsa = "0.9.6"
-runtimelib = { version = "1.2.0", default-features = false, features = [
+runtimelib = { version = "1.4.0", default-features = false, features = [
     "async-dispatcher-runtime", "aws-lc-rs"
 ] }
 rust-embed = { version = "8.4", features = ["include-exclude"] }
@@ -756,7 +777,9 @@ wasmtime = { version = "33", default-features = false, features = [
 wasmtime-wasi = "33"
 wax = "0.7"
 which = "6.0.0"
-wgpu = "28.0"
+wasm-bindgen = "0.2.113"
+web-time = "1.1.0"
+wgpu = { git = "https://github.com/zed-industries/wgpu", rev = "9459e95113c5bd116b2cc2c87e8424b28059e17c" }
 windows-core = "0.61"
 yawc = "0.2.5"
 zeroize = "1.8"
@@ -767,11 +790,13 @@ zstd = "0.11"
 version = "0.61"
 features = [
     "Foundation_Numerics",
+    "Globalization_DateTimeFormatting",
     "Storage_Search",
     "Storage_Streams",
     "System_Threading",
     "UI_ViewManagement",
     "Wdk_System_SystemServices",
+    "Win32_Foundation",
     "Win32_Globalization",
     "Win32_Graphics_Direct3D",
     "Win32_Graphics_Direct3D11",
@@ -799,6 +824,7 @@ features = [
     "Win32_System_Ole",
     "Win32_System_Performance",
     "Win32_System_Pipes",
+    "Win32_System_RestartManager",
     "Win32_System_SystemInformation",
     "Win32_System_SystemServices",
     "Win32_System_Threading",
@@ -821,6 +847,8 @@ notify = { git = "https://github.com/zed-industries/notify.git", rev = "ce58c24c
 notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "ce58c24cad542c28e04ced02e20325a4ec28a31d" }
 windows-capture = { git = "https://github.com/zed-industries/windows-capture.git", rev = "f0d6c1b6691db75461b732f6d5ff56eed002eeb9" }
 calloop = { git = "https://github.com/zed-industries/calloop" }
+livekit = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "37835f840d0070d45ac8b31cce6a6ae7aca3f459" }
+libwebrtc = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "37835f840d0070d45ac8b31cce6a6ae7aca3f459" }
 
 [profile.dev]
 split-debuginfo = "unpacked"
@@ -880,7 +908,6 @@ sidebar = { codegen-units = 1 }
 snippet = { codegen-units = 1 }
 snippets_ui = { codegen-units = 1 }
 story = { codegen-units = 1 }
-supermaven_api = { codegen-units = 1 }
 telemetry_events = { codegen-units = 1 }
 theme_selector = { codegen-units = 1 }
 time_format = { codegen-units = 1 }

assets/icons/ai_vercel.svg 🔗

@@ -0,0 +1,3 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M8 1L15.5 14H0.5L8 1Z" fill="black"/>
+</svg>

assets/icons/fast_forward.svg 🔗

@@ -0,0 +1,4 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M8 4.4366C8.00005 4.20171 8.06974 3.97211 8.20026 3.77683C8.33078 3.58154 8.51627 3.42934 8.73328 3.33946C8.95029 3.24958 9.18908 3.22605 9.41946 3.27186C9.64983 3.31767 9.86146 3.43076 10.0276 3.59683L13.591 7.16022C13.8136 7.38297 13.9387 7.68503 13.9387 8C13.9387 8.31496 13.8136 8.61702 13.591 8.83977L10.0276 12.4032C9.86146 12.5692 9.64983 12.6823 9.41946 12.7281C9.18908 12.7739 8.95029 12.7504 8.73328 12.6605C8.51627 12.5707 8.33078 12.4185 8.20026 12.2232C8.06974 12.0279 8.00005 11.7983 8 11.5634V4.4366Z" fill="#C6CAD0" fill-opacity="0.15" stroke="#C6CAD0" stroke-width="1.06902" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M2.061 4.4366C2.06105 4.20171 2.13075 3.97211 2.26127 3.77683C2.39179 3.58154 2.57728 3.42934 2.79429 3.33946C3.0113 3.24958 3.25008 3.22605 3.48046 3.27186C3.71084 3.31767 3.92246 3.43076 4.08858 3.59683L7.65197 7.16022C7.87465 7.38297 7.99974 7.68503 7.99974 8C7.99974 8.31496 7.87465 8.61702 7.65197 8.83977L4.08858 12.4032C3.92246 12.5692 3.71084 12.6823 3.48046 12.7281C3.25008 12.7739 3.0113 12.7504 2.79429 12.6605C2.57728 12.5707 2.39179 12.4185 2.26127 12.2232C2.13075 12.0279 2.06105 11.7983 2.061 11.5634V4.4366Z" fill="#C6CAD0" fill-opacity="0.15" stroke="#C6CAD0" stroke-width="1.06902" stroke-linecap="round" stroke-linejoin="round"/>
+</svg>

assets/icons/fast_forward_off.svg 🔗

@@ -0,0 +1,5 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M8 10.3715V11.5634C8.00005 11.7983 8.06974 12.0279 8.20026 12.2232C8.33078 12.4185 8.51627 12.5707 8.73328 12.6605C8.95029 12.7504 9.18908 12.7739 9.41946 12.7281C9.64983 12.6823 9.86146 12.5692 10.0276 12.4032L10.2748 12.1559M8 5.38478V4.4366C8.00005 4.20171 8.06974 3.97211 8.20026 3.77683C8.33078 3.58154 8.51627 3.42934 8.73328 3.33946C8.95029 3.24958 9.18908 3.22605 9.41946 3.27186C9.64983 3.31767 9.86146 3.43076 10.0276 3.59683L13.591 7.16022C13.8136 7.38297 13.9387 7.68503 13.9387 8C13.9387 8.31496 13.8136 8.61702 13.591 8.83977L12.7618 9.66893" stroke="#C6CAD0" stroke-width="1.06902" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M2.061 6.99803V11.5634C2.06105 11.7983 2.13075 12.0279 2.26127 12.2232C2.39179 12.4185 2.57728 12.5707 2.79429 12.6605C3.0113 12.7504 3.25008 12.7739 3.48046 12.7281C3.71084 12.6823 3.92246 12.5692 4.08858 12.4032L6.46394 10.0278" stroke="#C6CAD0" stroke-width="1.06902" stroke-linecap="round" stroke-linejoin="round"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M14.1225 13.809C14.0341 13.9146 13.877 13.9289 13.7711 13.8409L1.19311 3.4002C1.08659 3.31177 1.07221 3.15361 1.16104 3.04742L1.87752 2.191C1.96588 2.08539 2.123 2.07111 2.22895 2.15905L14.8069 12.5998C14.9134 12.6882 14.9278 12.8464 14.839 12.9526L14.1225 13.809Z" fill="#C6CAD0"/>
+</svg>

assets/icons/file_icons/gitlab.svg 🔗

@@ -0,0 +1 @@
+<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="none"><path fill="#c6cad0" fill-rule="evenodd" d="m13.823 6.818-.017-.044-1.64-4.28a.43.43 0 0 0-.424-.267.45.45 0 0 0-.246.092.45.45 0 0 0-.146.221l-1.107 3.387H5.76L4.653 2.54a.43.43 0 0 0-.816-.044L2.195 6.773l-.017.043a3.045 3.045 0 0 0 1.105 3.465l.026.018L8 13.78l4.69-3.48.027-.019a3.04 3.04 0 0 0 1.106-3.462" clip-rule="evenodd"/></svg>

assets/icons/file_icons/yaml.svg 🔗

@@ -0,0 +1 @@
+<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="none"><path fill="#c6cad0" d="M2.067 2.422h1.364l1.3 2.14 1.427-2.14h1.364L5.26 5.604v1.818H4.15V5.604zM4.15 8.288h1.379l1.456 3.554h.062l1.457-3.554h1.38v5.29H8.798v-3.443h-.044l-1.37 3.417h-.738l-1.369-3.43h-.044v3.456H4.15zM10.547 13.578v-5.29h1.118v4.368h2.268v.922z"/><path fill="#c6cad0" d="M11.704 7.422h-1.31l-.365-1.055H7.865l-.412 1.055H6.145l2-5h1.573zm-3.5-1.928h1.52l-.694-2h-.045z" opacity=".5"/></svg>

assets/icons/git_commit.svg 🔗

@@ -0,0 +1,5 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M8 2V6" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M8 10C9.10457 10 10 9.10457 10 8C10 6.89543 9.10457 6 8 6C6.89543 6 6 6.89543 6 8C6 9.10457 6.89543 10 8 10Z" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M8 10V14" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+</svg>

assets/icons/git_graph.svg 🔗

@@ -1,4 +1,7 @@
 <svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
-<path d="M3 3V11.8889C3 12.1836 3.11706 12.4662 3.32544 12.6746C3.53381 12.8829 3.81643 13 4.11111 13H13" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M11.8889 6.33333L9.11112 9.11111L6.8889 6.88888L5.22223 8.55555" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M4.19617 6.09808L4.19617 13.7058" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M11.8159 11.8038L11.8159 13.7058" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M11.816 11.8038C12.8664 11.8038 13.7179 10.9523 13.7179 9.90192C13.7179 8.85152 12.8664 8 11.816 8C10.7656 8 9.91403 8.85152 9.91403 9.90192C9.91403 10.9523 10.7656 11.8038 11.816 11.8038Z" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M4.19617 6.09808C5.24657 6.09808 6.09809 5.24656 6.09809 4.19616C6.09809 3.14575 5.24657 2.29424 4.19617 2.29424C3.14577 2.29424 2.29425 3.14575 2.29425 4.19616C2.29425 5.24656 3.14577 6.09808 4.19617 6.09808Z" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M10.0842 9.90192H7.30429C5.58772 9.90192 4.19617 8.51036 4.19617 6.79379V5.58465" stroke="#C6CAD0" stroke-width="1.2"/>
 </svg>

assets/icons/new_thread.svg 🔗

@@ -0,0 +1,4 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M6 8H8M8 8H10M8 8V6M8 8V10" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M8 13.7253C11.1619 13.7253 13.7253 11.162 13.7253 8.00001C13.7253 4.83803 11.1619 2.27475 8 2.27475C4.83802 2.27475 2.27474 4.83803 2.27474 8.00001C2.27474 9.04281 2.55354 10.0205 3.04068 10.8626L2.561 13.439L5.13737 12.9593C5.97948 13.4465 6.9572 13.7253 8 13.7253Z" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+</svg>

assets/icons/open_folder.svg 🔗

@@ -0,0 +1,4 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M6.98001 12.8001H3.20001C2.88175 12.8001 2.57652 12.6736 2.35148 12.4486C2.12644 12.2235 2.00001 11.9183 2.00001 11.6001V3.80006C2.00001 3.4818 2.12644 3.17658 2.35148 2.95154C2.57652 2.72649 2.88175 2.60006 3.20001 2.60006H5.58801C5.7887 2.5981 5.98668 2.6465 6.16383 2.74084C6.34097 2.83517 6.49163 2.97244 6.60201 3.14006L6.99801 3.86006C7.10727 4.02598 7.25602 4.16218 7.43091 4.25643C7.60579 4.35067 7.80134 4.40003 8.00001 4.40006H12.8C13.1183 4.40006 13.4235 4.52649 13.6485 4.75153C13.8736 4.97658 14 5.2818 14 5.60006V7.58006" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M9.8 11.6H11.6M11.6 11.6H13.4M11.6 11.6V9.79999M11.6 11.6V13.4" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+</svg>

assets/icons/queue_message.svg 🔗

@@ -0,0 +1,7 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M10.3404 3.53018H2.73401" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M10.3404 7.62592H2.73401" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M6.24467 11.7217H2.73401" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M10.3404 9.96637L8.58511 11.7217L10.3404 13.477" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M13.266 3.53018V10.5515C13.266 10.8619 13.1427 11.1595 12.9232 11.379C12.7038 11.5984 12.4061 11.7217 12.0958 11.7217H8.58511" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+</svg>

assets/keymaps/default-linux.json 🔗

@@ -204,6 +204,7 @@
   {
     "context": "Editor && editor_agent_diff",
     "bindings": {
+      "alt-y": "agent::Keep",
       "ctrl-alt-y": "agent::Keep",
       "ctrl-alt-z": "agent::Reject",
       "shift-alt-y": "agent::KeepAll",
@@ -214,6 +215,7 @@
   {
     "context": "AgentDiff",
     "bindings": {
+      "alt-y": "agent::Keep",
       "ctrl-alt-y": "agent::Keep",
       "ctrl-alt-z": "agent::Reject",
       "shift-alt-y": "agent::KeepAll",
@@ -333,6 +335,7 @@
       "ctrl-alt-k": "agent::ToggleThinkingMode",
       "ctrl-alt-'": "agent::ToggleThinkingEffortMenu",
       "ctrl-'": "agent::CycleThinkingEffort",
+      "ctrl-alt-.": "agent::ToggleFastMode",
     },
   },
   {
@@ -670,6 +673,9 @@
     "use_key_equivalents": true,
     "bindings": {
       "ctrl-n": "multi_workspace::NewWorkspaceInWindow",
+      "left": "agents_sidebar::CollapseSelectedEntry",
+      "right": "agents_sidebar::ExpandSelectedEntry",
+      "enter": "menu::Confirm",
     },
   },
   {
@@ -1309,6 +1315,7 @@
     "bindings": {
       "ctrl-shift-space": "git::WorktreeFromDefaultOnWindow",
       "ctrl-space": "git::WorktreeFromDefault",
+      "ctrl-shift-backspace": "git::DeleteWorktree",
     },
   },
   {

assets/keymaps/default-macos.json 🔗

@@ -242,6 +242,7 @@
     "context": "AgentDiff",
     "use_key_equivalents": true,
     "bindings": {
+      "cmd-y": "agent::Keep",
       "cmd-alt-y": "agent::Keep",
       "cmd-alt-z": "agent::Reject",
       "shift-alt-y": "agent::KeepAll",
@@ -252,6 +253,7 @@
     "context": "Editor && editor_agent_diff",
     "use_key_equivalents": true,
     "bindings": {
+      "cmd-y": "agent::Keep",
       "cmd-alt-y": "agent::Keep",
       "cmd-alt-z": "agent::Reject",
       "shift-alt-y": "agent::KeepAll",
@@ -377,6 +379,7 @@
       "cmd-alt-k": "agent::ToggleThinkingMode",
       "cmd-alt-'": "agent::ToggleThinkingEffortMenu",
       "ctrl-'": "agent::CycleThinkingEffort",
+      "cmd-alt-.": "agent::ToggleFastMode",
     },
   },
   {
@@ -447,6 +450,13 @@
       "down": "search::NextHistoryQuery",
     },
   },
+  {
+    "context": "BufferSearchBar || ProjectSearchBar",
+    "use_key_equivalents": true,
+    "bindings": {
+      "ctrl-enter": "editor::Newline",
+    },
+  },
   {
     "context": "ProjectSearchBar",
     "use_key_equivalents": true,
@@ -731,6 +741,9 @@
     "use_key_equivalents": true,
     "bindings": {
       "cmd-n": "multi_workspace::NewWorkspaceInWindow",
+      "left": "agents_sidebar::CollapseSelectedEntry",
+      "right": "agents_sidebar::ExpandSelectedEntry",
+      "enter": "menu::Confirm",
     },
   },
   {
@@ -1407,6 +1420,7 @@
     "bindings": {
       "ctrl-shift-space": "git::WorktreeFromDefaultOnWindow",
       "ctrl-space": "git::WorktreeFromDefault",
+      "cmd-shift-backspace": "git::DeleteWorktree",
     },
   },
   {

assets/keymaps/default-windows.json 🔗

@@ -203,6 +203,7 @@
     "context": "Editor && editor_agent_diff",
     "use_key_equivalents": true,
     "bindings": {
+      "alt-y": "agent::Keep",
       "ctrl-alt-y": "agent::Keep",
       "ctrl-alt-z": "agent::Reject",
       "shift-alt-y": "agent::KeepAll",
@@ -214,6 +215,7 @@
     "context": "AgentDiff",
     "use_key_equivalents": true,
     "bindings": {
+      "alt-y": "agent::Keep",
       "ctrl-alt-y": "agent::Keep",
       "ctrl-alt-z": "agent::Reject",
       "shift-alt-y": "agent::KeepAll",
@@ -335,6 +337,7 @@
       "ctrl-alt-k": "agent::ToggleThinkingMode",
       "ctrl-alt-'": "agent::ToggleThinkingEffortMenu",
       "ctrl-'": "agent::CycleThinkingEffort",
+      "ctrl-alt-.": "agent::ToggleFastMode",
     },
   },
   {
@@ -674,6 +677,9 @@
     "use_key_equivalents": true,
     "bindings": {
       "ctrl-n": "multi_workspace::NewWorkspaceInWindow",
+      "left": "agents_sidebar::CollapseSelectedEntry",
+      "right": "agents_sidebar::ExpandSelectedEntry",
+      "enter": "menu::Confirm",
     },
   },
   {
@@ -1330,6 +1336,7 @@
     "bindings": {
       "ctrl-shift-space": "git::WorktreeFromDefaultOnWindow",
       "ctrl-space": "git::WorktreeFromDefault",
+      "ctrl-shift-backspace": "git::DeleteWorktree",
     },
   },
   {

assets/keymaps/vim.json 🔗

@@ -1110,4 +1110,12 @@
       "shift-g": "menu::SelectLast",
     },
   },
+  {
+    "context": "NotebookEditor > Editor && VimControl && vim_mode == normal",
+
+    "bindings": {
+      "j": "notebook::NotebookMoveDown",
+      "k": "notebook::NotebookMoveUp",
+    },
+  },
 ]

assets/settings/default.json 🔗

@@ -361,8 +361,11 @@
   // bracket, brace, single or double quote characters.
   // For example, when you select text and type '(', Zed will surround the text with ().
   "use_auto_surround": true,
-  // Whether indentation should be adjusted based on the context whilst typing.
-  "auto_indent": true,
+  // Controls automatic indentation behavior when typing.
+  // - "syntax_aware": Adjusts indentation based on syntax context (default)
+  // - "preserve_indent": Preserves current line's indentation on new lines
+  // - "none": No automatic indentation
+  "auto_indent": "syntax_aware",
   // Whether indentation of pasted content should be adjusted based on the context.
   "auto_indent_on_paste": true,
   // Controls how the editor handles the autoclosed characters.
@@ -799,6 +802,8 @@
     // 3. Show files first, then directories:
     //    "files_first"
     "sort_mode": "directories_first",
+    // Whether to show error and warning count badges next to file names in the project panel.
+    "diagnostic_badges": false,
     // Whether to enable drag-and-drop operations in the project panel.
     "drag_and_drop": true,
     // Whether to hide the root entry when only one folder is open in the window;
@@ -913,6 +918,10 @@
       // Default: inherits editor scrollbar settings
       // "show": null
     },
+    // Whether to show the addition/deletion change count next to each file in the Git panel.
+    //
+    // Default: false
+    "diff_stats": false,
   },
   "message_editor": {
     // Whether to automatically replace emoji shortcodes with emoji characters.
@@ -1265,8 +1274,6 @@
     //
     // Default: true
     "skip_focus_for_active_in_search": true,
-    // Whether to show the git status in the file finder.
-    "git_status": true,
     // Whether to use gitignored files when searching.
     // Only the file Zed had indexed will be used, not necessary all the gitignored files.
     //
@@ -1827,8 +1834,8 @@
         "    (",
         "        # multi-char path: first char (not opening delimiter, space, or box drawing char)",
         "        [^({\\[<\"'`\\ \\u2500-\\u257F]",
-        "        # middle chars: non-space, and colon/paren only if not followed by digit/paren",
-        "        ([^\\ :(]|[:(][^0-9()])*",
+        "        # middle chars: non-space, and colon/paren only if not followed by digit/paren/space",
+        "        ([^\\ :(]|[:(][^0-9()\\ ])*",
         "        # last char: not closing delimiter or colon",
         "        [^()}\\]>\"'`.,;:\\ ]",
         "    |",
@@ -2222,6 +2229,9 @@
     "vercel": {
       "api_url": "https://api.v0.dev/v1",
     },
+    "vercel_ai_gateway": {
+      "api_url": "https://ai-gateway.vercel.sh/v1",
+    },
     "x_ai": {
       "api_url": "https://api.x.ai/v1",
     },

assets/settings/default_semantic_token_rules.json 🔗

@@ -2,7 +2,9 @@
 //
 // These rules map LSP semantic token types to syntax theme styles.
 // To customize, add rules to "semantic_token_rules" in your settings.json.
-// User-defined rules are prepended to these defaults and take precedence.
+// User-defined rules are prepended and take highest precedence.
+// Extension language rules are applied next.
+// These built-in defaults are applied last.
 //
 // Each rule has the following properties:
 // - `token_type`: The LSP semantic token type to match. If omitted, matches all types.

crates/acp_thread/src/acp_thread.rs 🔗

@@ -2,55 +2,23 @@ mod connection;
 mod diff;
 mod mention;
 mod terminal;
-
-/// Key used in ACP ToolCall meta to store the tool's programmatic name.
-/// This is a workaround since ACP's ToolCall doesn't have a dedicated name field.
-pub const TOOL_NAME_META_KEY: &str = "tool_name";
-
-/// Key used in ACP ToolCall meta to store the session id when a subagent is spawned.
-pub const SUBAGENT_SESSION_ID_META_KEY: &str = "subagent_session_id";
-
-/// Helper to extract tool name from ACP meta
-pub fn tool_name_from_meta(meta: &Option<acp::Meta>) -> Option<SharedString> {
-    meta.as_ref()
-        .and_then(|m| m.get(TOOL_NAME_META_KEY))
-        .and_then(|v| v.as_str())
-        .map(|s| SharedString::from(s.to_owned()))
-}
-
-/// Helper to extract subagent session id from ACP meta
-pub fn subagent_session_id_from_meta(meta: &Option<acp::Meta>) -> Option<acp::SessionId> {
-    meta.as_ref()
-        .and_then(|m| m.get(SUBAGENT_SESSION_ID_META_KEY))
-        .and_then(|v| v.as_str())
-        .map(|s| acp::SessionId::from(s.to_string()))
-}
-
-/// Helper to create meta with tool name
-pub fn meta_with_tool_name(tool_name: &str) -> acp::Meta {
-    acp::Meta::from_iter([(TOOL_NAME_META_KEY.into(), tool_name.into())])
-}
-use collections::HashSet;
-pub use connection::*;
-pub use diff::*;
-use language::language_settings::FormatOnSave;
-pub use mention::*;
-use project::lsp_store::{FormatTrigger, LspFormatTarget};
-use serde::{Deserialize, Serialize};
-use serde_json::to_string_pretty;
-
-use task::{Shell, ShellBuilder};
-pub use terminal::*;
-
 use action_log::{ActionLog, ActionLogTelemetry};
 use agent_client_protocol::{self as acp};
 use anyhow::{Context as _, Result, anyhow};
+use collections::HashSet;
+pub use connection::*;
+pub use diff::*;
 use futures::{FutureExt, channel::oneshot, future::BoxFuture};
 use gpui::{AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task, WeakEntity};
 use itertools::Itertools;
+use language::language_settings::FormatOnSave;
 use language::{Anchor, Buffer, BufferSnapshot, LanguageRegistry, Point, ToPoint, text_diff};
 use markdown::Markdown;
+pub use mention::*;
+use project::lsp_store::{FormatTrigger, LspFormatTarget};
 use project::{AgentLocation, Project, git_store::GitStoreCheckpoint};
+use serde::{Deserialize, Serialize};
+use serde_json::to_string_pretty;
 use std::collections::HashMap;
 use std::error::Error;
 use std::fmt::{Formatter, Write};
@@ -59,11 +27,51 @@ use std::process::ExitStatus;
 use std::rc::Rc;
 use std::time::{Duration, Instant};
 use std::{fmt::Display, mem, path::PathBuf, sync::Arc};
+use task::{Shell, ShellBuilder};
+pub use terminal::*;
 use text::Bias;
 use ui::App;
 use util::{ResultExt, get_default_system_shell_preferring_bash, paths::PathStyle};
 use uuid::Uuid;
 
+/// Key used in ACP ToolCall meta to store the tool's programmatic name.
+/// This is a workaround since ACP's ToolCall doesn't have a dedicated name field.
+pub const TOOL_NAME_META_KEY: &str = "tool_name";
+
+/// Helper to extract tool name from ACP meta
+pub fn tool_name_from_meta(meta: &Option<acp::Meta>) -> Option<SharedString> {
+    meta.as_ref()
+        .and_then(|m| m.get(TOOL_NAME_META_KEY))
+        .and_then(|v| v.as_str())
+        .map(|s| SharedString::from(s.to_owned()))
+}
+
+/// Helper to create meta with tool name
+pub fn meta_with_tool_name(tool_name: &str) -> acp::Meta {
+    acp::Meta::from_iter([(TOOL_NAME_META_KEY.into(), tool_name.into())])
+}
+
+/// Key used in ACP ToolCall meta to store the session id and message indexes
+pub const SUBAGENT_SESSION_INFO_META_KEY: &str = "subagent_session_info";
+
+#[derive(Clone, Debug, Deserialize, Serialize)]
+pub struct SubagentSessionInfo {
+    /// The session id of the subagent sessiont that was spawned
+    pub session_id: acp::SessionId,
+    /// The index of the message of the start of the "turn" run by this tool call
+    pub message_start_index: usize,
+    /// The index of the output of the message that the subagent has returned
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub message_end_index: Option<usize>,
+}
+
+/// Helper to extract subagent session id from ACP meta
+pub fn subagent_session_info_from_meta(meta: &Option<acp::Meta>) -> Option<SubagentSessionInfo> {
+    meta.as_ref()
+        .and_then(|m| m.get(SUBAGENT_SESSION_INFO_META_KEY))
+        .and_then(|v| serde_json::from_value(v.clone()).ok())
+}
+
 #[derive(Debug)]
 pub struct UserMessage {
     pub id: Option<UserMessageId>,
@@ -102,6 +110,7 @@ impl UserMessage {
 pub struct AssistantMessage {
     pub chunks: Vec<AssistantMessageChunk>,
     pub indented: bool,
+    pub is_subagent_output: bool,
 }
 
 impl AssistantMessage {
@@ -222,7 +231,7 @@ pub struct ToolCall {
     pub raw_input_markdown: Option<Entity<Markdown>>,
     pub raw_output: Option<serde_json::Value>,
     pub tool_name: Option<SharedString>,
-    pub subagent_session_id: Option<acp::SessionId>,
+    pub subagent_session_info: Option<SubagentSessionInfo>,
 }
 
 impl ToolCall {
@@ -261,7 +270,7 @@ impl ToolCall {
 
         let tool_name = tool_name_from_meta(&tool_call.meta);
 
-        let subagent_session = subagent_session_id_from_meta(&tool_call.meta);
+        let subagent_session_info = subagent_session_info_from_meta(&tool_call.meta);
 
         let result = Self {
             id: tool_call.tool_call_id,
@@ -276,7 +285,7 @@ impl ToolCall {
             raw_input_markdown,
             raw_output: tool_call.raw_output,
             tool_name,
-            subagent_session_id: subagent_session,
+            subagent_session_info,
         };
         Ok(result)
     }
@@ -309,8 +318,8 @@ impl ToolCall {
             self.status = status.into();
         }
 
-        if let Some(subagent_session_id) = subagent_session_id_from_meta(&meta) {
-            self.subagent_session_id = Some(subagent_session_id);
+        if let Some(subagent_session_info) = subagent_session_info_from_meta(&meta) {
+            self.subagent_session_info = Some(subagent_session_info);
         }
 
         if let Some(title) = title {
@@ -401,7 +410,7 @@ impl ToolCall {
 
     pub fn is_subagent(&self) -> bool {
         self.tool_name.as_ref().is_some_and(|s| s == "spawn_agent")
-            || self.subagent_session_id.is_some()
+            || self.subagent_session_info.is_some()
     }
 
     pub fn to_markdown(&self, cx: &App) -> String {
@@ -961,6 +970,10 @@ pub struct AcpThread {
     pending_terminal_output: HashMap<acp::TerminalId, Vec<Vec<u8>>>,
     pending_terminal_exit: HashMap<acp::TerminalId, acp::TerminalExitStatus>,
     had_error: bool,
+    /// The user's unsent prompt text, persisted so it can be restored when reloading the thread.
+    draft_prompt: Option<Vec<acp::ContentBlock>>,
+    /// The initial scroll position for the thread view, set during session registration.
+    ui_scroll_position: Option<gpui::ListOffset>,
 }
 
 impl From<&AcpThread> for ActionLogTelemetry {
@@ -983,7 +996,7 @@ pub enum AcpThreadEvent {
     ToolAuthorizationReceived(acp::ToolCallId),
     Retry(RetryStatus),
     SubagentSpawned(acp::SessionId),
-    Stopped,
+    Stopped(acp::StopReason),
     Error,
     LoadError(LoadError),
     PromptCapabilitiesUpdated,
@@ -1198,6 +1211,8 @@ impl AcpThread {
             pending_terminal_output: HashMap::default(),
             pending_terminal_exit: HashMap::default(),
             had_error: false,
+            draft_prompt: None,
+            ui_scroll_position: None,
         }
     }
 
@@ -1209,6 +1224,22 @@ impl AcpThread {
         self.prompt_capabilities.clone()
     }
 
+    pub fn draft_prompt(&self) -> Option<&[acp::ContentBlock]> {
+        self.draft_prompt.as_deref()
+    }
+
+    pub fn set_draft_prompt(&mut self, prompt: Option<Vec<acp::ContentBlock>>) {
+        self.draft_prompt = prompt;
+    }
+
+    pub fn ui_scroll_position(&self) -> Option<gpui::ListOffset> {
+        self.ui_scroll_position
+    }
+
+    pub fn set_ui_scroll_position(&mut self, position: Option<gpui::ListOffset>) {
+        self.ui_scroll_position = position;
+    }
+
     pub fn connection(&self) -> &Rc<dyn AgentConnection> {
         &self.connection
     }
@@ -1425,6 +1456,7 @@ impl AcpThread {
             && let AgentThreadEntry::AssistantMessage(AssistantMessage {
                 chunks,
                 indented: existing_indented,
+                is_subagent_output: _,
             }) = last_entry
             && *existing_indented == indented
         {
@@ -1456,6 +1488,7 @@ impl AcpThread {
                 AgentThreadEntry::AssistantMessage(AssistantMessage {
                     chunks: vec![chunk],
                     indented,
+                    is_subagent_output: false,
                 }),
                 cx,
             );
@@ -1525,7 +1558,7 @@ impl AcpThread {
                     raw_input_markdown: None,
                     raw_output: None,
                     tool_name: None,
-                    subagent_session_id: None,
+                    subagent_session_info: None,
                 };
                 self.push_entry(AgentThreadEntry::ToolCall(failed_tool_call), cx);
                 return Ok(());
@@ -1589,6 +1622,7 @@ impl AcpThread {
 
         let agent_telemetry_id = self.connection().telemetry_id();
         let session = self.session_id();
+        let parent_session_id = self.parent_session_id();
         if let ToolCallStatus::Completed | ToolCallStatus::Failed = status {
             let status = if matches!(status, ToolCallStatus::Completed) {
                 "completed"
@@ -1599,6 +1633,7 @@ impl AcpThread {
                 "Agent Tool Call Completed",
                 agent_telemetry_id,
                 session,
+                parent_session_id,
                 status
             );
         }
@@ -1687,10 +1722,14 @@ impl AcpThread {
 
     pub fn tool_call_for_subagent(&self, session_id: &acp::SessionId) -> Option<&ToolCall> {
         self.entries.iter().find_map(|entry| match entry {
-            AgentThreadEntry::ToolCall(tool_call)
-                if tool_call.subagent_session_id.as_ref() == Some(session_id) =>
-            {
-                Some(tool_call)
+            AgentThreadEntry::ToolCall(tool_call) => {
+                if let Some(subagent_session_info) = &tool_call.subagent_session_info
+                    && &subagent_session_info.session_id == session_id
+                {
+                    Some(tool_call)
+                } else {
+                    None
+                }
             }
             _ => None,
         })
@@ -1698,6 +1737,7 @@ impl AcpThread {
 
     pub fn resolve_locations(&mut self, id: acp::ToolCallId, cx: &mut Context<Self>) {
         let project = self.project.clone();
+        let should_update_agent_location = self.parent_session_id.is_none();
         let Some((_, tool_call)) = self.tool_call_mut(&id) else {
             return;
         };
@@ -1733,7 +1773,7 @@ impl AcpThread {
                         } else {
                             false
                         };
-                        if !should_ignore {
+                        if !should_ignore && should_update_agent_location {
                             project.set_agent_location(Some(location.into()), cx);
                         }
                     });
@@ -1964,8 +2004,10 @@ impl AcpThread {
                 .await?;
 
             this.update(cx, |this, cx| {
-                this.project
-                    .update(cx, |project, cx| project.set_agent_location(None, cx));
+                if this.parent_session_id.is_none() {
+                    this.project
+                        .update(cx, |project, cx| project.set_agent_location(None, cx));
+                }
                 let Ok(response) = response else {
                     // tx dropped, just return
                     return Ok(None);
@@ -2033,7 +2075,7 @@ impl AcpThread {
                             }
                         }
 
-                        cx.emit(AcpThreadEvent::Stopped);
+                        cx.emit(AcpThreadEvent::Stopped(r.stop_reason));
                         Ok(Some(r))
                     }
                     Err(e) => {
@@ -2237,6 +2279,7 @@ impl AcpThread {
         let limit = limit.unwrap_or(u32::MAX);
         let project = self.project.clone();
         let action_log = self.action_log.clone();
+        let should_update_agent_location = self.parent_session_id.is_none();
         cx.spawn(async move |this, cx| {
             let load = project.update(cx, |project, cx| {
                 let path = project
@@ -2287,15 +2330,17 @@ impl AcpThread {
             let start = snapshot.anchor_before(start_position);
             let end = snapshot.anchor_before(Point::new(line.saturating_add(limit), 0));
 
-            project.update(cx, |project, cx| {
-                project.set_agent_location(
-                    Some(AgentLocation {
-                        buffer: buffer.downgrade(),
-                        position: start,
-                    }),
-                    cx,
-                );
-            });
+            if should_update_agent_location {
+                project.update(cx, |project, cx| {
+                    project.set_agent_location(
+                        Some(AgentLocation {
+                            buffer: buffer.downgrade(),
+                            position: start,
+                        }),
+                        cx,
+                    );
+                });
+            }
 
             Ok(snapshot.text_for_range(start..end).collect::<String>())
         })
@@ -2309,6 +2354,7 @@ impl AcpThread {
     ) -> Task<Result<()>> {
         let project = self.project.clone();
         let action_log = self.action_log.clone();
+        let should_update_agent_location = self.parent_session_id.is_none();
         cx.spawn(async move |this, cx| {
             let load = project.update(cx, |project, cx| {
                 let path = project
@@ -2336,18 +2382,20 @@ impl AcpThread {
                 })
                 .await;
 
-            project.update(cx, |project, cx| {
-                project.set_agent_location(
-                    Some(AgentLocation {
-                        buffer: buffer.downgrade(),
-                        position: edits
-                            .last()
-                            .map(|(range, _)| range.end)
-                            .unwrap_or(Anchor::min_for_buffer(buffer.read(cx).remote_id())),
-                    }),
-                    cx,
-                );
-            });
+            if should_update_agent_location {
+                project.update(cx, |project, cx| {
+                    project.set_agent_location(
+                        Some(AgentLocation {
+                            buffer: buffer.downgrade(),
+                            position: edits
+                                .last()
+                                .map(|(range, _)| range.end)
+                                .unwrap_or(Anchor::min_for_buffer(buffer.read(cx).remote_id())),
+                        }),
+                        cx,
+                    );
+                });
+            }
 
             let format_on_save = cx.update(|cx| {
                 action_log.update(cx, |action_log, cx| {
@@ -2549,6 +2597,16 @@ impl AcpThread {
         self.terminals.insert(terminal_id.clone(), entity.clone());
         entity
     }
+
+    pub fn mark_as_subagent_output(&mut self, cx: &mut Context<Self>) {
+        for entry in self.entries.iter_mut().rev() {
+            if let AgentThreadEntry::AssistantMessage(assistant_message) = entry {
+                assistant_message.is_subagent_output = true;
+                cx.notify();
+                return;
+            }
+        }
+    }
 }
 
 fn markdown_for_raw_output(

crates/acp_thread/src/connection.rs 🔗

@@ -496,6 +496,7 @@ mod test_support {
     //! - `create_test_png_base64` for generating test images
 
     use std::sync::Arc;
+    use std::sync::atomic::{AtomicUsize, Ordering};
 
     use action_log::ActionLog;
     use collections::HashMap;
@@ -621,7 +622,9 @@ mod test_support {
             _cwd: &Path,
             cx: &mut gpui::App,
         ) -> Task<gpui::Result<Entity<AcpThread>>> {
-            let session_id = acp::SessionId::new(self.sessions.lock().len().to_string());
+            static NEXT_SESSION_ID: AtomicUsize = AtomicUsize::new(0);
+            let session_id =
+                acp::SessionId::new(NEXT_SESSION_ID.fetch_add(1, Ordering::SeqCst).to_string());
             let action_log = cx.new(|_| ActionLog::new(project.clone()));
             let thread = cx.new(|cx| {
                 AcpThread::new(

crates/acp_thread/src/diff.rs 🔗

@@ -149,6 +149,16 @@ impl Diff {
         }
     }
 
+    pub fn file_path(&self, cx: &App) -> Option<String> {
+        match self {
+            Self::Pending(PendingDiff { new_buffer, .. }) => new_buffer
+                .read(cx)
+                .file()
+                .map(|file| file.full_path(cx).to_string_lossy().into_owned()),
+            Self::Finalized(FinalizedDiff { path, .. }) => Some(path.clone()),
+        }
+    }
+
     pub fn multibuffer(&self) -> &Entity<MultiBuffer> {
         match self {
             Self::Pending(PendingDiff { multibuffer, .. }) => multibuffer,

crates/acp_thread/src/mention.rs 🔗

@@ -254,6 +254,41 @@ impl MentionUri {
         }
     }
 
+    pub fn tooltip_text(&self) -> Option<SharedString> {
+        match self {
+            MentionUri::File { abs_path } | MentionUri::Directory { abs_path } => {
+                Some(abs_path.to_string_lossy().into_owned().into())
+            }
+            MentionUri::Symbol {
+                abs_path,
+                line_range,
+                ..
+            } => Some(
+                format!(
+                    "{}:{}-{}",
+                    abs_path.display(),
+                    line_range.start(),
+                    line_range.end()
+                )
+                .into(),
+            ),
+            MentionUri::Selection {
+                abs_path: Some(path),
+                line_range,
+                ..
+            } => Some(
+                format!(
+                    "{}:{}-{}",
+                    path.display(),
+                    line_range.start(),
+                    line_range.end()
+                )
+                .into(),
+            ),
+            _ => None,
+        }
+    }
+
     pub fn icon_path(&self, cx: &mut App) -> SharedString {
         match self {
             MentionUri::File { abs_path } => {

crates/action_log/Cargo.toml 🔗

@@ -20,6 +20,7 @@ buffer_diff.workspace = true
 log.workspace = true
 clock.workspace = true
 collections.workspace = true
+fs.workspace = true
 futures.workspace = true
 gpui.workspace = true
 language.workspace = true

crates/action_log/src/action_log.rs 🔗

@@ -1,14 +1,20 @@
 use anyhow::{Context as _, Result};
 use buffer_diff::BufferDiff;
 use clock;
-use collections::BTreeMap;
+use collections::{BTreeMap, HashMap};
+use fs::MTime;
 use futures::{FutureExt, StreamExt, channel::mpsc};
 use gpui::{
     App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
 };
 use language::{Anchor, Buffer, BufferEvent, Point, ToOffset, ToPoint};
 use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
-use std::{cmp, ops::Range, sync::Arc};
+use std::{
+    cmp,
+    ops::Range,
+    path::{Path, PathBuf},
+    sync::Arc,
+};
 use text::{Edit, Patch, Rope};
 use util::{RangeExt, ResultExt as _};
 
@@ -48,8 +54,14 @@ pub struct ActionLog {
     tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
     /// The project this action log is associated with
     project: Entity<Project>,
+    /// An action log to forward all public methods to
+    /// Useful in cases like subagents, where we want to track individual diffs for this subagent,
+    /// but also want to associate the reads/writes with a parent review experience
+    linked_action_log: Option<Entity<ActionLog>>,
     /// Stores undo information for the most recent reject operation
     last_reject_undo: Option<LastRejectUndo>,
+    /// Tracks the last time files were read by the agent, to detect external modifications
+    file_read_times: HashMap<PathBuf, MTime>,
 }
 
 impl ActionLog {
@@ -58,14 +70,47 @@ impl ActionLog {
         Self {
             tracked_buffers: BTreeMap::default(),
             project,
+            linked_action_log: None,
             last_reject_undo: None,
+            file_read_times: HashMap::default(),
         }
     }
 
+    pub fn with_linked_action_log(mut self, linked_action_log: Entity<ActionLog>) -> Self {
+        self.linked_action_log = Some(linked_action_log);
+        self
+    }
+
     pub fn project(&self) -> &Entity<Project> {
         &self.project
     }
 
+    pub fn file_read_time(&self, path: &Path) -> Option<MTime> {
+        self.file_read_times.get(path).copied()
+    }
+
+    fn update_file_read_time(&mut self, buffer: &Entity<Buffer>, cx: &App) {
+        let buffer = buffer.read(cx);
+        if let Some(file) = buffer.file() {
+            if let Some(local_file) = file.as_local() {
+                if let Some(mtime) = file.disk_state().mtime() {
+                    let abs_path = local_file.abs_path(cx);
+                    self.file_read_times.insert(abs_path, mtime);
+                }
+            }
+        }
+    }
+
+    fn remove_file_read_time(&mut self, buffer: &Entity<Buffer>, cx: &App) {
+        let buffer = buffer.read(cx);
+        if let Some(file) = buffer.file() {
+            if let Some(local_file) = file.as_local() {
+                let abs_path = local_file.abs_path(cx);
+                self.file_read_times.remove(&abs_path);
+            }
+        }
+    }
+
     fn track_buffer_internal(
         &mut self,
         buffer: Entity<Buffer>,
@@ -496,16 +541,70 @@ impl ActionLog {
 
     /// Track a buffer as read by agent, so we can notify the model about user edits.
     pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
+        self.buffer_read_impl(buffer, true, cx);
+    }
+
+    fn buffer_read_impl(
+        &mut self,
+        buffer: Entity<Buffer>,
+        record_file_read_time: bool,
+        cx: &mut Context<Self>,
+    ) {
+        if let Some(linked_action_log) = &self.linked_action_log {
+            // We don't want to share read times since the other agent hasn't read it necessarily
+            linked_action_log.update(cx, |log, cx| {
+                log.buffer_read_impl(buffer.clone(), false, cx);
+            });
+        }
+        if record_file_read_time {
+            self.update_file_read_time(&buffer, cx);
+        }
         self.track_buffer_internal(buffer, false, cx);
     }
 
     /// Mark a buffer as created by agent, so we can refresh it in the context
     pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
+        self.buffer_created_impl(buffer, true, cx);
+    }
+
+    fn buffer_created_impl(
+        &mut self,
+        buffer: Entity<Buffer>,
+        record_file_read_time: bool,
+        cx: &mut Context<Self>,
+    ) {
+        if let Some(linked_action_log) = &self.linked_action_log {
+            // We don't want to share read times since the other agent hasn't read it necessarily
+            linked_action_log.update(cx, |log, cx| {
+                log.buffer_created_impl(buffer.clone(), false, cx);
+            });
+        }
+        if record_file_read_time {
+            self.update_file_read_time(&buffer, cx);
+        }
         self.track_buffer_internal(buffer, true, cx);
     }
 
     /// Mark a buffer as edited by agent, so we can refresh it in the context
     pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
+        self.buffer_edited_impl(buffer, true, cx);
+    }
+
+    fn buffer_edited_impl(
+        &mut self,
+        buffer: Entity<Buffer>,
+        record_file_read_time: bool,
+        cx: &mut Context<Self>,
+    ) {
+        if let Some(linked_action_log) = &self.linked_action_log {
+            // We don't want to share read times since the other agent hasn't read it necessarily
+            linked_action_log.update(cx, |log, cx| {
+                log.buffer_edited_impl(buffer.clone(), false, cx);
+            });
+        }
+        if record_file_read_time {
+            self.update_file_read_time(&buffer, cx);
+        }
         let new_version = buffer.read(cx).version();
         let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
         if let TrackedBufferStatus::Deleted = tracked_buffer.status {
@@ -517,6 +616,9 @@ impl ActionLog {
     }
 
     pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
+        // Ok to propagate file read time removal to linked action log
+        self.remove_file_read_time(&buffer, cx);
+        let has_linked_action_log = self.linked_action_log.is_some();
         let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
         match tracked_buffer.status {
             TrackedBufferStatus::Created { .. } => {
@@ -524,12 +626,24 @@ impl ActionLog {
                 cx.notify();
             }
             TrackedBufferStatus::Modified => {
-                buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
                 tracked_buffer.status = TrackedBufferStatus::Deleted;
-                tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
+                if !has_linked_action_log {
+                    buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
+                    tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
+                }
             }
+
             TrackedBufferStatus::Deleted => {}
         }
+
+        if let Some(linked_action_log) = &mut self.linked_action_log {
+            linked_action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
+        }
+
+        if has_linked_action_log && let Some(tracked_buffer) = self.tracked_buffers.get(&buffer) {
+            tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
+        }
+
         cx.notify();
     }
 
@@ -914,15 +1028,6 @@ impl ActionLog {
             .collect()
     }
 
-    /// Returns all tracked buffers for debugging purposes
-    #[cfg(any(test, feature = "test-support"))]
-    pub fn tracked_buffers_for_debug(
-        &self,
-        _cx: &App,
-    ) -> impl Iterator<Item = (&Entity<Buffer>, &TrackedBuffer)> {
-        self.tracked_buffers.iter()
-    }
-
     /// Iterate over buffers changed since last read or edited by the model
     pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
         self.tracked_buffers
@@ -2634,6 +2739,515 @@ mod tests {
         assert!(!action_log.read_with(cx, |log, _| log.has_pending_undo()));
     }
 
+    #[gpui::test]
+    async fn test_linked_action_log_buffer_read(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let fs = FakeFs::new(cx.executor());
+        fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
+            .await;
+        let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+        let parent_log = cx.new(|_| ActionLog::new(project.clone()));
+        let child_log =
+            cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
+
+        let file_path = project
+            .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
+            .unwrap();
+        let buffer = project
+            .update(cx, |project, cx| project.open_buffer(file_path, cx))
+            .await
+            .unwrap();
+
+        cx.update(|cx| {
+            child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
+        });
+
+        // Neither log considers the buffer stale immediately after reading it.
+        let child_stale = cx.read(|cx| {
+            child_log
+                .read(cx)
+                .stale_buffers(cx)
+                .cloned()
+                .collect::<Vec<_>>()
+        });
+        let parent_stale = cx.read(|cx| {
+            parent_log
+                .read(cx)
+                .stale_buffers(cx)
+                .cloned()
+                .collect::<Vec<_>>()
+        });
+        assert!(child_stale.is_empty());
+        assert!(parent_stale.is_empty());
+
+        // Simulate a user edit after the agent read the file.
+        cx.update(|cx| {
+            buffer.update(cx, |buffer, cx| {
+                buffer.edit([(0..5, "goodbye")], None, cx).unwrap();
+            });
+        });
+        cx.run_until_parked();
+
+        // Both child and parent should see the buffer as stale because both tracked
+        // it at the pre-edit version via buffer_read forwarding.
+        let child_stale = cx.read(|cx| {
+            child_log
+                .read(cx)
+                .stale_buffers(cx)
+                .cloned()
+                .collect::<Vec<_>>()
+        });
+        let parent_stale = cx.read(|cx| {
+            parent_log
+                .read(cx)
+                .stale_buffers(cx)
+                .cloned()
+                .collect::<Vec<_>>()
+        });
+        assert_eq!(child_stale, vec![buffer.clone()]);
+        assert_eq!(parent_stale, vec![buffer]);
+    }
+
+    #[gpui::test]
+    async fn test_linked_action_log_buffer_edited(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let fs = FakeFs::new(cx.executor());
+        fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi"}))
+            .await;
+        let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+        let parent_log = cx.new(|_| ActionLog::new(project.clone()));
+        let child_log =
+            cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
+
+        let file_path = project
+            .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
+            .unwrap();
+        let buffer = project
+            .update(cx, |project, cx| project.open_buffer(file_path, cx))
+            .await
+            .unwrap();
+
+        cx.update(|cx| {
+            child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
+            buffer.update(cx, |buffer, cx| {
+                buffer
+                    .edit([(Point::new(1, 0)..Point::new(1, 3), "DEF")], None, cx)
+                    .unwrap();
+            });
+            child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
+        });
+        cx.run_until_parked();
+
+        let expected_hunks = vec![(
+            buffer,
+            vec![HunkStatus {
+                range: Point::new(1, 0)..Point::new(2, 0),
+                diff_status: DiffHunkStatusKind::Modified,
+                old_text: "def\n".into(),
+            }],
+        )];
+        assert_eq!(
+            unreviewed_hunks(&child_log, cx),
+            expected_hunks,
+            "child should track the agent edit"
+        );
+        assert_eq!(
+            unreviewed_hunks(&parent_log, cx),
+            expected_hunks,
+            "parent should also track the agent edit via linked log forwarding"
+        );
+    }
+
+    #[gpui::test]
+    async fn test_linked_action_log_buffer_created(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let fs = FakeFs::new(cx.executor());
+        fs.insert_tree(path!("/dir"), json!({})).await;
+        let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+        let parent_log = cx.new(|_| ActionLog::new(project.clone()));
+        let child_log =
+            cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
+
+        let file_path = project
+            .read_with(cx, |project, cx| {
+                project.find_project_path("dir/new_file", cx)
+            })
+            .unwrap();
+        let buffer = project
+            .update(cx, |project, cx| project.open_buffer(file_path, cx))
+            .await
+            .unwrap();
+
+        cx.update(|cx| {
+            child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
+            buffer.update(cx, |buffer, cx| buffer.set_text("hello", cx));
+            child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
+        });
+        project
+            .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
+            .await
+            .unwrap();
+        cx.run_until_parked();
+
+        let expected_hunks = vec![(
+            buffer.clone(),
+            vec![HunkStatus {
+                range: Point::new(0, 0)..Point::new(0, 5),
+                diff_status: DiffHunkStatusKind::Added,
+                old_text: "".into(),
+            }],
+        )];
+        assert_eq!(
+            unreviewed_hunks(&child_log, cx),
+            expected_hunks,
+            "child should track the created file"
+        );
+        assert_eq!(
+            unreviewed_hunks(&parent_log, cx),
+            expected_hunks,
+            "parent should also track the created file via linked log forwarding"
+        );
+    }
+
+    #[gpui::test]
+    async fn test_linked_action_log_will_delete_buffer(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let fs = FakeFs::new(cx.executor());
+        fs.insert_tree(path!("/dir"), json!({"file": "hello\n"}))
+            .await;
+        let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+        let parent_log = cx.new(|_| ActionLog::new(project.clone()));
+        let child_log =
+            cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
+
+        let file_path = project
+            .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
+            .unwrap();
+        let buffer = project
+            .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
+            .await
+            .unwrap();
+
+        cx.update(|cx| {
+            child_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
+        });
+        project
+            .update(cx, |project, cx| project.delete_file(file_path, false, cx))
+            .unwrap()
+            .await
+            .unwrap();
+        cx.run_until_parked();
+
+        let expected_hunks = vec![(
+            buffer.clone(),
+            vec![HunkStatus {
+                range: Point::new(0, 0)..Point::new(0, 0),
+                diff_status: DiffHunkStatusKind::Deleted,
+                old_text: "hello\n".into(),
+            }],
+        )];
+        assert_eq!(
+            unreviewed_hunks(&child_log, cx),
+            expected_hunks,
+            "child should track the deleted file"
+        );
+        assert_eq!(
+            unreviewed_hunks(&parent_log, cx),
+            expected_hunks,
+            "parent should also track the deleted file via linked log forwarding"
+        );
+    }
+
+    /// Simulates the subagent scenario: two child logs linked to the same parent, each
+    /// editing a different file. The parent accumulates all edits while each child
+    /// only sees its own.
+    #[gpui::test]
+    async fn test_linked_action_log_independent_tracking(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let fs = FakeFs::new(cx.executor());
+        fs.insert_tree(
+            path!("/dir"),
+            json!({
+                "file_a": "content of a",
+                "file_b": "content of b",
+            }),
+        )
+        .await;
+        let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+        let parent_log = cx.new(|_| ActionLog::new(project.clone()));
+        let child_log_1 =
+            cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
+        let child_log_2 =
+            cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
+
+        let file_a_path = project
+            .read_with(cx, |project, cx| {
+                project.find_project_path("dir/file_a", cx)
+            })
+            .unwrap();
+        let file_b_path = project
+            .read_with(cx, |project, cx| {
+                project.find_project_path("dir/file_b", cx)
+            })
+            .unwrap();
+        let buffer_a = project
+            .update(cx, |project, cx| project.open_buffer(file_a_path, cx))
+            .await
+            .unwrap();
+        let buffer_b = project
+            .update(cx, |project, cx| project.open_buffer(file_b_path, cx))
+            .await
+            .unwrap();
+
+        cx.update(|cx| {
+            child_log_1.update(cx, |log, cx| log.buffer_read(buffer_a.clone(), cx));
+            buffer_a.update(cx, |buffer, cx| {
+                buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap();
+            });
+            child_log_1.update(cx, |log, cx| log.buffer_edited(buffer_a.clone(), cx));
+
+            child_log_2.update(cx, |log, cx| log.buffer_read(buffer_b.clone(), cx));
+            buffer_b.update(cx, |buffer, cx| {
+                buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap();
+            });
+            child_log_2.update(cx, |log, cx| log.buffer_edited(buffer_b.clone(), cx));
+        });
+        cx.run_until_parked();
+
+        let child_1_changed: Vec<_> = cx.read(|cx| {
+            child_log_1
+                .read(cx)
+                .changed_buffers(cx)
+                .into_keys()
+                .collect()
+        });
+        let child_2_changed: Vec<_> = cx.read(|cx| {
+            child_log_2
+                .read(cx)
+                .changed_buffers(cx)
+                .into_keys()
+                .collect()
+        });
+        let parent_changed: Vec<_> = cx.read(|cx| {
+            parent_log
+                .read(cx)
+                .changed_buffers(cx)
+                .into_keys()
+                .collect()
+        });
+
+        assert_eq!(
+            child_1_changed,
+            vec![buffer_a.clone()],
+            "child 1 should only track file_a"
+        );
+        assert_eq!(
+            child_2_changed,
+            vec![buffer_b.clone()],
+            "child 2 should only track file_b"
+        );
+        assert_eq!(parent_changed.len(), 2, "parent should track both files");
+        assert!(
+            parent_changed.contains(&buffer_a) && parent_changed.contains(&buffer_b),
+            "parent should contain both buffer_a and buffer_b"
+        );
+    }
+
+    #[gpui::test]
+    async fn test_file_read_time_recorded_on_buffer_read(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let fs = FakeFs::new(cx.executor());
+        fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
+            .await;
+        let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+        let action_log = cx.new(|_| ActionLog::new(project.clone()));
+
+        let file_path = project
+            .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
+            .unwrap();
+        let buffer = project
+            .update(cx, |project, cx| project.open_buffer(file_path, cx))
+            .await
+            .unwrap();
+
+        let abs_path = PathBuf::from(path!("/dir/file"));
+        assert!(
+            action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
+            "file_read_time should be None before buffer_read"
+        );
+
+        cx.update(|cx| {
+            action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
+        });
+
+        assert!(
+            action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
+            "file_read_time should be recorded after buffer_read"
+        );
+    }
+
+    #[gpui::test]
+    async fn test_file_read_time_recorded_on_buffer_edited(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let fs = FakeFs::new(cx.executor());
+        fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
+            .await;
+        let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+        let action_log = cx.new(|_| ActionLog::new(project.clone()));
+
+        let file_path = project
+            .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
+            .unwrap();
+        let buffer = project
+            .update(cx, |project, cx| project.open_buffer(file_path, cx))
+            .await
+            .unwrap();
+
+        let abs_path = PathBuf::from(path!("/dir/file"));
+        assert!(
+            action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
+            "file_read_time should be None before buffer_edited"
+        );
+
+        cx.update(|cx| {
+            action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
+        });
+
+        assert!(
+            action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
+            "file_read_time should be recorded after buffer_edited"
+        );
+    }
+
+    #[gpui::test]
+    async fn test_file_read_time_recorded_on_buffer_created(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let fs = FakeFs::new(cx.executor());
+        fs.insert_tree(path!("/dir"), json!({"file": "existing content"}))
+            .await;
+        let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+        let action_log = cx.new(|_| ActionLog::new(project.clone()));
+
+        let file_path = project
+            .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
+            .unwrap();
+        let buffer = project
+            .update(cx, |project, cx| project.open_buffer(file_path, cx))
+            .await
+            .unwrap();
+
+        let abs_path = PathBuf::from(path!("/dir/file"));
+        assert!(
+            action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
+            "file_read_time should be None before buffer_created"
+        );
+
+        cx.update(|cx| {
+            action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
+        });
+
+        assert!(
+            action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
+            "file_read_time should be recorded after buffer_created"
+        );
+    }
+
+    #[gpui::test]
+    async fn test_file_read_time_removed_on_delete(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let fs = FakeFs::new(cx.executor());
+        fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
+            .await;
+        let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+        let action_log = cx.new(|_| ActionLog::new(project.clone()));
+
+        let file_path = project
+            .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
+            .unwrap();
+        let buffer = project
+            .update(cx, |project, cx| project.open_buffer(file_path, cx))
+            .await
+            .unwrap();
+
+        let abs_path = PathBuf::from(path!("/dir/file"));
+
+        cx.update(|cx| {
+            action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
+        });
+        assert!(
+            action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
+            "file_read_time should exist after buffer_read"
+        );
+
+        cx.update(|cx| {
+            action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
+        });
+        assert!(
+            action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
+            "file_read_time should be removed after will_delete_buffer"
+        );
+    }
+
+    #[gpui::test]
+    async fn test_file_read_time_not_forwarded_to_linked_action_log(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let fs = FakeFs::new(cx.executor());
+        fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
+            .await;
+        let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+        let parent_log = cx.new(|_| ActionLog::new(project.clone()));
+        let child_log =
+            cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
+
+        let file_path = project
+            .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
+            .unwrap();
+        let buffer = project
+            .update(cx, |project, cx| project.open_buffer(file_path, cx))
+            .await
+            .unwrap();
+
+        let abs_path = PathBuf::from(path!("/dir/file"));
+
+        cx.update(|cx| {
+            child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
+        });
+        assert!(
+            child_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
+            "child should record file_read_time on buffer_read"
+        );
+        assert!(
+            parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
+            "parent should NOT get file_read_time from child's buffer_read"
+        );
+
+        cx.update(|cx| {
+            child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
+        });
+        assert!(
+            parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
+            "parent should NOT get file_read_time from child's buffer_edited"
+        );
+
+        cx.update(|cx| {
+            child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
+        });
+        assert!(
+            parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
+            "parent should NOT get file_read_time from child's buffer_created"
+        );
+    }
+
     #[derive(Debug, PartialEq)]
     struct HunkStatus {
         range: Range<Point>,

crates/agent/src/agent.rs 🔗

@@ -14,6 +14,7 @@ mod tools;
 
 use context_server::ContextServerId;
 pub use db::*;
+use itertools::Itertools;
 pub use native_agent_server::NativeAgentServer;
 pub use pattern_extraction::*;
 pub use shell_command_parser::extract_commands;
@@ -51,6 +52,7 @@ use std::path::{Path, PathBuf};
 use std::rc::Rc;
 use std::sync::Arc;
 use util::ResultExt;
+use util::path_list::PathList;
 use util::rel_path::RelPath;
 
 #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
@@ -349,11 +351,14 @@ impl NativeAgent {
         let session_id = thread.id().clone();
         let parent_session_id = thread.parent_thread_id();
         let title = thread.title();
+        let draft_prompt = thread.draft_prompt().map(Vec::from);
+        let scroll_position = thread.ui_scroll_position();
+        let token_usage = thread.latest_token_usage();
         let project = thread.project.clone();
         let action_log = thread.action_log.clone();
         let prompt_capabilities_rx = thread.prompt_capabilities_rx.clone();
         let acp_thread = cx.new(|cx| {
-            acp_thread::AcpThread::new(
+            let mut acp_thread = acp_thread::AcpThread::new(
                 parent_session_id,
                 title,
                 connection,
@@ -362,18 +367,24 @@ impl NativeAgent {
                 session_id.clone(),
                 prompt_capabilities_rx,
                 cx,
-            )
+            );
+            acp_thread.set_draft_prompt(draft_prompt);
+            acp_thread.set_ui_scroll_position(scroll_position);
+            acp_thread.update_token_usage(token_usage, cx);
+            acp_thread
         });
 
         let registry = LanguageModelRegistry::read_global(cx);
         let summarization_model = registry.thread_summary_model().map(|c| c.model);
 
         let weak = cx.weak_entity();
+        let weak_thread = thread_handle.downgrade();
         thread_handle.update(cx, |thread, cx| {
             thread.set_summarization_model(summarization_model, cx);
             thread.add_default_tools(
                 Rc::new(NativeThreadEnvironment {
                     acp_thread: acp_thread.downgrade(),
+                    thread: weak_thread,
                     agent: weak,
                 }) as _,
                 cx,
@@ -840,19 +851,36 @@ impl NativeAgent {
             return;
         }
 
-        let database_future = ThreadsDatabase::connect(cx);
-        let (id, db_thread) =
-            thread.update(cx, |thread, cx| (thread.id().clone(), thread.to_db(cx)));
+        let id = thread.read(cx).id().clone();
         let Some(session) = self.sessions.get_mut(&id) else {
             return;
         };
+
+        let folder_paths = PathList::new(
+            &self
+                .project
+                .read(cx)
+                .visible_worktrees(cx)
+                .map(|worktree| worktree.read(cx).abs_path().to_path_buf())
+                .collect::<Vec<_>>(),
+        );
+
+        let draft_prompt = session.acp_thread.read(cx).draft_prompt().map(Vec::from);
+        let database_future = ThreadsDatabase::connect(cx);
+        let db_thread = thread.update(cx, |thread, cx| {
+            thread.set_draft_prompt(draft_prompt);
+            thread.to_db(cx)
+        });
         let thread_store = self.thread_store.clone();
         session.pending_save = cx.spawn(async move |_, cx| {
             let Some(database) = database_future.await.map_err(|err| anyhow!(err)).log_err() else {
                 return;
             };
             let db_thread = db_thread.await;
-            database.save_thread(id, db_thread).await.log_err();
+            database
+                .save_thread(id, db_thread, folder_paths)
+                .await
+                .log_err();
             thread_store.update(cx, |store, cx| store.reload(cx));
         });
     }
@@ -1462,16 +1490,6 @@ impl NativeAgentSessionList {
         }
     }
 
-    fn to_session_info(entry: DbThreadMetadata) -> AgentSessionInfo {
-        AgentSessionInfo {
-            session_id: entry.id,
-            cwd: None,
-            title: Some(entry.title),
-            updated_at: Some(entry.updated_at),
-            meta: None,
-        }
-    }
-
     pub fn thread_store(&self) -> &Entity<ThreadStore> {
         &self.thread_store
     }
@@ -1487,7 +1505,7 @@ impl AgentSessionList for NativeAgentSessionList {
             .thread_store
             .read(cx)
             .entries()
-            .map(Self::to_session_info)
+            .map(|entry| AgentSessionInfo::from(&entry))
             .collect();
         Task::ready(Ok(AgentSessionListResponse::new(sessions)))
     }
@@ -1576,17 +1594,19 @@ impl acp_thread::AgentSessionSetTitle for NativeAgentSessionSetTitle {
 
 pub struct NativeThreadEnvironment {
     agent: WeakEntity<NativeAgent>,
+    thread: WeakEntity<Thread>,
     acp_thread: WeakEntity<AcpThread>,
 }
 
 impl NativeThreadEnvironment {
     pub(crate) fn create_subagent_thread(
-        agent: WeakEntity<NativeAgent>,
-        parent_thread_entity: Entity<Thread>,
+        &self,
         label: String,
-        initial_prompt: String,
         cx: &mut App,
     ) -> Result<Rc<dyn SubagentHandle>> {
+        let Some(parent_thread_entity) = self.thread.upgrade() else {
+            anyhow::bail!("Parent thread no longer exists".to_string());
+        };
         let parent_thread = parent_thread_entity.read(cx);
         let current_depth = parent_thread.depth();
 
@@ -1605,28 +1625,29 @@ impl NativeThreadEnvironment {
 
         let session_id = subagent_thread.read(cx).id().clone();
 
-        let acp_thread = agent.update(cx, |agent, cx| {
+        let acp_thread = self.agent.update(cx, |agent, cx| {
             agent.register_session(subagent_thread.clone(), cx)
         })?;
 
-        Self::prompt_subagent(
-            session_id,
-            subagent_thread,
-            acp_thread,
-            parent_thread_entity,
-            initial_prompt,
-            cx,
-        )
+        let depth = current_depth + 1;
+
+        telemetry::event!(
+            "Subagent Started",
+            session = parent_thread_entity.read(cx).id().to_string(),
+            subagent_session = session_id.to_string(),
+            depth,
+            is_resumed = false,
+        );
+
+        self.prompt_subagent(session_id, subagent_thread, acp_thread)
     }
 
     pub(crate) fn resume_subagent_thread(
-        agent: WeakEntity<NativeAgent>,
-        parent_thread_entity: Entity<Thread>,
+        &self,
         session_id: acp::SessionId,
-        follow_up_prompt: String,
         cx: &mut App,
     ) -> Result<Rc<dyn SubagentHandle>> {
-        let (subagent_thread, acp_thread) = agent.update(cx, |agent, _cx| {
+        let (subagent_thread, acp_thread) = self.agent.update(cx, |agent, _cx| {
             let session = agent
                 .sessions
                 .get(&session_id)
@@ -1634,31 +1655,35 @@ impl NativeThreadEnvironment {
             anyhow::Ok((session.thread.clone(), session.acp_thread.clone()))
         })??;
 
-        Self::prompt_subagent(
-            session_id,
-            subagent_thread,
-            acp_thread,
-            parent_thread_entity,
-            follow_up_prompt,
-            cx,
-        )
+        let depth = subagent_thread.read(cx).depth();
+
+        if let Some(parent_thread_entity) = self.thread.upgrade() {
+            telemetry::event!(
+                "Subagent Started",
+                session = parent_thread_entity.read(cx).id().to_string(),
+                subagent_session = session_id.to_string(),
+                depth,
+                is_resumed = true,
+            );
+        }
+
+        self.prompt_subagent(session_id, subagent_thread, acp_thread)
     }
 
     fn prompt_subagent(
+        &self,
         session_id: acp::SessionId,
         subagent_thread: Entity<Thread>,
         acp_thread: Entity<acp_thread::AcpThread>,
-        parent_thread_entity: Entity<Thread>,
-        prompt: String,
-        cx: &mut App,
     ) -> Result<Rc<dyn SubagentHandle>> {
+        let Some(parent_thread_entity) = self.thread.upgrade() else {
+            anyhow::bail!("Parent thread no longer exists".to_string());
+        };
         Ok(Rc::new(NativeSubagentHandle::new(
             session_id,
             subagent_thread,
             acp_thread,
             parent_thread_entity,
-            prompt,
-            cx,
         )) as _)
     }
 }
@@ -1697,36 +1722,16 @@ impl ThreadEnvironment for NativeThreadEnvironment {
         })
     }
 
-    fn create_subagent(
-        &self,
-        parent_thread_entity: Entity<Thread>,
-        label: String,
-        initial_prompt: String,
-        cx: &mut App,
-    ) -> Result<Rc<dyn SubagentHandle>> {
-        Self::create_subagent_thread(
-            self.agent.clone(),
-            parent_thread_entity,
-            label,
-            initial_prompt,
-            cx,
-        )
+    fn create_subagent(&self, label: String, cx: &mut App) -> Result<Rc<dyn SubagentHandle>> {
+        self.create_subagent_thread(label, cx)
     }
 
     fn resume_subagent(
         &self,
-        parent_thread_entity: Entity<Thread>,
         session_id: acp::SessionId,
-        follow_up_prompt: String,
         cx: &mut App,
     ) -> Result<Rc<dyn SubagentHandle>> {
-        Self::resume_subagent_thread(
-            self.agent.clone(),
-            parent_thread_entity,
-            session_id,
-            follow_up_prompt,
-            cx,
-        )
+        self.resume_subagent_thread(session_id, cx)
     }
 }
 
@@ -1742,8 +1747,7 @@ pub struct NativeSubagentHandle {
     session_id: acp::SessionId,
     parent_thread: WeakEntity<Thread>,
     subagent_thread: Entity<Thread>,
-    wait_for_prompt_to_complete: Shared<Task<SubagentPromptResult>>,
-    _subscription: Subscription,
+    acp_thread: Entity<acp_thread::AcpThread>,
 }
 
 impl NativeSubagentHandle {
@@ -1752,71 +1756,12 @@ impl NativeSubagentHandle {
         subagent_thread: Entity<Thread>,
         acp_thread: Entity<acp_thread::AcpThread>,
         parent_thread_entity: Entity<Thread>,
-        prompt: String,
-        cx: &mut App,
     ) -> Self {
-        let ratio_before_prompt = subagent_thread
-            .read(cx)
-            .latest_token_usage()
-            .map(|usage| usage.ratio());
-
-        parent_thread_entity.update(cx, |parent_thread, _cx| {
-            parent_thread.register_running_subagent(subagent_thread.downgrade())
-        });
-
-        let task = acp_thread.update(cx, |acp_thread, cx| {
-            acp_thread.send(vec![prompt.into()], cx)
-        });
-
-        let (token_limit_tx, token_limit_rx) = oneshot::channel::<()>();
-        let mut token_limit_tx = Some(token_limit_tx);
-
-        let subscription = cx.subscribe(
-            &subagent_thread,
-            move |_thread, event: &TokenUsageUpdated, _cx| {
-                if let Some(usage) = &event.0 {
-                    let old_ratio = ratio_before_prompt
-                        .clone()
-                        .unwrap_or(TokenUsageRatio::Normal);
-                    let new_ratio = usage.ratio();
-                    if old_ratio == TokenUsageRatio::Normal && new_ratio == TokenUsageRatio::Warning
-                    {
-                        if let Some(tx) = token_limit_tx.take() {
-                            tx.send(()).ok();
-                        }
-                    }
-                }
-            },
-        );
-
-        let wait_for_prompt_to_complete = cx
-            .background_spawn(async move {
-                futures::select! {
-                    response = task.fuse() => match response {
-                        Ok(Some(response)) =>{
-                            match response.stop_reason {
-                                acp::StopReason::Cancelled => SubagentPromptResult::Cancelled,
-                                acp::StopReason::MaxTokens => SubagentPromptResult::Error("The agent reached the maximum number of tokens.".into()),
-                                acp::StopReason::MaxTurnRequests => SubagentPromptResult::Error("The agent reached the maximum number of allowed requests between user turns. Try prompting again.".into()),
-                                acp::StopReason::Refusal => SubagentPromptResult::Error("The agent refused to process that prompt. Try again.".into()),
-                                acp::StopReason::EndTurn | _ => SubagentPromptResult::Completed,
-                            }
-
-                        }
-                        Ok(None) => SubagentPromptResult::Error("No response from the agent. You can try messaging again.".into()),
-                        Err(error) => SubagentPromptResult::Error(error.to_string()),
-                    },
-                    _ = token_limit_rx.fuse() =>  SubagentPromptResult::ContextWindowWarning,
-                }
-            })
-            .shared();
-
         NativeSubagentHandle {
             session_id,
             subagent_thread,
             parent_thread: parent_thread_entity.downgrade(),
-            wait_for_prompt_to_complete,
-            _subscription: subscription,
+            acp_thread,
         }
     }
 }
@@ -1826,22 +1771,100 @@ impl SubagentHandle for NativeSubagentHandle {
         self.session_id.clone()
     }
 
-    fn wait_for_output(&self, cx: &AsyncApp) -> Task<Result<String>> {
-        let thread = self.subagent_thread.clone();
-        let wait_for_prompt = self.wait_for_prompt_to_complete.clone();
+    fn num_entries(&self, cx: &App) -> usize {
+        self.acp_thread.read(cx).entries().len()
+    }
 
+    fn send(&self, message: String, cx: &AsyncApp) -> Task<Result<String>> {
+        let thread = self.subagent_thread.clone();
+        let acp_thread = self.acp_thread.clone();
         let subagent_session_id = self.session_id.clone();
         let parent_thread = self.parent_thread.clone();
 
         cx.spawn(async move |cx| {
-            let result = match wait_for_prompt.await {
+            let (task, _subscription) = cx.update(|cx| {
+                let ratio_before_prompt = thread
+                    .read(cx)
+                    .latest_token_usage()
+                    .map(|usage| usage.ratio());
+
+                parent_thread
+                    .update(cx, |parent_thread, _cx| {
+                        parent_thread.register_running_subagent(thread.downgrade())
+                    })
+                    .ok();
+
+                let task = acp_thread.update(cx, |acp_thread, cx| {
+                    acp_thread.send(vec![message.into()], cx)
+                });
+
+                let (token_limit_tx, token_limit_rx) = oneshot::channel::<()>();
+                let mut token_limit_tx = Some(token_limit_tx);
+
+                let subscription = cx.subscribe(
+                    &thread,
+                    move |_thread, event: &TokenUsageUpdated, _cx| {
+                        if let Some(usage) = &event.0 {
+                            let old_ratio = ratio_before_prompt
+                                .clone()
+                                .unwrap_or(TokenUsageRatio::Normal);
+                            let new_ratio = usage.ratio();
+                            if old_ratio == TokenUsageRatio::Normal
+                                && new_ratio == TokenUsageRatio::Warning
+                            {
+                                if let Some(tx) = token_limit_tx.take() {
+                                    tx.send(()).ok();
+                                }
+                            }
+                        }
+                    },
+                );
+
+                let wait_for_prompt = cx
+                    .background_spawn(async move {
+                        futures::select! {
+                            response = task.fuse() => match response {
+                                Ok(Some(response)) => {
+                                    match response.stop_reason {
+                                        acp::StopReason::Cancelled => SubagentPromptResult::Cancelled,
+                                        acp::StopReason::MaxTokens => SubagentPromptResult::Error("The agent reached the maximum number of tokens.".into()),
+                                        acp::StopReason::MaxTurnRequests => SubagentPromptResult::Error("The agent reached the maximum number of allowed requests between user turns. Try prompting again.".into()),
+                                        acp::StopReason::Refusal => SubagentPromptResult::Error("The agent refused to process that prompt. Try again.".into()),
+                                        acp::StopReason::EndTurn | _ => SubagentPromptResult::Completed,
+                                    }
+                                }
+                                Ok(None) => SubagentPromptResult::Error("No response from the agent. You can try messaging again.".into()),
+                                Err(error) => SubagentPromptResult::Error(error.to_string()),
+                            },
+                            _ = token_limit_rx.fuse() => SubagentPromptResult::ContextWindowWarning,
+                        }
+                    });
+
+                (wait_for_prompt, subscription)
+            });
+
+            let result = match task.await {
                 SubagentPromptResult::Completed => thread.read_with(cx, |thread, _cx| {
                     thread
                         .last_message()
-                        .map(|m| m.to_markdown())
+                        .and_then(|message| {
+                            let content = message.as_agent_message()?
+                                .content
+                                .iter()
+                                .filter_map(|c| match c {
+                                    AgentMessageContent::Text(text) => Some(text.as_str()),
+                                    _ => None,
+                                })
+                                .join("\n\n");
+                            if content.is_empty() {
+                                None
+                            } else {
+                                Some( content)
+                            }
+                        })
                         .context("No response from subagent")
                 }),
-                SubagentPromptResult::Cancelled => Err(anyhow!("User cancelled")),
+                SubagentPromptResult::Cancelled => Err(anyhow!("User canceled")),
                 SubagentPromptResult::Error(message) => Err(anyhow!("{message}")),
                 SubagentPromptResult::ContextWindowWarning => {
                     thread.update(cx, |thread, cx| thread.cancel(cx)).await;
@@ -1910,7 +1933,9 @@ mod internal_tests {
     use gpui::TestAppContext;
     use indoc::formatdoc;
     use language_model::fake_provider::{FakeLanguageModel, FakeLanguageModelProvider};
-    use language_model::{LanguageModelProviderId, LanguageModelProviderName};
+    use language_model::{
+        LanguageModelCompletionEvent, LanguageModelProviderId, LanguageModelProviderName,
+    };
     use serde_json::json;
     use settings::SettingsStore;
     use util::{path, rel_path::rel_path};
@@ -2542,6 +2567,13 @@ mod internal_tests {
         cx.run_until_parked();
 
         model.send_last_completion_stream_text_chunk("Lorem.");
+        model.send_last_completion_stream_event(LanguageModelCompletionEvent::UsageUpdate(
+            language_model::TokenUsage {
+                input_tokens: 150,
+                output_tokens: 75,
+                ..Default::default()
+            },
+        ));
         model.end_last_completion_stream();
         cx.run_until_parked();
         summary_model
@@ -2571,6 +2603,24 @@ mod internal_tests {
 
         cx.run_until_parked();
 
+        // Set a draft prompt with rich content blocks before saving.
+        let draft_blocks = vec![
+            acp::ContentBlock::Text(acp::TextContent::new("Check out ")),
+            acp::ContentBlock::ResourceLink(acp::ResourceLink::new("b.md", uri.to_string())),
+            acp::ContentBlock::Text(acp::TextContent::new(" please")),
+        ];
+        acp_thread.update(cx, |thread, _cx| {
+            thread.set_draft_prompt(Some(draft_blocks.clone()));
+        });
+        thread.update(cx, |thread, _cx| {
+            thread.set_ui_scroll_position(Some(gpui::ListOffset {
+                item_ix: 5,
+                offset_in_item: gpui::px(12.5),
+            }));
+        });
+        thread.update(cx, |_thread, cx| cx.notify());
+        cx.run_until_parked();
+
         // Close the session so it can be reloaded from disk.
         cx.update(|cx| connection.clone().close_session(&session_id, cx))
             .await
@@ -2608,6 +2658,29 @@ mod internal_tests {
                 "}
             )
         });
+
+        // Ensure the draft prompt with rich content blocks survived the round-trip.
+        acp_thread.read_with(cx, |thread, _| {
+            assert_eq!(thread.draft_prompt(), Some(draft_blocks.as_slice()));
+        });
+
+        // Ensure token usage survived the round-trip.
+        acp_thread.read_with(cx, |thread, _| {
+            let usage = thread
+                .token_usage()
+                .expect("token usage should be restored after reload");
+            assert_eq!(usage.input_tokens, 150);
+            assert_eq!(usage.output_tokens, 75);
+        });
+
+        // Ensure scroll position survived the round-trip.
+        acp_thread.read_with(cx, |thread, _| {
+            let scroll = thread
+                .ui_scroll_position()
+                .expect("scroll position should be restored after reload");
+            assert_eq!(scroll.item_ix, 5);
+            assert_eq!(scroll.offset_in_item, gpui::px(12.5));
+        });
     }
 
     fn thread_entries(

crates/agent/src/db.rs 🔗

@@ -8,6 +8,7 @@ use collections::{HashMap, IndexMap};
 use futures::{FutureExt, future::Shared};
 use gpui::{BackgroundExecutor, Global, Task};
 use indoc::indoc;
+use language_model::Speed;
 use parking_lot::Mutex;
 use serde::{Deserialize, Serialize};
 use sqlez::{
@@ -17,23 +18,13 @@ use sqlez::{
 };
 use std::sync::Arc;
 use ui::{App, SharedString};
+use util::path_list::PathList;
 use zed_env_vars::ZED_STATELESS;
 
 pub type DbMessage = crate::Message;
 pub type DbSummary = crate::legacy_thread::DetailedSummaryState;
 pub type DbLanguageModel = crate::legacy_thread::SerializedLanguageModel;
 
-/// Metadata about the git worktree associated with an agent thread.
-#[derive(Debug, Clone, Serialize, Deserialize)]
-pub struct AgentGitWorktreeInfo {
-    /// The branch name in the git worktree.
-    pub branch: String,
-    /// Absolute path to the git worktree on disk.
-    pub worktree_path: std::path::PathBuf,
-    /// The base branch/commit the worktree was created from.
-    pub base_ref: String,
-}
-
 #[derive(Debug, Clone, Serialize, Deserialize)]
 pub struct DbThreadMetadata {
     pub id: acp::SessionId,
@@ -41,10 +32,22 @@ pub struct DbThreadMetadata {
     #[serde(alias = "summary")]
     pub title: SharedString,
     pub updated_at: DateTime<Utc>,
-    /// Denormalized from `DbThread::git_worktree_info.branch` for efficient
-    /// listing without decompressing thread data. The blob is the source of
-    /// truth; this column is populated on save for query convenience.
-    pub worktree_branch: Option<String>,
+    pub created_at: Option<DateTime<Utc>>,
+    /// The workspace folder paths this thread was created against, sorted
+    /// lexicographically. Used for grouping threads by project in the sidebar.
+    pub folder_paths: PathList,
+}
+
+impl From<&DbThreadMetadata> for acp_thread::AgentSessionInfo {
+    fn from(meta: &DbThreadMetadata) -> Self {
+        Self {
+            session_id: meta.id.clone(),
+            cwd: None,
+            title: Some(meta.title.clone()),
+            updated_at: Some(meta.updated_at),
+            meta: None,
+        }
+    }
 }
 
 #[derive(Debug, Serialize, Deserialize)]
@@ -69,7 +72,21 @@ pub struct DbThread {
     #[serde(default)]
     pub subagent_context: Option<crate::SubagentContext>,
     #[serde(default)]
-    pub git_worktree_info: Option<AgentGitWorktreeInfo>,
+    pub speed: Option<Speed>,
+    #[serde(default)]
+    pub thinking_enabled: bool,
+    #[serde(default)]
+    pub thinking_effort: Option<String>,
+    #[serde(default)]
+    pub draft_prompt: Option<Vec<acp::ContentBlock>>,
+    #[serde(default)]
+    pub ui_scroll_position: Option<SerializedScrollPosition>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
+pub struct SerializedScrollPosition {
+    pub item_ix: usize,
+    pub offset_in_item: f32,
 }
 
 #[derive(Debug, Clone, Serialize, Deserialize)]
@@ -108,7 +125,11 @@ impl SharedThread {
             profile: None,
             imported: true,
             subagent_context: None,
-            git_worktree_info: None,
+            speed: None,
+            thinking_enabled: false,
+            thinking_effort: None,
+            draft_prompt: None,
+            ui_scroll_position: None,
         }
     }
 
@@ -283,7 +304,11 @@ impl DbThread {
             profile: thread.profile,
             imported: false,
             subagent_context: None,
-            git_worktree_info: None,
+            speed: None,
+            thinking_enabled: false,
+            thinking_effort: None,
+            draft_prompt: None,
+            ui_scroll_position: None,
         })
     }
 }
@@ -389,12 +414,24 @@ impl ThreadsDatabase {
         }
 
         if let Ok(mut s) = connection.exec(indoc! {"
-            ALTER TABLE threads ADD COLUMN worktree_branch TEXT
+            ALTER TABLE threads ADD COLUMN folder_paths TEXT;
+            ALTER TABLE threads ADD COLUMN folder_paths_order TEXT;
         "})
         {
             s().ok();
         }
 
+        if let Ok(mut s) = connection.exec(indoc! {"
+            ALTER TABLE threads ADD COLUMN created_at TEXT;
+        "})
+        {
+            if s().is_ok() {
+                connection.exec(indoc! {"
+                    UPDATE threads SET created_at = updated_at WHERE created_at IS NULL
+                "})?()?;
+            }
+        }
+
         let db = Self {
             executor,
             connection: Arc::new(Mutex::new(connection)),
@@ -407,6 +444,7 @@ impl ThreadsDatabase {
         connection: &Arc<Mutex<Connection>>,
         id: acp::SessionId,
         thread: DbThread,
+        folder_paths: &PathList,
     ) -> Result<()> {
         const COMPRESSION_LEVEL: i32 = 3;
 
@@ -423,10 +461,16 @@ impl ThreadsDatabase {
             .subagent_context
             .as_ref()
             .map(|ctx| ctx.parent_thread_id.0.clone());
-        let worktree_branch = thread
-            .git_worktree_info
-            .as_ref()
-            .map(|info| info.branch.clone());
+        let serialized_folder_paths = folder_paths.serialize();
+        let (folder_paths_str, folder_paths_order_str): (Option<String>, Option<String>) =
+            if folder_paths.is_empty() {
+                (None, None)
+            } else {
+                (
+                    Some(serialized_folder_paths.paths),
+                    Some(serialized_folder_paths.order),
+                )
+            };
         let json_data = serde_json::to_string(&SerializedThread {
             thread,
             version: DbThread::VERSION,
@@ -438,18 +482,31 @@ impl ThreadsDatabase {
         let data_type = DataType::Zstd;
         let data = compressed;
 
-        let mut insert = connection.exec_bound::<(Arc<str>, Option<Arc<str>>, Option<String>, String, String, DataType, Vec<u8>)>(indoc! {"
-            INSERT OR REPLACE INTO threads (id, parent_id, worktree_branch, summary, updated_at, data_type, data) VALUES (?, ?, ?, ?, ?, ?, ?)
+        let created_at = Utc::now().to_rfc3339();
+
+        let mut insert = connection.exec_bound::<(Arc<str>, Option<Arc<str>>, Option<String>, Option<String>, String, String, DataType, Vec<u8>, String)>(indoc! {"
+            INSERT INTO threads (id, parent_id, folder_paths, folder_paths_order, summary, updated_at, data_type, data, created_at)
+            VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9)
+            ON CONFLICT(id) DO UPDATE SET
+                parent_id = excluded.parent_id,
+                folder_paths = excluded.folder_paths,
+                folder_paths_order = excluded.folder_paths_order,
+                summary = excluded.summary,
+                updated_at = excluded.updated_at,
+                data_type = excluded.data_type,
+                data = excluded.data
         "})?;
 
         insert((
             id.0,
             parent_id,
-            worktree_branch,
+            folder_paths_str,
+            folder_paths_order_str,
             title,
             updated_at,
             data_type,
             data,
+            created_at,
         ))?;
 
         Ok(())
@@ -462,20 +519,35 @@ impl ThreadsDatabase {
             let connection = connection.lock();
 
             let mut select = connection
-                .select_bound::<(), (Arc<str>, Option<Arc<str>>, Option<String>, String, String)>(indoc! {"
-                SELECT id, parent_id, worktree_branch, summary, updated_at FROM threads ORDER BY updated_at DESC
+                .select_bound::<(), (Arc<str>, Option<Arc<str>>, Option<String>, Option<String>, String, String, Option<String>)>(indoc! {"
+                SELECT id, parent_id, folder_paths, folder_paths_order, summary, updated_at, created_at FROM threads ORDER BY updated_at DESC, created_at DESC
             "})?;
 
             let rows = select(())?;
             let mut threads = Vec::new();
 
-            for (id, parent_id, worktree_branch, summary, updated_at) in rows {
+            for (id, parent_id, folder_paths, folder_paths_order, summary, updated_at, created_at) in rows {
+                let folder_paths = folder_paths
+                    .map(|paths| {
+                        PathList::deserialize(&util::path_list::SerializedPathList {
+                            paths,
+                            order: folder_paths_order.unwrap_or_default(),
+                        })
+                    })
+                    .unwrap_or_default();
+                let created_at = created_at
+                    .as_deref()
+                    .map(DateTime::parse_from_rfc3339)
+                    .transpose()?
+                    .map(|dt| dt.with_timezone(&Utc));
+
                 threads.push(DbThreadMetadata {
                     id: acp::SessionId::new(id),
                     parent_session_id: parent_id.map(acp::SessionId::new),
                     title: summary.into(),
                     updated_at: DateTime::parse_from_rfc3339(&updated_at)?.with_timezone(&Utc),
-                    worktree_branch,
+                    created_at,
+                    folder_paths,
                 });
             }
 
@@ -509,11 +581,16 @@ impl ThreadsDatabase {
         })
     }
 
-    pub fn save_thread(&self, id: acp::SessionId, thread: DbThread) -> Task<Result<()>> {
+    pub fn save_thread(
+        &self,
+        id: acp::SessionId,
+        thread: DbThread,
+        folder_paths: PathList,
+    ) -> Task<Result<()>> {
         let connection = self.connection.clone();
 
         self.executor
-            .spawn(async move { Self::save_thread_sync(&connection, id, thread) })
+            .spawn(async move { Self::save_thread_sync(&connection, id, thread, &folder_paths) })
     }
 
     pub fn delete_thread(&self, id: acp::SessionId) -> Task<Result<()>> {
@@ -609,12 +686,16 @@ mod tests {
             profile: None,
             imported: false,
             subagent_context: None,
-            git_worktree_info: None,
+            speed: None,
+            thinking_enabled: false,
+            thinking_effort: None,
+            draft_prompt: None,
+            ui_scroll_position: None,
         }
     }
 
     #[gpui::test]
-    async fn test_list_threads_orders_by_updated_at(cx: &mut TestAppContext) {
+    async fn test_list_threads_orders_by_created_at(cx: &mut TestAppContext) {
         let database = ThreadsDatabase::new(cx.executor()).unwrap();
 
         let older_id = session_id("thread-a");
@@ -630,11 +711,11 @@ mod tests {
         );
 
         database
-            .save_thread(older_id.clone(), older_thread)
+            .save_thread(older_id.clone(), older_thread, PathList::default())
             .await
             .unwrap();
         database
-            .save_thread(newer_id.clone(), newer_thread)
+            .save_thread(newer_id.clone(), newer_thread, PathList::default())
             .await
             .unwrap();
 
@@ -659,11 +740,11 @@ mod tests {
         );
 
         database
-            .save_thread(thread_id.clone(), original_thread)
+            .save_thread(thread_id.clone(), original_thread, PathList::default())
             .await
             .unwrap();
         database
-            .save_thread(thread_id.clone(), updated_thread)
+            .save_thread(thread_id.clone(), updated_thread, PathList::default())
             .await
             .unwrap();
 
@@ -675,6 +756,10 @@ mod tests {
             entries[0].updated_at,
             Utc.with_ymd_and_hms(2024, 1, 2, 0, 0, 0).unwrap()
         );
+        assert!(
+            entries[0].created_at.is_some(),
+            "created_at should be populated"
+        );
     }
 
     #[test]
@@ -693,6 +778,22 @@ mod tests {
         );
     }
 
+    #[test]
+    fn test_draft_prompt_defaults_to_none() {
+        let json = r#"{
+            "title": "Old Thread",
+            "messages": [],
+            "updated_at": "2024-01-01T00:00:00Z"
+        }"#;
+
+        let db_thread: DbThread = serde_json::from_str(json).expect("Failed to deserialize");
+
+        assert!(
+            db_thread.draft_prompt.is_none(),
+            "Legacy threads without draft_prompt field should default to None"
+        );
+    }
+
     #[gpui::test]
     async fn test_subagent_context_roundtrips_through_save_load(cx: &mut TestAppContext) {
         let database = ThreadsDatabase::new(cx.executor()).unwrap();
@@ -710,7 +811,7 @@ mod tests {
         });
 
         database
-            .save_thread(child_id.clone(), child_thread)
+            .save_thread(child_id.clone(), child_thread, PathList::default())
             .await
             .unwrap();
 
@@ -738,7 +839,7 @@ mod tests {
         );
 
         database
-            .save_thread(thread_id.clone(), thread)
+            .save_thread(thread_id.clone(), thread, PathList::default())
             .await
             .unwrap();
 
@@ -755,92 +856,96 @@ mod tests {
     }
 
     #[gpui::test]
-    async fn test_git_worktree_info_roundtrip(cx: &mut TestAppContext) {
+    async fn test_folder_paths_roundtrip(cx: &mut TestAppContext) {
         let database = ThreadsDatabase::new(cx.executor()).unwrap();
 
-        let thread_id = session_id("worktree-thread");
-        let mut thread = make_thread(
-            "Worktree Thread",
+        let thread_id = session_id("folder-thread");
+        let thread = make_thread(
+            "Folder Thread",
             Utc.with_ymd_and_hms(2024, 6, 15, 12, 0, 0).unwrap(),
         );
-        thread.git_worktree_info = Some(AgentGitWorktreeInfo {
-            branch: "zed/agent/a4Xiu".to_string(),
-            worktree_path: std::path::PathBuf::from("/repo/worktrees/zed/agent/a4Xiu"),
-            base_ref: "main".to_string(),
-        });
+
+        let folder_paths = PathList::new(&[
+            std::path::PathBuf::from("/home/user/project-a"),
+            std::path::PathBuf::from("/home/user/project-b"),
+        ]);
 
         database
-            .save_thread(thread_id.clone(), thread)
+            .save_thread(thread_id.clone(), thread, folder_paths.clone())
             .await
             .unwrap();
 
-        let loaded = database
-            .load_thread(thread_id)
-            .await
-            .unwrap()
-            .expect("thread should exist");
-
-        let info = loaded
-            .git_worktree_info
-            .expect("git_worktree_info should be restored");
-        assert_eq!(info.branch, "zed/agent/a4Xiu");
-        assert_eq!(
-            info.worktree_path,
-            std::path::PathBuf::from("/repo/worktrees/zed/agent/a4Xiu")
-        );
-        assert_eq!(info.base_ref, "main");
+        let threads = database.list_threads().await.unwrap();
+        assert_eq!(threads.len(), 1);
+        assert_eq!(threads[0].folder_paths, folder_paths);
     }
 
     #[gpui::test]
-    async fn test_session_list_includes_worktree_meta(cx: &mut TestAppContext) {
+    async fn test_folder_paths_empty_when_not_set(cx: &mut TestAppContext) {
         let database = ThreadsDatabase::new(cx.executor()).unwrap();
 
-        // Save a thread with worktree info
-        let worktree_id = session_id("wt-thread");
-        let mut worktree_thread = make_thread(
-            "With Worktree",
+        let thread_id = session_id("no-folder-thread");
+        let thread = make_thread(
+            "No Folder Thread",
             Utc.with_ymd_and_hms(2024, 6, 15, 12, 0, 0).unwrap(),
         );
-        worktree_thread.git_worktree_info = Some(AgentGitWorktreeInfo {
-            branch: "zed/agent/bR9kz".to_string(),
-            worktree_path: std::path::PathBuf::from("/repo/worktrees/zed/agent/bR9kz"),
-            base_ref: "develop".to_string(),
-        });
 
         database
-            .save_thread(worktree_id.clone(), worktree_thread)
+            .save_thread(thread_id.clone(), thread, PathList::default())
             .await
             .unwrap();
 
-        // Save a thread without worktree info
-        let plain_id = session_id("plain-thread");
-        let plain_thread = make_thread(
-            "Without Worktree",
-            Utc.with_ymd_and_hms(2024, 6, 15, 11, 0, 0).unwrap(),
+        let threads = database.list_threads().await.unwrap();
+        assert_eq!(threads.len(), 1);
+        assert!(threads[0].folder_paths.is_empty());
+    }
+
+    #[test]
+    fn test_scroll_position_defaults_to_none() {
+        let json = r#"{
+            "title": "Old Thread",
+            "messages": [],
+            "updated_at": "2024-01-01T00:00:00Z"
+        }"#;
+
+        let db_thread: DbThread = serde_json::from_str(json).expect("Failed to deserialize");
+
+        assert!(
+            db_thread.ui_scroll_position.is_none(),
+            "Legacy threads without scroll_position field should default to None"
+        );
+    }
+
+    #[gpui::test]
+    async fn test_scroll_position_roundtrips_through_save_load(cx: &mut TestAppContext) {
+        let database = ThreadsDatabase::new(cx.executor()).unwrap();
+
+        let thread_id = session_id("thread-with-scroll");
+
+        let mut thread = make_thread(
+            "Thread With Scroll",
+            Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap(),
         );
+        thread.ui_scroll_position = Some(SerializedScrollPosition {
+            item_ix: 42,
+            offset_in_item: 13.5,
+        });
 
         database
-            .save_thread(plain_id.clone(), plain_thread)
+            .save_thread(thread_id.clone(), thread, PathList::default())
             .await
             .unwrap();
 
-        // List threads and verify worktree_branch is populated correctly
-        let threads = database.list_threads().await.unwrap();
-        assert_eq!(threads.len(), 2);
-
-        let wt_entry = threads
-            .iter()
-            .find(|t| t.id == worktree_id)
-            .expect("should find worktree thread");
-        assert_eq!(wt_entry.worktree_branch.as_deref(), Some("zed/agent/bR9kz"));
-
-        let plain_entry = threads
-            .iter()
-            .find(|t| t.id == plain_id)
-            .expect("should find plain thread");
-        assert!(
-            plain_entry.worktree_branch.is_none(),
-            "plain thread should have no worktree_branch"
-        );
+        let loaded = database
+            .load_thread(thread_id)
+            .await
+            .unwrap()
+            .expect("thread should exist");
+
+        let scroll = loaded
+            .ui_scroll_position
+            .expect("scroll_position should be restored");
+        assert_eq!(scroll.item_ix, 42);
+        assert!((scroll.offset_in_item - 13.5).abs() < f32::EPSILON);
     }
 }

crates/agent/src/edit_agent.rs 🔗

@@ -2,6 +2,7 @@ mod create_file_parser;
 mod edit_parser;
 #[cfg(test)]
 mod evals;
+pub mod reindent;
 pub mod streaming_fuzzy_matcher;
 
 use crate::{Template, Templates};
@@ -24,9 +25,10 @@ use language_model::{
     LanguageModelToolChoice, MessageContent, Role,
 };
 use project::{AgentLocation, Project};
+use reindent::{IndentDelta, Reindenter};
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use std::{cmp, iter, mem, ops::Range, pin::Pin, sync::Arc, task::Poll};
+use std::{mem, ops::Range, pin::Pin, sync::Arc, task::Poll};
 use streaming_diff::{CharOperation, StreamingDiff};
 use streaming_fuzzy_matcher::StreamingFuzzyMatcher;
 
@@ -82,6 +84,7 @@ pub struct EditAgent {
     templates: Arc<Templates>,
     edit_format: EditFormat,
     thinking_allowed: bool,
+    update_agent_location: bool,
 }
 
 impl EditAgent {
@@ -92,6 +95,7 @@ impl EditAgent {
         templates: Arc<Templates>,
         edit_format: EditFormat,
         allow_thinking: bool,
+        update_agent_location: bool,
     ) -> Self {
         EditAgent {
             model,
@@ -100,6 +104,7 @@ impl EditAgent {
             templates,
             edit_format,
             thinking_allowed: allow_thinking,
+            update_agent_location,
         }
     }
 
@@ -166,56 +171,73 @@ impl EditAgent {
         output_events_tx: mpsc::UnboundedSender<EditAgentOutputEvent>,
         cx: &mut AsyncApp,
     ) -> Result<()> {
-        cx.update(|cx| {
-            buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
-            self.action_log.update(cx, |log, cx| {
-                log.buffer_edited(buffer.clone(), cx);
-            });
-            self.project.update(cx, |project, cx| {
-                project.set_agent_location(
-                    Some(AgentLocation {
-                        buffer: buffer.downgrade(),
-                        position: language::Anchor::max_for_buffer(buffer.read(cx).remote_id()),
-                    }),
-                    cx,
-                )
-            });
+        let buffer_id = cx.update(|cx| {
+            let buffer_id = buffer.read(cx).remote_id();
+            if self.update_agent_location {
+                self.project.update(cx, |project, cx| {
+                    project.set_agent_location(
+                        Some(AgentLocation {
+                            buffer: buffer.downgrade(),
+                            position: language::Anchor::min_for_buffer(buffer_id),
+                        }),
+                        cx,
+                    )
+                });
+            }
+            buffer_id
+        });
+
+        let send_edit_event = || {
             output_events_tx
                 .unbounded_send(EditAgentOutputEvent::Edited(
-                    Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id()),
+                    Anchor::min_max_range_for_buffer(buffer_id),
                 ))
-                .ok();
-        });
-
+                .ok()
+        };
+        let set_agent_location = |cx: &mut _| {
+            if self.update_agent_location {
+                self.project.update(cx, |project, cx| {
+                    project.set_agent_location(
+                        Some(AgentLocation {
+                            buffer: buffer.downgrade(),
+                            position: language::Anchor::max_for_buffer(buffer_id),
+                        }),
+                        cx,
+                    )
+                })
+            }
+        };
+        let mut first_chunk = true;
         while let Some(event) = parse_rx.next().await {
             match event? {
                 CreateFileParserEvent::NewTextChunk { chunk } => {
-                    let buffer_id = cx.update(|cx| {
-                        buffer.update(cx, |buffer, cx| buffer.append(chunk, cx));
+                    cx.update(|cx| {
+                        buffer.update(cx, |buffer, cx| {
+                            if mem::take(&mut first_chunk) {
+                                buffer.set_text(chunk, cx)
+                            } else {
+                                buffer.append(chunk, cx)
+                            }
+                        });
                         self.action_log
                             .update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
-                        self.project.update(cx, |project, cx| {
-                            project.set_agent_location(
-                                Some(AgentLocation {
-                                    buffer: buffer.downgrade(),
-                                    position: language::Anchor::max_for_buffer(
-                                        buffer.read(cx).remote_id(),
-                                    ),
-                                }),
-                                cx,
-                            )
-                        });
-                        buffer.read(cx).remote_id()
+                        set_agent_location(cx);
                     });
-                    output_events_tx
-                        .unbounded_send(EditAgentOutputEvent::Edited(
-                            Anchor::min_max_range_for_buffer(buffer_id),
-                        ))
-                        .ok();
+                    send_edit_event();
                 }
             }
         }
 
+        if first_chunk {
+            cx.update(|cx| {
+                buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
+                self.action_log
+                    .update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
+                set_agent_location(cx);
+            });
+            send_edit_event();
+        }
+
         Ok(())
     }
 
@@ -287,15 +309,17 @@ impl EditAgent {
                 if let Some(old_range) = old_range {
                     let old_range = snapshot.anchor_before(old_range.start)
                         ..snapshot.anchor_before(old_range.end);
-                    self.project.update(cx, |project, cx| {
-                        project.set_agent_location(
-                            Some(AgentLocation {
-                                buffer: buffer.downgrade(),
-                                position: old_range.end,
-                            }),
-                            cx,
-                        );
-                    });
+                    if self.update_agent_location {
+                        self.project.update(cx, |project, cx| {
+                            project.set_agent_location(
+                                Some(AgentLocation {
+                                    buffer: buffer.downgrade(),
+                                    position: old_range.end,
+                                }),
+                                cx,
+                            );
+                        });
+                    }
                     output_events
                         .unbounded_send(EditAgentOutputEvent::ResolvingEditRange(old_range))
                         .ok();
@@ -368,15 +392,17 @@ impl EditAgent {
                     });
                     self.action_log
                         .update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
-                    self.project.update(cx, |project, cx| {
-                        project.set_agent_location(
-                            Some(AgentLocation {
-                                buffer: buffer.downgrade(),
-                                position: max_edit_end,
-                            }),
-                            cx,
-                        );
-                    });
+                    if self.update_agent_location {
+                        self.project.update(cx, |project, cx| {
+                            project.set_agent_location(
+                                Some(AgentLocation {
+                                    buffer: buffer.downgrade(),
+                                    position: max_edit_end,
+                                }),
+                                cx,
+                            );
+                        });
+                    }
                     (min_edit_start, max_edit_end)
                 });
                 output_events
@@ -540,15 +566,8 @@ impl EditAgent {
         let compute_edits = cx.background_spawn(async move {
             let buffer_start_indent = snapshot
                 .line_indent_for_row(snapshot.offset_to_point(resolved_old_text.range.start).row);
-            let indent_delta = if buffer_start_indent.tabs > 0 {
-                IndentDelta::Tabs(
-                    buffer_start_indent.tabs as isize - resolved_old_text.indent.tabs as isize,
-                )
-            } else {
-                IndentDelta::Spaces(
-                    buffer_start_indent.spaces as isize - resolved_old_text.indent.spaces as isize,
-                )
-            };
+            let indent_delta =
+                reindent::compute_indent_delta(buffer_start_indent, resolved_old_text.indent);
 
             let old_text = snapshot
                 .text_for_range(resolved_old_text.range.clone())
@@ -595,8 +614,7 @@ impl EditAgent {
         delta: IndentDelta,
         mut stream: impl Unpin + Stream<Item = Result<EditParserEvent>>,
     ) -> impl Stream<Item = Result<String>> {
-        let mut buffer = String::new();
-        let mut in_leading_whitespace = true;
+        let mut reindenter = Reindenter::new(delta);
         let mut done = false;
         futures::stream::poll_fn(move |cx| {
             while !done {
@@ -609,55 +627,10 @@ impl EditAgent {
                     _ => return Poll::Ready(None),
                 };
 
-                buffer.push_str(&chunk);
-
-                let mut indented_new_text = String::new();
-                let mut start_ix = 0;
-                let mut newlines = buffer.match_indices('\n').peekable();
-                loop {
-                    let (line_end, is_pending_line) = match newlines.next() {
-                        Some((ix, _)) => (ix, false),
-                        None => (buffer.len(), true),
-                    };
-                    let line = &buffer[start_ix..line_end];
-
-                    if in_leading_whitespace {
-                        if let Some(non_whitespace_ix) = line.find(|c| delta.character() != c) {
-                            // We found a non-whitespace character, adjust
-                            // indentation based on the delta.
-                            let new_indent_len =
-                                cmp::max(0, non_whitespace_ix as isize + delta.len()) as usize;
-                            indented_new_text
-                                .extend(iter::repeat(delta.character()).take(new_indent_len));
-                            indented_new_text.push_str(&line[non_whitespace_ix..]);
-                            in_leading_whitespace = false;
-                        } else if is_pending_line {
-                            // We're still in leading whitespace and this line is incomplete.
-                            // Stop processing until we receive more input.
-                            break;
-                        } else {
-                            // This line is entirely whitespace. Push it without indentation.
-                            indented_new_text.push_str(line);
-                        }
-                    } else {
-                        indented_new_text.push_str(line);
-                    }
-
-                    if is_pending_line {
-                        start_ix = line_end;
-                        break;
-                    } else {
-                        in_leading_whitespace = true;
-                        indented_new_text.push('\n');
-                        start_ix = line_end + 1;
-                    }
-                }
-                buffer.replace_range(..start_ix, "");
-
+                let mut indented_new_text = reindenter.push(&chunk);
                 // This was the last chunk, push all the buffered content as-is.
                 if is_last_chunk {
-                    indented_new_text.push_str(&buffer);
-                    buffer.clear();
+                    indented_new_text.push_str(&reindenter.finish());
                     done = true;
                 }
 
@@ -736,6 +709,7 @@ impl EditAgent {
             temperature: None,
             thinking_allowed: self.thinking_allowed,
             thinking_effort: None,
+            speed: None,
         };
 
         Ok(self.model.stream_completion_text(request, cx).await?.stream)
@@ -747,28 +721,6 @@ struct ResolvedOldText {
     indent: LineIndent,
 }
 
-#[derive(Copy, Clone, Debug)]
-enum IndentDelta {
-    Spaces(isize),
-    Tabs(isize),
-}
-
-impl IndentDelta {
-    fn character(&self) -> char {
-        match self {
-            IndentDelta::Spaces(_) => ' ',
-            IndentDelta::Tabs(_) => '\t',
-        }
-    }
-
-    fn len(&self) -> isize {
-        match self {
-            IndentDelta::Spaces(n) => *n,
-            IndentDelta::Tabs(n) => *n,
-        }
-    }
-}
-
 #[cfg(test)]
 mod tests {
     use super::*;
@@ -1194,19 +1146,16 @@ mod tests {
         );
 
         cx.run_until_parked();
-        assert_matches!(
-            drain_events(&mut events).as_slice(),
-            [EditAgentOutputEvent::Edited(_)]
-        );
+        assert_eq!(drain_events(&mut events).as_slice(), []);
         assert_eq!(
             buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
-            ""
+            "abc\ndef\nghi"
         );
         assert_eq!(
             project.read_with(cx, |project, _| project.agent_location()),
             Some(AgentLocation {
                 buffer: buffer.downgrade(),
-                position: language::Anchor::max_for_buffer(
+                position: language::Anchor::min_for_buffer(
                     cx.update(|cx| buffer.read(cx).remote_id())
                 ),
             })
@@ -1290,6 +1239,32 @@ mod tests {
         );
     }
 
+    #[gpui::test]
+    async fn test_overwrite_no_content(cx: &mut TestAppContext) {
+        let agent = init_test(cx).await;
+        let buffer = cx.new(|cx| Buffer::local("abc\ndef\nghi", cx));
+        let (chunks_tx, chunks_rx) = mpsc::unbounded::<&str>();
+        let (apply, mut events) = agent.overwrite_with_chunks(
+            buffer.clone(),
+            chunks_rx.map(|chunk| Ok(chunk.to_string())),
+            &mut cx.to_async(),
+        );
+
+        drop(chunks_tx);
+        cx.run_until_parked();
+
+        let result = apply.await;
+        assert!(result.is_ok(),);
+        assert_matches!(
+            drain_events(&mut events).as_slice(),
+            [EditAgentOutputEvent::Edited { .. }]
+        );
+        assert_eq!(
+            buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
+            ""
+        );
+    }
+
     #[gpui::test(iterations = 100)]
     async fn test_indent_new_text_chunks(mut rng: StdRng) {
         let chunks = to_random_chunks(&mut rng, "    abc\n  def\n      ghi");
@@ -1426,6 +1401,7 @@ mod tests {
             Templates::new(),
             EditFormat::XmlTags,
             thinking_allowed,
+            true,
         )
     }
 

crates/agent/src/edit_agent/reindent.rs 🔗

@@ -0,0 +1,214 @@
+use language::LineIndent;
+use std::{cmp, iter};
+
+#[derive(Copy, Clone, Debug)]
+pub enum IndentDelta {
+    Spaces(isize),
+    Tabs(isize),
+}
+
+impl IndentDelta {
+    pub fn character(&self) -> char {
+        match self {
+            IndentDelta::Spaces(_) => ' ',
+            IndentDelta::Tabs(_) => '\t',
+        }
+    }
+
+    pub fn len(&self) -> isize {
+        match self {
+            IndentDelta::Spaces(n) => *n,
+            IndentDelta::Tabs(n) => *n,
+        }
+    }
+}
+
+pub fn compute_indent_delta(buffer_indent: LineIndent, query_indent: LineIndent) -> IndentDelta {
+    if buffer_indent.tabs > 0 {
+        IndentDelta::Tabs(buffer_indent.tabs as isize - query_indent.tabs as isize)
+    } else {
+        IndentDelta::Spaces(buffer_indent.spaces as isize - query_indent.spaces as isize)
+    }
+}
+
+/// Synchronous re-indentation adapter. Buffers incomplete lines and applies
+/// an `IndentDelta` to each line's leading whitespace before emitting it.
+pub struct Reindenter {
+    delta: IndentDelta,
+    buffer: String,
+    in_leading_whitespace: bool,
+}
+
+impl Reindenter {
+    pub fn new(delta: IndentDelta) -> Self {
+        Self {
+            delta,
+            buffer: String::new(),
+            in_leading_whitespace: true,
+        }
+    }
+
+    /// Feed a chunk of text and return the re-indented portion that is
+    /// ready to emit. Incomplete trailing lines are buffered internally.
+    pub fn push(&mut self, chunk: &str) -> String {
+        self.buffer.push_str(chunk);
+        self.drain(false)
+    }
+
+    /// Flush any remaining buffered content (call when the stream is done).
+    pub fn finish(&mut self) -> String {
+        self.drain(true)
+    }
+
+    fn drain(&mut self, is_final: bool) -> String {
+        let mut indented = String::new();
+        let mut start_ix = 0;
+        let mut newlines = self.buffer.match_indices('\n');
+        loop {
+            let (line_end, is_pending_line) = match newlines.next() {
+                Some((ix, _)) => (ix, false),
+                None => (self.buffer.len(), true),
+            };
+            let line = &self.buffer[start_ix..line_end];
+
+            if self.in_leading_whitespace {
+                if let Some(non_whitespace_ix) = line.find(|c| self.delta.character() != c) {
+                    // We found a non-whitespace character, adjust indentation
+                    // based on the delta.
+                    let new_indent_len =
+                        cmp::max(0, non_whitespace_ix as isize + self.delta.len()) as usize;
+                    indented.extend(iter::repeat(self.delta.character()).take(new_indent_len));
+                    indented.push_str(&line[non_whitespace_ix..]);
+                    self.in_leading_whitespace = false;
+                } else if is_pending_line && !is_final {
+                    // We're still in leading whitespace and this line is incomplete.
+                    // Stop processing until we receive more input.
+                    break;
+                } else {
+                    // This line is entirely whitespace. Push it without indentation.
+                    indented.push_str(line);
+                }
+            } else {
+                indented.push_str(line);
+            }
+
+            if is_pending_line {
+                start_ix = line_end;
+                break;
+            } else {
+                self.in_leading_whitespace = true;
+                indented.push('\n');
+                start_ix = line_end + 1;
+            }
+        }
+        self.buffer.replace_range(..start_ix, "");
+        if is_final {
+            indented.push_str(&self.buffer);
+            self.buffer.clear();
+        }
+        indented
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn test_indent_single_chunk() {
+        let mut r = Reindenter::new(IndentDelta::Spaces(2));
+        let out = r.push("    abc\n  def\n      ghi");
+        // All three lines are emitted: "ghi" starts with spaces but
+        // contains non-whitespace, so it's processed immediately.
+        assert_eq!(out, "      abc\n    def\n        ghi");
+        let out = r.finish();
+        assert_eq!(out, "");
+    }
+
+    #[test]
+    fn test_outdent_tabs() {
+        let mut r = Reindenter::new(IndentDelta::Tabs(-2));
+        let out = r.push("\t\t\t\tabc\n\t\tdef\n\t\t\t\t\t\tghi");
+        assert_eq!(out, "\t\tabc\ndef\n\t\t\t\tghi");
+        let out = r.finish();
+        assert_eq!(out, "");
+    }
+
+    #[test]
+    fn test_incremental_chunks() {
+        let mut r = Reindenter::new(IndentDelta::Spaces(2));
+        // Feed "    ab" — the `a` is non-whitespace, so the line is
+        // processed immediately even without a trailing newline.
+        let out = r.push("    ab");
+        assert_eq!(out, "      ab");
+        // Feed "c\n" — appended to the already-processed line (no longer
+        // in leading whitespace).
+        let out = r.push("c\n");
+        assert_eq!(out, "c\n");
+        let out = r.finish();
+        assert_eq!(out, "");
+    }
+
+    #[test]
+    fn test_zero_delta() {
+        let mut r = Reindenter::new(IndentDelta::Spaces(0));
+        let out = r.push("  hello\n  world\n");
+        assert_eq!(out, "  hello\n  world\n");
+        let out = r.finish();
+        assert_eq!(out, "");
+    }
+
+    #[test]
+    fn test_clamp_negative_indent() {
+        let mut r = Reindenter::new(IndentDelta::Spaces(-10));
+        let out = r.push("  abc\n");
+        // max(0, 2 - 10) = 0, so no leading spaces.
+        assert_eq!(out, "abc\n");
+        let out = r.finish();
+        assert_eq!(out, "");
+    }
+
+    #[test]
+    fn test_whitespace_only_lines() {
+        let mut r = Reindenter::new(IndentDelta::Spaces(2));
+        let out = r.push("   \n  code\n");
+        // First line is all whitespace — emitted verbatim. Second line is indented.
+        assert_eq!(out, "   \n    code\n");
+        let out = r.finish();
+        assert_eq!(out, "");
+    }
+
+    #[test]
+    fn test_compute_indent_delta_spaces() {
+        let buffer = LineIndent {
+            tabs: 0,
+            spaces: 8,
+            line_blank: false,
+        };
+        let query = LineIndent {
+            tabs: 0,
+            spaces: 4,
+            line_blank: false,
+        };
+        let delta = compute_indent_delta(buffer, query);
+        assert_eq!(delta.len(), 4);
+        assert_eq!(delta.character(), ' ');
+    }
+
+    #[test]
+    fn test_compute_indent_delta_tabs() {
+        let buffer = LineIndent {
+            tabs: 2,
+            spaces: 0,
+            line_blank: false,
+        };
+        let query = LineIndent {
+            tabs: 3,
+            spaces: 0,
+            line_blank: false,
+        };
+        let delta = compute_indent_delta(buffer, query);
+        assert_eq!(delta.len(), -1);
+        assert_eq!(delta.character(), '\t');
+    }
+}

crates/agent/src/native_agent_server.rs 🔗

@@ -1,4 +1,4 @@
-use std::{any::Any, path::Path, rc::Rc, sync::Arc};
+use std::{any::Any, rc::Rc, sync::Arc};
 
 use agent_client_protocol as acp;
 use agent_servers::{AgentServer, AgentServerDelegate};
@@ -35,19 +35,10 @@ impl AgentServer for NativeAgentServer {
 
     fn connect(
         &self,
-        _root_dir: Option<&Path>,
         delegate: AgentServerDelegate,
         cx: &mut App,
-    ) -> Task<
-        Result<(
-            Rc<dyn acp_thread::AgentConnection>,
-            Option<task::SpawnInTerminal>,
-        )>,
-    > {
-        log::debug!(
-            "NativeAgentServer::connect called for path: {:?}",
-            _root_dir
-        );
+    ) -> Task<Result<Rc<dyn acp_thread::AgentConnection>>> {
+        log::debug!("NativeAgentServer::connect");
         let project = delegate.project().clone();
         let fs = self.fs.clone();
         let thread_store = self.thread_store.clone();
@@ -66,10 +57,7 @@ impl AgentServer for NativeAgentServer {
             let connection = NativeAgentConnection(agent);
             log::debug!("NativeAgentServer connection established successfully");
 
-            Ok((
-                Rc::new(connection) as Rc<dyn acp_thread::AgentConnection>,
-                None,
-            ))
+            Ok(Rc::new(connection) as Rc<dyn acp_thread::AgentConnection>)
         })
     }
 

crates/agent/src/tests/edit_file_thread_test.rs 🔗

@@ -50,9 +50,9 @@ async fn test_edit_file_tool_in_thread_context(cx: &mut TestAppContext) {
         // Add just the tools we need for this test
         let language_registry = project.read(cx).languages().clone();
         thread.add_tool(crate::ReadFileTool::new(
-            cx.weak_entity(),
             project.clone(),
             thread.action_log().clone(),
+            true,
         ));
         thread.add_tool(crate::EditFileTool::new(
             project.clone(),

crates/agent/src/tests/mod.rs 🔗

@@ -159,7 +159,7 @@ impl crate::TerminalHandle for FakeTerminalHandle {
 
 struct FakeSubagentHandle {
     session_id: acp::SessionId,
-    wait_for_summary_task: Shared<Task<String>>,
+    send_task: Shared<Task<String>>,
 }
 
 impl SubagentHandle for FakeSubagentHandle {
@@ -167,8 +167,12 @@ impl SubagentHandle for FakeSubagentHandle {
         self.session_id.clone()
     }
 
-    fn wait_for_output(&self, cx: &AsyncApp) -> Task<Result<String>> {
-        let task = self.wait_for_summary_task.clone();
+    fn num_entries(&self, _cx: &App) -> usize {
+        unimplemented!()
+    }
+
+    fn send(&self, _message: String, cx: &AsyncApp) -> Task<Result<String>> {
+        let task = self.send_task.clone();
         cx.background_spawn(async move { Ok(task.await) })
     }
 }
@@ -203,13 +207,7 @@ impl crate::ThreadEnvironment for FakeThreadEnvironment {
         Task::ready(Ok(handle as Rc<dyn crate::TerminalHandle>))
     }
 
-    fn create_subagent(
-        &self,
-        _parent_thread: Entity<Thread>,
-        _label: String,
-        _initial_prompt: String,
-        _cx: &mut App,
-    ) -> Result<Rc<dyn SubagentHandle>> {
+    fn create_subagent(&self, _label: String, _cx: &mut App) -> Result<Rc<dyn SubagentHandle>> {
         Ok(self
             .subagent_handle
             .clone()
@@ -248,13 +246,7 @@ impl crate::ThreadEnvironment for MultiTerminalEnvironment {
         Task::ready(Ok(handle as Rc<dyn crate::TerminalHandle>))
     }
 
-    fn create_subagent(
-        &self,
-        _parent_thread: Entity<Thread>,
-        _label: String,
-        _initial_prompt: String,
-        _cx: &mut App,
-    ) -> Result<Rc<dyn SubagentHandle>> {
+    fn create_subagent(&self, _label: String, _cx: &mut App) -> Result<Rc<dyn SubagentHandle>> {
         unimplemented!()
     }
 }
@@ -285,8 +277,17 @@ async fn test_echo(cx: &mut TestAppContext) {
 
     let events = events.collect().await;
     thread.update(cx, |thread, _cx| {
-        assert_eq!(thread.last_message().unwrap().role(), Role::Assistant);
-        assert_eq!(thread.last_message().unwrap().to_markdown(), "Hello\n")
+        assert_eq!(
+            thread.last_received_or_pending_message().unwrap().role(),
+            Role::Assistant
+        );
+        assert_eq!(
+            thread
+                .last_received_or_pending_message()
+                .unwrap()
+                .to_markdown(),
+            "Hello\n"
+        )
     });
     assert_eq!(stop_events(events), vec![acp::StopReason::EndTurn]);
 }
@@ -310,11 +311,11 @@ async fn test_terminal_tool_timeout_kills_handle(cx: &mut TestAppContext) {
 
     let task = cx.update(|cx| {
         tool.run(
-            crate::TerminalToolInput {
+            ToolInput::resolved(crate::TerminalToolInput {
                 command: "sleep 1000".to_string(),
                 cd: ".".to_string(),
                 timeout_ms: Some(5),
-            },
+            }),
             event_stream,
             cx,
         )
@@ -377,11 +378,11 @@ async fn test_terminal_tool_without_timeout_does_not_kill_handle(cx: &mut TestAp
 
     let _task = cx.update(|cx| {
         tool.run(
-            crate::TerminalToolInput {
+            ToolInput::resolved(crate::TerminalToolInput {
                 command: "sleep 1000".to_string(),
                 cd: ".".to_string(),
                 timeout_ms: None,
-            },
+            }),
             event_stream,
             cx,
         )
@@ -438,9 +439,15 @@ async fn test_thinking(cx: &mut TestAppContext) {
 
     let events = events.collect().await;
     thread.update(cx, |thread, _cx| {
-        assert_eq!(thread.last_message().unwrap().role(), Role::Assistant);
         assert_eq!(
-            thread.last_message().unwrap().to_markdown(),
+            thread.last_received_or_pending_message().unwrap().role(),
+            Role::Assistant
+        );
+        assert_eq!(
+            thread
+                .last_received_or_pending_message()
+                .unwrap()
+                .to_markdown(),
             indoc! {"
                 <think>Think</think>
                 Hello
@@ -718,7 +725,7 @@ async fn test_basic_tool_calls(cx: &mut TestAppContext) {
     thread.update(cx, |thread, _cx| {
         assert!(
             thread
-                .last_message()
+                .last_received_or_pending_message()
                 .unwrap()
                 .as_agent_message()
                 .unwrap()
@@ -755,7 +762,7 @@ async fn test_streaming_tool_calls(cx: &mut TestAppContext) {
         if let Ok(ThreadEvent::ToolCall(tool_call)) = event {
             thread.update(cx, |thread, _cx| {
                 // Look for a tool use in the thread's last message
-                let message = thread.last_message().unwrap();
+                let message = thread.last_received_or_pending_message().unwrap();
                 let agent_message = message.as_agent_message().unwrap();
                 let last_content = agent_message.content.last().unwrap();
                 if let AgentMessageContent::ToolUse(last_tool_use) = last_content {
@@ -1225,7 +1232,7 @@ async fn test_concurrent_tool_calls(cx: &mut TestAppContext) {
     assert_eq!(stop_reasons, vec![acp::StopReason::EndTurn]);
 
     thread.update(cx, |thread, _cx| {
-        let last_message = thread.last_message().unwrap();
+        let last_message = thread.last_received_or_pending_message().unwrap();
         let agent_message = last_message.as_agent_message().unwrap();
         let text = agent_message
             .content
@@ -1931,7 +1938,7 @@ async fn test_cancellation(cx: &mut TestAppContext) {
         .collect::<Vec<_>>()
         .await;
     thread.update(cx, |thread, _cx| {
-        let message = thread.last_message().unwrap();
+        let message = thread.last_received_or_pending_message().unwrap();
         let agent_message = message.as_agent_message().unwrap();
         assert_eq!(
             agent_message.content,
@@ -2000,7 +2007,7 @@ async fn test_terminal_tool_cancellation_captures_output(cx: &mut TestAppContext
 
     // Verify the tool result contains the terminal output, not just "Tool canceled by user"
     thread.update(cx, |thread, _cx| {
-        let message = thread.last_message().unwrap();
+        let message = thread.last_received_or_pending_message().unwrap();
         let agent_message = message.as_agent_message().unwrap();
 
         let tool_use = agent_message
@@ -2156,7 +2163,7 @@ async fn verify_thread_recovery(
 
     let events = events.collect::<Vec<_>>().await;
     thread.update(cx, |thread, _cx| {
-        let message = thread.last_message().unwrap();
+        let message = thread.last_received_or_pending_message().unwrap();
         let agent_message = message.as_agent_message().unwrap();
         assert_eq!(
             agent_message.content,
@@ -2465,7 +2472,7 @@ async fn test_terminal_tool_stopped_via_terminal_card_button(cx: &mut TestAppCon
 
     // Verify the tool result indicates user stopped
     thread.update(cx, |thread, _cx| {
-        let message = thread.last_message().unwrap();
+        let message = thread.last_received_or_pending_message().unwrap();
         let agent_message = message.as_agent_message().unwrap();
 
         let tool_use = agent_message
@@ -2560,7 +2567,7 @@ async fn test_terminal_tool_timeout_expires(cx: &mut TestAppContext) {
 
     // Verify the tool result indicates timeout, not user stopped
     thread.update(cx, |thread, _cx| {
-        let message = thread.last_message().unwrap();
+        let message = thread.last_received_or_pending_message().unwrap();
         let agent_message = message.as_agent_message().unwrap();
 
         let tool_use = agent_message
@@ -2624,6 +2631,84 @@ async fn test_in_progress_send_canceled_by_next_send(cx: &mut TestAppContext) {
     assert_eq!(stop_events(events_2), vec![acp::StopReason::EndTurn]);
 }
 
+#[gpui::test]
+async fn test_retry_cancelled_promptly_on_new_send(cx: &mut TestAppContext) {
+    // Regression test: when a completion fails with a retryable error (e.g. upstream 500),
+    // the retry loop waits on a timer. If the user switches models and sends a new message
+    // during that delay, the old turn should exit immediately instead of retrying with the
+    // stale model.
+    let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await;
+    let model_a = model.as_fake();
+
+    // Start a turn with model_a.
+    let events_1 = thread
+        .update(cx, |thread, cx| {
+            thread.send(UserMessageId::new(), ["Hello"], cx)
+        })
+        .unwrap();
+    cx.run_until_parked();
+    assert_eq!(model_a.completion_count(), 1);
+
+    // Model returns a retryable upstream 500. The turn enters the retry delay.
+    model_a.send_last_completion_stream_error(
+        LanguageModelCompletionError::UpstreamProviderError {
+            message: "Internal server error".to_string(),
+            status: http_client::StatusCode::INTERNAL_SERVER_ERROR,
+            retry_after: None,
+        },
+    );
+    model_a.end_last_completion_stream();
+    cx.run_until_parked();
+
+    // The old completion was consumed; model_a has no pending requests yet because the
+    // retry timer hasn't fired.
+    assert_eq!(model_a.completion_count(), 0);
+
+    // Switch to model_b and send a new message. This cancels the old turn.
+    let model_b = Arc::new(FakeLanguageModel::with_id_and_thinking(
+        "fake", "model-b", "Model B", false,
+    ));
+    thread.update(cx, |thread, cx| {
+        thread.set_model(model_b.clone(), cx);
+    });
+    let events_2 = thread
+        .update(cx, |thread, cx| {
+            thread.send(UserMessageId::new(), ["Continue"], cx)
+        })
+        .unwrap();
+    cx.run_until_parked();
+
+    // model_b should have received its completion request.
+    assert_eq!(model_b.as_fake().completion_count(), 1);
+
+    // Advance the clock well past the retry delay (BASE_RETRY_DELAY = 5s).
+    cx.executor().advance_clock(Duration::from_secs(10));
+    cx.run_until_parked();
+
+    // model_a must NOT have received another completion request — the cancelled turn
+    // should have exited during the retry delay rather than retrying with the old model.
+    assert_eq!(
+        model_a.completion_count(),
+        0,
+        "old model should not receive a retry request after cancellation"
+    );
+
+    // Complete model_b's turn.
+    model_b
+        .as_fake()
+        .send_last_completion_stream_text_chunk("Done!");
+    model_b
+        .as_fake()
+        .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::EndTurn));
+    model_b.as_fake().end_last_completion_stream();
+
+    let events_1 = events_1.collect::<Vec<_>>().await;
+    assert_eq!(stop_events(events_1), vec![acp::StopReason::Cancelled]);
+
+    let events_2 = events_2.collect::<Vec<_>>().await;
+    assert_eq!(stop_events(events_2), vec![acp::StopReason::EndTurn]);
+}
+
 #[gpui::test]
 async fn test_subsequent_successful_sends_dont_cancel(cx: &mut TestAppContext) {
     let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await;
@@ -3456,7 +3541,7 @@ async fn test_send_retry_finishes_tool_calls_on_error(cx: &mut TestAppContext) {
     events.collect::<Vec<_>>().await;
     thread.read_with(cx, |thread, _cx| {
         assert_eq!(
-            thread.last_message(),
+            thread.last_received_or_pending_message(),
             Some(Message::Agent(AgentMessage {
                 content: vec![AgentMessageContent::Text("Done".into())],
                 tool_results: IndexMap::default(),
@@ -3520,6 +3605,113 @@ async fn test_send_max_retries_exceeded(cx: &mut TestAppContext) {
     ));
 }
 
+#[gpui::test]
+async fn test_streaming_tool_completes_when_llm_stream_ends_without_final_input(
+    cx: &mut TestAppContext,
+) {
+    init_test(cx);
+    always_allow_tools(cx);
+
+    let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await;
+    let fake_model = model.as_fake();
+
+    thread.update(cx, |thread, _cx| {
+        thread.add_tool(StreamingEchoTool);
+    });
+
+    let _events = thread
+        .update(cx, |thread, cx| {
+            thread.send(UserMessageId::new(), ["Use the streaming_echo tool"], cx)
+        })
+        .unwrap();
+    cx.run_until_parked();
+
+    // Send a partial tool use (is_input_complete = false), simulating the LLM
+    // streaming input for a tool.
+    let tool_use = LanguageModelToolUse {
+        id: "tool_1".into(),
+        name: "streaming_echo".into(),
+        raw_input: r#"{"text": "partial"}"#.into(),
+        input: json!({"text": "partial"}),
+        is_input_complete: false,
+        thought_signature: None,
+    };
+    fake_model
+        .send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use.clone()));
+    cx.run_until_parked();
+
+    // Send a stream error WITHOUT ever sending is_input_complete = true.
+    // Before the fix, this would deadlock: the tool waits for more partials
+    // (or cancellation), run_turn_internal waits for the tool, and the sender
+    // keeping the channel open lives inside RunningTurn.
+    fake_model.send_last_completion_stream_error(
+        LanguageModelCompletionError::UpstreamProviderError {
+            message: "Internal server error".to_string(),
+            status: http_client::StatusCode::INTERNAL_SERVER_ERROR,
+            retry_after: None,
+        },
+    );
+    fake_model.end_last_completion_stream();
+
+    // Advance past the retry delay so run_turn_internal retries.
+    cx.executor().advance_clock(Duration::from_secs(5));
+    cx.run_until_parked();
+
+    // The retry request should contain the streaming tool's error result,
+    // proving the tool terminated and its result was forwarded.
+    let completion = fake_model
+        .pending_completions()
+        .pop()
+        .expect("No running turn");
+    assert_eq!(
+        completion.messages[1..],
+        vec![
+            LanguageModelRequestMessage {
+                role: Role::User,
+                content: vec!["Use the streaming_echo tool".into()],
+                cache: false,
+                reasoning_details: None,
+            },
+            LanguageModelRequestMessage {
+                role: Role::Assistant,
+                content: vec![language_model::MessageContent::ToolUse(tool_use.clone())],
+                cache: false,
+                reasoning_details: None,
+            },
+            LanguageModelRequestMessage {
+                role: Role::User,
+                content: vec![language_model::MessageContent::ToolResult(
+                    LanguageModelToolResult {
+                        tool_use_id: tool_use.id.clone(),
+                        tool_name: tool_use.name,
+                        is_error: true,
+                        content: "Failed to receive tool input: tool input was not fully received"
+                            .into(),
+                        output: Some(
+                            "Failed to receive tool input: tool input was not fully received"
+                                .into()
+                        ),
+                    }
+                )],
+                cache: true,
+                reasoning_details: None,
+            },
+        ]
+    );
+
+    // Finish the retry round so the turn completes cleanly.
+    fake_model.send_last_completion_stream_text_chunk("Done");
+    fake_model.end_last_completion_stream();
+    cx.run_until_parked();
+
+    thread.read_with(cx, |thread, _cx| {
+        assert!(
+            thread.is_turn_complete(),
+            "Thread should not be stuck; the turn should have completed",
+        );
+    });
+}
+
 /// Filters out the stop events for asserting against in tests
 fn stop_events(result_events: Vec<Result<ThreadEvent>>) -> Vec<acp::StopReason> {
     result_events
@@ -3575,6 +3767,7 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest {
                             ToolRequiringPermission::NAME: true,
                             InfiniteTool::NAME: true,
                             CancellationAwareTool::NAME: true,
+                            StreamingEchoTool::NAME: true,
                             (TerminalTool::NAME): true,
                         }
                     }
@@ -3991,11 +4184,11 @@ async fn test_terminal_tool_permission_rules(cx: &mut TestAppContext) {
 
         let task = cx.update(|cx| {
             tool.run(
-                crate::TerminalToolInput {
+                ToolInput::resolved(crate::TerminalToolInput {
                     command: "rm -rf /".to_string(),
                     cd: ".".to_string(),
                     timeout_ms: None,
-                },
+                }),
                 event_stream,
                 cx,
             )
@@ -4043,11 +4236,11 @@ async fn test_terminal_tool_permission_rules(cx: &mut TestAppContext) {
 
         let task = cx.update(|cx| {
             tool.run(
-                crate::TerminalToolInput {
+                ToolInput::resolved(crate::TerminalToolInput {
                     command: "echo hello".to_string(),
                     cd: ".".to_string(),
                     timeout_ms: None,
-                },
+                }),
                 event_stream,
                 cx,
             )
@@ -4101,11 +4294,11 @@ async fn test_terminal_tool_permission_rules(cx: &mut TestAppContext) {
 
         let _task = cx.update(|cx| {
             tool.run(
-                crate::TerminalToolInput {
+                ToolInput::resolved(crate::TerminalToolInput {
                     command: "sudo rm file".to_string(),
                     cd: ".".to_string(),
                     timeout_ms: None,
-                },
+                }),
                 event_stream,
                 cx,
             )
@@ -4148,11 +4341,11 @@ async fn test_terminal_tool_permission_rules(cx: &mut TestAppContext) {
 
         let task = cx.update(|cx| {
             tool.run(
-                crate::TerminalToolInput {
+                ToolInput::resolved(crate::TerminalToolInput {
                     command: "echo hello".to_string(),
                     cd: ".".to_string(),
                     timeout_ms: None,
-                },
+                }),
                 event_stream,
                 cx,
             )
@@ -4306,6 +4499,160 @@ async fn test_subagent_tool_call_end_to_end(cx: &mut TestAppContext) {
 
             subagent task response
 
+            ## Assistant
+
+            Response
+
+        "#},
+    );
+}
+
+#[gpui::test]
+async fn test_subagent_tool_output_does_not_include_thinking(cx: &mut TestAppContext) {
+    init_test(cx);
+    cx.update(|cx| {
+        LanguageModelRegistry::test(cx);
+    });
+    cx.update(|cx| {
+        cx.update_flags(true, vec!["subagents".to_string()]);
+    });
+
+    let fs = FakeFs::new(cx.executor());
+    fs.insert_tree(
+        "/",
+        json!({
+            "a": {
+                "b.md": "Lorem"
+            }
+        }),
+    )
+    .await;
+    let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await;
+    let thread_store = cx.new(|cx| ThreadStore::new(cx));
+    let agent = NativeAgent::new(
+        project.clone(),
+        thread_store.clone(),
+        Templates::new(),
+        None,
+        fs.clone(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+    let connection = Rc::new(NativeAgentConnection(agent.clone()));
+
+    let acp_thread = cx
+        .update(|cx| {
+            connection
+                .clone()
+                .new_session(project.clone(), Path::new(""), cx)
+        })
+        .await
+        .unwrap();
+    let session_id = acp_thread.read_with(cx, |thread, _| thread.session_id().clone());
+    let thread = agent.read_with(cx, |agent, _| {
+        agent.sessions.get(&session_id).unwrap().thread.clone()
+    });
+    let model = Arc::new(FakeLanguageModel::default());
+
+    // Ensure empty threads are not saved, even if they get mutated.
+    thread.update(cx, |thread, cx| {
+        thread.set_model(model.clone(), cx);
+    });
+    cx.run_until_parked();
+
+    let send = acp_thread.update(cx, |thread, cx| thread.send_raw("Prompt", cx));
+    cx.run_until_parked();
+    model.send_last_completion_stream_text_chunk("spawning subagent");
+    let subagent_tool_input = SpawnAgentToolInput {
+        label: "label".to_string(),
+        message: "subagent task prompt".to_string(),
+        session_id: None,
+    };
+    let subagent_tool_use = LanguageModelToolUse {
+        id: "subagent_1".into(),
+        name: SpawnAgentTool::NAME.into(),
+        raw_input: serde_json::to_string(&subagent_tool_input).unwrap(),
+        input: serde_json::to_value(&subagent_tool_input).unwrap(),
+        is_input_complete: true,
+        thought_signature: None,
+    };
+    model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
+        subagent_tool_use,
+    ));
+    model.end_last_completion_stream();
+
+    cx.run_until_parked();
+
+    let subagent_session_id = thread.read_with(cx, |thread, cx| {
+        thread
+            .running_subagent_ids(cx)
+            .get(0)
+            .expect("subagent thread should be running")
+            .clone()
+    });
+
+    let subagent_thread = agent.read_with(cx, |agent, _cx| {
+        agent
+            .sessions
+            .get(&subagent_session_id)
+            .expect("subagent session should exist")
+            .acp_thread
+            .clone()
+    });
+
+    model.send_last_completion_stream_text_chunk("subagent task response 1");
+    model.send_last_completion_stream_event(LanguageModelCompletionEvent::Thinking {
+        text: "thinking more about the subagent task".into(),
+        signature: None,
+    });
+    model.send_last_completion_stream_text_chunk("subagent task response 2");
+    model.end_last_completion_stream();
+
+    cx.run_until_parked();
+
+    assert_eq!(
+        subagent_thread.read_with(cx, |thread, cx| thread.to_markdown(cx)),
+        indoc! {"
+            ## User
+
+            subagent task prompt
+
+            ## Assistant
+
+            subagent task response 1
+
+            <thinking>
+            thinking more about the subagent task
+            </thinking>
+
+            subagent task response 2
+
+        "}
+    );
+
+    model.send_last_completion_stream_text_chunk("Response");
+    model.end_last_completion_stream();
+
+    send.await.unwrap();
+
+    assert_eq!(
+        acp_thread.read_with(cx, |thread, cx| thread.to_markdown(cx)),
+        indoc! {r#"
+            ## User
+
+            Prompt
+
+            ## Assistant
+
+            spawning subagent
+
+            **Tool Call: label**
+            Status: Completed
+
+            subagent task response 1
+
+            subagent task response 2
 
             ## Assistant
 
@@ -5309,11 +5656,11 @@ async fn test_edit_file_tool_deny_rule_blocks_edit(cx: &mut TestAppContext) {
 
     let task = cx.update(|cx| {
         tool.run(
-            crate::EditFileToolInput {
+            ToolInput::resolved(crate::EditFileToolInput {
                 display_description: "Edit sensitive file".to_string(),
                 path: "root/sensitive_config.txt".into(),
                 mode: crate::EditFileMode::Edit,
-            },
+            }),
             event_stream,
             cx,
         )
@@ -5359,9 +5706,9 @@ async fn test_delete_path_tool_deny_rule_blocks_deletion(cx: &mut TestAppContext
 
     let task = cx.update(|cx| {
         tool.run(
-            crate::DeletePathToolInput {
+            ToolInput::resolved(crate::DeletePathToolInput {
                 path: "root/important_data.txt".to_string(),
-            },
+            }),
             event_stream,
             cx,
         )
@@ -5411,10 +5758,10 @@ async fn test_move_path_tool_denies_if_destination_denied(cx: &mut TestAppContex
 
     let task = cx.update(|cx| {
         tool.run(
-            crate::MovePathToolInput {
+            ToolInput::resolved(crate::MovePathToolInput {
                 source_path: "root/safe.txt".to_string(),
                 destination_path: "root/protected/safe.txt".to_string(),
-            },
+            }),
             event_stream,
             cx,
         )
@@ -5467,10 +5814,10 @@ async fn test_move_path_tool_denies_if_source_denied(cx: &mut TestAppContext) {
 
     let task = cx.update(|cx| {
         tool.run(
-            crate::MovePathToolInput {
+            ToolInput::resolved(crate::MovePathToolInput {
                 source_path: "root/secret.txt".to_string(),
                 destination_path: "root/public/not_secret.txt".to_string(),
-            },
+            }),
             event_stream,
             cx,
         )
@@ -5525,10 +5872,10 @@ async fn test_copy_path_tool_deny_rule_blocks_copy(cx: &mut TestAppContext) {
 
     let task = cx.update(|cx| {
         tool.run(
-            crate::CopyPathToolInput {
+            ToolInput::resolved(crate::CopyPathToolInput {
                 source_path: "root/confidential.txt".to_string(),
                 destination_path: "root/dest/copy.txt".to_string(),
-            },
+            }),
             event_stream,
             cx,
         )
@@ -5580,12 +5927,12 @@ async fn test_save_file_tool_denies_if_any_path_denied(cx: &mut TestAppContext)
 
     let task = cx.update(|cx| {
         tool.run(
-            crate::SaveFileToolInput {
+            ToolInput::resolved(crate::SaveFileToolInput {
                 paths: vec![
                     std::path::PathBuf::from("root/normal.txt"),
                     std::path::PathBuf::from("root/readonly/config.txt"),
                 ],
-            },
+            }),
             event_stream,
             cx,
         )
@@ -5632,9 +5979,9 @@ async fn test_save_file_tool_respects_deny_rules(cx: &mut TestAppContext) {
 
     let task = cx.update(|cx| {
         tool.run(
-            crate::SaveFileToolInput {
+            ToolInput::resolved(crate::SaveFileToolInput {
                 paths: vec![std::path::PathBuf::from("root/config.secret")],
-            },
+            }),
             event_stream,
             cx,
         )
@@ -5676,7 +6023,7 @@ async fn test_web_search_tool_deny_rule_blocks_search(cx: &mut TestAppContext) {
     let input: crate::WebSearchToolInput =
         serde_json::from_value(json!({"query": "internal.company.com secrets"})).unwrap();
 
-    let task = cx.update(|cx| tool.run(input, event_stream, cx));
+    let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx));
 
     let result = task.await;
     assert!(result.is_err(), "expected search to be blocked");
@@ -5741,11 +6088,11 @@ async fn test_edit_file_tool_allow_rule_skips_confirmation(cx: &mut TestAppConte
 
     let _task = cx.update(|cx| {
         tool.run(
-            crate::EditFileToolInput {
+            ToolInput::resolved(crate::EditFileToolInput {
                 display_description: "Edit README".to_string(),
                 path: "root/README.md".into(),
                 mode: crate::EditFileMode::Edit,
-            },
+            }),
             event_stream,
             cx,
         )
@@ -5811,11 +6158,11 @@ async fn test_edit_file_tool_allow_still_prompts_for_local_settings(cx: &mut Tes
     let (event_stream, mut rx) = crate::ToolCallEventStream::test();
     let _task = cx.update(|cx| {
         tool.run(
-            crate::EditFileToolInput {
+            ToolInput::resolved(crate::EditFileToolInput {
                 display_description: "Edit local settings".to_string(),
                 path: "root/.zed/settings.json".into(),
                 mode: crate::EditFileMode::Edit,
-            },
+            }),
             event_stream,
             cx,
         )
@@ -5855,7 +6202,7 @@ async fn test_fetch_tool_deny_rule_blocks_url(cx: &mut TestAppContext) {
     let input: crate::FetchToolInput =
         serde_json::from_value(json!({"url": "https://internal.company.com/api"})).unwrap();
 
-    let task = cx.update(|cx| tool.run(input, event_stream, cx));
+    let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx));
 
     let result = task.await;
     assert!(result.is_err(), "expected fetch to be blocked");
@@ -5893,7 +6240,7 @@ async fn test_fetch_tool_allow_rule_skips_confirmation(cx: &mut TestAppContext)
     let input: crate::FetchToolInput =
         serde_json::from_value(json!({"url": "https://docs.rs/some-crate"})).unwrap();
 
-    let _task = cx.update(|cx| tool.run(input, event_stream, cx));
+    let _task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx));
 
     cx.run_until_parked();
 

crates/agent/src/tests/test_tools.rs 🔗

@@ -3,6 +3,57 @@ use agent_settings::AgentSettings;
 use gpui::{App, SharedString, Task};
 use std::future;
 use std::sync::atomic::{AtomicBool, Ordering};
+use std::time::Duration;
+
+/// A streaming tool that echoes its input, used to test streaming tool
+/// lifecycle (e.g. partial delivery and cleanup when the LLM stream ends
+/// before `is_input_complete`).
+#[derive(JsonSchema, Serialize, Deserialize)]
+pub struct StreamingEchoToolInput {
+    /// The text to echo.
+    pub text: String,
+}
+
+pub struct StreamingEchoTool;
+
+impl AgentTool for StreamingEchoTool {
+    type Input = StreamingEchoToolInput;
+    type Output = String;
+
+    const NAME: &'static str = "streaming_echo";
+
+    fn supports_input_streaming() -> bool {
+        true
+    }
+
+    fn kind() -> acp::ToolKind {
+        acp::ToolKind::Other
+    }
+
+    fn initial_title(
+        &self,
+        _input: Result<Self::Input, serde_json::Value>,
+        _cx: &mut App,
+    ) -> SharedString {
+        "Streaming Echo".into()
+    }
+
+    fn run(
+        self: Arc<Self>,
+        mut input: ToolInput<Self::Input>,
+        _event_stream: ToolCallEventStream,
+        cx: &mut App,
+    ) -> Task<Result<String, String>> {
+        cx.spawn(async move |_cx| {
+            while input.recv_partial().await.is_some() {}
+            let input = input
+                .recv()
+                .await
+                .map_err(|e| format!("Failed to receive tool input: {e}"))?;
+            Ok(input.text)
+        })
+    }
+}
 
 /// A tool that echoes its input
 #[derive(JsonSchema, Serialize, Deserialize)]
@@ -33,11 +84,17 @@ impl AgentTool for EchoTool {
 
     fn run(
         self: Arc<Self>,
-        input: Self::Input,
+        input: ToolInput<Self::Input>,
         _event_stream: ToolCallEventStream,
-        _cx: &mut App,
+        cx: &mut App,
     ) -> Task<Result<String, String>> {
-        Task::ready(Ok(input.text))
+        cx.spawn(async move |_cx| {
+            let input = input
+                .recv()
+                .await
+                .map_err(|e| format!("Failed to receive tool input: {e}"))?;
+            Ok(input.text)
+        })
     }
 }
 
@@ -74,7 +131,7 @@ impl AgentTool for DelayTool {
 
     fn run(
         self: Arc<Self>,
-        input: Self::Input,
+        input: ToolInput<Self::Input>,
         _event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<String, String>>
@@ -83,6 +140,10 @@ impl AgentTool for DelayTool {
     {
         let executor = cx.background_executor().clone();
         cx.foreground_executor().spawn(async move {
+            let input = input
+                .recv()
+                .await
+                .map_err(|e| format!("Failed to receive tool input: {e}"))?;
             executor.timer(Duration::from_millis(input.ms)).await;
             Ok("Ding".to_string())
         })
@@ -114,28 +175,38 @@ impl AgentTool for ToolRequiringPermission {
 
     fn run(
         self: Arc<Self>,
-        _input: Self::Input,
+        input: ToolInput<Self::Input>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<String, String>> {
-        let settings = AgentSettings::get_global(cx);
-        let decision = decide_permission_from_settings(Self::NAME, &[String::new()], settings);
-
-        let authorize = match decision {
-            ToolPermissionDecision::Allow => None,
-            ToolPermissionDecision::Deny(reason) => {
-                return Task::ready(Err(reason));
-            }
-            ToolPermissionDecision::Confirm => {
-                let context = crate::ToolPermissionContext::new(
-                    "tool_requiring_permission",
-                    vec![String::new()],
-                );
-                Some(event_stream.authorize("Authorize?", context, cx))
-            }
-        };
+        cx.spawn(async move |cx| {
+            let _input = input
+                .recv()
+                .await
+                .map_err(|e| format!("Failed to receive tool input: {e}"))?;
+
+            let decision = cx.update(|cx| {
+                decide_permission_from_settings(
+                    Self::NAME,
+                    &[String::new()],
+                    AgentSettings::get_global(cx),
+                )
+            });
+
+            let authorize = match decision {
+                ToolPermissionDecision::Allow => None,
+                ToolPermissionDecision::Deny(reason) => {
+                    return Err(reason);
+                }
+                ToolPermissionDecision::Confirm => Some(cx.update(|cx| {
+                    let context = crate::ToolPermissionContext::new(
+                        "tool_requiring_permission",
+                        vec![String::new()],
+                    );
+                    event_stream.authorize("Authorize?", context, cx)
+                })),
+            };
 
-        cx.foreground_executor().spawn(async move {
             if let Some(authorize) = authorize {
                 authorize.await.map_err(|e| e.to_string())?;
             }
@@ -169,11 +240,15 @@ impl AgentTool for InfiniteTool {
 
     fn run(
         self: Arc<Self>,
-        _input: Self::Input,
+        input: ToolInput<Self::Input>,
         _event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<String, String>> {
         cx.foreground_executor().spawn(async move {
+            let _input = input
+                .recv()
+                .await
+                .map_err(|e| format!("Failed to receive tool input: {e}"))?;
             future::pending::<()>().await;
             unreachable!()
         })
@@ -221,11 +296,15 @@ impl AgentTool for CancellationAwareTool {
 
     fn run(
         self: Arc<Self>,
-        _input: Self::Input,
+        input: ToolInput<Self::Input>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<String, String>> {
         cx.foreground_executor().spawn(async move {
+            let _input = input
+                .recv()
+                .await
+                .map_err(|e| format!("Failed to receive tool input: {e}"))?;
             // Wait for cancellation - this tool does nothing but wait to be cancelled
             event_stream.cancelled_by_user().await;
             self.was_cancelled.store(true, Ordering::SeqCst);
@@ -276,10 +355,16 @@ impl AgentTool for WordListTool {
 
     fn run(
         self: Arc<Self>,
-        _input: Self::Input,
+        input: ToolInput<Self::Input>,
         _event_stream: ToolCallEventStream,
-        _cx: &mut App,
+        cx: &mut App,
     ) -> Task<Result<String, String>> {
-        Task::ready(Ok("ok".to_string()))
+        cx.spawn(async move |_cx| {
+            let _input = input
+                .recv()
+                .await
+                .map_err(|e| format!("Failed to receive tool input: {e}"))?;
+            Ok("ok".to_string())
+        })
     }
 }

crates/agent/src/thread.rs 🔗

@@ -1,16 +1,14 @@
 use crate::{
-    AgentGitWorktreeInfo, ContextServerRegistry, CopyPathTool, CreateDirectoryTool,
-    DbLanguageModel, DbThread, DeletePathTool, DiagnosticsTool, EditFileTool, FetchTool,
-    FindPathTool, GrepTool, ListDirectoryTool, MovePathTool, NowTool, OpenTool, ProjectSnapshot,
-    ReadFileTool, RestoreFileFromDiskTool, SaveFileTool, SpawnAgentTool, StreamingEditFileTool,
+    ContextServerRegistry, CopyPathTool, CreateDirectoryTool, DbLanguageModel, DbThread,
+    DeletePathTool, DiagnosticsTool, EditFileTool, FetchTool, FindPathTool, GrepTool,
+    ListDirectoryTool, MovePathTool, NowTool, OpenTool, ProjectSnapshot, ReadFileTool,
+    RestoreFileFromDiskTool, SaveFileTool, SpawnAgentTool, StreamingEditFileTool,
     SystemPromptTemplate, Template, Templates, TerminalTool, ToolPermissionDecision, WebSearchTool,
     decide_permission_from_settings,
 };
 use acp_thread::{MentionUri, UserMessageId};
 use action_log::ActionLog;
-use feature_flags::{
-    FeatureFlagAppExt as _, StreamingEditFileToolFeatureFlag, SubagentsFeatureFlag,
-};
+use feature_flags::{FeatureFlagAppExt as _, StreamingEditFileToolFeatureFlag};
 
 use agent_client_protocol as acp;
 use agent_settings::{
@@ -40,16 +38,19 @@ use language_model::{
     LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest,
     LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
     LanguageModelToolResultContent, LanguageModelToolSchemaFormat, LanguageModelToolUse,
-    LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage, ZED_CLOUD_PROVIDER_ID,
+    LanguageModelToolUseId, Role, SelectedModel, Speed, StopReason, TokenUsage,
+    ZED_CLOUD_PROVIDER_ID,
 };
 use project::Project;
 use prompt_store::ProjectContext;
 use schemars::{JsonSchema, Schema};
+use serde::de::DeserializeOwned;
 use serde::{Deserialize, Serialize};
 use settings::{LanguageModelSelection, Settings, ToolPermissionMode, update_settings_file};
 use smol::stream::StreamExt;
 use std::{
     collections::BTreeMap,
+    marker::PhantomData,
     ops::RangeInclusive,
     path::Path,
     rc::Rc,
@@ -602,8 +603,13 @@ pub trait TerminalHandle {
 }
 
 pub trait SubagentHandle {
+    /// The session ID of this subagent thread
     fn id(&self) -> acp::SessionId;
-    fn wait_for_output(&self, cx: &AsyncApp) -> Task<Result<String>>;
+    /// The current number of entries in the thread.
+    /// Useful for knowing where the next turn will begin
+    fn num_entries(&self, cx: &App) -> usize;
+    /// Runs a turn for a given message and returns both the response and the index of that output message.
+    fn send(&self, message: String, cx: &AsyncApp) -> Task<Result<String>>;
 }
 
 pub trait ThreadEnvironment {
@@ -615,19 +621,11 @@ pub trait ThreadEnvironment {
         cx: &mut AsyncApp,
     ) -> Task<Result<Rc<dyn TerminalHandle>>>;
 
-    fn create_subagent(
-        &self,
-        parent_thread: Entity<Thread>,
-        label: String,
-        initial_prompt: String,
-        cx: &mut App,
-    ) -> Result<Rc<dyn SubagentHandle>>;
+    fn create_subagent(&self, label: String, cx: &mut App) -> Result<Rc<dyn SubagentHandle>>;
 
     fn resume_subagent(
         &self,
-        _parent_thread: Entity<Thread>,
         _session_id: acp::SessionId,
-        _follow_up_prompt: String,
         _cx: &mut App,
     ) -> Result<Rc<dyn SubagentHandle>> {
         Err(anyhow::anyhow!(
@@ -890,20 +888,20 @@ pub struct Thread {
     summarization_model: Option<Arc<dyn LanguageModel>>,
     thinking_enabled: bool,
     thinking_effort: Option<String>,
+    speed: Option<Speed>,
     prompt_capabilities_tx: watch::Sender<acp::PromptCapabilities>,
     pub(crate) prompt_capabilities_rx: watch::Receiver<acp::PromptCapabilities>,
     pub(crate) project: Entity<Project>,
     pub(crate) action_log: Entity<ActionLog>,
-    /// Tracks the last time files were read by the agent, to detect external modifications
-    pub(crate) file_read_times: HashMap<PathBuf, fs::MTime>,
     /// True if this thread was imported from a shared thread and can be synced.
     imported: bool,
     /// If this is a subagent thread, contains context about the parent
     subagent_context: Option<SubagentContext>,
+    /// The user's unsent prompt text, persisted so it can be restored when reloading the thread.
+    draft_prompt: Option<Vec<acp::ContentBlock>>,
+    ui_scroll_position: Option<gpui::ListOffset>,
     /// Weak references to running subagent threads for cancellation propagation
     running_subagents: Vec<WeakEntity<Thread>>,
-    /// Git worktree info if this thread is running in an agent worktree.
-    git_worktree_info: Option<AgentGitWorktreeInfo>,
 }
 
 impl Thread {
@@ -920,12 +918,16 @@ impl Thread {
         let context_server_registry = parent_thread.read(cx).context_server_registry.clone();
         let templates = parent_thread.read(cx).templates.clone();
         let model = parent_thread.read(cx).model().cloned();
-        let mut thread = Self::new(
+        let parent_action_log = parent_thread.read(cx).action_log().clone();
+        let action_log =
+            cx.new(|_cx| ActionLog::new(project.clone()).with_linked_action_log(parent_action_log));
+        let mut thread = Self::new_internal(
             project,
             project_context,
             context_server_registry,
             templates,
             model,
+            action_log,
             cx,
         );
         thread.subagent_context = Some(SubagentContext {
@@ -942,6 +944,26 @@ impl Thread {
         templates: Arc<Templates>,
         model: Option<Arc<dyn LanguageModel>>,
         cx: &mut Context<Self>,
+    ) -> Self {
+        Self::new_internal(
+            project.clone(),
+            project_context,
+            context_server_registry,
+            templates,
+            model,
+            cx.new(|_cx| ActionLog::new(project)),
+            cx,
+        )
+    }
+
+    fn new_internal(
+        project: Entity<Project>,
+        project_context: Entity<ProjectContext>,
+        context_server_registry: Entity<ContextServerRegistry>,
+        templates: Arc<Templates>,
+        model: Option<Arc<dyn LanguageModel>>,
+        action_log: Entity<ActionLog>,
+        cx: &mut Context<Self>,
     ) -> Self {
         let settings = AgentSettings::get_global(cx);
         let profile_id = settings.default_profile.clone();
@@ -953,7 +975,6 @@ impl Thread {
             .default_model
             .as_ref()
             .and_then(|model| model.effort.clone());
-        let action_log = cx.new(|_cx| ActionLog::new(project.clone()));
         let (prompt_capabilities_tx, prompt_capabilities_rx) =
             watch::channel(Self::prompt_capabilities(model.as_deref()));
         Self {
@@ -985,16 +1006,17 @@ impl Thread {
             model,
             summarization_model: None,
             thinking_enabled: enable_thinking,
+            speed: None,
             thinking_effort,
             prompt_capabilities_tx,
             prompt_capabilities_rx,
             project,
             action_log,
-            file_read_times: HashMap::default(),
             imported: false,
             subagent_context: None,
+            draft_prompt: None,
+            ui_scroll_position: None,
             running_subagents: Vec::new(),
-            git_worktree_info: None,
         }
     }
 
@@ -1143,10 +1165,6 @@ impl Thread {
         let profile_id = db_thread
             .profile
             .unwrap_or_else(|| settings.default_profile.clone());
-        let thinking_effort = settings
-            .default_model
-            .as_ref()
-            .and_then(|model| model.effort.clone());
 
         let mut model = LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
             db_thread
@@ -1175,12 +1193,6 @@ impl Thread {
             watch::channel(Self::prompt_capabilities(model.as_deref()));
 
         let action_log = cx.new(|_| ActionLog::new(project.clone()));
-        // TODO: We should serialize the user's configured thinking parameter on `DbThread`
-        // rather than deriving it from the model's capability. A user may have explicitly
-        // toggled thinking off for a model that supports it, and we'd lose that preference here.
-        let enable_thinking = model
-            .as_deref()
-            .is_some_and(|model| model.supports_thinking());
 
         Self {
             id,
@@ -1208,18 +1220,22 @@ impl Thread {
             templates,
             model,
             summarization_model: None,
-            thinking_enabled: enable_thinking,
-            thinking_effort,
+            thinking_enabled: db_thread.thinking_enabled,
+            thinking_effort: db_thread.thinking_effort,
+            speed: db_thread.speed,
             project,
             action_log,
             updated_at: db_thread.updated_at,
             prompt_capabilities_tx,
             prompt_capabilities_rx,
-            file_read_times: HashMap::default(),
             imported: db_thread.imported,
             subagent_context: db_thread.subagent_context,
+            draft_prompt: db_thread.draft_prompt,
+            ui_scroll_position: db_thread.ui_scroll_position.map(|sp| gpui::ListOffset {
+                item_ix: sp.item_ix,
+                offset_in_item: gpui::px(sp.offset_in_item),
+            }),
             running_subagents: Vec::new(),
-            git_worktree_info: db_thread.git_worktree_info,
         }
     }
 
@@ -1240,7 +1256,16 @@ impl Thread {
             profile: Some(self.profile_id.clone()),
             imported: self.imported,
             subagent_context: self.subagent_context.clone(),
-            git_worktree_info: self.git_worktree_info.clone(),
+            speed: self.speed,
+            thinking_enabled: self.thinking_enabled,
+            thinking_effort: self.thinking_effort.clone(),
+            draft_prompt: self.draft_prompt.clone(),
+            ui_scroll_position: self.ui_scroll_position.map(|lo| {
+                crate::db::SerializedScrollPosition {
+                    item_ix: lo.item_ix,
+                    offset_in_item: lo.offset_in_item.as_f32(),
+                }
+            }),
         };
 
         cx.background_spawn(async move {
@@ -1282,19 +1307,42 @@ impl Thread {
         self.messages.is_empty() && self.title.is_none()
     }
 
+    pub fn draft_prompt(&self) -> Option<&[acp::ContentBlock]> {
+        self.draft_prompt.as_deref()
+    }
+
+    pub fn set_draft_prompt(&mut self, prompt: Option<Vec<acp::ContentBlock>>) {
+        self.draft_prompt = prompt;
+    }
+
+    pub fn ui_scroll_position(&self) -> Option<gpui::ListOffset> {
+        self.ui_scroll_position
+    }
+
+    pub fn set_ui_scroll_position(&mut self, position: Option<gpui::ListOffset>) {
+        self.ui_scroll_position = position;
+    }
+
     pub fn model(&self) -> Option<&Arc<dyn LanguageModel>> {
         self.model.as_ref()
     }
 
     pub fn set_model(&mut self, model: Arc<dyn LanguageModel>, cx: &mut Context<Self>) {
         let old_usage = self.latest_token_usage();
-        self.model = Some(model);
+        self.model = Some(model.clone());
         let new_caps = Self::prompt_capabilities(self.model.as_deref());
         let new_usage = self.latest_token_usage();
         if old_usage != new_usage {
             cx.emit(TokenUsageUpdated(new_usage));
         }
         self.prompt_capabilities_tx.send(new_caps).log_err();
+
+        for subagent in &self.running_subagents {
+            subagent
+                .update(cx, |thread, cx| thread.set_model(model.clone(), cx))
+                .ok();
+        }
+
         cx.notify()
     }
 
@@ -1307,7 +1355,15 @@ impl Thread {
         model: Option<Arc<dyn LanguageModel>>,
         cx: &mut Context<Self>,
     ) {
-        self.summarization_model = model;
+        self.summarization_model = model.clone();
+
+        for subagent in &self.running_subagents {
+            subagent
+                .update(cx, |thread, cx| {
+                    thread.set_summarization_model(model.clone(), cx)
+                })
+                .ok();
+        }
         cx.notify()
     }
 
@@ -1317,6 +1373,12 @@ impl Thread {
 
     pub fn set_thinking_enabled(&mut self, enabled: bool, cx: &mut Context<Self>) {
         self.thinking_enabled = enabled;
+
+        for subagent in &self.running_subagents {
+            subagent
+                .update(cx, |thread, cx| thread.set_thinking_enabled(enabled, cx))
+                .ok();
+        }
         cx.notify();
     }
 
@@ -1325,11 +1387,39 @@ impl Thread {
     }
 
     pub fn set_thinking_effort(&mut self, effort: Option<String>, cx: &mut Context<Self>) {
-        self.thinking_effort = effort;
+        self.thinking_effort = effort.clone();
+
+        for subagent in &self.running_subagents {
+            subagent
+                .update(cx, |thread, cx| {
+                    thread.set_thinking_effort(effort.clone(), cx)
+                })
+                .ok();
+        }
         cx.notify();
     }
 
-    pub fn last_message(&self) -> Option<Message> {
+    pub fn speed(&self) -> Option<Speed> {
+        self.speed
+    }
+
+    pub fn set_speed(&mut self, speed: Speed, cx: &mut Context<Self>) {
+        self.speed = Some(speed);
+
+        for subagent in &self.running_subagents {
+            subagent
+                .update(cx, |thread, cx| thread.set_speed(speed, cx))
+                .ok();
+        }
+        cx.notify();
+    }
+
+    pub fn last_message(&self) -> Option<&Message> {
+        self.messages.last()
+    }
+
+    #[cfg(any(test, feature = "test-support"))]
+    pub fn last_received_or_pending_message(&self) -> Option<Message> {
         if let Some(message) = self.pending_message.clone() {
             Some(Message::Agent(message))
         } else {
@@ -1342,6 +1432,9 @@ impl Thread {
         environment: Rc<dyn ThreadEnvironment>,
         cx: &mut Context<Self>,
     ) {
+        // Only update the agent location for the root thread, not for subagents.
+        let update_agent_location = self.parent_thread_id().is_none();
+
         let language_registry = self.project.read(cx).languages().clone();
         self.add_tool(CopyPathTool::new(self.project.clone()));
         self.add_tool(CreateDirectoryTool::new(self.project.clone()));
@@ -1359,8 +1452,8 @@ impl Thread {
         self.add_tool(StreamingEditFileTool::new(
             self.project.clone(),
             cx.weak_entity(),
+            self.action_log.clone(),
             language_registry,
-            Templates::new(),
         ));
         self.add_tool(FetchTool::new(self.project.read(cx).client().http_client()));
         self.add_tool(FindPathTool::new(self.project.clone()));
@@ -1370,17 +1463,17 @@ impl Thread {
         self.add_tool(NowTool);
         self.add_tool(OpenTool::new(self.project.clone()));
         self.add_tool(ReadFileTool::new(
-            cx.weak_entity(),
             self.project.clone(),
             self.action_log.clone(),
+            update_agent_location,
         ));
         self.add_tool(SaveFileTool::new(self.project.clone()));
         self.add_tool(RestoreFileFromDiskTool::new(self.project.clone()));
         self.add_tool(TerminalTool::new(self.project.clone(), environment.clone()));
         self.add_tool(WebSearchTool);
 
-        if cx.has_flag::<SubagentsFeatureFlag>() && self.depth() < MAX_SUBAGENT_DEPTH {
-            self.add_tool(SpawnAgentTool::new(cx.weak_entity(), environment));
+        if self.depth() < MAX_SUBAGENT_DEPTH {
+            self.add_tool(SpawnAgentTool::new(environment));
         }
     }
 
@@ -1393,6 +1486,7 @@ impl Thread {
         self.tools.insert(T::NAME.into(), tool.erase());
     }
 
+    #[cfg(any(test, feature = "test-support"))]
     pub fn remove_tool(&mut self, name: &str) -> bool {
         self.tools.remove(name).is_some()
     }
@@ -1406,12 +1500,18 @@ impl Thread {
             return;
         }
 
-        self.profile_id = profile_id;
+        self.profile_id = profile_id.clone();
 
         // Swap to the profile's preferred model when available.
         if let Some(model) = Self::resolve_profile_model(&self.profile_id, cx) {
             self.set_model(model, cx);
         }
+
+        for subagent in &self.running_subagents {
+            subagent
+                .update(cx, |thread, cx| thread.set_profile(profile_id.clone(), cx))
+                .ok();
+        }
     }
 
     pub fn cancel(&mut self, cx: &mut Context<Self>) -> Task<()> {
@@ -1664,6 +1764,7 @@ impl Thread {
             event_stream: event_stream.clone(),
             tools: self.enabled_tools(profile, &model, cx),
             cancellation_tx,
+            streaming_tool_inputs: HashMap::default(),
             _task: cx.spawn(async move |this, cx| {
                 log::debug!("Starting agent turn execution");
 
@@ -1730,6 +1831,9 @@ impl Thread {
             telemetry::event!(
                 "Agent Thread Completion",
                 thread_id = this.read_with(cx, |this, _| this.id.to_string())?,
+                parent_thread_id = this.read_with(cx, |this, _| this
+                    .parent_thread_id()
+                    .map(|id| id.to_string()))?,
                 prompt_id = this.read_with(cx, |this, _| this.prompt_id.to_string())?,
                 model = model.telemetry_id(),
                 model_provider = model.provider_id().to_string(),
@@ -1814,6 +1918,19 @@ impl Thread {
             // that need their own permits.
             drop(events);
 
+            // Drop streaming tool input senders that never received their final input.
+            // This prevents deadlock when the LLM stream ends (e.g. because of an error)
+            // before sending a tool use with `is_input_complete: true`.
+            this.update(cx, |this, _cx| {
+                if let Some(running_turn) = this.running_turn.as_mut() {
+                    if running_turn.streaming_tool_inputs.is_empty() {
+                        return;
+                    }
+                    log::warn!("Dropping partial tool inputs because the stream ended");
+                    running_turn.streaming_tool_inputs.drain();
+                }
+            })?;
+
             let end_turn = tool_results.is_empty();
             while let Some(tool_result) = tool_results.next().await {
                 log::debug!("Tool finished {:?}", tool_result);
@@ -1856,7 +1973,15 @@ impl Thread {
                 })??;
                 let timer = cx.background_executor().timer(retry.duration);
                 event_stream.send_retry(retry);
-                timer.await;
+                futures::select! {
+                    _ = timer.fuse() => {}
+                    _ = cancellation_rx.changed().fuse() => {
+                        if *cancellation_rx.borrow() {
+                            log::debug!("Turn cancelled during retry delay, exiting");
+                            return Ok(());
+                        }
+                    }
+                }
                 this.update(cx, |this, _cx| {
                     if let Some(Message::Agent(message)) = this.messages.last() {
                         if message.tool_results.is_empty() {
@@ -1988,6 +2113,7 @@ impl Thread {
                 telemetry::event!(
                     "Agent Thread Completion Usage Updated",
                     thread_id = self.id.to_string(),
+                    parent_thread_id = self.parent_thread_id().map(|id| id.to_string()),
                     prompt_id = self.prompt_id.to_string(),
                     model = self.model.as_ref().map(|m| m.telemetry_id()),
                     model_provider = self.model.as_ref().map(|m| m.provider_id().to_string()),
@@ -2068,10 +2194,6 @@ impl Thread {
 
         self.send_or_update_tool_use(&tool_use, title, kind, event_stream);
 
-        if !tool_use.is_input_complete {
-            return None;
-        }
-
         let Some(tool) = tool else {
             let content = format!("No tool named {} exists", tool_use.name);
             return Some(Task::ready(LanguageModelToolResult {
@@ -2083,9 +2205,72 @@ impl Thread {
             }));
         };
 
+        if !tool_use.is_input_complete {
+            if tool.supports_input_streaming() {
+                let running_turn = self.running_turn.as_mut()?;
+                if let Some(sender) = running_turn.streaming_tool_inputs.get(&tool_use.id) {
+                    sender.send_partial(tool_use.input);
+                    return None;
+                }
+
+                let (sender, tool_input) = ToolInputSender::channel();
+                sender.send_partial(tool_use.input);
+                running_turn
+                    .streaming_tool_inputs
+                    .insert(tool_use.id.clone(), sender);
+
+                let tool = tool.clone();
+                log::debug!("Running streaming tool {}", tool_use.name);
+                return Some(self.run_tool(
+                    tool,
+                    tool_input,
+                    tool_use.id,
+                    tool_use.name,
+                    event_stream,
+                    cancellation_rx,
+                    cx,
+                ));
+            } else {
+                return None;
+            }
+        }
+
+        if let Some(sender) = self
+            .running_turn
+            .as_mut()?
+            .streaming_tool_inputs
+            .remove(&tool_use.id)
+        {
+            sender.send_final(tool_use.input);
+            return None;
+        }
+
+        log::debug!("Running tool {}", tool_use.name);
+        let tool_input = ToolInput::ready(tool_use.input);
+        Some(self.run_tool(
+            tool,
+            tool_input,
+            tool_use.id,
+            tool_use.name,
+            event_stream,
+            cancellation_rx,
+            cx,
+        ))
+    }
+
+    fn run_tool(
+        &self,
+        tool: Arc<dyn AnyAgentTool>,
+        tool_input: ToolInput<serde_json::Value>,
+        tool_use_id: LanguageModelToolUseId,
+        tool_name: Arc<str>,
+        event_stream: &ThreadEventStream,
+        cancellation_rx: watch::Receiver<bool>,
+        cx: &mut Context<Self>,
+    ) -> Task<LanguageModelToolResult> {
         let fs = self.project.read(cx).fs().clone();
         let tool_event_stream = ToolCallEventStream::new(
-            tool_use.id.clone(),
+            tool_use_id.clone(),
             event_stream.clone(),
             Some(fs),
             cancellation_rx,
@@ -2094,9 +2279,8 @@ impl Thread {
             acp::ToolCallUpdateFields::new().status(acp::ToolCallStatus::InProgress),
         );
         let supports_images = self.model().is_some_and(|model| model.supports_images());
-        let tool_result = tool.run(tool_use.input, tool_event_stream, cx);
-        log::debug!("Running tool {}", tool_use.name);
-        Some(cx.foreground_executor().spawn(async move {
+        let tool_result = tool.run(tool_input, tool_event_stream, cx);
+        cx.foreground_executor().spawn(async move {
             let (is_error, output) = match tool_result.await {
                 Ok(mut output) => {
                     if let LanguageModelToolResultContent::Image(_) = &output.llm_output
@@ -2114,13 +2298,13 @@ impl Thread {
             };
 
             LanguageModelToolResult {
-                tool_use_id: tool_use.id,
-                tool_name: tool_use.name,
+                tool_use_id,
+                tool_name,
                 is_error,
                 content: output.llm_output,
                 output: Some(output.raw_output),
             }
-        }))
+        })
     }
 
     fn handle_tool_use_json_parse_error_event(
@@ -2165,20 +2349,18 @@ impl Thread {
     ) {
         // Ensure the last message ends in the current tool use
         let last_message = self.pending_message();
-        let push_new_tool_use = last_message.content.last_mut().is_none_or(|content| {
+
+        let has_tool_use = last_message.content.iter_mut().rev().any(|content| {
             if let AgentMessageContent::ToolUse(last_tool_use) = content {
                 if last_tool_use.id == tool_use.id {
                     *last_tool_use = tool_use.clone();
-                    false
-                } else {
-                    true
+                    return true;
                 }
-            } else {
-                true
             }
+            false
         });
 
-        if push_new_tool_use {
+        if !has_tool_use {
             event_stream.send_tool_call(
                 &tool_use.id,
                 &tool_use.name,
@@ -2321,7 +2503,12 @@ impl Thread {
                 anyhow::Ok(())
             };
 
-            if generate.await.context("failed to generate title").is_ok() {
+            if generate
+                .await
+                .context("failed to generate thread title")
+                .log_err()
+                .is_some()
+            {
                 _ = this.update(cx, |this, cx| this.set_title(title.into(), cx));
             }
             _ = this.update(cx, |this, _| this.pending_title_generation = None);
@@ -2406,6 +2593,7 @@ impl Thread {
                         name: tool_name.to_string(),
                         description: tool.description().to_string(),
                         input_schema: tool.input_schema(model.tool_input_format()).log_err()?,
+                        use_input_streaming: tool.supports_input_streaming(),
                     })
                 })
                 .collect::<Vec<_>>()
@@ -2437,6 +2625,7 @@ impl Thread {
             temperature: AgentSettings::temperature_for_model(model, cx),
             thinking_allowed: self.thinking_enabled,
             thinking_effort: self.thinking_effort.clone(),
+            speed: self.speed(),
         };
 
         log::debug!("Completion request built successfully");
@@ -2459,7 +2648,8 @@ impl Thread {
             }
         }
 
-        let use_streaming_edit_tool = cx.has_flag::<StreamingEditFileToolFeatureFlag>();
+        let use_streaming_edit_tool =
+            cx.has_flag::<StreamingEditFileToolFeatureFlag>() && model.supports_streaming_tools();
 
         let mut tools = self
             .tools
@@ -2776,6 +2966,9 @@ struct RunningTurn {
     /// Sender to signal tool cancellation. When cancel is called, this is
     /// set to true so all tools can detect user-initiated cancellation.
     cancellation_tx: watch::Sender<bool>,
+    /// Senders for tools that support input streaming and have already been
+    /// started but are still receiving input from the LLM.
+    streaming_tool_inputs: HashMap<LanguageModelToolUseId, ToolInputSender>,
 }
 
 impl RunningTurn {
@@ -2795,6 +2988,103 @@ pub struct TitleUpdated;
 
 impl EventEmitter<TitleUpdated> for Thread {}
 
+/// A channel-based wrapper that delivers tool input to a running tool.
+///
+/// For non-streaming tools, created via `ToolInput::ready()` so `.recv()` resolves immediately.
+/// For streaming tools, partial JSON snapshots arrive via `.recv_partial()` as the LLM streams
+/// them, followed by the final complete input available through `.recv()`.
+pub struct ToolInput<T> {
+    partial_rx: mpsc::UnboundedReceiver<serde_json::Value>,
+    final_rx: oneshot::Receiver<serde_json::Value>,
+    _phantom: PhantomData<T>,
+}
+
+impl<T: DeserializeOwned> ToolInput<T> {
+    #[cfg(any(test, feature = "test-support"))]
+    pub fn resolved(input: impl Serialize) -> Self {
+        let value = serde_json::to_value(input).expect("failed to serialize tool input");
+        Self::ready(value)
+    }
+
+    pub fn ready(value: serde_json::Value) -> Self {
+        let (partial_tx, partial_rx) = mpsc::unbounded();
+        drop(partial_tx);
+        let (final_tx, final_rx) = oneshot::channel();
+        final_tx.send(value).ok();
+        Self {
+            partial_rx,
+            final_rx,
+            _phantom: PhantomData,
+        }
+    }
+
+    #[cfg(any(test, feature = "test-support"))]
+    pub fn test() -> (ToolInputSender, Self) {
+        let (sender, input) = ToolInputSender::channel();
+        (sender, input.cast())
+    }
+
+    /// Wait for the final deserialized input, ignoring all partial updates.
+    /// Non-streaming tools can use this to wait until the whole input is available.
+    pub async fn recv(mut self) -> Result<T> {
+        // Drain any remaining partials
+        while self.partial_rx.next().await.is_some() {}
+        let value = self
+            .final_rx
+            .await
+            .map_err(|_| anyhow!("tool input was not fully received"))?;
+        serde_json::from_value(value).map_err(Into::into)
+    }
+
+    /// Returns the next partial JSON snapshot, or `None` when input is complete.
+    /// Once this returns `None`, call `recv()` to get the final input.
+    pub async fn recv_partial(&mut self) -> Option<serde_json::Value> {
+        self.partial_rx.next().await
+    }
+
+    fn cast<U: DeserializeOwned>(self) -> ToolInput<U> {
+        ToolInput {
+            partial_rx: self.partial_rx,
+            final_rx: self.final_rx,
+            _phantom: PhantomData,
+        }
+    }
+}
+
+pub struct ToolInputSender {
+    partial_tx: mpsc::UnboundedSender<serde_json::Value>,
+    final_tx: Option<oneshot::Sender<serde_json::Value>>,
+}
+
+impl ToolInputSender {
+    pub(crate) fn channel() -> (Self, ToolInput<serde_json::Value>) {
+        let (partial_tx, partial_rx) = mpsc::unbounded();
+        let (final_tx, final_rx) = oneshot::channel();
+        let sender = Self {
+            partial_tx,
+            final_tx: Some(final_tx),
+        };
+        let input = ToolInput {
+            partial_rx,
+            final_rx,
+            _phantom: PhantomData,
+        };
+        (sender, input)
+    }
+
+    pub(crate) fn send_partial(&self, value: serde_json::Value) {
+        self.partial_tx.unbounded_send(value).ok();
+    }
+
+    pub(crate) fn send_final(mut self, value: serde_json::Value) {
+        // Close the partial channel so recv_partial() returns None
+        self.partial_tx.close_channel();
+        if let Some(final_tx) = self.final_tx.take() {
+            final_tx.send(value).ok();
+        }
+    }
+}
+
 pub trait AgentTool
 where
     Self: 'static + Sized,
@@ -2828,6 +3118,11 @@ where
         language_model::tool_schema::root_schema_for::<Self::Input>(format)
     }
 
+    /// Returns whether the tool supports streaming of tool use parameters.
+    fn supports_input_streaming() -> bool {
+        false
+    }
+
     /// Some tools rely on a provider for the underlying billing or other reasons.
     /// Allow the tool to check if they are compatible, or should be filtered out.
     fn supports_provider(_provider: &LanguageModelProviderId) -> bool {
@@ -2843,7 +3138,7 @@ where
     /// still signaling whether the invocation succeeded or failed.
     fn run(
         self: Arc<Self>,
-        input: Self::Input,
+        input: ToolInput<Self::Input>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<Self::Output, Self::Output>>;
@@ -2888,13 +3183,16 @@ pub trait AnyAgentTool {
     fn kind(&self) -> acp::ToolKind;
     fn initial_title(&self, input: serde_json::Value, _cx: &mut App) -> SharedString;
     fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result<serde_json::Value>;
+    fn supports_input_streaming(&self) -> bool {
+        false
+    }
     fn supports_provider(&self, _provider: &LanguageModelProviderId) -> bool {
         true
     }
     /// See [`AgentTool::run`] for why this returns `Result<AgentToolOutput, AgentToolOutput>`.
     fn run(
         self: Arc<Self>,
-        input: serde_json::Value,
+        input: ToolInput<serde_json::Value>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<AgentToolOutput, AgentToolOutput>>;
@@ -2923,6 +3221,10 @@ where
         T::kind()
     }
 
+    fn supports_input_streaming(&self) -> bool {
+        T::supports_input_streaming()
+    }
+
     fn initial_title(&self, input: serde_json::Value, _cx: &mut App) -> SharedString {
         let parsed_input = serde_json::from_value(input.clone()).map_err(|_| input);
         self.0.initial_title(parsed_input, _cx)
@@ -2940,35 +3242,31 @@ where
 
     fn run(
         self: Arc<Self>,
-        input: serde_json::Value,
+        input: ToolInput<serde_json::Value>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<AgentToolOutput, AgentToolOutput>> {
-        cx.spawn(async move |cx| {
-            let input: T::Input = serde_json::from_value(input).map_err(|e| {
-                AgentToolOutput::from_error(format!("Failed to parse tool input: {e}"))
-            })?;
-            let task = cx.update(|cx| self.0.clone().run(input, event_stream, cx));
-            match task.await {
-                Ok(output) => {
-                    let raw_output = serde_json::to_value(&output).map_err(|e| {
-                        AgentToolOutput::from_error(format!("Failed to serialize tool output: {e}"))
-                    })?;
-                    Ok(AgentToolOutput {
-                        llm_output: output.into(),
-                        raw_output,
-                    })
-                }
-                Err(error_output) => {
-                    let raw_output = serde_json::to_value(&error_output).unwrap_or_else(|e| {
-                        log::error!("Failed to serialize tool error output: {e}");
-                        serde_json::Value::Null
-                    });
-                    Err(AgentToolOutput {
-                        llm_output: error_output.into(),
-                        raw_output,
-                    })
-                }
+        let tool_input: ToolInput<T::Input> = input.cast();
+        let task = self.0.clone().run(tool_input, event_stream, cx);
+        cx.spawn(async move |_cx| match task.await {
+            Ok(output) => {
+                let raw_output = serde_json::to_value(&output).map_err(|e| {
+                    AgentToolOutput::from_error(format!("Failed to serialize tool output: {e}"))
+                })?;
+                Ok(AgentToolOutput {
+                    llm_output: output.into(),
+                    raw_output,
+                })
+            }
+            Err(error_output) => {
+                let raw_output = serde_json::to_value(&error_output).unwrap_or_else(|e| {
+                    log::error!("Failed to serialize tool error output: {e}");
+                    serde_json::Value::Null
+                });
+                Err(AgentToolOutput {
+                    llm_output: error_output.into(),
+                    raw_output,
+                })
             }
         })
     }
@@ -3592,6 +3890,7 @@ mod tests {
     use super::*;
     use gpui::TestAppContext;
     use language_model::LanguageModelToolUseId;
+    use language_model::fake_provider::FakeLanguageModel;
     use serde_json::json;
     use std::sync::Arc;
 
@@ -3629,6 +3928,181 @@ mod tests {
         })
     }
 
+    fn setup_parent_with_subagents(
+        cx: &mut TestAppContext,
+        parent: &Entity<Thread>,
+        count: usize,
+    ) -> Vec<Entity<Thread>> {
+        cx.update(|cx| {
+            let mut subagents = Vec::new();
+            for _ in 0..count {
+                let subagent = cx.new(|cx| Thread::new_subagent(parent, cx));
+                parent.update(cx, |thread, _cx| {
+                    thread.register_running_subagent(subagent.downgrade());
+                });
+                subagents.push(subagent);
+            }
+            subagents
+        })
+    }
+
+    #[gpui::test]
+    async fn test_set_model_propagates_to_subagents(cx: &mut TestAppContext) {
+        let (parent, _event_stream) = setup_thread_for_test(cx).await;
+        let subagents = setup_parent_with_subagents(cx, &parent, 2);
+
+        let new_model: Arc<dyn LanguageModel> = Arc::new(FakeLanguageModel::with_id_and_thinking(
+            "test-provider",
+            "new-model",
+            "New Model",
+            false,
+        ));
+
+        cx.update(|cx| {
+            parent.update(cx, |thread, cx| {
+                thread.set_model(new_model, cx);
+            });
+
+            for subagent in &subagents {
+                let subagent_model_id = subagent.read(cx).model().unwrap().id();
+                assert_eq!(
+                    subagent_model_id.0.as_ref(),
+                    "new-model",
+                    "Subagent model should match parent model after set_model"
+                );
+            }
+        });
+    }
+
+    #[gpui::test]
+    async fn test_set_summarization_model_propagates_to_subagents(cx: &mut TestAppContext) {
+        let (parent, _event_stream) = setup_thread_for_test(cx).await;
+        let subagents = setup_parent_with_subagents(cx, &parent, 2);
+
+        let summary_model: Arc<dyn LanguageModel> =
+            Arc::new(FakeLanguageModel::with_id_and_thinking(
+                "test-provider",
+                "summary-model",
+                "Summary Model",
+                false,
+            ));
+
+        cx.update(|cx| {
+            parent.update(cx, |thread, cx| {
+                thread.set_summarization_model(Some(summary_model), cx);
+            });
+
+            for subagent in &subagents {
+                let subagent_summary_id = subagent.read(cx).summarization_model().unwrap().id();
+                assert_eq!(
+                    subagent_summary_id.0.as_ref(),
+                    "summary-model",
+                    "Subagent summarization model should match parent after set_summarization_model"
+                );
+            }
+        });
+    }
+
+    #[gpui::test]
+    async fn test_set_thinking_enabled_propagates_to_subagents(cx: &mut TestAppContext) {
+        let (parent, _event_stream) = setup_thread_for_test(cx).await;
+        let subagents = setup_parent_with_subagents(cx, &parent, 2);
+
+        cx.update(|cx| {
+            parent.update(cx, |thread, cx| {
+                thread.set_thinking_enabled(true, cx);
+            });
+
+            for subagent in &subagents {
+                assert!(
+                    subagent.read(cx).thinking_enabled(),
+                    "Subagent thinking should be enabled after parent enables it"
+                );
+            }
+
+            parent.update(cx, |thread, cx| {
+                thread.set_thinking_enabled(false, cx);
+            });
+
+            for subagent in &subagents {
+                assert!(
+                    !subagent.read(cx).thinking_enabled(),
+                    "Subagent thinking should be disabled after parent disables it"
+                );
+            }
+        });
+    }
+
+    #[gpui::test]
+    async fn test_set_thinking_effort_propagates_to_subagents(cx: &mut TestAppContext) {
+        let (parent, _event_stream) = setup_thread_for_test(cx).await;
+        let subagents = setup_parent_with_subagents(cx, &parent, 2);
+
+        cx.update(|cx| {
+            parent.update(cx, |thread, cx| {
+                thread.set_thinking_effort(Some("high".to_string()), cx);
+            });
+
+            for subagent in &subagents {
+                assert_eq!(
+                    subagent.read(cx).thinking_effort().map(|s| s.as_str()),
+                    Some("high"),
+                    "Subagent thinking effort should match parent"
+                );
+            }
+
+            parent.update(cx, |thread, cx| {
+                thread.set_thinking_effort(None, cx);
+            });
+
+            for subagent in &subagents {
+                assert_eq!(
+                    subagent.read(cx).thinking_effort(),
+                    None,
+                    "Subagent thinking effort should be None after parent clears it"
+                );
+            }
+        });
+    }
+
+    #[gpui::test]
+    async fn test_set_speed_propagates_to_subagents(cx: &mut TestAppContext) {
+        let (parent, _event_stream) = setup_thread_for_test(cx).await;
+        let subagents = setup_parent_with_subagents(cx, &parent, 2);
+
+        cx.update(|cx| {
+            parent.update(cx, |thread, cx| {
+                thread.set_speed(Speed::Fast, cx);
+            });
+
+            for subagent in &subagents {
+                assert_eq!(
+                    subagent.read(cx).speed(),
+                    Some(Speed::Fast),
+                    "Subagent speed should match parent after set_speed"
+                );
+            }
+        });
+    }
+
+    #[gpui::test]
+    async fn test_dropped_subagent_does_not_panic(cx: &mut TestAppContext) {
+        let (parent, _event_stream) = setup_thread_for_test(cx).await;
+        let subagents = setup_parent_with_subagents(cx, &parent, 1);
+
+        // Drop the subagent so the WeakEntity can no longer be upgraded
+        drop(subagents);
+
+        // Should not panic even though the subagent was dropped
+        cx.update(|cx| {
+            parent.update(cx, |thread, cx| {
+                thread.set_thinking_enabled(true, cx);
+                thread.set_speed(Speed::Fast, cx);
+                thread.set_thinking_effort(Some("high".to_string()), cx);
+            });
+        });
+    }
+
     #[gpui::test]
     async fn test_handle_tool_use_json_parse_error_adds_tool_use_to_content(
         cx: &mut TestAppContext,

crates/agent/src/thread_store.rs 🔗

@@ -2,40 +2,12 @@ use crate::{DbThread, DbThreadMetadata, ThreadsDatabase};
 use agent_client_protocol as acp;
 use anyhow::{Result, anyhow};
 use gpui::{App, Context, Entity, Global, Task, prelude::*};
-use project::Project;
-use std::rc::Rc;
+use util::path_list::PathList;
 
 struct GlobalThreadStore(Entity<ThreadStore>);
 
 impl Global for GlobalThreadStore {}
 
-// TODO: Remove once ACP thread loading is fully handled elsewhere.
-pub fn load_agent_thread(
-    session_id: acp::SessionId,
-    thread_store: Entity<ThreadStore>,
-    project: Entity<Project>,
-    cx: &mut App,
-) -> Task<Result<Entity<crate::Thread>>> {
-    use agent_servers::{AgentServer, AgentServerDelegate};
-
-    let server = Rc::new(crate::NativeAgentServer::new(
-        project.read(cx).fs().clone(),
-        thread_store,
-    ));
-    let delegate = AgentServerDelegate::new(
-        project.read(cx).agent_server_store().clone(),
-        project.clone(),
-        None,
-        None,
-    );
-    let connection = server.connect(None, delegate, cx);
-    cx.spawn(async move |cx| {
-        let (agent, _) = connection.await?;
-        let agent = agent.downcast::<crate::NativeAgentConnection>().unwrap();
-        cx.update(|cx| agent.load_thread(session_id, cx)).await
-    })
-}
-
 pub struct ThreadStore {
     threads: Vec<DbThreadMetadata>,
 }
@@ -50,6 +22,10 @@ impl ThreadStore {
         cx.global::<GlobalThreadStore>().0.clone()
     }
 
+    pub fn try_global(cx: &App) -> Option<Entity<Self>> {
+        cx.try_global::<GlobalThreadStore>().map(|g| g.0.clone())
+    }
+
     pub fn new(cx: &mut Context<Self>) -> Self {
         let this = Self {
             threads: Vec::new(),
@@ -78,12 +54,13 @@ impl ThreadStore {
         &mut self,
         id: acp::SessionId,
         thread: crate::DbThread,
+        folder_paths: PathList,
         cx: &mut Context<Self>,
     ) -> Task<Result<()>> {
         let database_future = ThreadsDatabase::connect(cx);
         cx.spawn(async move |this, cx| {
             let database = database_future.await.map_err(|err| anyhow!(err))?;
-            database.save_thread(id, thread).await?;
+            database.save_thread(id, thread, folder_paths).await?;
             this.update(cx, |this, cx| this.reload(cx))
         })
     }
@@ -135,6 +112,13 @@ impl ThreadStore {
     pub fn entries(&self) -> impl Iterator<Item = DbThreadMetadata> + '_ {
         self.threads.iter().cloned()
     }
+
+    /// Returns threads whose folder_paths match the given paths exactly.
+    pub fn threads_for_paths(&self, paths: &PathList) -> impl Iterator<Item = &DbThreadMetadata> {
+        self.threads
+            .iter()
+            .filter(move |thread| &thread.folder_paths == paths)
+    }
 }
 
 #[cfg(test)]
@@ -162,7 +146,11 @@ mod tests {
             profile: None,
             imported: false,
             subagent_context: None,
-            git_worktree_info: None,
+            speed: None,
+            thinking_enabled: false,
+            thinking_effort: None,
+            draft_prompt: None,
+            ui_scroll_position: None,
         }
     }
 
@@ -184,12 +172,12 @@ mod tests {
         );
 
         let save_older = thread_store.update(cx, |store, cx| {
-            store.save_thread(older_id.clone(), older_thread, cx)
+            store.save_thread(older_id.clone(), older_thread, PathList::default(), cx)
         });
         save_older.await.unwrap();
 
         let save_newer = thread_store.update(cx, |store, cx| {
-            store.save_thread(newer_id.clone(), newer_thread, cx)
+            store.save_thread(newer_id.clone(), newer_thread, PathList::default(), cx)
         });
         save_newer.await.unwrap();
 
@@ -212,8 +200,9 @@ mod tests {
             Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap(),
         );
 
-        let save_task =
-            thread_store.update(cx, |store, cx| store.save_thread(thread_id, thread, cx));
+        let save_task = thread_store.update(cx, |store, cx| {
+            store.save_thread(thread_id, thread, PathList::default(), cx)
+        });
         save_task.await.unwrap();
 
         cx.run_until_parked();
@@ -244,11 +233,11 @@ mod tests {
         );
 
         let save_first = thread_store.update(cx, |store, cx| {
-            store.save_thread(first_id.clone(), first_thread, cx)
+            store.save_thread(first_id.clone(), first_thread, PathList::default(), cx)
         });
         save_first.await.unwrap();
         let save_second = thread_store.update(cx, |store, cx| {
-            store.save_thread(second_id.clone(), second_thread, cx)
+            store.save_thread(second_id.clone(), second_thread, PathList::default(), cx)
         });
         save_second.await.unwrap();
         cx.run_until_parked();
@@ -281,11 +270,11 @@ mod tests {
         );
 
         let save_first = thread_store.update(cx, |store, cx| {
-            store.save_thread(first_id.clone(), first_thread, cx)
+            store.save_thread(first_id.clone(), first_thread, PathList::default(), cx)
         });
         save_first.await.unwrap();
         let save_second = thread_store.update(cx, |store, cx| {
-            store.save_thread(second_id.clone(), second_thread, cx)
+            store.save_thread(second_id.clone(), second_thread, PathList::default(), cx)
         });
         save_second.await.unwrap();
         cx.run_until_parked();
@@ -295,7 +284,7 @@ mod tests {
             Utc.with_ymd_and_hms(2024, 1, 3, 0, 0, 0).unwrap(),
         );
         let update_task = thread_store.update(cx, |store, cx| {
-            store.save_thread(first_id.clone(), updated_first, cx)
+            store.save_thread(first_id.clone(), updated_first, PathList::default(), cx)
         });
         update_task.await.unwrap();
         cx.run_until_parked();
@@ -305,4 +294,50 @@ mod tests {
         assert_eq!(entries[0].id, first_id);
         assert_eq!(entries[1].id, second_id);
     }
+
+    #[gpui::test]
+    async fn test_threads_for_paths_filters_correctly(cx: &mut TestAppContext) {
+        let thread_store = cx.new(|cx| ThreadStore::new(cx));
+        cx.run_until_parked();
+
+        let project_a_paths = PathList::new(&[std::path::PathBuf::from("/home/user/project-a")]);
+        let project_b_paths = PathList::new(&[std::path::PathBuf::from("/home/user/project-b")]);
+
+        let thread_a = make_thread(
+            "Thread in A",
+            Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap(),
+        );
+        let thread_b = make_thread(
+            "Thread in B",
+            Utc.with_ymd_and_hms(2024, 1, 2, 0, 0, 0).unwrap(),
+        );
+        let thread_a_id = session_id("thread-a");
+        let thread_b_id = session_id("thread-b");
+
+        let save_a = thread_store.update(cx, |store, cx| {
+            store.save_thread(thread_a_id.clone(), thread_a, project_a_paths.clone(), cx)
+        });
+        save_a.await.unwrap();
+
+        let save_b = thread_store.update(cx, |store, cx| {
+            store.save_thread(thread_b_id.clone(), thread_b, project_b_paths.clone(), cx)
+        });
+        save_b.await.unwrap();
+
+        cx.run_until_parked();
+
+        thread_store.read_with(cx, |store, _cx| {
+            let a_threads: Vec<_> = store.threads_for_paths(&project_a_paths).collect();
+            assert_eq!(a_threads.len(), 1);
+            assert_eq!(a_threads[0].id, thread_a_id);
+
+            let b_threads: Vec<_> = store.threads_for_paths(&project_b_paths).collect();
+            assert_eq!(b_threads.len(), 1);
+            assert_eq!(b_threads[0].id, thread_b_id);
+
+            let nonexistent = PathList::new(&[std::path::PathBuf::from("/nonexistent")]);
+            let no_threads: Vec<_> = store.threads_for_paths(&nonexistent).collect();
+            assert!(no_threads.is_empty());
+        });
+    }
 }

crates/agent/src/tools.rs 🔗

@@ -17,6 +17,7 @@ mod save_file_tool;
 mod spawn_agent_tool;
 mod streaming_edit_file_tool;
 mod terminal_tool;
+mod tool_edit_parser;
 mod tool_permissions;
 mod web_search_tool;
 
@@ -100,6 +101,7 @@ macro_rules! tools {
                     name: T::NAME.to_string(),
                     description: T::description().to_string(),
                     input_schema: T::input_schema(LanguageModelToolSchemaFormat::JsonSchema).to_value(),
+                    use_input_streaming: T::supports_input_streaming(),
                 }
             }
             [

crates/agent/src/tools/context_server_registry.rs 🔗

@@ -1,4 +1,4 @@
-use crate::{AgentToolOutput, AnyAgentTool, ToolCallEventStream};
+use crate::{AgentToolOutput, AnyAgentTool, ToolCallEventStream, ToolInput};
 use agent_client_protocol::ToolKind;
 use anyhow::Result;
 use collections::{BTreeMap, HashMap};
@@ -329,7 +329,7 @@ impl AnyAgentTool for ContextServerTool {
 
     fn run(
         self: Arc<Self>,
-        input: serde_json::Value,
+        input: ToolInput<serde_json::Value>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<AgentToolOutput, AgentToolOutput>> {
@@ -339,14 +339,15 @@ impl AnyAgentTool for ContextServerTool {
         let tool_name = self.tool.name.clone();
         let tool_id = mcp_tool_id(&self.server_id.0, &self.tool.name);
         let display_name = self.tool.name.clone();
-        let authorize = event_stream.authorize_third_party_tool(
-            self.initial_title(input.clone(), cx),
-            tool_id,
-            display_name,
-            cx,
-        );
+        let initial_title = self.initial_title(serde_json::Value::Null, cx);
+        let authorize =
+            event_stream.authorize_third_party_tool(initial_title, tool_id, display_name, cx);
 
         cx.spawn(async move |_cx| {
+            let input = input.recv().await.map_err(|e| {
+                AgentToolOutput::from_error(format!("Failed to receive tool input: {e}"))
+            })?;
+
             authorize.await.map_err(|e| AgentToolOutput::from_error(e.to_string()))?;
 
             let Some(protocol) = server.client() else {

crates/agent/src/tools/copy_path_tool.rs 🔗

@@ -2,7 +2,9 @@ use super::tool_permissions::{
     SensitiveSettingsKind, authorize_symlink_escapes, canonicalize_worktree_roots,
     collect_symlink_escapes, sensitive_settings_kind,
 };
-use crate::{AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_for_paths};
+use crate::{
+    AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision, decide_permission_for_paths,
+};
 use agent_client_protocol::ToolKind;
 use agent_settings::AgentSettings;
 use futures::FutureExt as _;
@@ -79,19 +81,24 @@ impl AgentTool for CopyPathTool {
 
     fn run(
         self: Arc<Self>,
-        input: Self::Input,
+        input: ToolInput<Self::Input>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<Self::Output, Self::Output>> {
-        let settings = AgentSettings::get_global(cx);
-        let paths = vec![input.source_path.clone(), input.destination_path.clone()];
-        let decision = decide_permission_for_paths(Self::NAME, &paths, settings);
-        if let ToolPermissionDecision::Deny(reason) = decision {
-            return Task::ready(Err(reason));
-        }
-
         let project = self.project.clone();
         cx.spawn(async move |cx| {
+            let input = input
+                .recv()
+                .await
+                .map_err(|e| format!("Failed to receive tool input: {e}"))?;
+            let paths = vec![input.source_path.clone(), input.destination_path.clone()];
+            let decision = cx.update(|cx| {
+                decide_permission_for_paths(Self::NAME, &paths, &AgentSettings::get_global(cx))
+            });
+            if let ToolPermissionDecision::Deny(reason) = decision {
+                return Err(reason);
+            }
+
             let fs = project.read_with(cx, |project, _cx| project.fs().clone());
             let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await;
 
@@ -248,7 +255,7 @@ mod tests {
         };
 
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
-        let task = cx.update(|cx| tool.run(input, event_stream, cx));
+        let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx));
 
         let auth = event_rx.expect_authorization().await;
         let title = auth.tool_call.fields.title.as_deref().unwrap_or("");
@@ -302,7 +309,7 @@ mod tests {
         };
 
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
-        let task = cx.update(|cx| tool.run(input, event_stream, cx));
+        let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx));
 
         let auth = event_rx.expect_authorization().await;
         drop(auth);
@@ -354,7 +361,7 @@ mod tests {
         };
 
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
-        let task = cx.update(|cx| tool.run(input, event_stream, cx));
+        let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx));
 
         let auth = event_rx.expect_authorization().await;
         let title = auth.tool_call.fields.title.as_deref().unwrap_or("");
@@ -430,7 +437,9 @@ mod tests {
         };
 
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
-        let result = cx.update(|cx| tool.run(input, event_stream, cx)).await;
+        let result = cx
+            .update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx))
+            .await;
 
         assert!(result.is_err(), "Tool should fail when policy denies");
         assert!(

crates/agent/src/tools/create_directory_tool.rs 🔗

@@ -13,7 +13,9 @@ use settings::Settings;
 use std::sync::Arc;
 use util::markdown::MarkdownInlineCode;
 
-use crate::{AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_for_path};
+use crate::{
+    AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision, decide_permission_for_path,
+};
 use std::path::Path;
 
 /// Creates a new directory at the specified path within the project. Returns confirmation that the directory was created.
@@ -68,21 +70,26 @@ impl AgentTool for CreateDirectoryTool {
 
     fn run(
         self: Arc<Self>,
-        input: Self::Input,
+        input: ToolInput<Self::Input>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<Self::Output, Self::Output>> {
-        let settings = AgentSettings::get_global(cx);
-        let decision = decide_permission_for_path(Self::NAME, &input.path, settings);
+        let project = self.project.clone();
+        cx.spawn(async move |cx| {
+            let input = input
+                .recv()
+                .await
+                .map_err(|e| format!("Failed to receive tool input: {e}"))?;
+            let decision = cx.update(|cx| {
+                decide_permission_for_path(Self::NAME, &input.path, AgentSettings::get_global(cx))
+            });
 
-        if let ToolPermissionDecision::Deny(reason) = decision {
-            return Task::ready(Err(reason));
-        }
+            if let ToolPermissionDecision::Deny(reason) = decision {
+                return Err(reason);
+            }
 
-        let destination_path: Arc<str> = input.path.as_str().into();
+            let destination_path: Arc<str> = input.path.as_str().into();
 
-        let project = self.project.clone();
-        cx.spawn(async move |cx| {
             let fs = project.read_with(cx, |project, _cx| project.fs().clone());
             let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await;
 
@@ -218,9 +225,9 @@ mod tests {
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
         let task = cx.update(|cx| {
             tool.run(
-                CreateDirectoryToolInput {
+                ToolInput::resolved(CreateDirectoryToolInput {
                     path: "project/link_to_external".into(),
-                },
+                }),
                 event_stream,
                 cx,
             )
@@ -277,9 +284,9 @@ mod tests {
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
         let task = cx.update(|cx| {
             tool.run(
-                CreateDirectoryToolInput {
+                ToolInput::resolved(CreateDirectoryToolInput {
                     path: "project/link_to_external".into(),
-                },
+                }),
                 event_stream,
                 cx,
             )
@@ -336,9 +343,9 @@ mod tests {
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
         let task = cx.update(|cx| {
             tool.run(
-                CreateDirectoryToolInput {
+                ToolInput::resolved(CreateDirectoryToolInput {
                     path: "project/link_to_external".into(),
-                },
+                }),
                 event_stream,
                 cx,
             )
@@ -415,9 +422,9 @@ mod tests {
         let result = cx
             .update(|cx| {
                 tool.run(
-                    CreateDirectoryToolInput {
+                    ToolInput::resolved(CreateDirectoryToolInput {
                         path: "project/link_to_external".into(),
-                    },
+                    }),
                     event_stream,
                     cx,
                 )

crates/agent/src/tools/delete_path_tool.rs 🔗

@@ -2,7 +2,9 @@ use super::tool_permissions::{
     SensitiveSettingsKind, authorize_symlink_access, canonicalize_worktree_roots,
     detect_symlink_escape, sensitive_settings_kind,
 };
-use crate::{AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_for_path};
+use crate::{
+    AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision, decide_permission_for_path,
+};
 use action_log::ActionLog;
 use agent_client_protocol::ToolKind;
 use agent_settings::AgentSettings;
@@ -71,22 +73,27 @@ impl AgentTool for DeletePathTool {
 
     fn run(
         self: Arc<Self>,
-        input: Self::Input,
+        input: ToolInput<Self::Input>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<Self::Output, Self::Output>> {
-        let path = input.path;
-
-        let settings = AgentSettings::get_global(cx);
-        let decision = decide_permission_for_path(Self::NAME, &path, settings);
-
-        if let ToolPermissionDecision::Deny(reason) = decision {
-            return Task::ready(Err(reason));
-        }
-
         let project = self.project.clone();
         let action_log = self.action_log.clone();
         cx.spawn(async move |cx| {
+            let input = input
+                .recv()
+                .await
+                .map_err(|e| format!("Failed to receive tool input: {e}"))?;
+            let path = input.path;
+
+            let decision = cx.update(|cx| {
+                decide_permission_for_path(Self::NAME, &path, AgentSettings::get_global(cx))
+            });
+
+            if let ToolPermissionDecision::Deny(reason) = decision {
+                return Err(reason);
+            }
+
             let fs = project.read_with(cx, |project, _cx| project.fs().clone());
             let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await;
 
@@ -278,9 +285,9 @@ mod tests {
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
         let task = cx.update(|cx| {
             tool.run(
-                DeletePathToolInput {
+                ToolInput::resolved(DeletePathToolInput {
                     path: "project/link_to_external".into(),
-                },
+                }),
                 event_stream,
                 cx,
             )
@@ -345,9 +352,9 @@ mod tests {
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
         let task = cx.update(|cx| {
             tool.run(
-                DeletePathToolInput {
+                ToolInput::resolved(DeletePathToolInput {
                     path: "project/link_to_external".into(),
-                },
+                }),
                 event_stream,
                 cx,
             )
@@ -405,9 +412,9 @@ mod tests {
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
         let task = cx.update(|cx| {
             tool.run(
-                DeletePathToolInput {
+                ToolInput::resolved(DeletePathToolInput {
                     path: "project/link_to_external".into(),
-                },
+                }),
                 event_stream,
                 cx,
             )
@@ -488,9 +495,9 @@ mod tests {
         let result = cx
             .update(|cx| {
                 tool.run(
-                    DeletePathToolInput {
+                    ToolInput::resolved(DeletePathToolInput {
                         path: "project/link_to_external".into(),
-                    },
+                    }),
                     event_stream,
                     cx,
                 )

crates/agent/src/tools/diagnostics_tool.rs 🔗

@@ -1,4 +1,4 @@
-use crate::{AgentTool, ToolCallEventStream};
+use crate::{AgentTool, ToolCallEventStream, ToolInput};
 use agent_client_protocol as acp;
 use anyhow::Result;
 use futures::FutureExt as _;
@@ -87,21 +87,27 @@ impl AgentTool for DiagnosticsTool {
 
     fn run(
         self: Arc<Self>,
-        input: Self::Input,
+        input: ToolInput<Self::Input>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<Self::Output, Self::Output>> {
-        match input.path {
-            Some(path) if !path.is_empty() => {
-                let Some(project_path) = self.project.read(cx).find_project_path(&path, cx) else {
-                    return Task::ready(Err(format!("Could not find path {path} in project")));
-                };
-
-                let open_buffer_task = self
-                    .project
-                    .update(cx, |project, cx| project.open_buffer(project_path, cx));
+        let project = self.project.clone();
+        cx.spawn(async move |cx| {
+            let input = input
+                .recv()
+                .await
+                .map_err(|e| format!("Failed to receive tool input: {e}"))?;
+
+            match input.path {
+                Some(path) if !path.is_empty() => {
+                    let (_project_path, open_buffer_task) = project.update(cx, |project, cx| {
+                        let Some(project_path) = project.find_project_path(&path, cx) else {
+                            return Err(format!("Could not find path {path} in project"));
+                        };
+                        let task = project.open_buffer(project_path.clone(), cx);
+                        Ok((project_path, task))
+                    })?;
 
-                cx.spawn(async move |cx| {
                     let buffer = futures::select! {
                         result = open_buffer_task.fuse() => result.map_err(|e| e.to_string())?,
                         _ = event_stream.cancelled_by_user().fuse() => {
@@ -135,36 +141,40 @@ impl AgentTool for DiagnosticsTool {
                     } else {
                         Ok(output)
                     }
-                })
-            }
-            _ => {
-                let project = self.project.read(cx);
-                let mut output = String::new();
-                let mut has_diagnostics = false;
-
-                for (project_path, _, summary) in project.diagnostic_summaries(true, cx) {
-                    if summary.error_count > 0 || summary.warning_count > 0 {
-                        let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx)
-                        else {
-                            continue;
-                        };
-
-                        has_diagnostics = true;
-                        output.push_str(&format!(
-                            "{}: {} error(s), {} warning(s)\n",
-                            worktree.read(cx).absolutize(&project_path.path).display(),
-                            summary.error_count,
-                            summary.warning_count
-                        ));
-                    }
                 }
+                _ => {
+                    let (output, has_diagnostics) = project.read_with(cx, |project, cx| {
+                        let mut output = String::new();
+                        let mut has_diagnostics = false;
+
+                        for (project_path, _, summary) in project.diagnostic_summaries(true, cx) {
+                            if summary.error_count > 0 || summary.warning_count > 0 {
+                                let Some(worktree) =
+                                    project.worktree_for_id(project_path.worktree_id, cx)
+                                else {
+                                    continue;
+                                };
+
+                                has_diagnostics = true;
+                                output.push_str(&format!(
+                                    "{}: {} error(s), {} warning(s)\n",
+                                    worktree.read(cx).absolutize(&project_path.path).display(),
+                                    summary.error_count,
+                                    summary.warning_count
+                                ));
+                            }
+                        }
+
+                        (output, has_diagnostics)
+                    });
 
-                if has_diagnostics {
-                    Task::ready(Ok(output))
-                } else {
-                    Task::ready(Ok("No errors or warnings found in the project.".into()))
+                    if has_diagnostics {
+                        Ok(output)
+                    } else {
+                        Ok("No errors or warnings found in the project.".into())
+                    }
                 }
             }
-        }
+        })
     }
 }

crates/agent/src/tools/edit_file_tool.rs 🔗

@@ -2,8 +2,8 @@ use super::restore_file_from_disk_tool::RestoreFileFromDiskTool;
 use super::save_file_tool::SaveFileTool;
 use super::tool_permissions::authorize_file_edit;
 use crate::{
-    AgentTool, Templates, Thread, ToolCallEventStream,
-    edit_agent::{EditAgent, EditAgentOutput, EditAgentOutputEvent, EditFormat},
+    AgentTool, Templates, Thread, ToolCallEventStream, ToolInput,
+    edit_agent::{EditAgent, EditAgentOutputEvent, EditFormat},
 };
 use acp_thread::Diff;
 use agent_client_protocol::{self as acp, ToolCallLocation, ToolCallUpdateFields};
@@ -104,8 +104,6 @@ pub enum EditFileToolOutput {
         old_text: Arc<String>,
         #[serde(default)]
         diff: String,
-        #[serde(alias = "raw_output")]
-        edit_agent_output: EditAgentOutput,
     },
     Error {
         error: String,
@@ -237,39 +235,47 @@ impl AgentTool for EditFileTool {
 
     fn run(
         self: Arc<Self>,
-        input: Self::Input,
+        input: ToolInput<Self::Input>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<Self::Output, Self::Output>> {
-        let Ok(project) = self
-            .thread
-            .read_with(cx, |thread, _cx| thread.project().clone())
-        else {
-            return Task::ready(Err(EditFileToolOutput::Error {
-                error: "thread was dropped".to_string(),
-            }));
-        };
-        let project_path = match resolve_path(&input, project.clone(), cx) {
-            Ok(path) => path,
-            Err(err) => {
-                return Task::ready(Err(EditFileToolOutput::Error {
-                    error: err.to_string(),
-                }));
-            }
-        };
-        let abs_path = project.read(cx).absolute_path(&project_path, cx);
-        if let Some(abs_path) = abs_path.clone() {
-            event_stream.update_fields(
-                ToolCallUpdateFields::new().locations(vec![acp::ToolCallLocation::new(abs_path)]),
-            );
-        }
-        let allow_thinking = self
-            .thread
-            .read_with(cx, |thread, _cx| thread.thinking_enabled())
-            .unwrap_or(true);
-
-        let authorize = self.authorize(&input, &event_stream, cx);
         cx.spawn(async move |cx: &mut AsyncApp| {
+            let input = input.recv().await.map_err(|e| EditFileToolOutput::Error {
+                error: format!("Failed to receive tool input: {e}"),
+            })?;
+
+            let project = self
+                .thread
+                .read_with(cx, |thread, _cx| thread.project().clone())
+                .map_err(|_| EditFileToolOutput::Error {
+                    error: "thread was dropped".to_string(),
+                })?;
+
+            let (project_path, abs_path, allow_thinking, update_agent_location, authorize) =
+                cx.update(|cx| {
+                    let project_path = resolve_path(&input, project.clone(), cx).map_err(|err| {
+                        EditFileToolOutput::Error {
+                            error: err.to_string(),
+                        }
+                    })?;
+                    let abs_path = project.read(cx).absolute_path(&project_path, cx);
+                    if let Some(abs_path) = abs_path.clone() {
+                        event_stream.update_fields(
+                            ToolCallUpdateFields::new()
+                                .locations(vec![acp::ToolCallLocation::new(abs_path)]),
+                        );
+                    }
+                    let allow_thinking = self
+                        .thread
+                        .read_with(cx, |thread, _cx| thread.thinking_enabled())
+                        .unwrap_or(true);
+
+                    let update_agent_location = self.thread.read_with(cx, |thread, _cx| !thread.is_subagent()).unwrap_or_default();
+
+                    let authorize = self.authorize(&input, &event_stream, cx);
+                    Ok::<_, EditFileToolOutput>((project_path, abs_path, allow_thinking, update_agent_location, authorize))
+                })?;
+
             let result: anyhow::Result<EditFileToolOutput> = async {
                 authorize.await?;
 
@@ -288,6 +294,7 @@ impl AgentTool for EditFileTool {
                     self.templates.clone(),
                     edit_format,
                     allow_thinking,
+                    update_agent_location,
                 );
 
                 let buffer = project
@@ -298,13 +305,13 @@ impl AgentTool for EditFileTool {
 
                 // Check if the file has been modified since the agent last read it
                 if let Some(abs_path) = abs_path.as_ref() {
-                    let (last_read_mtime, current_mtime, is_dirty, has_save_tool, has_restore_tool) = self.thread.update(cx, |thread, cx| {
-                        let last_read = thread.file_read_times.get(abs_path).copied();
+                    let last_read_mtime = action_log.read_with(cx, |log, _| log.file_read_time(abs_path));
+                    let (current_mtime, is_dirty, has_save_tool, has_restore_tool) = self.thread.read_with(cx, |thread, cx| {
                         let current = buffer.read(cx).file().and_then(|file| file.disk_state().mtime());
                         let dirty = buffer.read(cx).is_dirty();
                         let has_save = thread.has_tool(SaveFileTool::NAME);
                         let has_restore = thread.has_tool(RestoreFileFromDiskTool::NAME);
-                        (last_read, current, dirty, has_save, has_restore)
+                        (current, dirty, has_save, has_restore)
                     })?;
 
                     // Check for unsaved changes first - these indicate modifications we don't know about
@@ -427,7 +434,7 @@ impl AgentTool for EditFileTool {
                     }
                 }
 
-                let edit_agent_output = output.await?;
+                output.await?;
 
                 let format_on_save_enabled = buffer.read_with(cx, |buffer, cx| {
                     let settings = language_settings::language_settings(
@@ -463,17 +470,6 @@ impl AgentTool for EditFileTool {
                     log.buffer_edited(buffer.clone(), cx);
                 });
 
-                // Update the recorded read time after a successful edit so consecutive edits work
-                if let Some(abs_path) = abs_path.as_ref() {
-                    if let Some(new_mtime) = buffer.read_with(cx, |buffer, _| {
-                        buffer.file().and_then(|file| file.disk_state().mtime())
-                    }) {
-                        self.thread.update(cx, |thread, _| {
-                            thread.file_read_times.insert(abs_path.to_path_buf(), new_mtime);
-                        })?;
-                    }
-                }
-
                 let new_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
                 let (new_text, unified_diff) = cx
                     .background_spawn({
@@ -519,7 +515,6 @@ impl AgentTool for EditFileTool {
                     new_text,
                     old_text,
                     diff: unified_diff,
-                    edit_agent_output,
                 })
             }.await;
             result
@@ -672,7 +667,11 @@ mod tests {
                     language_registry,
                     Templates::new(),
                 ))
-                .run(input, ToolCallEventStream::test().0, cx)
+                .run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await;
         assert_eq!(
@@ -881,7 +880,11 @@ mod tests {
                     language_registry.clone(),
                     Templates::new(),
                 ))
-                .run(input, ToolCallEventStream::test().0, cx)
+                .run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             });
 
             // Stream the unformatted content
@@ -940,7 +943,11 @@ mod tests {
                     language_registry,
                     Templates::new(),
                 ))
-                .run(input, ToolCallEventStream::test().0, cx)
+                .run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             });
 
             // Stream the unformatted content
@@ -1027,7 +1034,11 @@ mod tests {
                     language_registry.clone(),
                     Templates::new(),
                 ))
-                .run(input, ToolCallEventStream::test().0, cx)
+                .run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             });
 
             // Stream the content with trailing whitespace
@@ -1082,7 +1093,11 @@ mod tests {
                     language_registry,
                     Templates::new(),
                 ))
-                .run(input, ToolCallEventStream::test().0, cx)
+                .run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             });
 
             // Stream the content with trailing whitespace
@@ -2081,11 +2096,11 @@ mod tests {
             let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
             let edit = cx.update(|cx| {
                 tool.run(
-                    EditFileToolInput {
+                    ToolInput::resolved(EditFileToolInput {
                         display_description: "Edit file".into(),
                         path: path!("/main.rs").into(),
                         mode: EditFileMode::Edit,
-                    },
+                    }),
                     stream_tx,
                     cx,
                 )
@@ -2111,11 +2126,11 @@ mod tests {
             let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
             let edit = cx.update(|cx| {
                 tool.run(
-                    EditFileToolInput {
+                    ToolInput::resolved(EditFileToolInput {
                         display_description: "Edit file".into(),
                         path: path!("/main.rs").into(),
                         mode: EditFileMode::Edit,
-                    },
+                    }),
                     stream_tx,
                     cx,
                 )
@@ -2139,11 +2154,11 @@ mod tests {
             let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
             let edit = cx.update(|cx| {
                 tool.run(
-                    EditFileToolInput {
+                    ToolInput::resolved(EditFileToolInput {
                         display_description: "Edit file".into(),
                         path: path!("/main.rs").into(),
                         mode: EditFileMode::Edit,
-                    },
+                    }),
                     stream_tx,
                     cx,
                 )
@@ -2186,24 +2201,28 @@ mod tests {
         let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
 
         // Initially, file_read_times should be empty
-        let is_empty = thread.read_with(cx, |thread, _| thread.file_read_times.is_empty());
+        let is_empty = action_log.read_with(cx, |action_log, _| {
+            action_log
+                .file_read_time(path!("/root/test.txt").as_ref())
+                .is_none()
+        });
         assert!(is_empty, "file_read_times should start empty");
 
         // Create read tool
         let read_tool = Arc::new(crate::ReadFileTool::new(
-            thread.downgrade(),
             project.clone(),
-            action_log,
+            action_log.clone(),
+            true,
         ));
 
         // Read the file to record the read time
         cx.update(|cx| {
             read_tool.clone().run(
-                crate::ReadFileToolInput {
+                ToolInput::resolved(crate::ReadFileToolInput {
                     path: "root/test.txt".to_string(),
                     start_line: None,
                     end_line: None,
-                },
+                }),
                 ToolCallEventStream::test().0,
                 cx,
             )
@@ -2212,12 +2231,9 @@ mod tests {
         .unwrap();
 
         // Verify that file_read_times now contains an entry for the file
-        let has_entry = thread.read_with(cx, |thread, _| {
-            thread.file_read_times.len() == 1
-                && thread
-                    .file_read_times
-                    .keys()
-                    .any(|path| path.ends_with("test.txt"))
+        let has_entry = action_log.read_with(cx, |log, _| {
+            log.file_read_time(path!("/root/test.txt").as_ref())
+                .is_some()
         });
         assert!(
             has_entry,
@@ -2227,11 +2243,11 @@ mod tests {
         // Read the file again - should update the entry
         cx.update(|cx| {
             read_tool.clone().run(
-                crate::ReadFileToolInput {
+                ToolInput::resolved(crate::ReadFileToolInput {
                     path: "root/test.txt".to_string(),
                     start_line: None,
                     end_line: None,
-                },
+                }),
                 ToolCallEventStream::test().0,
                 cx,
             )
@@ -2239,11 +2255,14 @@ mod tests {
         .await
         .unwrap();
 
-        // Should still have exactly one entry
-        let has_one_entry = thread.read_with(cx, |thread, _| thread.file_read_times.len() == 1);
+        // Should still have an entry after re-reading
+        let has_entry = action_log.read_with(cx, |log, _| {
+            log.file_read_time(path!("/root/test.txt").as_ref())
+                .is_some()
+        });
         assert!(
-            has_one_entry,
-            "file_read_times should still have one entry after re-reading"
+            has_entry,
+            "file_read_times should still have an entry after re-reading"
         );
     }
 
@@ -2283,11 +2302,7 @@ mod tests {
         let languages = project.read_with(cx, |project, _| project.languages().clone());
         let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
 
-        let read_tool = Arc::new(crate::ReadFileTool::new(
-            thread.downgrade(),
-            project.clone(),
-            action_log,
-        ));
+        let read_tool = Arc::new(crate::ReadFileTool::new(project.clone(), action_log, true));
         let edit_tool = Arc::new(EditFileTool::new(
             project.clone(),
             thread.downgrade(),
@@ -2298,11 +2313,11 @@ mod tests {
         // Read the file first
         cx.update(|cx| {
             read_tool.clone().run(
-                crate::ReadFileToolInput {
+                ToolInput::resolved(crate::ReadFileToolInput {
                     path: "root/test.txt".to_string(),
                     start_line: None,
                     end_line: None,
-                },
+                }),
                 ToolCallEventStream::test().0,
                 cx,
             )
@@ -2314,11 +2329,11 @@ mod tests {
         let edit_result = {
             let edit_task = cx.update(|cx| {
                 edit_tool.clone().run(
-                    EditFileToolInput {
+                    ToolInput::resolved(EditFileToolInput {
                         display_description: "First edit".into(),
                         path: "root/test.txt".into(),
                         mode: EditFileMode::Edit,
-                    },
+                    }),
                     ToolCallEventStream::test().0,
                     cx,
                 )
@@ -2343,11 +2358,11 @@ mod tests {
         let edit_result = {
             let edit_task = cx.update(|cx| {
                 edit_tool.clone().run(
-                    EditFileToolInput {
+                    ToolInput::resolved(EditFileToolInput {
                         display_description: "Second edit".into(),
                         path: "root/test.txt".into(),
                         mode: EditFileMode::Edit,
-                    },
+                    }),
                     ToolCallEventStream::test().0,
                     cx,
                 )
@@ -2397,11 +2412,7 @@ mod tests {
         let languages = project.read_with(cx, |project, _| project.languages().clone());
         let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
 
-        let read_tool = Arc::new(crate::ReadFileTool::new(
-            thread.downgrade(),
-            project.clone(),
-            action_log,
-        ));
+        let read_tool = Arc::new(crate::ReadFileTool::new(project.clone(), action_log, true));
         let edit_tool = Arc::new(EditFileTool::new(
             project.clone(),
             thread.downgrade(),
@@ -2412,11 +2423,11 @@ mod tests {
         // Read the file first
         cx.update(|cx| {
             read_tool.clone().run(
-                crate::ReadFileToolInput {
+                ToolInput::resolved(crate::ReadFileToolInput {
                     path: "root/test.txt".to_string(),
                     start_line: None,
                     end_line: None,
-                },
+                }),
                 ToolCallEventStream::test().0,
                 cx,
             )
@@ -2456,11 +2467,11 @@ mod tests {
         let result = cx
             .update(|cx| {
                 edit_tool.clone().run(
-                    EditFileToolInput {
+                    ToolInput::resolved(EditFileToolInput {
                         display_description: "Edit after external change".into(),
                         path: "root/test.txt".into(),
                         mode: EditFileMode::Edit,
-                    },
+                    }),
                     ToolCallEventStream::test().0,
                     cx,
                 )
@@ -2508,11 +2519,7 @@ mod tests {
         let languages = project.read_with(cx, |project, _| project.languages().clone());
         let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
 
-        let read_tool = Arc::new(crate::ReadFileTool::new(
-            thread.downgrade(),
-            project.clone(),
-            action_log,
-        ));
+        let read_tool = Arc::new(crate::ReadFileTool::new(project.clone(), action_log, true));
         let edit_tool = Arc::new(EditFileTool::new(
             project.clone(),
             thread.downgrade(),
@@ -2523,11 +2530,11 @@ mod tests {
         // Read the file first
         cx.update(|cx| {
             read_tool.clone().run(
-                crate::ReadFileToolInput {
+                ToolInput::resolved(crate::ReadFileToolInput {
                     path: "root/test.txt".to_string(),
                     start_line: None,
                     end_line: None,
-                },
+                }),
                 ToolCallEventStream::test().0,
                 cx,
             )
@@ -2560,11 +2567,11 @@ mod tests {
         let result = cx
             .update(|cx| {
                 edit_tool.clone().run(
-                    EditFileToolInput {
+                    ToolInput::resolved(EditFileToolInput {
                         display_description: "Edit with dirty buffer".into(),
                         path: "root/test.txt".into(),
                         mode: EditFileMode::Edit,
-                    },
+                    }),
                     ToolCallEventStream::test().0,
                     cx,
                 )

crates/agent/src/tools/fetch_tool.rs 🔗

@@ -16,7 +16,8 @@ use ui::SharedString;
 use util::markdown::{MarkdownEscaped, MarkdownInlineCode};
 
 use crate::{
-    AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_from_settings,
+    AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision,
+    decide_permission_from_settings,
 };
 
 #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
@@ -141,41 +142,52 @@ impl AgentTool for FetchTool {
 
     fn run(
         self: Arc<Self>,
-        input: Self::Input,
+        input: ToolInput<Self::Input>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<Self::Output, Self::Output>> {
-        let settings = AgentSettings::get_global(cx);
-        let decision =
-            decide_permission_from_settings(Self::NAME, std::slice::from_ref(&input.url), settings);
-
-        let authorize = match decision {
-            ToolPermissionDecision::Allow => None,
-            ToolPermissionDecision::Deny(reason) => {
-                return Task::ready(Err(reason));
-            }
-            ToolPermissionDecision::Confirm => {
-                let context =
-                    crate::ToolPermissionContext::new(Self::NAME, vec![input.url.clone()]);
-                Some(event_stream.authorize(
-                    format!("Fetch {}", MarkdownInlineCode(&input.url)),
-                    context,
-                    cx,
-                ))
-            }
-        };
+        let http_client = self.http_client.clone();
+        cx.spawn(async move |cx| {
+            let input: FetchToolInput = input
+                .recv()
+                .await
+                .map_err(|e| format!("Failed to receive tool input: {e}"))?;
+
+            let decision = cx.update(|cx| {
+                decide_permission_from_settings(
+                    Self::NAME,
+                    std::slice::from_ref(&input.url),
+                    AgentSettings::get_global(cx),
+                )
+            });
+
+            let authorize = match decision {
+                ToolPermissionDecision::Allow => None,
+                ToolPermissionDecision::Deny(reason) => {
+                    return Err(reason);
+                }
+                ToolPermissionDecision::Confirm => Some(cx.update(|cx| {
+                    let context =
+                        crate::ToolPermissionContext::new(Self::NAME, vec![input.url.clone()]);
+                    event_stream.authorize(
+                        format!("Fetch {}", MarkdownInlineCode(&input.url)),
+                        context,
+                        cx,
+                    )
+                })),
+            };
 
-        let fetch_task = cx.background_spawn({
-            let http_client = self.http_client.clone();
-            async move {
-                if let Some(authorize) = authorize {
-                    authorize.await?;
+            let fetch_task = cx.background_spawn({
+                let http_client = http_client.clone();
+                let url = input.url.clone();
+                async move {
+                    if let Some(authorize) = authorize {
+                        authorize.await?;
+                    }
+                    Self::build_message(http_client, &url).await
                 }
-                Self::build_message(http_client, &input.url).await
-            }
-        });
+            });
 
-        cx.foreground_executor().spawn(async move {
             let text = futures::select! {
                 result = fetch_task.fuse() => result.map_err(|e| e.to_string())?,
                 _ = event_stream.cancelled_by_user().fuse() => {

crates/agent/src/tools/find_path_tool.rs 🔗

@@ -1,4 +1,4 @@
-use crate::{AgentTool, ToolCallEventStream};
+use crate::{AgentTool, ToolCallEventStream, ToolInput};
 use agent_client_protocol as acp;
 use anyhow::{Result, anyhow};
 use futures::FutureExt as _;
@@ -121,13 +121,18 @@ impl AgentTool for FindPathTool {
 
     fn run(
         self: Arc<Self>,
-        input: Self::Input,
+        input: ToolInput<Self::Input>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<Self::Output, Self::Output>> {
-        let search_paths_task = search_paths(&input.glob, self.project.clone(), cx);
+        let project = self.project.clone();
+        cx.spawn(async move |cx| {
+            let input = input.recv().await.map_err(|e| FindPathToolOutput::Error {
+                error: format!("Failed to receive tool input: {e}"),
+            })?;
+
+            let search_paths_task = cx.update(|cx| search_paths(&input.glob, project, cx));
 
-        cx.background_spawn(async move {
             let matches = futures::select! {
                 result = search_paths_task.fuse() => result.map_err(|e| FindPathToolOutput::Error { error: e.to_string() })?,
                 _ = event_stream.cancelled_by_user().fuse() => {

crates/agent/src/tools/grep_tool.rs 🔗

@@ -1,4 +1,4 @@
-use crate::{AgentTool, ToolCallEventStream};
+use crate::{AgentTool, ToolCallEventStream, ToolInput};
 use agent_client_protocol as acp;
 use anyhow::Result;
 use futures::{FutureExt as _, StreamExt};
@@ -114,66 +114,64 @@ impl AgentTool for GrepTool {
 
     fn run(
         self: Arc<Self>,
-        input: Self::Input,
+        input: ToolInput<Self::Input>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<Self::Output, Self::Output>> {
         const CONTEXT_LINES: u32 = 2;
         const MAX_ANCESTOR_LINES: u32 = 10;
 
-        let path_style = self.project.read(cx).path_style(cx);
-
-        let include_matcher = match PathMatcher::new(
-            input
-                .include_pattern
-                .as_ref()
-                .into_iter()
-                .collect::<Vec<_>>(),
-            path_style,
-        ) {
-            Ok(matcher) => matcher,
-            Err(error) => {
-                return Task::ready(Err(format!("invalid include glob pattern: {error}")));
-            }
-        };
-
-        // Exclude global file_scan_exclusions and private_files settings
-        let exclude_matcher = {
-            let global_settings = WorktreeSettings::get_global(cx);
-            let exclude_patterns = global_settings
-                .file_scan_exclusions
-                .sources()
-                .chain(global_settings.private_files.sources());
-
-            match PathMatcher::new(exclude_patterns, path_style) {
-                Ok(matcher) => matcher,
-                Err(error) => {
-                    return Task::ready(Err(format!("invalid exclude pattern: {error}")));
-                }
-            }
-        };
-
-        let query = match SearchQuery::regex(
-            &input.regex,
-            false,
-            input.case_sensitive,
-            false,
-            false,
-            include_matcher,
-            exclude_matcher,
-            true, // Always match file include pattern against *full project paths* that start with a project root.
-            None,
-        ) {
-            Ok(query) => query,
-            Err(error) => return Task::ready(Err(error.to_string())),
-        };
-
-        let results = self
-            .project
-            .update(cx, |project, cx| project.search(query, cx));
-
-        let project = self.project.downgrade();
+        let project = self.project.clone();
         cx.spawn(async move |cx|  {
+            let input = input
+                .recv()
+                .await
+                .map_err(|e| format!("Failed to receive tool input: {e}"))?;
+
+            let results = cx.update(|cx| {
+                let path_style = project.read(cx).path_style(cx);
+
+                let include_matcher = PathMatcher::new(
+                    input
+                        .include_pattern
+                        .as_ref()
+                        .into_iter()
+                        .collect::<Vec<_>>(),
+                    path_style,
+                )
+                .map_err(|error| format!("invalid include glob pattern: {error}"))?;
+
+                // Exclude global file_scan_exclusions and private_files settings
+                let exclude_matcher = {
+                    let global_settings = WorktreeSettings::get_global(cx);
+                    let exclude_patterns = global_settings
+                        .file_scan_exclusions
+                        .sources()
+                        .chain(global_settings.private_files.sources());
+
+                    PathMatcher::new(exclude_patterns, path_style)
+                        .map_err(|error| format!("invalid exclude pattern: {error}"))?
+                };
+
+                let query = SearchQuery::regex(
+                    &input.regex,
+                    false,
+                    input.case_sensitive,
+                    false,
+                    false,
+                    include_matcher,
+                    exclude_matcher,
+                    true, // Always match file include pattern against *full project paths* that start with a project root.
+                    None,
+                )
+                .map_err(|error| error.to_string())?;
+
+                Ok::<_, String>(
+                    project.update(cx, |project, cx| project.search(query, cx)),
+                )
+            })?;
+
+            let project = project.downgrade();
             // Keep the search alive for the duration of result iteration. Dropping this task is the
             // cancellation mechanism; we intentionally do not detach it.
             let SearchResults {rx, _task_handle}  = results;
@@ -787,7 +785,13 @@ mod tests {
         cx: &mut TestAppContext,
     ) -> String {
         let tool = Arc::new(GrepTool { project });
-        let task = cx.update(|cx| tool.run(input, ToolCallEventStream::test().0, cx));
+        let task = cx.update(|cx| {
+            tool.run(
+                ToolInput::resolved(input),
+                ToolCallEventStream::test().0,
+                cx,
+            )
+        });
 
         match task.await {
             Ok(result) => {

crates/agent/src/tools/list_directory_tool.rs 🔗

@@ -2,7 +2,7 @@ use super::tool_permissions::{
     ResolvedProjectPath, authorize_symlink_access, canonicalize_worktree_roots,
     resolve_project_path,
 };
-use crate::{AgentTool, ToolCallEventStream};
+use crate::{AgentTool, ToolCallEventStream, ToolInput};
 use agent_client_protocol::ToolKind;
 use anyhow::{Context as _, Result, anyhow};
 use gpui::{App, Entity, SharedString, Task};
@@ -146,34 +146,39 @@ impl AgentTool for ListDirectoryTool {
 
     fn run(
         self: Arc<Self>,
-        input: Self::Input,
+        input: ToolInput<Self::Input>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<Self::Output, Self::Output>> {
-        // Sometimes models will return these even though we tell it to give a path and not a glob.
-        // When this happens, just list the root worktree directories.
-        if matches!(input.path.as_str(), "." | "" | "./" | "*") {
-            let output = self
-                .project
-                .read(cx)
-                .worktrees(cx)
-                .filter_map(|worktree| {
-                    let worktree = worktree.read(cx);
-                    let root_entry = worktree.root_entry()?;
-                    if root_entry.is_dir() {
-                        Some(root_entry.path.display(worktree.path_style()))
-                    } else {
-                        None
-                    }
-                })
-                .collect::<Vec<_>>()
-                .join("\n");
-
-            return Task::ready(Ok(output));
-        }
-
         let project = self.project.clone();
         cx.spawn(async move |cx| {
+            let input = input
+                .recv()
+                .await
+                .map_err(|e| format!("Failed to receive tool input: {e}"))?;
+
+            // Sometimes models will return these even though we tell it to give a path and not a glob.
+            // When this happens, just list the root worktree directories.
+            if matches!(input.path.as_str(), "." | "" | "./" | "*") {
+                let output = project.read_with(cx, |project, cx| {
+                    project
+                        .worktrees(cx)
+                        .filter_map(|worktree| {
+                            let worktree = worktree.read(cx);
+                            let root_entry = worktree.root_entry()?;
+                            if root_entry.is_dir() {
+                                Some(root_entry.path.display(worktree.path_style()))
+                            } else {
+                                None
+                            }
+                        })
+                        .collect::<Vec<_>>()
+                        .join("\n")
+                });
+
+                return Ok(output);
+            }
+
             let fs = project.read_with(cx, |project, _cx| project.fs().clone());
             let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await;
 
@@ -323,7 +328,13 @@ mod tests {
             path: "project".into(),
         };
         let output = cx
-            .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx))
+            .update(|cx| {
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
+            })
             .await
             .unwrap();
         assert_eq!(
@@ -344,7 +355,13 @@ mod tests {
             path: "project/src".into(),
         };
         let output = cx
-            .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx))
+            .update(|cx| {
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
+            })
             .await
             .unwrap();
         assert_eq!(
@@ -365,7 +382,13 @@ mod tests {
             path: "project/tests".into(),
         };
         let output = cx
-            .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx))
+            .update(|cx| {
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
+            })
             .await
             .unwrap();
         assert!(!output.contains("# Folders:"));
@@ -393,7 +416,13 @@ mod tests {
             path: "project/empty_dir".into(),
         };
         let output = cx
-            .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx))
+            .update(|cx| {
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
+            })
             .await
             .unwrap();
         assert_eq!(output, "project/empty_dir is empty.\n");
@@ -420,7 +449,13 @@ mod tests {
             path: "project/nonexistent".into(),
         };
         let output = cx
-            .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx))
+            .update(|cx| {
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
+            })
             .await;
         assert!(output.unwrap_err().contains("Path not found"));
 
@@ -429,7 +464,13 @@ mod tests {
             path: "project/file.txt".into(),
         };
         let output = cx
-            .update(|cx| tool.run(input, ToolCallEventStream::test().0, cx))
+            .update(|cx| {
+                tool.run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
+            })
             .await;
         assert!(output.unwrap_err().contains("is not a directory"));
     }
@@ -493,7 +534,13 @@ mod tests {
             path: "project".into(),
         };
         let output = cx
-            .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx))
+            .update(|cx| {
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
+            })
             .await
             .unwrap();
 
@@ -520,7 +567,13 @@ mod tests {
             path: "project/.secretdir".into(),
         };
         let output = cx
-            .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx))
+            .update(|cx| {
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
+            })
             .await;
         assert!(
             output.unwrap_err().contains("file_scan_exclusions"),
@@ -532,7 +585,13 @@ mod tests {
             path: "project/visible_dir".into(),
         };
         let output = cx
-            .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx))
+            .update(|cx| {
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
+            })
             .await
             .unwrap();
 
@@ -637,7 +696,13 @@ mod tests {
             path: "worktree1/src".into(),
         };
         let output = cx
-            .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx))
+            .update(|cx| {
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
+            })
             .await
             .unwrap();
         assert!(output.contains("main.rs"), "Should list main.rs");
@@ -655,7 +720,13 @@ mod tests {
             path: "worktree1/tests".into(),
         };
         let output = cx
-            .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx))
+            .update(|cx| {
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
+            })
             .await
             .unwrap();
         assert!(output.contains("test.rs"), "Should list test.rs");
@@ -669,7 +740,13 @@ mod tests {
             path: "worktree2/lib".into(),
         };
         let output = cx
-            .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx))
+            .update(|cx| {
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
+            })
             .await
             .unwrap();
         assert!(output.contains("public.js"), "Should list public.js");
@@ -687,7 +764,13 @@ mod tests {
             path: "worktree2/docs".into(),
         };
         let output = cx
-            .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx))
+            .update(|cx| {
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
+            })
             .await
             .unwrap();
         assert!(output.contains("README.md"), "Should list README.md");
@@ -701,7 +784,13 @@ mod tests {
             path: "worktree1/src/secret.rs".into(),
         };
         let output = cx
-            .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx))
+            .update(|cx| {
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
+            })
             .await;
         assert!(output.unwrap_err().contains("Cannot list directory"),);
     }
@@ -743,9 +832,9 @@ mod tests {
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
         let task = cx.update(|cx| {
             tool.clone().run(
-                ListDirectoryToolInput {
+                ToolInput::resolved(ListDirectoryToolInput {
                     path: "project/link_to_external".into(),
-                },
+                }),
                 event_stream,
                 cx,
             )
@@ -804,9 +893,9 @@ mod tests {
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
         let task = cx.update(|cx| {
             tool.clone().run(
-                ListDirectoryToolInput {
+                ToolInput::resolved(ListDirectoryToolInput {
                     path: "project/link_to_external".into(),
-                },
+                }),
                 event_stream,
                 cx,
             )
@@ -871,9 +960,9 @@ mod tests {
         let result = cx
             .update(|cx| {
                 tool.clone().run(
-                    ListDirectoryToolInput {
+                    ToolInput::resolved(ListDirectoryToolInput {
                         path: "project/link_to_external".into(),
-                    },
+                    }),
                     event_stream,
                     cx,
                 )
@@ -924,9 +1013,9 @@ mod tests {
         let result = cx
             .update(|cx| {
                 tool.clone().run(
-                    ListDirectoryToolInput {
+                    ToolInput::resolved(ListDirectoryToolInput {
                         path: "project/src".into(),
-                    },
+                    }),
                     event_stream,
                     cx,
                 )
@@ -981,9 +1070,9 @@ mod tests {
         let result = cx
             .update(|cx| {
                 tool.clone().run(
-                    ListDirectoryToolInput {
+                    ToolInput::resolved(ListDirectoryToolInput {
                         path: "project/link_dir".into(),
-                    },
+                    }),
                     event_stream,
                     cx,
                 )

crates/agent/src/tools/move_path_tool.rs 🔗

@@ -2,7 +2,9 @@ use super::tool_permissions::{
     SensitiveSettingsKind, authorize_symlink_escapes, canonicalize_worktree_roots,
     collect_symlink_escapes, sensitive_settings_kind,
 };
-use crate::{AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_for_paths};
+use crate::{
+    AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision, decide_permission_for_paths,
+};
 use agent_client_protocol::ToolKind;
 use agent_settings::AgentSettings;
 use futures::FutureExt as _;
@@ -92,19 +94,24 @@ impl AgentTool for MovePathTool {
 
     fn run(
         self: Arc<Self>,
-        input: Self::Input,
+        input: ToolInput<Self::Input>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<Self::Output, Self::Output>> {
-        let settings = AgentSettings::get_global(cx);
-        let paths = vec![input.source_path.clone(), input.destination_path.clone()];
-        let decision = decide_permission_for_paths(Self::NAME, &paths, settings);
-        if let ToolPermissionDecision::Deny(reason) = decision {
-            return Task::ready(Err(reason));
-        }
-
         let project = self.project.clone();
         cx.spawn(async move |cx| {
+            let input = input
+                .recv()
+                .await
+                .map_err(|e| format!("Failed to receive tool input: {e}"))?;
+            let paths = vec![input.source_path.clone(), input.destination_path.clone()];
+            let decision = cx.update(|cx| {
+                decide_permission_for_paths(Self::NAME, &paths, AgentSettings::get_global(cx))
+            });
+            if let ToolPermissionDecision::Deny(reason) = decision {
+                return Err(reason);
+            }
+
             let fs = project.read_with(cx, |project, _cx| project.fs().clone());
             let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await;
 
@@ -255,7 +262,7 @@ mod tests {
         };
 
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
-        let task = cx.update(|cx| tool.run(input, event_stream, cx));
+        let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx));
 
         let auth = event_rx.expect_authorization().await;
         let title = auth.tool_call.fields.title.as_deref().unwrap_or("");
@@ -309,7 +316,7 @@ mod tests {
         };
 
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
-        let task = cx.update(|cx| tool.run(input, event_stream, cx));
+        let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx));
 
         let auth = event_rx.expect_authorization().await;
         drop(auth);
@@ -361,7 +368,7 @@ mod tests {
         };
 
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
-        let task = cx.update(|cx| tool.run(input, event_stream, cx));
+        let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx));
 
         let auth = event_rx.expect_authorization().await;
         let title = auth.tool_call.fields.title.as_deref().unwrap_or("");
@@ -437,7 +444,9 @@ mod tests {
         };
 
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
-        let result = cx.update(|cx| tool.run(input, event_stream, cx)).await;
+        let result = cx
+            .update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx))
+            .await;
 
         assert!(result.is_err(), "Tool should fail when policy denies");
         assert!(

crates/agent/src/tools/now_tool.rs 🔗

@@ -6,7 +6,7 @@ use gpui::{App, SharedString, Task};
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
 
-use crate::{AgentTool, ToolCallEventStream};
+use crate::{AgentTool, ToolCallEventStream, ToolInput};
 
 #[derive(Debug, Serialize, Deserialize, JsonSchema)]
 #[serde(rename_all = "snake_case")]
@@ -48,14 +48,20 @@ impl AgentTool for NowTool {
 
     fn run(
         self: Arc<Self>,
-        input: Self::Input,
+        input: ToolInput<Self::Input>,
         _event_stream: ToolCallEventStream,
-        _cx: &mut App,
+        cx: &mut App,
     ) -> Task<Result<String, String>> {
-        let now = match input.timezone {
-            Timezone::Utc => Utc::now().to_rfc3339(),
-            Timezone::Local => Local::now().to_rfc3339(),
-        };
-        Task::ready(Ok(format!("The current datetime is {now}.")))
+        cx.spawn(async move |_cx| {
+            let input = input
+                .recv()
+                .await
+                .map_err(|e| format!("Failed to receive tool input: {e}"))?;
+            let now = match input.timezone {
+                Timezone::Utc => Utc::now().to_rfc3339(),
+                Timezone::Local => Local::now().to_rfc3339(),
+            };
+            Ok(format!("The current datetime is {now}."))
+        })
     }
 }

crates/agent/src/tools/open_tool.rs 🔗

@@ -2,7 +2,7 @@ use super::tool_permissions::{
     ResolvedProjectPath, authorize_symlink_access, canonicalize_worktree_roots,
     resolve_project_path,
 };
-use crate::AgentTool;
+use crate::{AgentTool, ToolInput};
 use agent_client_protocol::ToolKind;
 use futures::FutureExt as _;
 use gpui::{App, AppContext as _, Entity, SharedString, Task};
@@ -61,16 +61,24 @@ impl AgentTool for OpenTool {
 
     fn run(
         self: Arc<Self>,
-        input: Self::Input,
+        input: ToolInput<Self::Input>,
         event_stream: crate::ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<Self::Output, Self::Output>> {
-        // If path_or_url turns out to be a path in the project, make it absolute.
-        let abs_path = to_absolute_path(&input.path_or_url, self.project.clone(), cx);
-        let initial_title = self.initial_title(Ok(input.clone()), cx);
-
         let project = self.project.clone();
         cx.spawn(async move |cx| {
+            let input = input
+                .recv()
+                .await
+                .map_err(|e| format!("Failed to receive tool input: {e}"))?;
+
+            // If path_or_url turns out to be a path in the project, make it absolute.
+            let (abs_path, initial_title) = cx.update(|cx| {
+                let abs_path = to_absolute_path(&input.path_or_url, project.clone(), cx);
+                let initial_title = self.initial_title(Ok(input.clone()), cx);
+                (abs_path, initial_title)
+            });
+
             let fs = project.read_with(cx, |project, _cx| project.fs().clone());
             let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await;
 

crates/agent/src/tools/read_file_tool.rs 🔗

@@ -2,7 +2,7 @@ use action_log::ActionLog;
 use agent_client_protocol::{self as acp, ToolCallUpdateFields};
 use anyhow::{Context as _, Result, anyhow};
 use futures::FutureExt as _;
-use gpui::{App, Entity, SharedString, Task, WeakEntity};
+use gpui::{App, Entity, SharedString, Task};
 use indoc::formatdoc;
 use language::Point;
 use language_model::{LanguageModelImage, LanguageModelToolResultContent};
@@ -21,7 +21,7 @@ use super::tool_permissions::{
     ResolvedProjectPath, authorize_symlink_access, canonicalize_worktree_roots,
     resolve_project_path,
 };
-use crate::{AgentTool, Thread, ToolCallEventStream, outline};
+use crate::{AgentTool, ToolCallEventStream, ToolInput, outline};
 
 /// Reads the content of the given file in the project.
 ///
@@ -56,21 +56,21 @@ pub struct ReadFileToolInput {
 }
 
 pub struct ReadFileTool {
-    thread: WeakEntity<Thread>,
     project: Entity<Project>,
     action_log: Entity<ActionLog>,
+    update_agent_location: bool,
 }
 
 impl ReadFileTool {
     pub fn new(
-        thread: WeakEntity<Thread>,
         project: Entity<Project>,
         action_log: Entity<ActionLog>,
+        update_agent_location: bool,
     ) -> Self {
         Self {
-            thread,
             project,
             action_log,
+            update_agent_location,
         }
     }
 }
@@ -114,14 +114,17 @@ impl AgentTool for ReadFileTool {
 
     fn run(
         self: Arc<Self>,
-        input: Self::Input,
+        input: ToolInput<Self::Input>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<LanguageModelToolResultContent, LanguageModelToolResultContent>> {
         let project = self.project.clone();
-        let thread = self.thread.clone();
         let action_log = self.action_log.clone();
         cx.spawn(async move |cx| {
+            let input = input
+                .recv()
+                .await
+                .map_err(tool_content_err)?;
             let fs = project.read_with(cx, |project, _cx| project.fs().clone());
             let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await;
 
@@ -208,7 +211,6 @@ impl AgentTool for ReadFileTool {
             });
 
             if is_image {
-
                 let image_entity: Entity<ImageItem> = cx
                     .update(|cx| {
                         self.project.update(cx, |project, cx| {
@@ -254,17 +256,6 @@ impl AgentTool for ReadFileTool {
                 return Err(tool_content_err(format!("{file_path} not found")));
             }
 
-            // Record the file read time and mtime
-            if let Some(mtime) = buffer.read_with(cx, |buffer, _| {
-                buffer.file().and_then(|file| file.disk_state().mtime())
-            }) {
-                thread
-                    .update(cx, |thread, _| {
-                        thread.file_read_times.insert(abs_path.to_path_buf(), mtime);
-                    })
-                    .ok();
-            }
-
             let mut anchor = None;
 
             // Check if specific line ranges are provided
@@ -324,15 +315,17 @@ impl AgentTool for ReadFileTool {
             };
 
             project.update(cx, |project, cx| {
-                project.set_agent_location(
-                    Some(AgentLocation {
-                        buffer: buffer.downgrade(),
-                        position: anchor.unwrap_or_else(|| {
-                            text::Anchor::min_for_buffer(buffer.read(cx).remote_id())
+                if self.update_agent_location {
+                    project.set_agent_location(
+                        Some(AgentLocation {
+                            buffer: buffer.downgrade(),
+                            position: anchor.unwrap_or_else(|| {
+                                text::Anchor::min_for_buffer(buffer.read(cx).remote_id())
+                            }),
                         }),
-                    }),
-                    cx,
-                );
+                        cx,
+                    );
+                }
                 if let Ok(LanguageModelToolResultContent::Text(text)) = &result {
                     let text: &str = text;
                     let markdown = MarkdownCodeBlock {
@@ -354,13 +347,10 @@ impl AgentTool for ReadFileTool {
 #[cfg(test)]
 mod test {
     use super::*;
-    use crate::{ContextServerRegistry, Templates, Thread};
     use agent_client_protocol as acp;
     use fs::Fs as _;
     use gpui::{AppContext, TestAppContext, UpdateGlobal as _};
-    use language_model::fake_provider::FakeLanguageModel;
     use project::{FakeFs, Project};
-    use prompt_store::ProjectContext;
     use serde_json::json;
     use settings::SettingsStore;
     use std::path::PathBuf;
@@ -375,20 +365,7 @@ mod test {
         fs.insert_tree(path!("/root"), json!({})).await;
         let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
         let action_log = cx.new(|_| ActionLog::new(project.clone()));
-        let context_server_registry =
-            cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
-        let model = Arc::new(FakeLanguageModel::default());
-        let thread = cx.new(|cx| {
-            Thread::new(
-                project.clone(),
-                cx.new(|_cx| ProjectContext::default()),
-                context_server_registry,
-                Templates::new(),
-                Some(model),
-                cx,
-            )
-        });
-        let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
+        let tool = Arc::new(ReadFileTool::new(project, action_log, true));
         let (event_stream, _) = ToolCallEventStream::test();
 
         let result = cx
@@ -398,7 +375,7 @@ mod test {
                     start_line: None,
                     end_line: None,
                 };
-                tool.run(input, event_stream, cx)
+                tool.run(ToolInput::resolved(input), event_stream, cx)
             })
             .await;
         assert_eq!(
@@ -421,20 +398,7 @@ mod test {
         .await;
         let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
         let action_log = cx.new(|_| ActionLog::new(project.clone()));
-        let context_server_registry =
-            cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
-        let model = Arc::new(FakeLanguageModel::default());
-        let thread = cx.new(|cx| {
-            Thread::new(
-                project.clone(),
-                cx.new(|_cx| ProjectContext::default()),
-                context_server_registry,
-                Templates::new(),
-                Some(model),
-                cx,
-            )
-        });
-        let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
+        let tool = Arc::new(ReadFileTool::new(project, action_log, true));
         let result = cx
             .update(|cx| {
                 let input = ReadFileToolInput {
@@ -442,7 +406,11 @@ mod test {
                     start_line: None,
                     end_line: None,
                 };
-                tool.run(input, ToolCallEventStream::test().0, cx)
+                tool.run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await;
         assert_eq!(result.unwrap(), "This is a small file content".into());
@@ -464,20 +432,7 @@ mod test {
         let language_registry = project.read_with(cx, |project, _| project.languages().clone());
         language_registry.add(language::rust_lang());
         let action_log = cx.new(|_| ActionLog::new(project.clone()));
-        let context_server_registry =
-            cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
-        let model = Arc::new(FakeLanguageModel::default());
-        let thread = cx.new(|cx| {
-            Thread::new(
-                project.clone(),
-                cx.new(|_cx| ProjectContext::default()),
-                context_server_registry,
-                Templates::new(),
-                Some(model),
-                cx,
-            )
-        });
-        let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
+        let tool = Arc::new(ReadFileTool::new(project, action_log, true));
         let result = cx
             .update(|cx| {
                 let input = ReadFileToolInput {
@@ -485,7 +440,11 @@ mod test {
                     start_line: None,
                     end_line: None,
                 };
-                tool.clone().run(input, ToolCallEventStream::test().0, cx)
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await
             .unwrap();
@@ -510,7 +469,11 @@ mod test {
                     start_line: None,
                     end_line: None,
                 };
-                tool.run(input, ToolCallEventStream::test().0, cx)
+                tool.run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await
             .unwrap();
@@ -549,20 +512,7 @@ mod test {
         let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
 
         let action_log = cx.new(|_| ActionLog::new(project.clone()));
-        let context_server_registry =
-            cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
-        let model = Arc::new(FakeLanguageModel::default());
-        let thread = cx.new(|cx| {
-            Thread::new(
-                project.clone(),
-                cx.new(|_cx| ProjectContext::default()),
-                context_server_registry,
-                Templates::new(),
-                Some(model),
-                cx,
-            )
-        });
-        let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
+        let tool = Arc::new(ReadFileTool::new(project, action_log, true));
         let result = cx
             .update(|cx| {
                 let input = ReadFileToolInput {
@@ -570,7 +520,11 @@ mod test {
                     start_line: Some(2),
                     end_line: Some(4),
                 };
-                tool.run(input, ToolCallEventStream::test().0, cx)
+                tool.run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await;
         assert_eq!(result.unwrap(), "Line 2\nLine 3\nLine 4\n".into());
@@ -590,20 +544,7 @@ mod test {
         .await;
         let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
         let action_log = cx.new(|_| ActionLog::new(project.clone()));
-        let context_server_registry =
-            cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
-        let model = Arc::new(FakeLanguageModel::default());
-        let thread = cx.new(|cx| {
-            Thread::new(
-                project.clone(),
-                cx.new(|_cx| ProjectContext::default()),
-                context_server_registry,
-                Templates::new(),
-                Some(model),
-                cx,
-            )
-        });
-        let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
+        let tool = Arc::new(ReadFileTool::new(project, action_log, true));
 
         // start_line of 0 should be treated as 1
         let result = cx
@@ -613,7 +554,11 @@ mod test {
                     start_line: Some(0),
                     end_line: Some(2),
                 };
-                tool.clone().run(input, ToolCallEventStream::test().0, cx)
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await;
         assert_eq!(result.unwrap(), "Line 1\nLine 2\n".into());
@@ -626,7 +571,11 @@ mod test {
                     start_line: Some(1),
                     end_line: Some(0),
                 };
-                tool.clone().run(input, ToolCallEventStream::test().0, cx)
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await;
         assert_eq!(result.unwrap(), "Line 1\n".into());
@@ -639,7 +588,11 @@ mod test {
                     start_line: Some(3),
                     end_line: Some(2),
                 };
-                tool.clone().run(input, ToolCallEventStream::test().0, cx)
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await;
         assert_eq!(result.unwrap(), "Line 3\n".into());
@@ -721,20 +674,7 @@ mod test {
 
         let project = Project::test(fs.clone(), [path!("/project_root").as_ref()], cx).await;
         let action_log = cx.new(|_| ActionLog::new(project.clone()));
-        let context_server_registry =
-            cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
-        let model = Arc::new(FakeLanguageModel::default());
-        let thread = cx.new(|cx| {
-            Thread::new(
-                project.clone(),
-                cx.new(|_cx| ProjectContext::default()),
-                context_server_registry,
-                Templates::new(),
-                Some(model),
-                cx,
-            )
-        });
-        let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
+        let tool = Arc::new(ReadFileTool::new(project, action_log, true));
 
         // Reading a file outside the project worktree should fail
         let result = cx
@@ -744,7 +684,11 @@ mod test {
                     start_line: None,
                     end_line: None,
                 };
-                tool.clone().run(input, ToolCallEventStream::test().0, cx)
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await;
         assert!(
@@ -760,7 +704,11 @@ mod test {
                     start_line: None,
                     end_line: None,
                 };
-                tool.clone().run(input, ToolCallEventStream::test().0, cx)
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await;
         assert!(
@@ -776,7 +724,11 @@ mod test {
                     start_line: None,
                     end_line: None,
                 };
-                tool.clone().run(input, ToolCallEventStream::test().0, cx)
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await;
         assert!(
@@ -791,7 +743,11 @@ mod test {
                     start_line: None,
                     end_line: None,
                 };
-                tool.clone().run(input, ToolCallEventStream::test().0, cx)
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await;
         assert!(
@@ -807,7 +763,11 @@ mod test {
                     start_line: None,
                     end_line: None,
                 };
-                tool.clone().run(input, ToolCallEventStream::test().0, cx)
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await;
         assert!(
@@ -822,7 +782,11 @@ mod test {
                     start_line: None,
                     end_line: None,
                 };
-                tool.clone().run(input, ToolCallEventStream::test().0, cx)
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await;
         assert!(
@@ -837,7 +801,11 @@ mod test {
                     start_line: None,
                     end_line: None,
                 };
-                tool.clone().run(input, ToolCallEventStream::test().0, cx)
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await;
         assert!(
@@ -853,7 +821,11 @@ mod test {
                     start_line: None,
                     end_line: None,
                 };
-                tool.clone().run(input, ToolCallEventStream::test().0, cx)
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await;
         assert!(result.is_ok(), "Should be able to read normal files");
@@ -867,7 +839,11 @@ mod test {
                     start_line: None,
                     end_line: None,
                 };
-                tool.run(input, ToolCallEventStream::test().0, cx)
+                tool.run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await;
         assert!(
@@ -893,29 +869,16 @@ mod test {
 
         let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
         let action_log = cx.new(|_| ActionLog::new(project.clone()));
-        let context_server_registry =
-            cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
-        let model = Arc::new(FakeLanguageModel::default());
-        let thread = cx.new(|cx| {
-            Thread::new(
-                project.clone(),
-                cx.new(|_cx| ProjectContext::default()),
-                context_server_registry,
-                Templates::new(),
-                Some(model),
-                cx,
-            )
-        });
-        let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
+        let tool = Arc::new(ReadFileTool::new(project, action_log, true));
 
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
         let read_task = cx.update(|cx| {
             tool.run(
-                ReadFileToolInput {
+                ToolInput::resolved(ReadFileToolInput {
                     path: "root/secret.png".to_string(),
                     start_line: None,
                     end_line: None,
-                },
+                }),
                 event_stream,
                 cx,
             )
@@ -1012,24 +975,7 @@ mod test {
         .await;
 
         let action_log = cx.new(|_| ActionLog::new(project.clone()));
-        let context_server_registry =
-            cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
-        let model = Arc::new(FakeLanguageModel::default());
-        let thread = cx.new(|cx| {
-            Thread::new(
-                project.clone(),
-                cx.new(|_cx| ProjectContext::default()),
-                context_server_registry,
-                Templates::new(),
-                Some(model),
-                cx,
-            )
-        });
-        let tool = Arc::new(ReadFileTool::new(
-            thread.downgrade(),
-            project.clone(),
-            action_log.clone(),
-        ));
+        let tool = Arc::new(ReadFileTool::new(project.clone(), action_log.clone(), true));
 
         // Test reading allowed files in worktree1
         let result = cx
@@ -1039,7 +985,11 @@ mod test {
                     start_line: None,
                     end_line: None,
                 };
-                tool.clone().run(input, ToolCallEventStream::test().0, cx)
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await
             .unwrap();
@@ -1057,7 +1007,11 @@ mod test {
                     start_line: None,
                     end_line: None,
                 };
-                tool.clone().run(input, ToolCallEventStream::test().0, cx)
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await;
 
@@ -1075,7 +1029,11 @@ mod test {
                     start_line: None,
                     end_line: None,
                 };
-                tool.clone().run(input, ToolCallEventStream::test().0, cx)
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await;
 
@@ -1093,7 +1051,11 @@ mod test {
                     start_line: None,
                     end_line: None,
                 };
-                tool.clone().run(input, ToolCallEventStream::test().0, cx)
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await
             .unwrap();
@@ -1111,7 +1073,11 @@ mod test {
                     start_line: None,
                     end_line: None,
                 };
-                tool.clone().run(input, ToolCallEventStream::test().0, cx)
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await;
 
@@ -1129,7 +1095,11 @@ mod test {
                     start_line: None,
                     end_line: None,
                 };
-                tool.clone().run(input, ToolCallEventStream::test().0, cx)
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await;
 
@@ -1148,7 +1118,11 @@ mod test {
                     start_line: None,
                     end_line: None,
                 };
-                tool.clone().run(input, ToolCallEventStream::test().0, cx)
+                tool.clone().run(
+                    ToolInput::resolved(input),
+                    ToolCallEventStream::test().0,
+                    cx,
+                )
             })
             .await;
 
@@ -1188,33 +1162,16 @@ mod test {
         cx.executor().run_until_parked();
 
         let action_log = cx.new(|_| ActionLog::new(project.clone()));
-        let context_server_registry =
-            cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
-        let model = Arc::new(FakeLanguageModel::default());
-        let thread = cx.new(|cx| {
-            Thread::new(
-                project.clone(),
-                cx.new(|_cx| ProjectContext::default()),
-                context_server_registry,
-                Templates::new(),
-                Some(model),
-                cx,
-            )
-        });
-        let tool = Arc::new(ReadFileTool::new(
-            thread.downgrade(),
-            project.clone(),
-            action_log,
-        ));
+        let tool = Arc::new(ReadFileTool::new(project.clone(), action_log, true));
 
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
         let task = cx.update(|cx| {
             tool.clone().run(
-                ReadFileToolInput {
+                ToolInput::resolved(ReadFileToolInput {
                     path: "project/secret_link.txt".to_string(),
                     start_line: None,
                     end_line: None,
-                },
+                }),
                 event_stream,
                 cx,
             )
@@ -1264,33 +1221,16 @@ mod test {
         cx.executor().run_until_parked();
 
         let action_log = cx.new(|_| ActionLog::new(project.clone()));
-        let context_server_registry =
-            cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
-        let model = Arc::new(FakeLanguageModel::default());
-        let thread = cx.new(|cx| {
-            Thread::new(
-                project.clone(),
-                cx.new(|_cx| ProjectContext::default()),
-                context_server_registry,
-                Templates::new(),
-                Some(model),
-                cx,
-            )
-        });
-        let tool = Arc::new(ReadFileTool::new(
-            thread.downgrade(),
-            project.clone(),
-            action_log,
-        ));
+        let tool = Arc::new(ReadFileTool::new(project.clone(), action_log, true));
 
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
         let task = cx.update(|cx| {
             tool.clone().run(
-                ReadFileToolInput {
+                ToolInput::resolved(ReadFileToolInput {
                     path: "project/secret_link.txt".to_string(),
                     start_line: None,
                     end_line: None,
-                },
+                }),
                 event_stream,
                 cx,
             )
@@ -1344,34 +1284,17 @@ mod test {
         cx.executor().run_until_parked();
 
         let action_log = cx.new(|_| ActionLog::new(project.clone()));
-        let context_server_registry =
-            cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
-        let model = Arc::new(FakeLanguageModel::default());
-        let thread = cx.new(|cx| {
-            Thread::new(
-                project.clone(),
-                cx.new(|_cx| ProjectContext::default()),
-                context_server_registry,
-                Templates::new(),
-                Some(model),
-                cx,
-            )
-        });
-        let tool = Arc::new(ReadFileTool::new(
-            thread.downgrade(),
-            project.clone(),
-            action_log,
-        ));
+        let tool = Arc::new(ReadFileTool::new(project.clone(), action_log, true));
 
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
         let result = cx
             .update(|cx| {
                 tool.clone().run(
-                    ReadFileToolInput {
+                    ToolInput::resolved(ReadFileToolInput {
                         path: "project/secret_link.txt".to_string(),
                         start_line: None,
                         end_line: None,
-                    },
+                    }),
                     event_stream,
                     cx,
                 )

crates/agent/src/tools/restore_file_from_disk_tool.rs 🔗

@@ -17,7 +17,9 @@ use std::path::{Path, PathBuf};
 use std::sync::Arc;
 use util::markdown::MarkdownInlineCode;
 
-use crate::{AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_for_path};
+use crate::{
+    AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision, decide_permission_for_path,
+};
 
 /// Discards unsaved changes in open buffers by reloading file contents from disk.
 ///
@@ -66,25 +68,31 @@ impl AgentTool for RestoreFileFromDiskTool {
 
     fn run(
         self: Arc<Self>,
-        input: Self::Input,
+        input: ToolInput<Self::Input>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<String, String>> {
-        let settings = AgentSettings::get_global(cx).clone();
-
-        // Check for any immediate deny before spawning async work.
-        for path in &input.paths {
-            let path_str = path.to_string_lossy();
-            let decision = decide_permission_for_path(Self::NAME, &path_str, &settings);
-            if let ToolPermissionDecision::Deny(reason) = decision {
-                return Task::ready(Err(reason));
-            }
-        }
-
         let project = self.project.clone();
-        let input_paths = input.paths;
 
         cx.spawn(async move |cx| {
+            let input = input
+                .recv()
+                .await
+                .map_err(|e| format!("Failed to receive tool input: {e}"))?;
+
+            // Check for any immediate deny before doing async work.
+            for path in &input.paths {
+                let path_str = path.to_string_lossy();
+                let decision = cx.update(|cx| {
+                    decide_permission_for_path(Self::NAME, &path_str, AgentSettings::get_global(cx))
+                });
+                if let ToolPermissionDecision::Deny(reason) = decision {
+                    return Err(reason);
+                }
+            }
+
+            let input_paths = input.paths;
+
             let fs = project.read_with(cx, |project, _cx| project.fs().clone());
             let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await;
 
@@ -92,7 +100,9 @@ impl AgentTool for RestoreFileFromDiskTool {
 
             for path in &input_paths {
                 let path_str = path.to_string_lossy();
-                let decision = decide_permission_for_path(Self::NAME, &path_str, &settings);
+                let decision = cx.update(|cx| {
+                    decide_permission_for_path(Self::NAME, &path_str, AgentSettings::get_global(cx))
+                });
                 let symlink_escape = project.read_with(cx, |project, cx| {
                     path_has_symlink_escape(project, path, &canonical_roots, cx)
                 });
@@ -378,12 +388,12 @@ mod tests {
         let output = cx
             .update(|cx| {
                 tool.clone().run(
-                    RestoreFileFromDiskToolInput {
+                    ToolInput::resolved(RestoreFileFromDiskToolInput {
                         paths: vec![
                             PathBuf::from("root/dirty.txt"),
                             PathBuf::from("root/clean.txt"),
                         ],
-                    },
+                    }),
                     ToolCallEventStream::test().0,
                     cx,
                 )
@@ -428,7 +438,7 @@ mod tests {
         let output = cx
             .update(|cx| {
                 tool.clone().run(
-                    RestoreFileFromDiskToolInput { paths: vec![] },
+                    ToolInput::resolved(RestoreFileFromDiskToolInput { paths: vec![] }),
                     ToolCallEventStream::test().0,
                     cx,
                 )
@@ -441,9 +451,9 @@ mod tests {
         let output = cx
             .update(|cx| {
                 tool.clone().run(
-                    RestoreFileFromDiskToolInput {
+                    ToolInput::resolved(RestoreFileFromDiskToolInput {
                         paths: vec![PathBuf::from("nonexistent/path.txt")],
-                    },
+                    }),
                     ToolCallEventStream::test().0,
                     cx,
                 )
@@ -495,9 +505,9 @@ mod tests {
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
         let task = cx.update(|cx| {
             tool.clone().run(
-                RestoreFileFromDiskToolInput {
+                ToolInput::resolved(RestoreFileFromDiskToolInput {
                     paths: vec![PathBuf::from("project/link.txt")],
-                },
+                }),
                 event_stream,
                 cx,
             )
@@ -564,9 +574,9 @@ mod tests {
         let result = cx
             .update(|cx| {
                 tool.clone().run(
-                    RestoreFileFromDiskToolInput {
+                    ToolInput::resolved(RestoreFileFromDiskToolInput {
                         paths: vec![PathBuf::from("project/link.txt")],
-                    },
+                    }),
                     event_stream,
                     cx,
                 )
@@ -623,9 +633,9 @@ mod tests {
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
         let task = cx.update(|cx| {
             tool.clone().run(
-                RestoreFileFromDiskToolInput {
+                ToolInput::resolved(RestoreFileFromDiskToolInput {
                     paths: vec![PathBuf::from("project/link.txt")],
-                },
+                }),
                 event_stream,
                 cx,
             )

crates/agent/src/tools/save_file_tool.rs 🔗

@@ -17,7 +17,9 @@ use super::tool_permissions::{
     canonicalize_worktree_roots, path_has_symlink_escape, resolve_project_path,
     sensitive_settings_kind,
 };
-use crate::{AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_for_path};
+use crate::{
+    AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision, decide_permission_for_path,
+};
 
 /// Saves files that have unsaved changes.
 ///
@@ -63,25 +65,31 @@ impl AgentTool for SaveFileTool {
 
     fn run(
         self: Arc<Self>,
-        input: Self::Input,
+        input: ToolInput<Self::Input>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<String, String>> {
-        let settings = AgentSettings::get_global(cx).clone();
-
-        // Check for any immediate deny before spawning async work.
-        for path in &input.paths {
-            let path_str = path.to_string_lossy();
-            let decision = decide_permission_for_path(Self::NAME, &path_str, &settings);
-            if let ToolPermissionDecision::Deny(reason) = decision {
-                return Task::ready(Err(reason));
-            }
-        }
-
         let project = self.project.clone();
-        let input_paths = input.paths;
 
         cx.spawn(async move |cx| {
+            let input = input
+                .recv()
+                .await
+                .map_err(|e| format!("Failed to receive tool input: {e}"))?;
+
+            // Check for any immediate deny before doing async work.
+            for path in &input.paths {
+                let path_str = path.to_string_lossy();
+                let decision = cx.update(|cx| {
+                    decide_permission_for_path(Self::NAME, &path_str, AgentSettings::get_global(cx))
+                });
+                if let ToolPermissionDecision::Deny(reason) = decision {
+                    return Err(reason);
+                }
+            }
+
+            let input_paths = input.paths;
+
             let fs = project.read_with(cx, |project, _cx| project.fs().clone());
             let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await;
 
@@ -89,7 +97,9 @@ impl AgentTool for SaveFileTool {
 
             for path in &input_paths {
                 let path_str = path.to_string_lossy();
-                let decision = decide_permission_for_path(Self::NAME, &path_str, &settings);
+                let decision = cx.update(|cx| {
+                    decide_permission_for_path(Self::NAME, &path_str, AgentSettings::get_global(cx))
+                });
                 let symlink_escape = project.read_with(cx, |project, cx| {
                     path_has_symlink_escape(project, path, &canonical_roots, cx)
                 });
@@ -382,12 +392,12 @@ mod tests {
         let output = cx
             .update(|cx| {
                 tool.clone().run(
-                    SaveFileToolInput {
+                    ToolInput::resolved(SaveFileToolInput {
                         paths: vec![
                             PathBuf::from("root/dirty.txt"),
                             PathBuf::from("root/clean.txt"),
                         ],
-                    },
+                    }),
                     ToolCallEventStream::test().0,
                     cx,
                 )
@@ -425,7 +435,7 @@ mod tests {
         let output = cx
             .update(|cx| {
                 tool.clone().run(
-                    SaveFileToolInput { paths: vec![] },
+                    ToolInput::resolved(SaveFileToolInput { paths: vec![] }),
                     ToolCallEventStream::test().0,
                     cx,
                 )
@@ -438,9 +448,9 @@ mod tests {
         let output = cx
             .update(|cx| {
                 tool.clone().run(
-                    SaveFileToolInput {
+                    ToolInput::resolved(SaveFileToolInput {
                         paths: vec![PathBuf::from("nonexistent/path.txt")],
-                    },
+                    }),
                     ToolCallEventStream::test().0,
                     cx,
                 )
@@ -490,9 +500,9 @@ mod tests {
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
         let task = cx.update(|cx| {
             tool.clone().run(
-                SaveFileToolInput {
+                ToolInput::resolved(SaveFileToolInput {
                     paths: vec![PathBuf::from("project/link.txt")],
-                },
+                }),
                 event_stream,
                 cx,
             )
@@ -559,9 +569,9 @@ mod tests {
         let result = cx
             .update(|cx| {
                 tool.clone().run(
-                    SaveFileToolInput {
+                    ToolInput::resolved(SaveFileToolInput {
                         paths: vec![PathBuf::from("project/link.txt")],
-                    },
+                    }),
                     event_stream,
                     cx,
                 )
@@ -618,9 +628,9 @@ mod tests {
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
         let task = cx.update(|cx| {
             tool.clone().run(
-                SaveFileToolInput {
+                ToolInput::resolved(SaveFileToolInput {
                     paths: vec![PathBuf::from("project/link.txt")],
-                },
+                }),
                 event_stream,
                 cx,
             )
@@ -702,12 +712,12 @@ mod tests {
         let (event_stream, mut event_rx) = ToolCallEventStream::test();
         let task = cx.update(|cx| {
             tool.clone().run(
-                SaveFileToolInput {
+                ToolInput::resolved(SaveFileToolInput {
                     paths: vec![
                         PathBuf::from("project/dirty.txt"),
                         PathBuf::from("project/link.txt"),
                     ],
-                },
+                }),
                 event_stream,
                 cx,
             )

crates/agent/src/tools/spawn_agent_tool.rs 🔗

@@ -1,77 +1,93 @@
-use acp_thread::SUBAGENT_SESSION_ID_META_KEY;
+use acp_thread::{SUBAGENT_SESSION_INFO_META_KEY, SubagentSessionInfo};
 use agent_client_protocol as acp;
 use anyhow::Result;
-use gpui::{App, SharedString, Task, WeakEntity};
+use gpui::{App, SharedString, Task};
 use language_model::LanguageModelToolResultContent;
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
 use std::rc::Rc;
 use std::sync::Arc;
 
-use crate::{AgentTool, Thread, ThreadEnvironment, ToolCallEventStream};
+use crate::{AgentTool, ThreadEnvironment, ToolCallEventStream, ToolInput};
 
 /// Spawns an agent to perform a delegated task.
 ///
-/// Use this tool when you want to do any of the following:
-/// - Run multiple tasks in parallel that would take significantly longer to run sequentially.
-/// - Complete a self-contained task where you need to know if it succeeded or failed (and how), but none of its intermediate output.
-/// - Perform an investigation where all you need to know is the outcome, not the research that led to that outcome.
+/// Use this tool when you want to:
+/// - Run multiple tasks in parallel.
+/// - Delegate a self-contained task where you only need the final outcome.
 ///
-/// You control what the agent does by providing a prompt describing what the agent should do. The agent has access to the same tools you do, but does NOT see your conversation history or any context the user attached. You must include all relevant context (file paths, requirements, constraints) in the prompt.
+/// Do NOT use this tool for tasks you could accomplish directly with one or two tool calls (e.g. reading a file, running a single command).
 ///
 /// You will receive only the agent's final message as output.
 ///
-/// If a response (success or error) includes a session_id, you can send a follow-up message to that session by passing the session_id back. This is useful for multi-turn conversations with an agent, asking clarifying questions about its output, or retrying after timeouts or transient failures.
+/// **New session** (no session_id): Creates a new agent that does NOT see your conversation history. Include all relevant context (file paths, requirements, constraints) in the message.
+///
+/// **Follow-up** (with session_id): Sends a follow-up to an existing agent session. The agent already has full context, so send only a short, direct message — do NOT repeat the original task or context. Examples: "Also update the tests", "Fix the compile error in foo.rs", "Retry".
 ///
-/// Note:
-/// - Agents cannot use tools you don't have access to.
 /// - If spawning multiple agents that might write to the filesystem, provide guidance on how to avoid conflicts (e.g. assign each to different directories).
 #[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
+#[serde(rename_all = "snake_case")]
 pub struct SpawnAgentToolInput {
     /// Short label displayed in the UI while the agent runs (e.g., "Researching alternatives")
     pub label: String,
-    /// Describe the task for the agent to perform. Be specific about what you want accomplished. Include all necessary context (file paths, requirements, constraints) since the agent cannot see your conversation.
+    /// The prompt for the agent. For new sessions, include full context needed for the task. For follow-ups (with session_id), you can rely on the agent already having the previous message.
     pub message: String,
-    /// Optional session ID of an existing agent session to continue a conversation with. When provided, the message is sent as a follow-up to that session instead of creating a new one. Use this to ask clarifying questions, request changes based on previous output, or retry after errors.
+    /// Session ID of an existing agent session to continue instead of creating a new one.
     #[serde(default)]
     pub session_id: Option<acp::SessionId>,
 }
 
-#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
+#[derive(Debug, Clone, Serialize, Deserialize)]
 #[serde(untagged)]
+#[serde(rename_all = "snake_case")]
 pub enum SpawnAgentToolOutput {
     Success {
         session_id: acp::SessionId,
         output: String,
+        session_info: SubagentSessionInfo,
     },
     Error {
         #[serde(skip_serializing_if = "Option::is_none")]
         #[serde(default)]
         session_id: Option<acp::SessionId>,
         error: String,
+        session_info: Option<SubagentSessionInfo>,
     },
 }
 
 impl From<SpawnAgentToolOutput> for LanguageModelToolResultContent {
     fn from(output: SpawnAgentToolOutput) -> Self {
-        serde_json::to_string(&output)
+        match output {
+            SpawnAgentToolOutput::Success {
+                session_id,
+                output,
+                session_info: _, // Don't show this to the model
+            } => serde_json::to_string(
+                &serde_json::json!({ "session_id": session_id, "output": output }),
+            )
             .unwrap_or_else(|e| format!("Failed to serialize spawn_agent output: {e}"))
-            .into()
+            .into(),
+            SpawnAgentToolOutput::Error {
+                session_id,
+                error,
+                session_info: _, // Don't show this to the model
+            } => serde_json::to_string(
+                &serde_json::json!({ "session_id": session_id, "error": error }),
+            )
+            .unwrap_or_else(|e| format!("Failed to serialize spawn_agent output: {e}"))
+            .into(),
+        }
     }
 }
 
 /// Tool that spawns an agent thread to work on a task.
 pub struct SpawnAgentTool {
-    parent_thread: WeakEntity<Thread>,
     environment: Rc<dyn ThreadEnvironment>,
 }
 
 impl SpawnAgentTool {
-    pub fn new(parent_thread: WeakEntity<Thread>, environment: Rc<dyn ThreadEnvironment>) -> Self {
-        Self {
-            parent_thread,
-            environment,
-        }
+    pub fn new(environment: Rc<dyn ThreadEnvironment>) -> Self {
+        Self { environment }
     }
 }
 
@@ -90,69 +106,108 @@ impl AgentTool for SpawnAgentTool {
         input: Result<Self::Input, serde_json::Value>,
         _cx: &mut App,
     ) -> SharedString {
-        input
-            .map(|i| i.label.into())
-            .unwrap_or_else(|_| "Spawning agent".into())
+        match input {
+            Ok(i) => i.label.into(),
+            Err(value) => value
+                .get("label")
+                .and_then(|v| v.as_str())
+                .map(|s| SharedString::from(s.to_owned()))
+                .unwrap_or_else(|| "Spawning agent".into()),
+        }
     }
 
     fn run(
         self: Arc<Self>,
-        input: Self::Input,
+        input: ToolInput<Self::Input>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<Self::Output, Self::Output>> {
-        let Some(parent_thread_entity) = self.parent_thread.upgrade() else {
-            return Task::ready(Err(SpawnAgentToolOutput::Error {
-                session_id: None,
-                error: "Parent thread no longer exists".to_string(),
-            }));
-        };
-
-        let subagent = if let Some(session_id) = input.session_id {
-            self.environment
-                .resume_subagent(parent_thread_entity, session_id, input.message, cx)
-        } else {
-            self.environment
-                .create_subagent(parent_thread_entity, input.label, input.message, cx)
-        };
-        let subagent = match subagent {
-            Ok(subagent) => subagent,
-            Err(err) => {
-                return Task::ready(Err(SpawnAgentToolOutput::Error {
+        cx.spawn(async move |cx| {
+            let input = input
+                .recv()
+                .await
+                .map_err(|e| SpawnAgentToolOutput::Error {
+                    session_id: None,
+                    error: format!("Failed to receive tool input: {e}"),
+                    session_info: None,
+                })?;
+
+            let (subagent, mut session_info) = cx.update(|cx| {
+                let subagent = if let Some(session_id) = input.session_id {
+                    self.environment.resume_subagent(session_id, cx)
+                } else {
+                    self.environment.create_subagent(input.label, cx)
+                };
+                let subagent = subagent.map_err(|err| SpawnAgentToolOutput::Error {
                     session_id: None,
                     error: err.to_string(),
-                }));
-            }
-        };
-        let subagent_session_id = subagent.id();
-
-        event_stream.subagent_spawned(subagent_session_id.clone());
-        let meta = acp::Meta::from_iter([(
-            SUBAGENT_SESSION_ID_META_KEY.into(),
-            subagent_session_id.to_string().into(),
-        )]);
-        event_stream.update_fields_with_meta(acp::ToolCallUpdateFields::new(), Some(meta));
-
-        cx.spawn(async move |cx| match subagent.wait_for_output(cx).await {
-            Ok(output) => {
-                event_stream.update_fields(
-                    acp::ToolCallUpdateFields::new().content(vec![output.clone().into()]),
-                );
-                Ok(SpawnAgentToolOutput::Success {
-                    session_id: subagent_session_id,
-                    output,
-                })
-            }
-            Err(e) => {
-                let error = e.to_string();
-                event_stream.update_fields(
-                    acp::ToolCallUpdateFields::new().content(vec![error.clone().into()]),
+                    session_info: None,
+                })?;
+                let session_info = SubagentSessionInfo {
+                    session_id: subagent.id(),
+                    message_start_index: subagent.num_entries(cx),
+                    message_end_index: None,
+                };
+
+                event_stream.subagent_spawned(subagent.id());
+                event_stream.update_fields_with_meta(
+                    acp::ToolCallUpdateFields::new(),
+                    Some(acp::Meta::from_iter([(
+                        SUBAGENT_SESSION_INFO_META_KEY.into(),
+                        serde_json::json!(&session_info),
+                    )])),
                 );
-                Err(SpawnAgentToolOutput::Error {
-                    session_id: Some(subagent_session_id),
-                    error,
-                })
-            }
+
+                Ok((subagent, session_info))
+            })?;
+
+            let send_result = subagent.send(input.message, cx).await;
+
+            let status = if send_result.is_ok() {
+                "completed"
+            } else {
+                "error"
+            };
+            telemetry::event!(
+                "Subagent Completed",
+                subagent_session = session_info.session_id.to_string(),
+                status,
+            );
+
+            session_info.message_end_index =
+                cx.update(|cx| Some(subagent.num_entries(cx).saturating_sub(1)));
+
+            let meta = Some(acp::Meta::from_iter([(
+                SUBAGENT_SESSION_INFO_META_KEY.into(),
+                serde_json::json!(&session_info),
+            )]));
+
+            let (output, result) = match send_result {
+                Ok(output) => (
+                    output.clone(),
+                    Ok(SpawnAgentToolOutput::Success {
+                        session_id: session_info.session_id.clone(),
+                        session_info,
+                        output,
+                    }),
+                ),
+                Err(e) => {
+                    let error = e.to_string();
+                    (
+                        error.clone(),
+                        Err(SpawnAgentToolOutput::Error {
+                            session_id: Some(session_info.session_id.clone()),
+                            error,
+                            session_info: Some(session_info),
+                        }),
+                    )
+                }
+            };
+            event_stream.update_fields_with_meta(
+                acp::ToolCallUpdateFields::new().content(vec![output.into()]),
+                meta,
+            );
+            result
         })
     }
 
@@ -163,25 +218,29 @@ impl AgentTool for SpawnAgentTool {
         event_stream: ToolCallEventStream,
         _cx: &mut App,
     ) -> Result<()> {
-        let session_id = match &output {
-            SpawnAgentToolOutput::Success { session_id, .. } => Some(session_id),
-            SpawnAgentToolOutput::Error { session_id, .. } => session_id.as_ref(),
+        let (content, session_info) = match output {
+            SpawnAgentToolOutput::Success {
+                output,
+                session_info,
+                ..
+            } => (output.into(), Some(session_info)),
+            SpawnAgentToolOutput::Error {
+                error,
+                session_info,
+                ..
+            } => (error.into(), session_info),
         };
 
-        if let Some(session_id) = session_id {
-            event_stream.subagent_spawned(session_id.clone());
-            let meta = acp::Meta::from_iter([(
-                SUBAGENT_SESSION_ID_META_KEY.into(),
-                session_id.to_string().into(),
-            )]);
-            event_stream.update_fields_with_meta(acp::ToolCallUpdateFields::new(), Some(meta));
-        }
-
-        let content = match &output {
-            SpawnAgentToolOutput::Success { output, .. } => output.into(),
-            SpawnAgentToolOutput::Error { error, .. } => error.into(),
-        };
-        event_stream.update_fields(acp::ToolCallUpdateFields::new().content(vec![content]));
+        let meta = session_info.map(|session_info| {
+            acp::Meta::from_iter([(
+                SUBAGENT_SESSION_INFO_META_KEY.into(),
+                serde_json::json!(&session_info),
+            )])
+        });
+        event_stream.update_fields_with_meta(
+            acp::ToolCallUpdateFields::new().content(vec![content]),
+            meta,
+        );
 
         Ok(())
     }

crates/agent/src/tools/streaming_edit_file_tool.rs 🔗

@@ -1,30 +1,36 @@
 use super::edit_file_tool::EditFileTool;
 use super::restore_file_from_disk_tool::RestoreFileFromDiskTool;
 use super::save_file_tool::SaveFileTool;
+use super::tool_edit_parser::{ToolEditEvent, ToolEditParser};
 use crate::{
-    AgentTool, Templates, Thread, ToolCallEventStream,
-    edit_agent::streaming_fuzzy_matcher::StreamingFuzzyMatcher,
+    AgentTool, Thread, ToolCallEventStream, ToolInput,
+    edit_agent::{
+        reindent::{Reindenter, compute_indent_delta},
+        streaming_fuzzy_matcher::StreamingFuzzyMatcher,
+    },
 };
 use acp_thread::Diff;
+use action_log::ActionLog;
 use agent_client_protocol::{self as acp, ToolCallLocation, ToolCallUpdateFields};
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
 use collections::HashSet;
 use futures::FutureExt as _;
 use gpui::{App, AppContext, AsyncApp, Entity, Task, WeakEntity};
-use language::LanguageRegistry;
 use language::language_settings::{self, FormatOnSave};
+use language::{Buffer, LanguageRegistry};
 use language_model::LanguageModelToolResultContent;
 use project::lsp_store::{FormatTrigger, LspFormatTarget};
-use project::{Project, ProjectPath};
+use project::{AgentLocation, Project, ProjectPath};
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
 use std::ops::Range;
 use std::path::PathBuf;
 use std::sync::Arc;
-use text::BufferSnapshot;
+use streaming_diff::{CharOperation, StreamingDiff};
+use text::ToOffset;
 use ui::SharedString;
-use util::ResultExt;
 use util::rel_path::RelPath;
+use util::{Deferred, ResultExt};
 
 const DEFAULT_UI_TEXT: &str = "Editing file";
 
@@ -70,14 +76,13 @@ pub struct StreamingEditFileToolInput {
     pub path: PathBuf,
 
     /// The mode of operation on the file. Possible values:
-    /// - 'create': Create a new file if it doesn't exist. Requires 'content' field.
-    /// - 'overwrite': Replace the entire contents of an existing file. Requires 'content' field.
+    /// - 'write': Replace the entire contents of the file. If the file doesn't exist, it will be created. Requires 'content' field.
     /// - 'edit': Make granular edits to an existing file. Requires 'edits' field.
     ///
     /// When a file already exists or you just created it, prefer editing it as opposed to recreating it from scratch.
     pub mode: StreamingEditFileMode,
 
-    /// The complete content for the new file (required for 'create' and 'overwrite' modes).
+    /// The complete content for the new file (required for 'write' mode).
     /// This field should contain the entire file content.
     #[serde(default, skip_serializing_if = "Option::is_none")]
     pub content: Option<String>,
@@ -85,23 +90,22 @@ pub struct StreamingEditFileToolInput {
     /// List of edit operations to apply sequentially (required for 'edit' mode).
     /// Each edit finds `old_text` in the file and replaces it with `new_text`.
     #[serde(default, skip_serializing_if = "Option::is_none")]
-    pub edits: Option<Vec<EditOperation>>,
+    pub edits: Option<Vec<Edit>>,
 }
 
-#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
+#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema)]
 #[serde(rename_all = "snake_case")]
 pub enum StreamingEditFileMode {
-    /// Create a new file if it doesn't exist
-    Create,
-    /// Replace the entire contents of an existing file
-    Overwrite,
+    /// Overwrite the file with new content (replacing any existing content).
+    /// If the file does not exist, it will be created.
+    Write,
     /// Make granular edits to an existing file
     Edit,
 }
 
 /// A single edit operation that replaces old text with new text
 #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
-pub struct EditOperation {
+pub struct Edit {
     /// The exact text to find in the file. This will be matched using fuzzy matching
     /// to handle minor differences in whitespace or formatting.
     pub old_text: String,
@@ -109,12 +113,26 @@ pub struct EditOperation {
     pub new_text: String,
 }
 
-#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
+#[derive(Default, Debug, Deserialize)]
 struct StreamingEditFileToolPartialInput {
     #[serde(default)]
-    path: String,
+    display_description: Option<String>,
+    #[serde(default)]
+    path: Option<String>,
+    #[serde(default)]
+    mode: Option<StreamingEditFileMode>,
+    #[serde(default)]
+    content: Option<String>,
+    #[serde(default)]
+    edits: Option<Vec<PartialEdit>>,
+}
+
+#[derive(Default, Debug, Deserialize)]
+pub struct PartialEdit {
+    #[serde(default)]
+    pub old_text: Option<String>,
     #[serde(default)]
-    display_description: String,
+    pub new_text: Option<String>,
 }
 
 #[derive(Debug, Serialize, Deserialize)]
@@ -133,6 +151,14 @@ pub enum StreamingEditFileToolOutput {
     },
 }
 
+impl StreamingEditFileToolOutput {
+    pub fn error(error: impl Into<String>) -> Self {
+        Self::Error {
+            error: error.into(),
+        }
+    }
+}
+
 impl std::fmt::Display for StreamingEditFileToolOutput {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         match self {
@@ -161,52 +187,55 @@ impl From<StreamingEditFileToolOutput> for LanguageModelToolResultContent {
 }
 
 pub struct StreamingEditFileTool {
+    project: Entity<Project>,
     thread: WeakEntity<Thread>,
+    action_log: Entity<ActionLog>,
     language_registry: Arc<LanguageRegistry>,
-    project: Entity<Project>,
-    #[allow(dead_code)]
-    templates: Arc<Templates>,
 }
 
 impl StreamingEditFileTool {
     pub fn new(
         project: Entity<Project>,
         thread: WeakEntity<Thread>,
+        action_log: Entity<ActionLog>,
         language_registry: Arc<LanguageRegistry>,
-        templates: Arc<Templates>,
     ) -> Self {
         Self {
             project,
             thread,
+            action_log,
             language_registry,
-            templates,
-        }
-    }
-
-    pub fn with_thread(&self, new_thread: WeakEntity<Thread>) -> Self {
-        Self {
-            project: self.project.clone(),
-            thread: new_thread,
-            language_registry: self.language_registry.clone(),
-            templates: self.templates.clone(),
         }
     }
 
     fn authorize(
         &self,
-        input: &StreamingEditFileToolInput,
+        path: &PathBuf,
+        description: &str,
         event_stream: &ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<()>> {
         super::tool_permissions::authorize_file_edit(
             EditFileTool::NAME,
-            &input.path,
-            &input.display_description,
+            path,
+            description,
             &self.thread,
             event_stream,
             cx,
         )
     }
+
+    fn set_agent_location(&self, buffer: WeakEntity<Buffer>, position: text::Anchor, cx: &mut App) {
+        let should_update_agent_location = self
+            .thread
+            .read_with(cx, |thread, _cx| !thread.is_subagent())
+            .unwrap_or_default();
+        if should_update_agent_location {
+            self.project.update(cx, |project, cx| {
+                project.set_agent_location(Some(AgentLocation { buffer, position }), cx);
+            });
+        }
+    }
 }
 
 impl AgentTool for StreamingEditFileTool {
@@ -215,6 +244,10 @@ impl AgentTool for StreamingEditFileTool {
 
     const NAME: &'static str = "streaming_edit_file";
 
+    fn supports_input_streaming() -> bool {
+        true
+    }
+
     fn kind() -> acp::ToolKind {
         acp::ToolKind::Edit
     }
@@ -237,25 +270,27 @@ impl AgentTool for StreamingEditFileTool {
                 .unwrap_or(input.path.to_string_lossy().into_owned())
                 .into(),
             Err(raw_input) => {
-                if let Some(input) =
-                    serde_json::from_value::<StreamingEditFileToolPartialInput>(raw_input).ok()
+                if let Ok(input) =
+                    serde_json::from_value::<StreamingEditFileToolPartialInput>(raw_input)
                 {
-                    let path = input.path.trim();
+                    let path = input.path.unwrap_or_default();
+                    let path = path.trim();
                     if !path.is_empty() {
                         return self
                             .project
                             .read(cx)
-                            .find_project_path(&input.path, cx)
+                            .find_project_path(&path, cx)
                             .and_then(|project_path| {
                                 self.project
                                     .read(cx)
                                     .short_full_path_for_project_path(&project_path, cx)
                             })
-                            .unwrap_or(input.path)
+                            .unwrap_or_else(|| path.to_string())
                             .into();
                     }
 
-                    let description = input.display_description.trim();
+                    let description = input.display_description.unwrap_or_default();
+                    let description = description.trim();
                     if !description.is_empty() {
                         return description.to_string().into();
                     }
@@ -268,227 +303,93 @@ impl AgentTool for StreamingEditFileTool {
 
     fn run(
         self: Arc<Self>,
-        input: Self::Input,
+        mut input: ToolInput<Self::Input>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<Self::Output, Self::Output>> {
-        let Ok(project) = self
-            .thread
-            .read_with(cx, |thread, _cx| thread.project().clone())
-        else {
-            return Task::ready(Err(StreamingEditFileToolOutput::Error {
-                error: "thread was dropped".to_string(),
-            }));
-        };
-
-        let project_path = match resolve_path(&input, project.clone(), cx) {
-            Ok(path) => path,
-            Err(err) => {
-                return Task::ready(Err(StreamingEditFileToolOutput::Error {
-                    error: err.to_string(),
-                }));
-            }
-        };
-
-        let abs_path = project.read(cx).absolute_path(&project_path, cx);
-        if let Some(abs_path) = abs_path.clone() {
-            event_stream.update_fields(
-                ToolCallUpdateFields::new().locations(vec![acp::ToolCallLocation::new(abs_path)]),
-            );
-        }
-
-        let authorize = self.authorize(&input, &event_stream, cx);
-
         cx.spawn(async move |cx: &mut AsyncApp| {
-            let result: anyhow::Result<StreamingEditFileToolOutput> = async {
-                authorize.await?;
-
-                let buffer = project
-                    .update(cx, |project, cx| {
-                        project.open_buffer(project_path.clone(), cx)
-                    })
-                    .await?;
-
-                if let Some(abs_path) = abs_path.as_ref() {
-                    let (last_read_mtime, current_mtime, is_dirty, has_save_tool, has_restore_tool) =
-                        self.thread.update(cx, |thread, cx| {
-                            let last_read = thread.file_read_times.get(abs_path).copied();
-                            let current = buffer
-                                .read(cx)
-                                .file()
-                                .and_then(|file| file.disk_state().mtime());
-                            let dirty = buffer.read(cx).is_dirty();
-                            let has_save = thread.has_tool(SaveFileTool::NAME);
-                            let has_restore = thread.has_tool(RestoreFileFromDiskTool::NAME);
-                            (last_read, current, dirty, has_save, has_restore)
-                        })?;
-
-                    if is_dirty {
-                        let message = match (has_save_tool, has_restore_tool) {
-                            (true, true) => {
-                                "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \
-                                 If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \
-                                 If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit."
-                            }
-                            (true, false) => {
-                                "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \
-                                 If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \
-                                 If they want to discard them, ask the user to manually revert the file, then inform you when it's ok to proceed."
-                            }
-                            (false, true) => {
-                                "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \
-                                 If they want to keep them, ask the user to manually save the file, then inform you when it's ok to proceed. \
-                                 If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit."
-                            }
-                            (false, false) => {
-                                "This file has unsaved changes. Ask the user whether they want to keep or discard those changes, \
-                                 then ask them to save or revert the file manually and inform you when it's ok to proceed."
+            let mut state: Option<EditSession> = None;
+            loop {
+                futures::select! {
+                    partial = input.recv_partial().fuse() => {
+                        let Some(partial_value) = partial else { break };
+                        if let Ok(parsed) = serde_json::from_value::<StreamingEditFileToolPartialInput>(partial_value) {
+                            if state.is_none()
+                                && let StreamingEditFileToolPartialInput {
+                                    path: Some(path),
+                                    display_description: Some(display_description),
+                                    mode: Some(mode),
+                                    ..
+                                } = &parsed
+                            {
+                                match EditSession::new(
+                                    &PathBuf::from(path),
+                                    display_description,
+                                    *mode,
+                                    &self,
+                                    &event_stream,
+                                    cx,
+                                )
+                                .await
+                                {
+                                    Ok(session) => state = Some(session),
+                                    Err(e) => {
+                                        log::error!("Failed to create edit session: {}", e);
+                                        return Err(e);
+                                    }
+                                }
                             }
-                        };
-                        anyhow::bail!("{}", message);
-                    }
 
-                    if let (Some(last_read), Some(current)) = (last_read_mtime, current_mtime) {
-                        if current != last_read {
-                            anyhow::bail!(
-                                "The file {} has been modified since you last read it. \
-                                 Please read the file again to get the current state before editing it.",
-                                input.path.display()
-                            );
+                            if let Some(state) = &mut state {
+                                if let Err(e) = state.process(parsed, &self, &event_stream, cx) {
+                                    log::error!("Failed to process edit: {}", e);
+                                    return Err(e);
+                                }
+                            }
                         }
                     }
-                }
-
-                let diff = cx.new(|cx| Diff::new(buffer.clone(), cx));
-                event_stream.update_diff(diff.clone());
-                let _finalize_diff = util::defer({
-                    let diff = diff.downgrade();
-                    let mut cx = cx.clone();
-                    move || {
-                        diff.update(&mut cx, |diff, cx| diff.finalize(cx)).ok();
-                    }
-                });
-
-                let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
-                let old_text = cx
-                    .background_spawn({
-                        let old_snapshot = old_snapshot.clone();
-                        async move { Arc::new(old_snapshot.text()) }
-                    })
-                    .await;
-
-                let action_log = self.thread.read_with(cx, |thread, _cx| thread.action_log().clone())?;
-
-                // Edit the buffer and report edits to the action log as part of the
-                // same effect cycle, otherwise the edit will be reported as if the
-                // user made it (due to the buffer subscription in action_log).
-                match input.mode {
-                    StreamingEditFileMode::Create | StreamingEditFileMode::Overwrite => {
-                        action_log.update(cx, |log, cx| {
-                            log.buffer_created(buffer.clone(), cx);
-                        });
-                        let content = input.content.ok_or_else(|| {
-                            anyhow!("'content' field is required for create and overwrite modes")
-                        })?;
-                        cx.update(|cx| {
-                            buffer.update(cx, |buffer, cx| {
-                                buffer.edit([(0..buffer.len(), content.as_str())], None, cx);
-                            });
-                            action_log.update(cx, |log, cx| {
-                                log.buffer_edited(buffer.clone(), cx);
-                            });
-                        });
-                    }
-                    StreamingEditFileMode::Edit => {
-                        action_log.update(cx, |log, cx| {
-                            log.buffer_read(buffer.clone(), cx);
-                        });
-                        let edits = input.edits.ok_or_else(|| {
-                            anyhow!("'edits' field is required for edit mode")
-                        })?;
-                        // apply_edits now handles buffer_edited internally in the same effect cycle
-                        apply_edits(&buffer, &action_log, &edits, &diff, &event_stream, &abs_path, cx)?;
-                    }
-                }
-
-                let format_on_save_enabled = buffer.read_with(cx, |buffer, cx| {
-                    let settings = language_settings::language_settings(
-                        buffer.language().map(|l| l.name()),
-                        buffer.file(),
-                        cx,
-                    );
-                    settings.format_on_save != FormatOnSave::Off
-                });
-
-                if format_on_save_enabled {
-                    action_log.update(cx, |log, cx| {
-                        log.buffer_edited(buffer.clone(), cx);
-                    });
-
-                    let format_task = project.update(cx, |project, cx| {
-                        project.format(
-                            HashSet::from_iter([buffer.clone()]),
-                            LspFormatTarget::Buffers,
-                            false,
-                            FormatTrigger::Save,
-                            cx,
-                        )
-                    });
-                    futures::select! {
-                        result = format_task.fuse() => { result.log_err(); },
-                        _ = event_stream.cancelled_by_user().fuse() => {
-                            anyhow::bail!("Edit cancelled by user");
-                        }
-                    };
-                }
-
-                let save_task = project
-                    .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
-                futures::select! {
-                    result = save_task.fuse() => { result?; },
                     _ = event_stream.cancelled_by_user().fuse() => {
-                        anyhow::bail!("Edit cancelled by user");
+                        return Err(StreamingEditFileToolOutput::error("Edit cancelled by user"));
                     }
-                };
-
-                action_log.update(cx, |log, cx| {
-                    log.buffer_edited(buffer.clone(), cx);
-                });
-
-                if let Some(abs_path) = abs_path.as_ref() {
-                    if let Some(new_mtime) = buffer.read_with(cx, |buffer, _| {
-                        buffer.file().and_then(|file| file.disk_state().mtime())
-                    }) {
-                        self.thread.update(cx, |thread, _| {
-                            thread.file_read_times.insert(abs_path.to_path_buf(), new_mtime);
-                        })?;
+                }
+            }
+            let full_input =
+                input
+                    .recv()
+                    .await
+                    .map_err(|e| {
+                        let err = StreamingEditFileToolOutput::error(format!("Failed to receive tool input: {e}"));
+                        log::error!("Failed to receive tool input: {e}");
+                        err
+                    })?;
+
+            let mut state = if let Some(state) = state {
+                state
+            } else {
+                match EditSession::new(
+                    &full_input.path,
+                    &full_input.display_description,
+                    full_input.mode,
+                    &self,
+                    &event_stream,
+                    cx,
+                )
+                .await
+                {
+                    Ok(session) => session,
+                    Err(e) => {
+                        log::error!("Failed to create edit session: {}", e);
+                        return Err(e);
                     }
                 }
-
-                let new_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
-                let (new_text, unified_diff) = cx
-                    .background_spawn({
-                        let new_snapshot = new_snapshot.clone();
-                        let old_text = old_text.clone();
-                        async move {
-                            let new_text = new_snapshot.text();
-                            let diff = language::unified_diff(&old_text, &new_text);
-                            (new_text, diff)
-                        }
-                    })
-                    .await;
-
-                let output = StreamingEditFileToolOutput::Success {
-                    input_path: input.path,
-                    new_text,
-                    old_text,
-                    diff: unified_diff,
-                };
-
-                Ok(output)
-            }.await;
-            result
-                .map_err(|e| StreamingEditFileToolOutput::Error { error: e.to_string() })
+            };
+            match state.finalize(full_input, &self, &event_stream, cx).await {
+                Ok(output) => Ok(output),
+                Err(e) => {
+                    log::error!("Failed to finalize edit: {}", e);
+                    Err(e)
+                }
+            }
         })
     }
 
@@ -522,173 +423,632 @@ impl AgentTool for StreamingEditFileTool {
     }
 }
 
-fn apply_edits(
-    buffer: &Entity<language::Buffer>,
-    action_log: &Entity<action_log::ActionLog>,
-    edits: &[EditOperation],
-    diff: &Entity<Diff>,
-    event_stream: &ToolCallEventStream,
-    abs_path: &Option<PathBuf>,
-    cx: &mut AsyncApp,
-) -> Result<()> {
-    let mut failed_edits = Vec::new();
-    let mut ambiguous_edits = Vec::new();
-    let mut resolved_edits: Vec<(Range<usize>, String)> = Vec::new();
-
-    // First pass: resolve all edits without applying them
-    let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
-    for (index, edit) in edits.iter().enumerate() {
-        let result = resolve_edit(&snapshot, edit);
-
-        match result {
-            Ok(Some((range, new_text))) => {
-                // Reveal the range in the diff view
-                let (start_anchor, end_anchor) = buffer.read_with(cx, |buffer, _cx| {
-                    (
-                        buffer.anchor_before(range.start),
-                        buffer.anchor_after(range.end),
-                    )
-                });
-                diff.update(cx, |card, cx| {
-                    card.reveal_range(start_anchor..end_anchor, cx)
-                });
-                resolved_edits.push((range, new_text));
-            }
-            Ok(None) => {
-                failed_edits.push(index);
-            }
-            Err(ranges) => {
-                ambiguous_edits.push((index, ranges));
-            }
+pub struct EditSession {
+    abs_path: PathBuf,
+    buffer: Entity<Buffer>,
+    old_text: Arc<String>,
+    diff: Entity<Diff>,
+    mode: StreamingEditFileMode,
+    parser: ToolEditParser,
+    pipeline: EditPipeline,
+    _finalize_diff_guard: Deferred<Box<dyn FnOnce()>>,
+}
+
+struct EditPipeline {
+    current_edit: Option<EditPipelineEntry>,
+    content_written: bool,
+}
+
+enum EditPipelineEntry {
+    ResolvingOldText {
+        matcher: StreamingFuzzyMatcher,
+    },
+    StreamingNewText {
+        streaming_diff: StreamingDiff,
+        edit_cursor: usize,
+        reindenter: Reindenter,
+        original_snapshot: text::BufferSnapshot,
+    },
+}
+
+impl EditPipeline {
+    fn new() -> Self {
+        Self {
+            current_edit: None,
+            content_written: false,
         }
     }
 
-    // Check for errors before applying any edits
-    if !failed_edits.is_empty() {
-        let indices = failed_edits
-            .iter()
-            .map(|i| i.to_string())
-            .collect::<Vec<_>>()
-            .join(", ");
-        anyhow::bail!(
-            "Could not find matching text for edit(s) at index(es): {}. \
-             The old_text did not match any content in the file. \
-             Please read the file again to get the current content.",
-            indices
-        );
+    fn ensure_resolving_old_text(&mut self, buffer: &Entity<Buffer>, cx: &mut AsyncApp) {
+        if self.current_edit.is_none() {
+            let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.text_snapshot());
+            self.current_edit = Some(EditPipelineEntry::ResolvingOldText {
+                matcher: StreamingFuzzyMatcher::new(snapshot),
+            });
+        }
     }
+}
 
-    if !ambiguous_edits.is_empty() {
-        let details: Vec<String> = ambiguous_edits
-            .iter()
-            .map(|(index, ranges)| {
-                let lines = ranges
-                    .iter()
-                    .map(|r| (snapshot.offset_to_point(r.start).row + 1).to_string())
-                    .collect::<Vec<_>>()
-                    .join(", ");
-                format!("edit {}: matches at lines {}", index, lines)
-            })
-            .collect();
-        anyhow::bail!(
-            "Some edits matched multiple locations in the file:\n{}. \
-             Please provide more context in old_text to uniquely identify the location.",
-            details.join("\n")
+impl EditSession {
+    async fn new(
+        path: &PathBuf,
+        display_description: &str,
+        mode: StreamingEditFileMode,
+        tool: &StreamingEditFileTool,
+        event_stream: &ToolCallEventStream,
+        cx: &mut AsyncApp,
+    ) -> Result<Self, StreamingEditFileToolOutput> {
+        let project_path = cx
+            .update(|cx| resolve_path(mode, &path, &tool.project, cx))
+            .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?;
+
+        let Some(abs_path) = cx.update(|cx| tool.project.read(cx).absolute_path(&project_path, cx))
+        else {
+            return Err(StreamingEditFileToolOutput::error(format!(
+                "Worktree at '{}' does not exist",
+                path.to_string_lossy()
+            )));
+        };
+
+        event_stream.update_fields(
+            ToolCallUpdateFields::new().locations(vec![ToolCallLocation::new(abs_path.clone())]),
         );
+
+        cx.update(|cx| tool.authorize(&path, &display_description, event_stream, cx))
+            .await
+            .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?;
+
+        let buffer = tool
+            .project
+            .update(cx, |project, cx| project.open_buffer(project_path, cx))
+            .await
+            .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?;
+
+        ensure_buffer_saved(&buffer, &abs_path, tool, cx)?;
+
+        let diff = cx.new(|cx| Diff::new(buffer.clone(), cx));
+        event_stream.update_diff(diff.clone());
+        let finalize_diff_guard = util::defer(Box::new({
+            let diff = diff.downgrade();
+            let mut cx = cx.clone();
+            move || {
+                diff.update(&mut cx, |diff, cx| diff.finalize(cx)).ok();
+            }
+        }) as Box<dyn FnOnce()>);
+
+        tool.action_log
+            .update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
+
+        let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
+        let old_text = cx
+            .background_spawn({
+                let old_snapshot = old_snapshot.clone();
+                async move { Arc::new(old_snapshot.text()) }
+            })
+            .await;
+
+        Ok(Self {
+            abs_path,
+            buffer,
+            old_text,
+            diff,
+            mode,
+            parser: ToolEditParser::default(),
+            pipeline: EditPipeline::new(),
+            _finalize_diff_guard: finalize_diff_guard,
+        })
     }
 
-    // Sort edits by position so buffer.edit() can handle offset translation
-    let mut edits_sorted = resolved_edits;
-    edits_sorted.sort_by(|a, b| a.0.start.cmp(&b.0.start));
+    async fn finalize(
+        &mut self,
+        input: StreamingEditFileToolInput,
+        tool: &StreamingEditFileTool,
+        event_stream: &ToolCallEventStream,
+        cx: &mut AsyncApp,
+    ) -> Result<StreamingEditFileToolOutput, StreamingEditFileToolOutput> {
+        let old_text = self.old_text.clone();
 
-    // Emit location for the earliest edit in the file
-    if let Some((first_range, _)) = edits_sorted.first() {
-        if let Some(abs_path) = abs_path.clone() {
-            let line = snapshot.offset_to_point(first_range.start).row;
-            event_stream.update_fields(
-                ToolCallUpdateFields::new()
-                    .locations(vec![ToolCallLocation::new(abs_path).line(Some(line))]),
-            );
+        match input.mode {
+            StreamingEditFileMode::Write => {
+                let content = input.content.ok_or_else(|| {
+                    StreamingEditFileToolOutput::error("'content' field is required for write mode")
+                })?;
+
+                let events = self.parser.finalize_content(&content);
+                self.process_events(&events, tool, event_stream, cx)?;
+
+                tool.action_log.update(cx, |log, cx| {
+                    log.buffer_created(self.buffer.clone(), cx);
+                });
+            }
+            StreamingEditFileMode::Edit => {
+                let edits = input.edits.ok_or_else(|| {
+                    StreamingEditFileToolOutput::error("'edits' field is required for edit mode")
+                })?;
+                let events = self.parser.finalize_edits(&edits);
+                self.process_events(&events, tool, event_stream, cx)?;
+
+                if log::log_enabled!(log::Level::Debug) {
+                    log::debug!("Got edits:");
+                    for edit in &edits {
+                        log::debug!(
+                            "  old_text: '{}', new_text: '{}'",
+                            edit.old_text.replace('\n', "\\n"),
+                            edit.new_text.replace('\n', "\\n")
+                        );
+                    }
+                }
+            }
         }
-    }
 
-    // Validate no overlaps (sorted ascending by start)
-    for window in edits_sorted.windows(2) {
-        if let [(earlier_range, _), (later_range, _)] = window
-            && (earlier_range.end > later_range.start || earlier_range.start == later_range.start)
-        {
-            let earlier_start_line = snapshot.offset_to_point(earlier_range.start).row + 1;
-            let earlier_end_line = snapshot.offset_to_point(earlier_range.end).row + 1;
-            let later_start_line = snapshot.offset_to_point(later_range.start).row + 1;
-            let later_end_line = snapshot.offset_to_point(later_range.end).row + 1;
-            anyhow::bail!(
-                "Conflicting edit ranges detected: lines {}-{} conflicts with lines {}-{}. \
-                 Conflicting edit ranges are not allowed, as they would overwrite each other.",
-                earlier_start_line,
-                earlier_end_line,
-                later_start_line,
-                later_end_line,
+        let format_on_save_enabled = self.buffer.read_with(cx, |buffer, cx| {
+            let settings = language_settings::language_settings(
+                buffer.language().map(|l| l.name()),
+                buffer.file(),
+                cx,
             );
-        }
-    }
+            settings.format_on_save != FormatOnSave::Off
+        });
 
-    // Apply all edits in a single batch and report to action_log in the same
-    // effect cycle. This prevents the buffer subscription from treating these
-    // as user edits.
-    if !edits_sorted.is_empty() {
-        cx.update(|cx| {
-            buffer.update(cx, |buffer, cx| {
-                buffer.edit(
-                    edits_sorted
-                        .iter()
-                        .map(|(range, new_text)| (range.clone(), new_text.as_str())),
-                    None,
-                    cx,
-                );
+        if format_on_save_enabled {
+            tool.action_log.update(cx, |log, cx| {
+                log.buffer_edited(self.buffer.clone(), cx);
             });
-            action_log.update(cx, |log, cx| {
-                log.buffer_edited(buffer.clone(), cx);
+
+            let format_task = tool.project.update(cx, |project, cx| {
+                project.format(
+                    HashSet::from_iter([self.buffer.clone()]),
+                    LspFormatTarget::Buffers,
+                    false,
+                    FormatTrigger::Save,
+                    cx,
+                )
             });
+            futures::select! {
+                result = format_task.fuse() => { result.log_err(); },
+                _ = event_stream.cancelled_by_user().fuse() => {
+                    return Err(StreamingEditFileToolOutput::error("Edit cancelled by user"));
+                }
+            };
+        }
+
+        let save_task = tool.project.update(cx, |project, cx| {
+            project.save_buffer(self.buffer.clone(), cx)
+        });
+        futures::select! {
+            result = save_task.fuse() => { result.map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?; },
+            _ = event_stream.cancelled_by_user().fuse() => {
+                return Err(StreamingEditFileToolOutput::error("Edit cancelled by user"));
+            }
+        };
+
+        tool.action_log.update(cx, |log, cx| {
+            log.buffer_edited(self.buffer.clone(), cx);
         });
+
+        let new_snapshot = self.buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
+        let (new_text, unified_diff) = cx
+            .background_spawn({
+                let new_snapshot = new_snapshot.clone();
+                let old_text = old_text.clone();
+                async move {
+                    let new_text = new_snapshot.text();
+                    let diff = language::unified_diff(&old_text, &new_text);
+                    (new_text, diff)
+                }
+            })
+            .await;
+
+        let output = StreamingEditFileToolOutput::Success {
+            input_path: input.path,
+            new_text,
+            old_text: old_text.clone(),
+            diff: unified_diff,
+        };
+        Ok(output)
     }
 
-    Ok(())
+    fn process(
+        &mut self,
+        partial: StreamingEditFileToolPartialInput,
+        tool: &StreamingEditFileTool,
+        event_stream: &ToolCallEventStream,
+        cx: &mut AsyncApp,
+    ) -> Result<(), StreamingEditFileToolOutput> {
+        match &self.mode {
+            StreamingEditFileMode::Write => {
+                if let Some(content) = &partial.content {
+                    let events = self.parser.push_content(content);
+                    self.process_events(&events, tool, event_stream, cx)?;
+                }
+            }
+            StreamingEditFileMode::Edit => {
+                if let Some(edits) = partial.edits {
+                    let events = self.parser.push_edits(&edits);
+                    self.process_events(&events, tool, event_stream, cx)?;
+                }
+            }
+        }
+        Ok(())
+    }
+
+    fn process_events(
+        &mut self,
+        events: &[ToolEditEvent],
+        tool: &StreamingEditFileTool,
+        event_stream: &ToolCallEventStream,
+        cx: &mut AsyncApp,
+    ) -> Result<(), StreamingEditFileToolOutput> {
+        for event in events {
+            match event {
+                ToolEditEvent::ContentChunk { chunk } => {
+                    let (buffer_id, buffer_len) = self
+                        .buffer
+                        .read_with(cx, |buffer, _cx| (buffer.remote_id(), buffer.len()));
+                    let edit_range = if self.pipeline.content_written {
+                        buffer_len..buffer_len
+                    } else {
+                        0..buffer_len
+                    };
+
+                    agent_edit_buffer(
+                        &self.buffer,
+                        [(edit_range, chunk.as_str())],
+                        &tool.action_log,
+                        cx,
+                    );
+                    cx.update(|cx| {
+                        tool.set_agent_location(
+                            self.buffer.downgrade(),
+                            text::Anchor::max_for_buffer(buffer_id),
+                            cx,
+                        );
+                    });
+                    self.pipeline.content_written = true;
+                }
+
+                ToolEditEvent::OldTextChunk {
+                    chunk, done: false, ..
+                } => {
+                    log::debug!("old_text_chunk: done=false, chunk='{}'", chunk);
+                    self.pipeline.ensure_resolving_old_text(&self.buffer, cx);
+
+                    if let Some(EditPipelineEntry::ResolvingOldText { matcher }) =
+                        &mut self.pipeline.current_edit
+                        && !chunk.is_empty()
+                    {
+                        if let Some(match_range) = matcher.push(chunk, None) {
+                            let anchor_range = self.buffer.read_with(cx, |buffer, _cx| {
+                                buffer.anchor_range_between(match_range.clone())
+                            });
+                            self.diff
+                                .update(cx, |diff, cx| diff.reveal_range(anchor_range, cx));
+
+                            cx.update(|cx| {
+                                let position = self.buffer.read(cx).anchor_before(match_range.end);
+                                tool.set_agent_location(self.buffer.downgrade(), position, cx);
+                            });
+                        }
+                    }
+                }
+
+                ToolEditEvent::OldTextChunk {
+                    edit_index,
+                    chunk,
+                    done: true,
+                } => {
+                    log::debug!("old_text_chunk: done=true, chunk='{}'", chunk);
+
+                    self.pipeline.ensure_resolving_old_text(&self.buffer, cx);
+
+                    let Some(EditPipelineEntry::ResolvingOldText { matcher }) =
+                        &mut self.pipeline.current_edit
+                    else {
+                        continue;
+                    };
+
+                    if !chunk.is_empty() {
+                        matcher.push(chunk, None);
+                    }
+                    let range = extract_match(matcher.finish(), &self.buffer, edit_index, cx)?;
+
+                    let anchor_range = self
+                        .buffer
+                        .read_with(cx, |buffer, _cx| buffer.anchor_range_between(range.clone()));
+                    self.diff
+                        .update(cx, |diff, cx| diff.reveal_range(anchor_range, cx));
+
+                    let snapshot = self.buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
+
+                    let line = snapshot.offset_to_point(range.start).row;
+                    event_stream.update_fields(
+                        ToolCallUpdateFields::new().locations(vec![
+                            ToolCallLocation::new(&self.abs_path).line(Some(line)),
+                        ]),
+                    );
+
+                    let buffer_indent = snapshot.line_indent_for_row(line);
+                    let query_indent = text::LineIndent::from_iter(
+                        matcher
+                            .query_lines()
+                            .first()
+                            .map(|s| s.as_str())
+                            .unwrap_or("")
+                            .chars(),
+                    );
+                    let indent_delta = compute_indent_delta(buffer_indent, query_indent);
+
+                    let old_text_in_buffer =
+                        snapshot.text_for_range(range.clone()).collect::<String>();
+
+                    log::debug!(
+                        "edit[{}] old_text matched at {}..{}: {:?}",
+                        edit_index,
+                        range.start,
+                        range.end,
+                        old_text_in_buffer,
+                    );
+
+                    let text_snapshot = self
+                        .buffer
+                        .read_with(cx, |buffer, _cx| buffer.text_snapshot());
+                    self.pipeline.current_edit = Some(EditPipelineEntry::StreamingNewText {
+                        streaming_diff: StreamingDiff::new(old_text_in_buffer),
+                        edit_cursor: range.start,
+                        reindenter: Reindenter::new(indent_delta),
+                        original_snapshot: text_snapshot,
+                    });
+
+                    cx.update(|cx| {
+                        let position = self.buffer.read(cx).anchor_before(range.end);
+                        tool.set_agent_location(self.buffer.downgrade(), position, cx);
+                    });
+                }
+
+                ToolEditEvent::NewTextChunk {
+                    chunk, done: false, ..
+                } => {
+                    log::debug!("new_text_chunk: done=false, chunk='{}'", chunk);
+
+                    let Some(EditPipelineEntry::StreamingNewText {
+                        streaming_diff,
+                        edit_cursor,
+                        reindenter,
+                        original_snapshot,
+                        ..
+                    }) = &mut self.pipeline.current_edit
+                    else {
+                        continue;
+                    };
+
+                    let reindented = reindenter.push(chunk);
+                    if reindented.is_empty() {
+                        continue;
+                    }
+
+                    let char_ops = streaming_diff.push_new(&reindented);
+                    apply_char_operations(
+                        &char_ops,
+                        &self.buffer,
+                        original_snapshot,
+                        edit_cursor,
+                        &tool.action_log,
+                        cx,
+                    );
+
+                    let position = original_snapshot.anchor_before(*edit_cursor);
+                    cx.update(|cx| {
+                        tool.set_agent_location(self.buffer.downgrade(), position, cx);
+                    });
+                }
+
+                ToolEditEvent::NewTextChunk {
+                    chunk, done: true, ..
+                } => {
+                    log::debug!("new_text_chunk: done=true, chunk='{}'", chunk);
+
+                    let Some(EditPipelineEntry::StreamingNewText {
+                        mut streaming_diff,
+                        mut edit_cursor,
+                        mut reindenter,
+                        original_snapshot,
+                    }) = self.pipeline.current_edit.take()
+                    else {
+                        continue;
+                    };
+
+                    // Flush any remaining reindent buffer + final chunk.
+                    let mut final_text = reindenter.push(chunk);
+                    final_text.push_str(&reindenter.finish());
+
+                    log::debug!("new_text_chunk: done=true, final_text='{}'", final_text);
+
+                    if !final_text.is_empty() {
+                        let char_ops = streaming_diff.push_new(&final_text);
+                        apply_char_operations(
+                            &char_ops,
+                            &self.buffer,
+                            &original_snapshot,
+                            &mut edit_cursor,
+                            &tool.action_log,
+                            cx,
+                        );
+                    }
+
+                    let remaining_ops = streaming_diff.finish();
+                    apply_char_operations(
+                        &remaining_ops,
+                        &self.buffer,
+                        &original_snapshot,
+                        &mut edit_cursor,
+                        &tool.action_log,
+                        cx,
+                    );
+
+                    let position = original_snapshot.anchor_before(edit_cursor);
+                    cx.update(|cx| {
+                        tool.set_agent_location(self.buffer.downgrade(), position, cx);
+                    });
+                }
+            }
+        }
+        Ok(())
+    }
+}
+
+fn apply_char_operations(
+    ops: &[CharOperation],
+    buffer: &Entity<Buffer>,
+    snapshot: &text::BufferSnapshot,
+    edit_cursor: &mut usize,
+    action_log: &Entity<ActionLog>,
+    cx: &mut AsyncApp,
+) {
+    for op in ops {
+        match op {
+            CharOperation::Insert { text } => {
+                let anchor = snapshot.anchor_after(*edit_cursor);
+                agent_edit_buffer(&buffer, [(anchor..anchor, text.as_str())], action_log, cx);
+            }
+            CharOperation::Delete { bytes } => {
+                let delete_end = *edit_cursor + bytes;
+                let anchor_range = snapshot.anchor_range_around(*edit_cursor..delete_end);
+                agent_edit_buffer(&buffer, [(anchor_range, "")], action_log, cx);
+                *edit_cursor = delete_end;
+            }
+            CharOperation::Keep { bytes } => {
+                *edit_cursor += bytes;
+            }
+        }
+    }
+}
+
+fn extract_match(
+    matches: Vec<Range<usize>>,
+    buffer: &Entity<Buffer>,
+    edit_index: &usize,
+    cx: &mut AsyncApp,
+) -> Result<Range<usize>, StreamingEditFileToolOutput> {
+    match matches.len() {
+        0 => Err(StreamingEditFileToolOutput::error(format!(
+            "Could not find matching text for edit at index {}. \
+                The old_text did not match any content in the file. \
+                Please read the file again to get the current content.",
+            edit_index,
+        ))),
+        1 => Ok(matches.into_iter().next().unwrap()),
+        _ => {
+            let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
+            let lines = matches
+                .iter()
+                .map(|r| (snapshot.offset_to_point(r.start).row + 1).to_string())
+                .collect::<Vec<_>>()
+                .join(", ");
+            Err(StreamingEditFileToolOutput::error(format!(
+                "Edit {} matched multiple locations in the file at lines: {}. \
+                    Please provide more context in old_text to uniquely \
+                    identify the location.",
+                edit_index, lines
+            )))
+        }
+    }
 }
 
-/// Resolves an edit operation by finding the matching text in the buffer.
-/// Returns Ok(Some((range, new_text))) if a unique match is found,
-/// Ok(None) if no match is found, or Err(ranges) if multiple matches are found.
-fn resolve_edit(
-    snapshot: &BufferSnapshot,
-    edit: &EditOperation,
-) -> std::result::Result<Option<(Range<usize>, String)>, Vec<Range<usize>>> {
-    let mut matcher = StreamingFuzzyMatcher::new(snapshot.clone());
-    matcher.push(&edit.old_text, None);
-    let matches = matcher.finish();
+/// Edits a buffer and reports the edit to the action log in the same effect
+/// cycle. This ensures the action log's subscription handler sees the version
+/// already updated by `buffer_edited`, so it does not misattribute the agent's
+/// edit as a user edit.
+fn agent_edit_buffer<I, S, T>(
+    buffer: &Entity<Buffer>,
+    edits: I,
+    action_log: &Entity<ActionLog>,
+    cx: &mut AsyncApp,
+) where
+    I: IntoIterator<Item = (Range<S>, T)>,
+    S: ToOffset,
+    T: Into<Arc<str>>,
+{
+    cx.update(|cx| {
+        buffer.update(cx, |buffer, cx| {
+            buffer.edit(edits, None, cx);
+        });
+        action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
+    });
+}
 
-    if matches.is_empty() {
-        return Ok(None);
+fn ensure_buffer_saved(
+    buffer: &Entity<Buffer>,
+    abs_path: &PathBuf,
+    tool: &StreamingEditFileTool,
+    cx: &mut AsyncApp,
+) -> Result<(), StreamingEditFileToolOutput> {
+    let last_read_mtime = tool
+        .action_log
+        .read_with(cx, |log, _| log.file_read_time(abs_path));
+    let check_result = tool.thread.read_with(cx, |thread, cx| {
+        let current = buffer
+            .read(cx)
+            .file()
+            .and_then(|file| file.disk_state().mtime());
+        let dirty = buffer.read(cx).is_dirty();
+        let has_save = thread.has_tool(SaveFileTool::NAME);
+        let has_restore = thread.has_tool(RestoreFileFromDiskTool::NAME);
+        (current, dirty, has_save, has_restore)
+    });
+
+    let Ok((current_mtime, is_dirty, has_save_tool, has_restore_tool)) = check_result else {
+        return Ok(());
+    };
+
+    if is_dirty {
+        let message = match (has_save_tool, has_restore_tool) {
+            (true, true) => {
+                "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \
+                         If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \
+                         If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit."
+            }
+            (true, false) => {
+                "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \
+                         If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \
+                         If they want to discard them, ask the user to manually revert the file, then inform you when it's ok to proceed."
+            }
+            (false, true) => {
+                "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \
+                         If they want to keep them, ask the user to manually save the file, then inform you when it's ok to proceed. \
+                         If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit."
+            }
+            (false, false) => {
+                "This file has unsaved changes. Ask the user whether they want to keep or discard those changes, \
+                         then ask them to save or revert the file manually and inform you when it's ok to proceed."
+            }
+        };
+        return Err(StreamingEditFileToolOutput::error(message));
     }
 
-    if matches.len() > 1 {
-        return Err(matches);
+    if let (Some(last_read), Some(current)) = (last_read_mtime, current_mtime) {
+        if current != last_read {
+            return Err(StreamingEditFileToolOutput::error(
+                "The file has been modified since you last read it. \
+                             Please read the file again to get the current state before editing it.",
+            ));
+        }
     }
 
-    let match_range = matches.into_iter().next().expect("checked len above");
-    Ok(Some((match_range, edit.new_text.clone())))
+    Ok(())
 }
 
 fn resolve_path(
-    input: &StreamingEditFileToolInput,
-    project: Entity<Project>,
+    mode: StreamingEditFileMode,
+    path: &PathBuf,
+    project: &Entity<Project>,
     cx: &mut App,
 ) -> Result<ProjectPath> {
     let project = project.read(cx);
 
-    match input.mode {
-        StreamingEditFileMode::Edit | StreamingEditFileMode::Overwrite => {
+    match mode {
+        StreamingEditFileMode::Edit => {
             let path = project
-                .find_project_path(&input.path, cx)
+                .find_project_path(&path, cx)
                 .context("Can't edit file: path not found")?;
 
             let entry = project

crates/agent/src/tools/terminal_tool.rs 🔗

@@ -15,7 +15,7 @@ use std::{
 };
 
 use crate::{
-    AgentTool, ThreadEnvironment, ToolCallEventStream, ToolPermissionDecision,
+    AgentTool, ThreadEnvironment, ToolCallEventStream, ToolInput, ToolPermissionDecision,
     decide_permission_from_settings,
 };
 
@@ -85,34 +85,45 @@ impl AgentTool for TerminalTool {
 
     fn run(
         self: Arc<Self>,
-        input: Self::Input,
+        input: ToolInput<Self::Input>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<Self::Output, Self::Output>> {
-        let working_dir = match working_dir(&input, &self.project, cx) {
-            Ok(dir) => dir,
-            Err(err) => return Task::ready(Err(err.to_string())),
-        };
+        cx.spawn(async move |cx| {
+            let input = input
+                .recv()
+                .await
+                .map_err(|e| format!("Failed to receive tool input: {e}"))?;
 
-        let settings = AgentSettings::get_global(cx);
-        let decision = decide_permission_from_settings(
-            Self::NAME,
-            std::slice::from_ref(&input.command),
-            settings,
-        );
+            let (working_dir, authorize) = cx.update(|cx| {
+                let working_dir =
+                    working_dir(&input, &self.project, cx).map_err(|err| err.to_string())?;
 
-        let authorize = match decision {
-            ToolPermissionDecision::Allow => None,
-            ToolPermissionDecision::Deny(reason) => {
-                return Task::ready(Err(reason));
-            }
-            ToolPermissionDecision::Confirm => {
-                let context =
-                    crate::ToolPermissionContext::new(Self::NAME, vec![input.command.clone()]);
-                Some(event_stream.authorize(self.initial_title(Ok(input.clone()), cx), context, cx))
-            }
-        };
-        cx.spawn(async move |cx| {
+                let decision = decide_permission_from_settings(
+                    Self::NAME,
+                    std::slice::from_ref(&input.command),
+                    AgentSettings::get_global(cx),
+                );
+
+                let authorize = match decision {
+                    ToolPermissionDecision::Allow => None,
+                    ToolPermissionDecision::Deny(reason) => {
+                        return Err(reason);
+                    }
+                    ToolPermissionDecision::Confirm => {
+                        let context = crate::ToolPermissionContext::new(
+                            Self::NAME,
+                            vec![input.command.clone()],
+                        );
+                        Some(event_stream.authorize(
+                            self.initial_title(Ok(input.clone()), cx),
+                            context,
+                            cx,
+                        ))
+                    }
+                };
+                Ok((working_dir, authorize))
+            })?;
             if let Some(authorize) = authorize {
                 authorize.await.map_err(|e| e.to_string())?;
             }

crates/agent/src/tools/tool_edit_parser.rs 🔗

@@ -0,0 +1,941 @@
+use smallvec::SmallVec;
+
+use crate::{Edit, PartialEdit};
+
+/// Events emitted by `ToolEditParser` as tool call input streams in.
+#[derive(Debug, PartialEq, Eq)]
+pub enum ToolEditEvent {
+    /// A chunk of `old_text` for an edit operation.
+    OldTextChunk {
+        edit_index: usize,
+        chunk: String,
+        done: bool,
+    },
+    /// A chunk of `new_text` for an edit operation.
+    NewTextChunk {
+        edit_index: usize,
+        chunk: String,
+        done: bool,
+    },
+    /// A chunk of content for write/overwrite mode.
+    ContentChunk { chunk: String },
+}
+
+/// Tracks the streaming state of a single edit to detect deltas.
+#[derive(Default, Debug)]
+struct EditStreamState {
+    old_text_emitted_len: usize,
+    old_text_done: bool,
+    new_text_emitted_len: usize,
+    new_text_done: bool,
+}
+
+/// Converts incrementally-growing tool call JSON into a stream of chunk events.
+///
+/// The tool call streaming infrastructure delivers partial JSON objects where
+/// string fields grow over time. This parser compares consecutive partials,
+/// computes the deltas, and emits `ToolEditEvent`s that downstream pipeline
+/// stages (`StreamingFuzzyMatcher` for old_text, `StreamingDiff` for new_text)
+/// can consume incrementally.
+///
+/// Because partial JSON comes through a fixer (`partial-json-fixer`) that
+/// closes incomplete escape sequences, a string can temporarily contain wrong
+/// trailing characters (e.g. a literal `\` instead of `\n`).  We handle this
+/// by holding back trailing backslash characters in non-finalized chunks: if
+/// a partial string ends with `\` (0x5C), that byte is not emitted until the
+/// next partial confirms or corrects it.  This avoids feeding corrupted bytes
+/// to downstream consumers.
+#[derive(Default, Debug)]
+pub struct ToolEditParser {
+    edit_states: Vec<EditStreamState>,
+    content_emitted_len: usize,
+}
+
+impl ToolEditParser {
+    /// Push a new set of partial edits (from edit mode) and return any events.
+    ///
+    /// Each call should pass the *entire current* edits array as seen in the
+    /// latest partial input. The parser will diff it against its internal state
+    /// to produce only the new events.
+    pub fn push_edits(&mut self, edits: &[PartialEdit]) -> SmallVec<[ToolEditEvent; 4]> {
+        let mut events = SmallVec::new();
+
+        for (index, partial) in edits.iter().enumerate() {
+            if index >= self.edit_states.len() {
+                // A new edit appeared — finalize the previous one if there was one.
+                if let Some(previous) = self.finalize_previous_edit(index) {
+                    events.extend(previous);
+                }
+                self.edit_states.push(EditStreamState::default());
+            }
+
+            let state = &mut self.edit_states[index];
+
+            // Process old_text changes.
+            if let Some(old_text) = &partial.old_text
+                && !state.old_text_done
+            {
+                if partial.new_text.is_some() {
+                    // new_text appeared, so old_text is done — emit everything.
+                    let start = state.old_text_emitted_len.min(old_text.len());
+                    let chunk = old_text[start..].to_string();
+                    state.old_text_done = true;
+                    state.old_text_emitted_len = old_text.len();
+                    events.push(ToolEditEvent::OldTextChunk {
+                        edit_index: index,
+                        chunk,
+                        done: true,
+                    });
+                } else {
+                    let safe_end = safe_emit_end(old_text);
+                    if safe_end > state.old_text_emitted_len {
+                        let chunk = old_text[state.old_text_emitted_len..safe_end].to_string();
+                        state.old_text_emitted_len = safe_end;
+                        events.push(ToolEditEvent::OldTextChunk {
+                            edit_index: index,
+                            chunk,
+                            done: false,
+                        });
+                    }
+                }
+            }
+
+            // Process new_text changes.
+            if let Some(new_text) = &partial.new_text
+                && !state.new_text_done
+            {
+                let safe_end = safe_emit_end(new_text);
+                if safe_end > state.new_text_emitted_len {
+                    let chunk = new_text[state.new_text_emitted_len..safe_end].to_string();
+                    state.new_text_emitted_len = safe_end;
+                    events.push(ToolEditEvent::NewTextChunk {
+                        edit_index: index,
+                        chunk,
+                        done: false,
+                    });
+                }
+            }
+        }
+
+        events
+    }
+
+    /// Push new content and return any events.
+    ///
+    /// Each call should pass the *entire current* content string. The parser
+    /// will diff it against its internal state to emit only the new chunk.
+    pub fn push_content(&mut self, content: &str) -> SmallVec<[ToolEditEvent; 1]> {
+        let mut events = SmallVec::new();
+
+        let safe_end = safe_emit_end(content);
+        if safe_end > self.content_emitted_len {
+            let chunk = content[self.content_emitted_len..safe_end].to_string();
+            self.content_emitted_len = safe_end;
+            events.push(ToolEditEvent::ContentChunk { chunk });
+        }
+
+        events
+    }
+
+    /// Finalize all edits with the complete input. This emits `done: true`
+    /// events for any in-progress old_text or new_text that hasn't been
+    /// finalized yet.
+    ///
+    /// `final_edits` should be the fully deserialized final edits array. The
+    /// parser compares against its tracked state and emits any remaining deltas
+    /// with `done: true`.
+    pub fn finalize_edits(&mut self, edits: &[Edit]) -> SmallVec<[ToolEditEvent; 4]> {
+        let mut events = SmallVec::new();
+
+        for (index, edit) in edits.iter().enumerate() {
+            if index >= self.edit_states.len() {
+                // This edit was never seen in partials — emit it fully.
+                if let Some(previous) = self.finalize_previous_edit(index) {
+                    events.extend(previous);
+                }
+                self.edit_states.push(EditStreamState::default());
+            }
+
+            let state = &mut self.edit_states[index];
+
+            if !state.old_text_done {
+                let start = state.old_text_emitted_len.min(edit.old_text.len());
+                let chunk = edit.old_text[start..].to_string();
+                state.old_text_done = true;
+                state.old_text_emitted_len = edit.old_text.len();
+                events.push(ToolEditEvent::OldTextChunk {
+                    edit_index: index,
+                    chunk,
+                    done: true,
+                });
+            }
+
+            if !state.new_text_done {
+                let start = state.new_text_emitted_len.min(edit.new_text.len());
+                let chunk = edit.new_text[start..].to_string();
+                state.new_text_done = true;
+                state.new_text_emitted_len = edit.new_text.len();
+                events.push(ToolEditEvent::NewTextChunk {
+                    edit_index: index,
+                    chunk,
+                    done: true,
+                });
+            }
+        }
+
+        events
+    }
+
+    /// Finalize content with the complete input.
+    pub fn finalize_content(&mut self, content: &str) -> SmallVec<[ToolEditEvent; 1]> {
+        let mut events = SmallVec::new();
+
+        let start = self.content_emitted_len.min(content.len());
+        if content.len() > start {
+            let chunk = content[start..].to_string();
+            self.content_emitted_len = content.len();
+            events.push(ToolEditEvent::ContentChunk { chunk });
+        }
+
+        events
+    }
+
+    /// When a new edit appears at `index`, finalize the edit at `index - 1`
+    /// by emitting a `NewTextChunk { done: true }` if it hasn't been finalized.
+    fn finalize_previous_edit(&mut self, new_index: usize) -> Option<SmallVec<[ToolEditEvent; 2]>> {
+        if new_index == 0 || self.edit_states.is_empty() {
+            return None;
+        }
+
+        let previous_index = new_index - 1;
+        if previous_index >= self.edit_states.len() {
+            return None;
+        }
+
+        let state = &mut self.edit_states[previous_index];
+        let mut events = SmallVec::new();
+
+        // If old_text was never finalized, finalize it now with an empty done chunk.
+        if !state.old_text_done {
+            state.old_text_done = true;
+            events.push(ToolEditEvent::OldTextChunk {
+                edit_index: previous_index,
+                chunk: String::new(),
+                done: true,
+            });
+        }
+
+        // Emit a done event for new_text if not already finalized.
+        if !state.new_text_done {
+            state.new_text_done = true;
+            events.push(ToolEditEvent::NewTextChunk {
+                edit_index: previous_index,
+                chunk: String::new(),
+                done: true,
+            });
+        }
+
+        Some(events)
+    }
+}
+
+/// Returns the byte position up to which it is safe to emit from a partial
+/// string.  If the string ends with a backslash (`\`, 0x5C), that byte is
+/// held back because it may be an artifact of the partial JSON fixer closing
+/// an incomplete escape sequence (e.g. turning a half-received `\n` into `\\`).
+/// The next partial will reveal the correct character.
+fn safe_emit_end(text: &str) -> usize {
+    if text.as_bytes().last() == Some(&b'\\') {
+        text.len() - 1
+    } else {
+        text.len()
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn test_single_edit_streamed_incrementally() {
+        let mut parser = ToolEditParser::default();
+
+        // old_text arrives in chunks: "hell" → "hello w" → "hello world"
+        let events = parser.push_edits(&[PartialEdit {
+            old_text: Some("hell".into()),
+            new_text: None,
+        }]);
+        assert_eq!(
+            events.as_slice(),
+            &[ToolEditEvent::OldTextChunk {
+                edit_index: 0,
+                chunk: "hell".into(),
+                done: false,
+            }]
+        );
+
+        let events = parser.push_edits(&[PartialEdit {
+            old_text: Some("hello w".into()),
+            new_text: None,
+        }]);
+        assert_eq!(
+            events.as_slice(),
+            &[ToolEditEvent::OldTextChunk {
+                edit_index: 0,
+                chunk: "o w".into(),
+                done: false,
+            }]
+        );
+
+        // new_text appears → old_text finalizes
+        let events = parser.push_edits(&[PartialEdit {
+            old_text: Some("hello world".into()),
+            new_text: Some("good".into()),
+        }]);
+        assert_eq!(
+            events.as_slice(),
+            &[
+                ToolEditEvent::OldTextChunk {
+                    edit_index: 0,
+                    chunk: "orld".into(),
+                    done: true,
+                },
+                ToolEditEvent::NewTextChunk {
+                    edit_index: 0,
+                    chunk: "good".into(),
+                    done: false,
+                },
+            ]
+        );
+
+        // new_text grows
+        let events = parser.push_edits(&[PartialEdit {
+            old_text: Some("hello world".into()),
+            new_text: Some("goodbye world".into()),
+        }]);
+        assert_eq!(
+            events.as_slice(),
+            &[ToolEditEvent::NewTextChunk {
+                edit_index: 0,
+                chunk: "bye world".into(),
+                done: false,
+            }]
+        );
+
+        // Finalize
+        let events = parser.finalize_edits(&[Edit {
+            old_text: "hello world".into(),
+            new_text: "goodbye world".into(),
+        }]);
+        assert_eq!(
+            events.as_slice(),
+            &[ToolEditEvent::NewTextChunk {
+                edit_index: 0,
+                chunk: "".into(),
+                done: true,
+            }]
+        );
+    }
+
+    #[test]
+    fn test_multiple_edits_sequential() {
+        let mut parser = ToolEditParser::default();
+
+        // First edit streams in
+        let events = parser.push_edits(&[PartialEdit {
+            old_text: Some("first old".into()),
+            new_text: None,
+        }]);
+        assert_eq!(
+            events.as_slice(),
+            &[ToolEditEvent::OldTextChunk {
+                edit_index: 0,
+                chunk: "first old".into(),
+                done: false,
+            }]
+        );
+
+        let events = parser.push_edits(&[PartialEdit {
+            old_text: Some("first old".into()),
+            new_text: Some("first new".into()),
+        }]);
+        assert_eq!(
+            events.as_slice(),
+            &[
+                ToolEditEvent::OldTextChunk {
+                    edit_index: 0,
+                    chunk: "".into(),
+                    done: true,
+                },
+                ToolEditEvent::NewTextChunk {
+                    edit_index: 0,
+                    chunk: "first new".into(),
+                    done: false,
+                },
+            ]
+        );
+
+        // Second edit appears → first edit's new_text is finalized
+        let events = parser.push_edits(&[
+            PartialEdit {
+                old_text: Some("first old".into()),
+                new_text: Some("first new".into()),
+            },
+            PartialEdit {
+                old_text: Some("second".into()),
+                new_text: None,
+            },
+        ]);
+        assert_eq!(
+            events.as_slice(),
+            &[
+                ToolEditEvent::NewTextChunk {
+                    edit_index: 0,
+                    chunk: "".into(),
+                    done: true,
+                },
+                ToolEditEvent::OldTextChunk {
+                    edit_index: 1,
+                    chunk: "second".into(),
+                    done: false,
+                },
+            ]
+        );
+
+        // Finalize everything
+        let events = parser.finalize_edits(&[
+            Edit {
+                old_text: "first old".into(),
+                new_text: "first new".into(),
+            },
+            Edit {
+                old_text: "second old".into(),
+                new_text: "second new".into(),
+            },
+        ]);
+        assert_eq!(
+            events.as_slice(),
+            &[
+                ToolEditEvent::OldTextChunk {
+                    edit_index: 1,
+                    chunk: " old".into(),
+                    done: true,
+                },
+                ToolEditEvent::NewTextChunk {
+                    edit_index: 1,
+                    chunk: "second new".into(),
+                    done: true,
+                },
+            ]
+        );
+    }
+
+    #[test]
+    fn test_content_streamed_incrementally() {
+        let mut parser = ToolEditParser::default();
+
+        let events = parser.push_content("hello");
+        assert_eq!(
+            events.as_slice(),
+            &[ToolEditEvent::ContentChunk {
+                chunk: "hello".into(),
+            }]
+        );
+
+        let events = parser.push_content("hello world");
+        assert_eq!(
+            events.as_slice(),
+            &[ToolEditEvent::ContentChunk {
+                chunk: " world".into(),
+            }]
+        );
+
+        // No change
+        let events = parser.push_content("hello world");
+        assert!(events.is_empty());
+
+        let events = parser.push_content("hello world!");
+        assert_eq!(
+            events.as_slice(),
+            &[ToolEditEvent::ContentChunk { chunk: "!".into() }]
+        );
+
+        // Finalize with no additional content
+        let events = parser.finalize_content("hello world!");
+        assert!(events.is_empty());
+    }
+
+    #[test]
+    fn test_finalize_content_with_remaining() {
+        let mut parser = ToolEditParser::default();
+
+        parser.push_content("partial");
+        let events = parser.finalize_content("partial content here");
+        assert_eq!(
+            events.as_slice(),
+            &[ToolEditEvent::ContentChunk {
+                chunk: " content here".into(),
+            }]
+        );
+    }
+
+    #[test]
+    fn test_content_trailing_backslash_held_back() {
+        let mut parser = ToolEditParser::default();
+
+        // Partial JSON fixer turns incomplete \n into \\ (literal backslash).
+        // The trailing backslash is held back.
+        let events = parser.push_content("hello,\\");
+        assert_eq!(
+            events.as_slice(),
+            &[ToolEditEvent::ContentChunk {
+                chunk: "hello,".into(),
+            }]
+        );
+
+        // Next partial corrects the escape to an actual newline.
+        // The held-back byte was wrong; the correct newline is emitted.
+        let events = parser.push_content("hello,\n");
+        assert_eq!(
+            events.as_slice(),
+            &[ToolEditEvent::ContentChunk { chunk: "\n".into() }]
+        );
+
+        // Normal growth.
+        let events = parser.push_content("hello,\nworld");
+        assert_eq!(
+            events.as_slice(),
+            &[ToolEditEvent::ContentChunk {
+                chunk: "world".into(),
+            }]
+        );
+    }
+
+    #[test]
+    fn test_content_finalize_with_trailing_backslash() {
+        let mut parser = ToolEditParser::default();
+
+        // Stream a partial with a fixer-corrupted trailing backslash.
+        // The backslash is held back.
+        parser.push_content("abc\\");
+
+        // Finalize reveals the correct character.
+        let events = parser.finalize_content("abc\n");
+        assert_eq!(
+            events.as_slice(),
+            &[ToolEditEvent::ContentChunk { chunk: "\n".into() }]
+        );
+    }
+
+    #[test]
+    fn test_no_partials_direct_finalize() {
+        let mut parser = ToolEditParser::default();
+
+        let events = parser.finalize_edits(&[Edit {
+            old_text: "old".into(),
+            new_text: "new".into(),
+        }]);
+        assert_eq!(
+            events.as_slice(),
+            &[
+                ToolEditEvent::OldTextChunk {
+                    edit_index: 0,
+                    chunk: "old".into(),
+                    done: true,
+                },
+                ToolEditEvent::NewTextChunk {
+                    edit_index: 0,
+                    chunk: "new".into(),
+                    done: true,
+                },
+            ]
+        );
+    }
+
+    #[test]
+    fn test_no_partials_direct_finalize_multiple() {
+        let mut parser = ToolEditParser::default();
+
+        let events = parser.finalize_edits(&[
+            Edit {
+                old_text: "first old".into(),
+                new_text: "first new".into(),
+            },
+            Edit {
+                old_text: "second old".into(),
+                new_text: "second new".into(),
+            },
+        ]);
+        assert_eq!(
+            events.as_slice(),
+            &[
+                ToolEditEvent::OldTextChunk {
+                    edit_index: 0,
+                    chunk: "first old".into(),
+                    done: true,
+                },
+                ToolEditEvent::NewTextChunk {
+                    edit_index: 0,
+                    chunk: "first new".into(),
+                    done: true,
+                },
+                ToolEditEvent::OldTextChunk {
+                    edit_index: 1,
+                    chunk: "second old".into(),
+                    done: true,
+                },
+                ToolEditEvent::NewTextChunk {
+                    edit_index: 1,
+                    chunk: "second new".into(),
+                    done: true,
+                },
+            ]
+        );
+    }
+
+    #[test]
+    fn test_old_text_no_growth() {
+        let mut parser = ToolEditParser::default();
+
+        let events = parser.push_edits(&[PartialEdit {
+            old_text: Some("same".into()),
+            new_text: None,
+        }]);
+        assert_eq!(
+            events.as_slice(),
+            &[ToolEditEvent::OldTextChunk {
+                edit_index: 0,
+                chunk: "same".into(),
+                done: false,
+            }]
+        );
+
+        // Same old_text, no new_text → no events
+        let events = parser.push_edits(&[PartialEdit {
+            old_text: Some("same".into()),
+            new_text: None,
+        }]);
+        assert!(events.is_empty());
+    }
+
+    #[test]
+    fn test_old_text_none_then_appears() {
+        let mut parser = ToolEditParser::default();
+
+        // Edit exists but old_text is None (field hasn't arrived yet)
+        let events = parser.push_edits(&[PartialEdit {
+            old_text: None,
+            new_text: None,
+        }]);
+        assert!(events.is_empty());
+
+        // old_text appears
+        let events = parser.push_edits(&[PartialEdit {
+            old_text: Some("text".into()),
+            new_text: None,
+        }]);
+        assert_eq!(
+            events.as_slice(),
+            &[ToolEditEvent::OldTextChunk {
+                edit_index: 0,
+                chunk: "text".into(),
+                done: false,
+            }]
+        );
+    }
+
+    #[test]
+    fn test_empty_old_text_with_new_text() {
+        let mut parser = ToolEditParser::default();
+
+        // old_text is empty, new_text appears immediately
+        let events = parser.push_edits(&[PartialEdit {
+            old_text: Some("".into()),
+            new_text: Some("inserted".into()),
+        }]);
+        assert_eq!(
+            events.as_slice(),
+            &[
+                ToolEditEvent::OldTextChunk {
+                    edit_index: 0,
+                    chunk: "".into(),
+                    done: true,
+                },
+                ToolEditEvent::NewTextChunk {
+                    edit_index: 0,
+                    chunk: "inserted".into(),
+                    done: false,
+                },
+            ]
+        );
+    }
+
+    #[test]
+    fn test_three_edits_streamed() {
+        let mut parser = ToolEditParser::default();
+
+        // Stream first edit
+        parser.push_edits(&[PartialEdit {
+            old_text: Some("a".into()),
+            new_text: Some("A".into()),
+        }]);
+
+        // Second edit appears
+        parser.push_edits(&[
+            PartialEdit {
+                old_text: Some("a".into()),
+                new_text: Some("A".into()),
+            },
+            PartialEdit {
+                old_text: Some("b".into()),
+                new_text: Some("B".into()),
+            },
+        ]);
+
+        // Third edit appears
+        let events = parser.push_edits(&[
+            PartialEdit {
+                old_text: Some("a".into()),
+                new_text: Some("A".into()),
+            },
+            PartialEdit {
+                old_text: Some("b".into()),
+                new_text: Some("B".into()),
+            },
+            PartialEdit {
+                old_text: Some("c".into()),
+                new_text: None,
+            },
+        ]);
+
+        // Should finalize edit 1 (index=1) and start edit 2 (index=2)
+        assert_eq!(
+            events.as_slice(),
+            &[
+                ToolEditEvent::NewTextChunk {
+                    edit_index: 1,
+                    chunk: "".into(),
+                    done: true,
+                },
+                ToolEditEvent::OldTextChunk {
+                    edit_index: 2,
+                    chunk: "c".into(),
+                    done: false,
+                },
+            ]
+        );
+
+        // Finalize
+        let events = parser.finalize_edits(&[
+            Edit {
+                old_text: "a".into(),
+                new_text: "A".into(),
+            },
+            Edit {
+                old_text: "b".into(),
+                new_text: "B".into(),
+            },
+            Edit {
+                old_text: "c".into(),
+                new_text: "C".into(),
+            },
+        ]);
+        assert_eq!(
+            events.as_slice(),
+            &[
+                ToolEditEvent::OldTextChunk {
+                    edit_index: 2,
+                    chunk: "".into(),
+                    done: true,
+                },
+                ToolEditEvent::NewTextChunk {
+                    edit_index: 2,
+                    chunk: "C".into(),
+                    done: true,
+                },
+            ]
+        );
+    }
+
+    #[test]
+    fn test_finalize_with_unseen_old_text() {
+        let mut parser = ToolEditParser::default();
+
+        // Only saw partial old_text, never saw new_text in partials
+        parser.push_edits(&[PartialEdit {
+            old_text: Some("partial".into()),
+            new_text: None,
+        }]);
+
+        let events = parser.finalize_edits(&[Edit {
+            old_text: "partial old text".into(),
+            new_text: "replacement".into(),
+        }]);
+        assert_eq!(
+            events.as_slice(),
+            &[
+                ToolEditEvent::OldTextChunk {
+                    edit_index: 0,
+                    chunk: " old text".into(),
+                    done: true,
+                },
+                ToolEditEvent::NewTextChunk {
+                    edit_index: 0,
+                    chunk: "replacement".into(),
+                    done: true,
+                },
+            ]
+        );
+    }
+
+    #[test]
+    fn test_finalize_with_partially_seen_new_text() {
+        let mut parser = ToolEditParser::default();
+
+        parser.push_edits(&[PartialEdit {
+            old_text: Some("old".into()),
+            new_text: Some("partial".into()),
+        }]);
+
+        let events = parser.finalize_edits(&[Edit {
+            old_text: "old".into(),
+            new_text: "partial new text".into(),
+        }]);
+        assert_eq!(
+            events.as_slice(),
+            &[ToolEditEvent::NewTextChunk {
+                edit_index: 0,
+                chunk: " new text".into(),
+                done: true,
+            }]
+        );
+    }
+
+    #[test]
+    fn test_repeated_pushes_with_no_change() {
+        let mut parser = ToolEditParser::default();
+
+        let events = parser.push_edits(&[PartialEdit {
+            old_text: Some("stable".into()),
+            new_text: Some("also stable".into()),
+        }]);
+        assert_eq!(events.len(), 2); // old done + new chunk
+
+        // Push the exact same data again
+        let events = parser.push_edits(&[PartialEdit {
+            old_text: Some("stable".into()),
+            new_text: Some("also stable".into()),
+        }]);
+        assert!(events.is_empty());
+
+        // And again
+        let events = parser.push_edits(&[PartialEdit {
+            old_text: Some("stable".into()),
+            new_text: Some("also stable".into()),
+        }]);
+        assert!(events.is_empty());
+    }
+
+    #[test]
+    fn test_old_text_trailing_backslash_held_back() {
+        let mut parser = ToolEditParser::default();
+
+        // Partial-json-fixer produces a literal backslash when the JSON stream
+        // cuts in the middle of an escape sequence like \n. The parser holds
+        // back the trailing backslash instead of emitting it.
+        let events = parser.push_edits(&[PartialEdit {
+            old_text: Some("hello,\\".into()), // fixer closed incomplete \n as \\
+            new_text: None,
+        }]);
+        // The trailing `\` is held back — only "hello," is emitted.
+        assert_eq!(
+            events.as_slice(),
+            &[ToolEditEvent::OldTextChunk {
+                edit_index: 0,
+                chunk: "hello,".into(),
+                done: false,
+            }]
+        );
+
+        // Next partial: the fixer corrects the escape to \n.
+        // The held-back byte was wrong, but we never emitted it. Now the
+        // correct newline at that position is emitted normally.
+        let events = parser.push_edits(&[PartialEdit {
+            old_text: Some("hello,\n".into()),
+            new_text: None,
+        }]);
+        assert_eq!(
+            events.as_slice(),
+            &[ToolEditEvent::OldTextChunk {
+                edit_index: 0,
+                chunk: "\n".into(),
+                done: false,
+            }]
+        );
+
+        // Continue normally.
+        let events = parser.push_edits(&[PartialEdit {
+            old_text: Some("hello,\nworld".into()),
+            new_text: None,
+        }]);
+        assert_eq!(
+            events.as_slice(),
+            &[ToolEditEvent::OldTextChunk {
+                edit_index: 0,
+                chunk: "world".into(),
+                done: false,
+            }]
+        );
+    }
+
+    #[test]
+    fn test_multiline_old_and_new_text() {
+        let mut parser = ToolEditParser::default();
+
+        let events = parser.push_edits(&[PartialEdit {
+            old_text: Some("line1\nline2".into()),
+            new_text: None,
+        }]);
+        assert_eq!(
+            events.as_slice(),
+            &[ToolEditEvent::OldTextChunk {
+                edit_index: 0,
+                chunk: "line1\nline2".into(),
+                done: false,
+            }]
+        );
+
+        let events = parser.push_edits(&[PartialEdit {
+            old_text: Some("line1\nline2\nline3".into()),
+            new_text: Some("LINE1\n".into()),
+        }]);
+        assert_eq!(
+            events.as_slice(),
+            &[
+                ToolEditEvent::OldTextChunk {
+                    edit_index: 0,
+                    chunk: "\nline3".into(),
+                    done: true,
+                },
+                ToolEditEvent::NewTextChunk {
+                    edit_index: 0,
+                    chunk: "LINE1\n".into(),
+                    done: false,
+                },
+            ]
+        );
+
+        let events = parser.push_edits(&[PartialEdit {
+            old_text: Some("line1\nline2\nline3".into()),
+            new_text: Some("LINE1\nLINE2\nLINE3".into()),
+        }]);
+        assert_eq!(
+            events.as_slice(),
+            &[ToolEditEvent::NewTextChunk {
+                edit_index: 0,
+                chunk: "LINE2\nLINE3".into(),
+                done: false,
+            }]
+        );
+    }
+}

crates/agent/src/tools/web_search_tool.rs 🔗

@@ -1,14 +1,15 @@
 use std::sync::Arc;
 
 use crate::{
-    AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_from_settings,
+    AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision,
+    decide_permission_from_settings,
 };
 use agent_client_protocol as acp;
 use agent_settings::AgentSettings;
 use anyhow::Result;
 use cloud_llm_client::WebSearchResponse;
 use futures::FutureExt as _;
-use gpui::{App, AppContext, Task};
+use gpui::{App, Task};
 use language_model::{
     LanguageModelProviderId, LanguageModelToolResultContent, ZED_CLOUD_PROVIDER_ID,
 };
@@ -73,41 +74,51 @@ impl AgentTool for WebSearchTool {
 
     fn run(
         self: Arc<Self>,
-        input: Self::Input,
+        input: ToolInput<Self::Input>,
         event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<Self::Output, Self::Output>> {
-        let settings = AgentSettings::get_global(cx);
-        let decision = decide_permission_from_settings(
-            Self::NAME,
-            std::slice::from_ref(&input.query),
-            settings,
-        );
-
-        let authorize = match decision {
-            ToolPermissionDecision::Allow => None,
-            ToolPermissionDecision::Deny(reason) => {
-                return Task::ready(Err(WebSearchToolOutput::Error { error: reason }));
-            }
-            ToolPermissionDecision::Confirm => {
-                let context =
-                    crate::ToolPermissionContext::new(Self::NAME, vec![input.query.clone()]);
-                Some(event_stream.authorize(
-                    format!("Search the web for {}", MarkdownInlineCode(&input.query)),
-                    context,
-                    cx,
-                ))
-            }
-        };
+        cx.spawn(async move |cx| {
+            let input = input
+                .recv()
+                .await
+                .map_err(|e| WebSearchToolOutput::Error {
+                    error: format!("Failed to receive tool input: {e}"),
+                })?;
+
+            let (authorize, search_task) = cx.update(|cx| {
+                let decision = decide_permission_from_settings(
+                    Self::NAME,
+                    std::slice::from_ref(&input.query),
+                    AgentSettings::get_global(cx),
+                );
+
+                let authorize = match decision {
+                    ToolPermissionDecision::Allow => None,
+                    ToolPermissionDecision::Deny(reason) => {
+                        return Err(WebSearchToolOutput::Error { error: reason });
+                    }
+                    ToolPermissionDecision::Confirm => {
+                        let context =
+                            crate::ToolPermissionContext::new(Self::NAME, vec![input.query.clone()]);
+                        Some(event_stream.authorize(
+                            format!("Search the web for {}", MarkdownInlineCode(&input.query)),
+                            context,
+                            cx,
+                        ))
+                    }
+                };
+
+                let Some(provider) = WebSearchRegistry::read_global(cx).active_provider() else {
+                    return Err(WebSearchToolOutput::Error {
+                        error: "Web search is not available.".to_string(),
+                    });
+                };
 
-        let Some(provider) = WebSearchRegistry::read_global(cx).active_provider() else {
-            return Task::ready(Err(WebSearchToolOutput::Error {
-                error: "Web search is not available.".to_string(),
-            }));
-        };
+                let search_task = provider.search(input.query, cx);
+                Ok((authorize, search_task))
+            })?;
 
-        let search_task = provider.search(input.query, cx);
-        cx.background_spawn(async move {
             if let Some(authorize) = authorize {
                 authorize.await.map_err(|e| WebSearchToolOutput::Error { error: e.to_string() })?;
             }

crates/agent_servers/src/acp.rs 🔗

@@ -10,7 +10,7 @@ use collections::HashMap;
 use futures::AsyncBufReadExt as _;
 use futures::io::BufReader;
 use project::Project;
-use project::agent_server_store::AgentServerCommand;
+use project::agent_server_store::{AgentServerCommand, GEMINI_NAME};
 use serde::Deserialize;
 use settings::Settings as _;
 use task::ShellBuilder;
@@ -36,6 +36,7 @@ pub struct UnsupportedVersion;
 
 pub struct AcpConnection {
     server_name: SharedString,
+    display_name: SharedString,
     telemetry_id: SharedString,
     connection: Rc<acp::ClientSideConnection>,
     sessions: Rc<RefCell<HashMap<acp::SessionId, AcpSession>>>,
@@ -44,7 +45,6 @@ pub struct AcpConnection {
     default_mode: Option<acp::SessionModeId>,
     default_model: Option<acp::ModelId>,
     default_config_options: HashMap<String, String>,
-    root_dir: PathBuf,
     child: Child,
     session_list: Option<Rc<AcpSessionList>>,
     _io_task: Task<Result<(), acp::Error>>,
@@ -158,22 +158,20 @@ impl AgentSessionList for AcpSessionList {
 
 pub async fn connect(
     server_name: SharedString,
+    display_name: SharedString,
     command: AgentServerCommand,
-    root_dir: &Path,
     default_mode: Option<acp::SessionModeId>,
     default_model: Option<acp::ModelId>,
     default_config_options: HashMap<String, String>,
-    is_remote: bool,
     cx: &mut AsyncApp,
 ) -> Result<Rc<dyn AgentConnection>> {
     let conn = AcpConnection::stdio(
         server_name,
+        display_name,
         command.clone(),
-        root_dir,
         default_mode,
         default_model,
         default_config_options,
-        is_remote,
         cx,
     )
     .await?;
@@ -185,12 +183,11 @@ const MINIMUM_SUPPORTED_VERSION: acp::ProtocolVersion = acp::ProtocolVersion::V1
 impl AcpConnection {
     pub async fn stdio(
         server_name: SharedString,
+        display_name: SharedString,
         command: AgentServerCommand,
-        root_dir: &Path,
         default_mode: Option<acp::SessionModeId>,
         default_model: Option<acp::ModelId>,
         default_config_options: HashMap<String, String>,
-        is_remote: bool,
         cx: &mut AsyncApp,
     ) -> Result<Self> {
         let shell = cx.update(|cx| TerminalSettings::get(None, cx).shell.clone());
@@ -198,9 +195,6 @@ impl AcpConnection {
         let mut child =
             builder.build_std_command(Some(command.path.display().to_string()), &command.args);
         child.envs(command.env.iter().flatten());
-        if !is_remote {
-            child.current_dir(root_dir);
-        }
         let mut child = Child::spawn(child, Stdio::piped(), Stdio::piped(), Stdio::piped())?;
 
         let stdout = child.stdout.take().context("Failed to take stdout")?;
@@ -325,11 +319,30 @@ impl AcpConnection {
             None
         };
 
+        // TODO: Remove this override once Google team releases their official auth methods
+        let auth_methods = if server_name == GEMINI_NAME {
+            let mut args = command.args.clone();
+            args.retain(|a| a != "--experimental-acp");
+            let value = serde_json::json!({
+                "label": "gemini /auth",
+                "command": command.path.to_string_lossy().into_owned(),
+                "args": args,
+                "env": command.env.clone().unwrap_or_default(),
+            });
+            let meta = acp::Meta::from_iter([("terminal-auth".to_string(), value)]);
+            vec![
+                acp::AuthMethod::new("spawn-gemini-cli", "Login")
+                    .description("Login with your Google or Vertex AI account")
+                    .meta(meta),
+            ]
+        } else {
+            response.auth_methods
+        };
         Ok(Self {
-            auth_methods: response.auth_methods,
-            root_dir: root_dir.to_owned(),
+            auth_methods,
             connection,
             server_name,
+            display_name,
             telemetry_id,
             sessions,
             agent_capabilities: response.agent_capabilities,
@@ -347,10 +360,6 @@ impl AcpConnection {
     pub fn prompt_capabilities(&self) -> &acp::PromptCapabilities {
         &self.agent_capabilities.prompt_capabilities
     }
-
-    pub fn root_dir(&self) -> &Path {
-        &self.root_dir
-    }
 }
 
 impl Drop for AcpConnection {
@@ -550,7 +559,7 @@ impl AgentConnection for AcpConnection {
             let thread: Entity<AcpThread> = cx.new(|cx| {
                 AcpThread::new(
                     None,
-                    self.server_name.clone(),
+                    self.display_name.clone(),
                     self.clone(),
                     project,
                     action_log,
@@ -603,10 +612,14 @@ impl AgentConnection for AcpConnection {
         let cwd = cwd.to_path_buf();
         let mcp_servers = mcp_servers_for_project(&project, cx);
         let action_log = cx.new(|_| ActionLog::new(project.clone()));
+        let title = session
+            .title
+            .clone()
+            .unwrap_or_else(|| self.display_name.clone());
         let thread: Entity<AcpThread> = cx.new(|cx| {
             AcpThread::new(
                 None,
-                self.server_name.clone(),
+                title,
                 self.clone(),
                 project,
                 action_log,
@@ -676,10 +689,14 @@ impl AgentConnection for AcpConnection {
         let cwd = cwd.to_path_buf();
         let mcp_servers = mcp_servers_for_project(&project, cx);
         let action_log = cx.new(|_| ActionLog::new(project.clone()));
+        let title = session
+            .title
+            .clone()
+            .unwrap_or_else(|| self.display_name.clone());
         let thread: Entity<AcpThread> = cx.new(|cx| {
             AcpThread::new(
                 None,
-                self.server_name.clone(),
+                title,
                 self.clone(),
                 project,
                 action_log,

crates/agent_servers/src/agent_servers.rs 🔗

@@ -1,19 +1,13 @@
 mod acp;
-mod claude;
-mod codex;
 mod custom;
-mod gemini;
 
 #[cfg(any(test, feature = "test-support"))]
 pub mod e2e_tests;
 
-pub use claude::*;
 use client::ProxySettings;
-pub use codex::*;
 use collections::{HashMap, HashSet};
 pub use custom::*;
 use fs::Fs;
-pub use gemini::*;
 use http_client::read_no_proxy_from_env;
 use project::agent_server_store::AgentServerStore;
 
@@ -22,7 +16,7 @@ use anyhow::Result;
 use gpui::{App, AppContext, Entity, SharedString, Task};
 use project::Project;
 use settings::SettingsStore;
-use std::{any::Any, path::Path, rc::Rc, sync::Arc};
+use std::{any::Any, rc::Rc, sync::Arc};
 
 pub use acp::AcpConnection;
 
@@ -58,10 +52,9 @@ pub trait AgentServer: Send {
     fn name(&self) -> SharedString;
     fn connect(
         &self,
-        root_dir: Option<&Path>,
         delegate: AgentServerDelegate,
         cx: &mut App,
-    ) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>>;
+    ) -> Task<Result<Rc<dyn AgentConnection>>>;
 
     fn into_any(self: Rc<Self>) -> Rc<dyn Any>;
 

crates/agent_servers/src/claude.rs 🔗

@@ -1,264 +0,0 @@
-use agent_client_protocol as acp;
-use collections::HashSet;
-use fs::Fs;
-use settings::{SettingsStore, update_settings_file};
-use std::path::Path;
-use std::rc::Rc;
-use std::sync::Arc;
-use std::{any::Any, path::PathBuf};
-
-use anyhow::{Context as _, Result};
-use gpui::{App, AppContext as _, SharedString, Task};
-use project::agent_server_store::{AllAgentServersSettings, CLAUDE_AGENT_NAME};
-
-use crate::{AgentServer, AgentServerDelegate, load_proxy_env};
-use acp_thread::AgentConnection;
-
-#[derive(Clone)]
-pub struct ClaudeCode;
-
-pub struct AgentServerLoginCommand {
-    pub path: PathBuf,
-    pub arguments: Vec<String>,
-}
-
-impl AgentServer for ClaudeCode {
-    fn name(&self) -> SharedString {
-        "Claude Agent".into()
-    }
-
-    fn logo(&self) -> ui::IconName {
-        ui::IconName::AiClaude
-    }
-
-    fn default_mode(&self, cx: &App) -> Option<acp::SessionModeId> {
-        let settings = cx.read_global(|settings: &SettingsStore, _| {
-            settings.get::<AllAgentServersSettings>(None).claude.clone()
-        });
-
-        settings
-            .as_ref()
-            .and_then(|s| s.default_mode.clone().map(acp::SessionModeId::new))
-    }
-
-    fn set_default_mode(&self, mode_id: Option<acp::SessionModeId>, fs: Arc<dyn Fs>, cx: &mut App) {
-        update_settings_file(fs, cx, |settings, _| {
-            settings
-                .agent_servers
-                .get_or_insert_default()
-                .claude
-                .get_or_insert_default()
-                .default_mode = mode_id.map(|m| m.to_string())
-        });
-    }
-
-    fn default_model(&self, cx: &App) -> Option<acp::ModelId> {
-        let settings = cx.read_global(|settings: &SettingsStore, _| {
-            settings.get::<AllAgentServersSettings>(None).claude.clone()
-        });
-
-        settings
-            .as_ref()
-            .and_then(|s| s.default_model.clone().map(acp::ModelId::new))
-    }
-
-    fn set_default_model(&self, model_id: Option<acp::ModelId>, fs: Arc<dyn Fs>, cx: &mut App) {
-        update_settings_file(fs, cx, |settings, _| {
-            settings
-                .agent_servers
-                .get_or_insert_default()
-                .claude
-                .get_or_insert_default()
-                .default_model = model_id.map(|m| m.to_string())
-        });
-    }
-
-    fn favorite_model_ids(&self, cx: &mut App) -> HashSet<acp::ModelId> {
-        let settings = cx.read_global(|settings: &SettingsStore, _| {
-            settings.get::<AllAgentServersSettings>(None).claude.clone()
-        });
-
-        settings
-            .as_ref()
-            .map(|s| {
-                s.favorite_models
-                    .iter()
-                    .map(|id| acp::ModelId::new(id.clone()))
-                    .collect()
-            })
-            .unwrap_or_default()
-    }
-
-    fn toggle_favorite_model(
-        &self,
-        model_id: acp::ModelId,
-        should_be_favorite: bool,
-        fs: Arc<dyn Fs>,
-        cx: &App,
-    ) {
-        update_settings_file(fs, cx, move |settings, _| {
-            let favorite_models = &mut settings
-                .agent_servers
-                .get_or_insert_default()
-                .claude
-                .get_or_insert_default()
-                .favorite_models;
-
-            let model_id_str = model_id.to_string();
-            if should_be_favorite {
-                if !favorite_models.contains(&model_id_str) {
-                    favorite_models.push(model_id_str);
-                }
-            } else {
-                favorite_models.retain(|id| id != &model_id_str);
-            }
-        });
-    }
-
-    fn default_config_option(&self, config_id: &str, cx: &App) -> Option<String> {
-        let settings = cx.read_global(|settings: &SettingsStore, _| {
-            settings.get::<AllAgentServersSettings>(None).claude.clone()
-        });
-
-        settings
-            .as_ref()
-            .and_then(|s| s.default_config_options.get(config_id).cloned())
-    }
-
-    fn set_default_config_option(
-        &self,
-        config_id: &str,
-        value_id: Option<&str>,
-        fs: Arc<dyn Fs>,
-        cx: &mut App,
-    ) {
-        let config_id = config_id.to_string();
-        let value_id = value_id.map(|s| s.to_string());
-        update_settings_file(fs, cx, move |settings, _| {
-            let config_options = &mut settings
-                .agent_servers
-                .get_or_insert_default()
-                .claude
-                .get_or_insert_default()
-                .default_config_options;
-
-            if let Some(value) = value_id.clone() {
-                config_options.insert(config_id.clone(), value);
-            } else {
-                config_options.remove(&config_id);
-            }
-        });
-    }
-
-    fn favorite_config_option_value_ids(
-        &self,
-        config_id: &acp::SessionConfigId,
-        cx: &mut App,
-    ) -> HashSet<acp::SessionConfigValueId> {
-        let settings = cx.read_global(|settings: &SettingsStore, _| {
-            settings.get::<AllAgentServersSettings>(None).claude.clone()
-        });
-
-        settings
-            .as_ref()
-            .and_then(|s| s.favorite_config_option_values.get(config_id.0.as_ref()))
-            .map(|values| {
-                values
-                    .iter()
-                    .cloned()
-                    .map(acp::SessionConfigValueId::new)
-                    .collect()
-            })
-            .unwrap_or_default()
-    }
-
-    fn toggle_favorite_config_option_value(
-        &self,
-        config_id: acp::SessionConfigId,
-        value_id: acp::SessionConfigValueId,
-        should_be_favorite: bool,
-        fs: Arc<dyn Fs>,
-        cx: &App,
-    ) {
-        let config_id = config_id.to_string();
-        let value_id = value_id.to_string();
-
-        update_settings_file(fs, cx, move |settings, _| {
-            let favorites = &mut settings
-                .agent_servers
-                .get_or_insert_default()
-                .claude
-                .get_or_insert_default()
-                .favorite_config_option_values;
-
-            let entry = favorites.entry(config_id.clone()).or_insert_with(Vec::new);
-
-            if should_be_favorite {
-                if !entry.iter().any(|v| v == &value_id) {
-                    entry.push(value_id.clone());
-                }
-            } else {
-                entry.retain(|v| v != &value_id);
-                if entry.is_empty() {
-                    favorites.remove(&config_id);
-                }
-            }
-        });
-    }
-
-    fn connect(
-        &self,
-        root_dir: Option<&Path>,
-        delegate: AgentServerDelegate,
-        cx: &mut App,
-    ) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
-        let name = self.name();
-        let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
-        let is_remote = delegate.project.read(cx).is_via_remote_server();
-        let store = delegate.store.downgrade();
-        let extra_env = load_proxy_env(cx);
-        let default_mode = self.default_mode(cx);
-        let default_model = self.default_model(cx);
-        let default_config_options = cx.read_global(|settings: &SettingsStore, _| {
-            settings
-                .get::<AllAgentServersSettings>(None)
-                .claude
-                .as_ref()
-                .map(|s| s.default_config_options.clone())
-                .unwrap_or_default()
-        });
-
-        cx.spawn(async move |cx| {
-            let (command, root_dir, login) = store
-                .update(cx, |store, cx| {
-                    let agent = store
-                        .get_external_agent(&CLAUDE_AGENT_NAME.into())
-                        .context("Claude Agent is not registered")?;
-                    anyhow::Ok(agent.get_command(
-                        root_dir.as_deref(),
-                        extra_env,
-                        delegate.status_tx,
-                        delegate.new_version_available,
-                        &mut cx.to_async(),
-                    ))
-                })??
-                .await?;
-            let connection = crate::acp::connect(
-                name,
-                command,
-                root_dir.as_ref(),
-                default_mode,
-                default_model,
-                default_config_options,
-                is_remote,
-                cx,
-            )
-            .await?;
-            Ok((connection, login))
-        })
-    }
-
-    fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
-        self
-    }
-}

crates/agent_servers/src/codex.rs 🔗

@@ -1,275 +0,0 @@
-use std::rc::Rc;
-use std::sync::Arc;
-use std::{any::Any, path::Path};
-
-use acp_thread::AgentConnection;
-use agent_client_protocol as acp;
-use anyhow::{Context as _, Result};
-use collections::HashSet;
-use fs::Fs;
-use gpui::{App, AppContext as _, SharedString, Task};
-use project::agent_server_store::{AllAgentServersSettings, CODEX_NAME};
-use settings::{SettingsStore, update_settings_file};
-
-use crate::{AgentServer, AgentServerDelegate, load_proxy_env};
-
-#[derive(Clone)]
-pub struct Codex;
-
-const CODEX_API_KEY_VAR_NAME: &str = "CODEX_API_KEY";
-const OPEN_AI_API_KEY_VAR_NAME: &str = "OPEN_AI_API_KEY";
-
-impl AgentServer for Codex {
-    fn name(&self) -> SharedString {
-        "Codex".into()
-    }
-
-    fn logo(&self) -> ui::IconName {
-        ui::IconName::AiOpenAi
-    }
-
-    fn default_mode(&self, cx: &App) -> Option<acp::SessionModeId> {
-        let settings = cx.read_global(|settings: &SettingsStore, _| {
-            settings.get::<AllAgentServersSettings>(None).codex.clone()
-        });
-
-        settings
-            .as_ref()
-            .and_then(|s| s.default_mode.clone().map(acp::SessionModeId::new))
-    }
-
-    fn set_default_mode(&self, mode_id: Option<acp::SessionModeId>, fs: Arc<dyn Fs>, cx: &mut App) {
-        update_settings_file(fs, cx, |settings, _| {
-            settings
-                .agent_servers
-                .get_or_insert_default()
-                .codex
-                .get_or_insert_default()
-                .default_mode = mode_id.map(|m| m.to_string())
-        });
-    }
-
-    fn default_model(&self, cx: &App) -> Option<acp::ModelId> {
-        let settings = cx.read_global(|settings: &SettingsStore, _| {
-            settings.get::<AllAgentServersSettings>(None).codex.clone()
-        });
-
-        settings
-            .as_ref()
-            .and_then(|s| s.default_model.clone().map(acp::ModelId::new))
-    }
-
-    fn set_default_model(&self, model_id: Option<acp::ModelId>, fs: Arc<dyn Fs>, cx: &mut App) {
-        update_settings_file(fs, cx, |settings, _| {
-            settings
-                .agent_servers
-                .get_or_insert_default()
-                .codex
-                .get_or_insert_default()
-                .default_model = model_id.map(|m| m.to_string())
-        });
-    }
-
-    fn favorite_model_ids(&self, cx: &mut App) -> HashSet<acp::ModelId> {
-        let settings = cx.read_global(|settings: &SettingsStore, _| {
-            settings.get::<AllAgentServersSettings>(None).codex.clone()
-        });
-
-        settings
-            .as_ref()
-            .map(|s| {
-                s.favorite_models
-                    .iter()
-                    .map(|id| acp::ModelId::new(id.clone()))
-                    .collect()
-            })
-            .unwrap_or_default()
-    }
-
-    fn toggle_favorite_model(
-        &self,
-        model_id: acp::ModelId,
-        should_be_favorite: bool,
-        fs: Arc<dyn Fs>,
-        cx: &App,
-    ) {
-        update_settings_file(fs, cx, move |settings, _| {
-            let favorite_models = &mut settings
-                .agent_servers
-                .get_or_insert_default()
-                .codex
-                .get_or_insert_default()
-                .favorite_models;
-
-            let model_id_str = model_id.to_string();
-            if should_be_favorite {
-                if !favorite_models.contains(&model_id_str) {
-                    favorite_models.push(model_id_str);
-                }
-            } else {
-                favorite_models.retain(|id| id != &model_id_str);
-            }
-        });
-    }
-
-    fn default_config_option(&self, config_id: &str, cx: &App) -> Option<String> {
-        let settings = cx.read_global(|settings: &SettingsStore, _| {
-            settings.get::<AllAgentServersSettings>(None).codex.clone()
-        });
-
-        settings
-            .as_ref()
-            .and_then(|s| s.default_config_options.get(config_id).cloned())
-    }
-
-    fn set_default_config_option(
-        &self,
-        config_id: &str,
-        value_id: Option<&str>,
-        fs: Arc<dyn Fs>,
-        cx: &mut App,
-    ) {
-        let config_id = config_id.to_string();
-        let value_id = value_id.map(|s| s.to_string());
-        update_settings_file(fs, cx, move |settings, _| {
-            let config_options = &mut settings
-                .agent_servers
-                .get_or_insert_default()
-                .codex
-                .get_or_insert_default()
-                .default_config_options;
-
-            if let Some(value) = value_id.clone() {
-                config_options.insert(config_id.clone(), value);
-            } else {
-                config_options.remove(&config_id);
-            }
-        });
-    }
-
-    fn favorite_config_option_value_ids(
-        &self,
-        config_id: &acp::SessionConfigId,
-        cx: &mut App,
-    ) -> HashSet<acp::SessionConfigValueId> {
-        let settings = cx.read_global(|settings: &SettingsStore, _| {
-            settings.get::<AllAgentServersSettings>(None).codex.clone()
-        });
-
-        settings
-            .as_ref()
-            .and_then(|s| s.favorite_config_option_values.get(config_id.0.as_ref()))
-            .map(|values| {
-                values
-                    .iter()
-                    .cloned()
-                    .map(acp::SessionConfigValueId::new)
-                    .collect()
-            })
-            .unwrap_or_default()
-    }
-
-    fn toggle_favorite_config_option_value(
-        &self,
-        config_id: acp::SessionConfigId,
-        value_id: acp::SessionConfigValueId,
-        should_be_favorite: bool,
-        fs: Arc<dyn Fs>,
-        cx: &App,
-    ) {
-        let config_id = config_id.to_string();
-        let value_id = value_id.to_string();
-
-        update_settings_file(fs, cx, move |settings, _| {
-            let favorites = &mut settings
-                .agent_servers
-                .get_or_insert_default()
-                .codex
-                .get_or_insert_default()
-                .favorite_config_option_values;
-
-            let entry = favorites.entry(config_id.clone()).or_insert_with(Vec::new);
-
-            if should_be_favorite {
-                if !entry.iter().any(|v| v == &value_id) {
-                    entry.push(value_id.clone());
-                }
-            } else {
-                entry.retain(|v| v != &value_id);
-                if entry.is_empty() {
-                    favorites.remove(&config_id);
-                }
-            }
-        });
-    }
-
-    fn connect(
-        &self,
-        root_dir: Option<&Path>,
-        delegate: AgentServerDelegate,
-        cx: &mut App,
-    ) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
-        let name = self.name();
-        let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
-        let is_remote = delegate.project.read(cx).is_via_remote_server();
-        let store = delegate.store.downgrade();
-        let mut extra_env = load_proxy_env(cx);
-        let default_mode = self.default_mode(cx);
-        let default_model = self.default_model(cx);
-        let default_config_options = cx.read_global(|settings: &SettingsStore, _| {
-            settings
-                .get::<AllAgentServersSettings>(None)
-                .codex
-                .as_ref()
-                .map(|s| s.default_config_options.clone())
-                .unwrap_or_default()
-        });
-        if let Ok(api_key) = std::env::var(CODEX_API_KEY_VAR_NAME) {
-            extra_env.insert(CODEX_API_KEY_VAR_NAME.into(), api_key);
-        }
-        if let Ok(api_key) = std::env::var(OPEN_AI_API_KEY_VAR_NAME) {
-            extra_env.insert(OPEN_AI_API_KEY_VAR_NAME.into(), api_key);
-        }
-
-        cx.spawn(async move |cx| {
-            let (command, root_dir, login) = store
-                .update(cx, |store, cx| {
-                    let agent = store
-                        .get_external_agent(&CODEX_NAME.into())
-                        .context("Codex is not registered")?;
-                    anyhow::Ok(agent.get_command(
-                        root_dir.as_deref(),
-                        extra_env,
-                        delegate.status_tx,
-                        delegate.new_version_available,
-                        &mut cx.to_async(),
-                    ))
-                })??
-                .await?;
-
-            let connection = crate::acp::connect(
-                name,
-                command,
-                root_dir.as_ref(),
-                default_mode,
-                default_model,
-                default_config_options,
-                is_remote,
-                cx,
-            )
-            .await?;
-            Ok((connection, login))
-        })
-    }
-
-    fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
-        self
-    }
-}
-
-#[cfg(test)]
-pub(crate) mod tests {
-    use super::*;
-
-    crate::common_e2e_tests!(async |_, _| Codex, allow_option_id = "proceed_once");
-}

crates/agent_servers/src/custom.rs 🔗

@@ -3,11 +3,15 @@ use acp_thread::AgentConnection;
 use agent_client_protocol as acp;
 use anyhow::{Context as _, Result};
 use collections::HashSet;
+use credentials_provider::CredentialsProvider;
 use fs::Fs;
 use gpui::{App, AppContext as _, SharedString, Task};
-use project::agent_server_store::{AllAgentServersSettings, ExternalAgentServerName};
+use language_model::{ApiKey, EnvVar};
+use project::agent_server_store::{
+    AllAgentServersSettings, CLAUDE_AGENT_NAME, CODEX_NAME, ExternalAgentServerName, GEMINI_NAME,
+};
 use settings::{SettingsStore, update_settings_file};
-use std::{path::Path, rc::Rc, sync::Arc};
+use std::{rc::Rc, sync::Arc};
 use ui::IconName;
 
 /// A generic agent server implementation for custom user-defined agents
@@ -34,7 +38,6 @@ impl AgentServer for CustomAgentServer {
         let settings = cx.read_global(|settings: &SettingsStore, _| {
             settings
                 .get::<AllAgentServersSettings>(None)
-                .custom
                 .get(self.name().as_ref())
                 .cloned()
         });
@@ -52,7 +55,6 @@ impl AgentServer for CustomAgentServer {
         let settings = cx.read_global(|settings: &SettingsStore, _| {
             settings
                 .get::<AllAgentServersSettings>(None)
-                .custom
                 .get(self.name().as_ref())
                 .cloned()
         });
@@ -86,7 +88,6 @@ impl AgentServer for CustomAgentServer {
             let settings = settings
                 .agent_servers
                 .get_or_insert_default()
-                .custom
                 .entry(name.to_string())
                 .or_insert_with(|| settings::CustomAgentServerSettings::Extension {
                     default_model: None,
@@ -135,7 +136,6 @@ impl AgentServer for CustomAgentServer {
             let settings = settings
                 .agent_servers
                 .get_or_insert_default()
-                .custom
                 .entry(name.to_string())
                 .or_insert_with(|| settings::CustomAgentServerSettings::Extension {
                     default_model: None,
@@ -160,7 +160,6 @@ impl AgentServer for CustomAgentServer {
         let settings = cx.read_global(|settings: &SettingsStore, _| {
             settings
                 .get::<AllAgentServersSettings>(None)
-                .custom
                 .get(self.name().as_ref())
                 .cloned()
         });
@@ -176,7 +175,6 @@ impl AgentServer for CustomAgentServer {
             let settings = settings
                 .agent_servers
                 .get_or_insert_default()
-                .custom
                 .entry(name.to_string())
                 .or_insert_with(|| settings::CustomAgentServerSettings::Extension {
                     default_model: None,
@@ -201,7 +199,6 @@ impl AgentServer for CustomAgentServer {
         let settings = cx.read_global(|settings: &SettingsStore, _| {
             settings
                 .get::<AllAgentServersSettings>(None)
-                .custom
                 .get(self.name().as_ref())
                 .cloned()
         });
@@ -229,7 +226,6 @@ impl AgentServer for CustomAgentServer {
             let settings = settings
                 .agent_servers
                 .get_or_insert_default()
-                .custom
                 .entry(name.to_string())
                 .or_insert_with(|| settings::CustomAgentServerSettings::Extension {
                     default_model: None,
@@ -267,7 +263,6 @@ impl AgentServer for CustomAgentServer {
         let settings = cx.read_global(|settings: &SettingsStore, _| {
             settings
                 .get::<AllAgentServersSettings>(None)
-                .custom
                 .get(self.name().as_ref())
                 .cloned()
         });
@@ -291,7 +286,6 @@ impl AgentServer for CustomAgentServer {
             let settings = settings
                 .agent_servers
                 .get_or_insert_default()
-                .custom
                 .entry(name.to_string())
                 .or_insert_with(|| settings::CustomAgentServerSettings::Extension {
                     default_model: None,
@@ -327,20 +321,23 @@ impl AgentServer for CustomAgentServer {
 
     fn connect(
         &self,
-        root_dir: Option<&Path>,
         delegate: AgentServerDelegate,
         cx: &mut App,
-    ) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
+    ) -> Task<Result<Rc<dyn AgentConnection>>> {
         let name = self.name();
-        let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
-        let is_remote = delegate.project.read(cx).is_via_remote_server();
+        let display_name = delegate
+            .store
+            .read(cx)
+            .agent_display_name(&ExternalAgentServerName(name.clone()))
+            .unwrap_or_else(|| name.clone());
         let default_mode = self.default_mode(cx);
         let default_model = self.default_model(cx);
+        let is_previous_built_in =
+            matches!(name.as_ref(), CLAUDE_AGENT_NAME | CODEX_NAME | GEMINI_NAME);
         let (default_config_options, is_registry_agent) =
             cx.read_global(|settings: &SettingsStore, _| {
                 let agent_settings = settings
                     .get::<AllAgentServersSettings>(None)
-                    .custom
                     .get(self.name().as_ref());
 
                 let is_registry = agent_settings
@@ -372,16 +369,46 @@ impl AgentServer for CustomAgentServer {
                 (config_options, is_registry)
             });
 
+        // Intermediate step to allow for previous built-ins to also be triggered if they aren't in settings yet.
+        let is_registry_agent = is_registry_agent || is_previous_built_in;
+
         if is_registry_agent {
             if let Some(registry_store) = project::AgentRegistryStore::try_global(cx) {
                 registry_store.update(cx, |store, cx| store.refresh_if_stale(cx));
             }
         }
 
+        let mut extra_env = load_proxy_env(cx);
+        if delegate.store.read(cx).no_browser() {
+            extra_env.insert("NO_BROWSER".to_owned(), "1".to_owned());
+        }
+        if is_registry_agent {
+            match name.as_ref() {
+                CLAUDE_AGENT_NAME => {
+                    extra_env.insert("ANTHROPIC_API_KEY".into(), "".into());
+                }
+                CODEX_NAME => {
+                    if let Ok(api_key) = std::env::var("CODEX_API_KEY") {
+                        extra_env.insert("CODEX_API_KEY".into(), api_key);
+                    }
+                    if let Ok(api_key) = std::env::var("OPEN_AI_API_KEY") {
+                        extra_env.insert("OPEN_AI_API_KEY".into(), api_key);
+                    }
+                }
+                GEMINI_NAME => {
+                    extra_env.insert("SURFACE".to_owned(), "zed".to_owned());
+                }
+                _ => {}
+            }
+        }
         let store = delegate.store.downgrade();
-        let extra_env = load_proxy_env(cx);
         cx.spawn(async move |cx| {
-            let (command, root_dir, login) = store
+            if is_registry_agent && name.as_ref() == GEMINI_NAME {
+                if let Some(api_key) = cx.update(api_key_for_gemini_cli).await.ok() {
+                    extra_env.insert("GEMINI_API_KEY".into(), api_key);
+                }
+            }
+            let command = store
                 .update(cx, |store, cx| {
                     let agent = store
                         .get_external_agent(&ExternalAgentServerName(name.clone()))
@@ -389,7 +416,6 @@ impl AgentServer for CustomAgentServer {
                             format!("Custom agent server `{}` is not registered", name)
                         })?;
                     anyhow::Ok(agent.get_command(
-                        root_dir.as_deref(),
                         extra_env,
                         delegate.status_tx,
                         delegate.new_version_available,
@@ -399,16 +425,15 @@ impl AgentServer for CustomAgentServer {
                 .await?;
             let connection = crate::acp::connect(
                 name,
+                display_name,
                 command,
-                root_dir.as_ref(),
                 default_mode,
                 default_model,
                 default_config_options,
-                is_remote,
                 cx,
             )
             .await?;
-            Ok((connection, login))
+            Ok(connection)
         })
     }
 
@@ -416,3 +441,20 @@ impl AgentServer for CustomAgentServer {
         self
     }
 }
+
+fn api_key_for_gemini_cli(cx: &mut App) -> Task<Result<String>> {
+    let env_var = EnvVar::new("GEMINI_API_KEY".into()).or(EnvVar::new("GOOGLE_AI_API_KEY".into()));
+    if let Some(key) = env_var.value {
+        return Task::ready(Ok(key));
+    }
+    let credentials_provider = <dyn CredentialsProvider>::global(cx);
+    let api_url = google_ai::API_URL.to_string();
+    cx.spawn(async move |cx| {
+        Ok(
+            ApiKey::load_from_system_keychain(&api_url, credentials_provider.as_ref(), cx)
+                .await?
+                .key()
+                .to_string(),
+        )
+    })
+}

crates/agent_servers/src/e2e_tests.rs 🔗

@@ -4,8 +4,6 @@ use agent_client_protocol as acp;
 use futures::{FutureExt, StreamExt, channel::mpsc, select};
 use gpui::{Entity, TestAppContext};
 use indoc::indoc;
-#[cfg(test)]
-use project::agent_server_store::BuiltinAgentServerSettings;
 use project::{FakeFs, Project};
 #[cfg(test)]
 use settings::Settings;
@@ -414,18 +412,7 @@ pub async fn init_test(cx: &mut TestAppContext) -> Arc<FakeFs> {
 
         #[cfg(test)]
         project::agent_server_store::AllAgentServersSettings::override_global(
-            project::agent_server_store::AllAgentServersSettings {
-                claude: Some(BuiltinAgentServerSettings {
-                    path: Some("claude-agent-acp".into()),
-                    ..Default::default()
-                }),
-                gemini: Some(crate::gemini::tests::local_command().into()),
-                codex: Some(BuiltinAgentServerSettings {
-                    path: Some("codex-acp".into()),
-                    ..Default::default()
-                }),
-                custom: collections::HashMap::default(),
-            },
+            project::agent_server_store::AllAgentServersSettings(collections::HashMap::default()),
             cx,
         );
     });
@@ -444,10 +431,7 @@ pub async fn new_test_thread(
     let store = project.read_with(cx, |project, _| project.agent_server_store().clone());
     let delegate = AgentServerDelegate::new(store, project.clone(), None, None);
 
-    let (connection, _) = cx
-        .update(|cx| server.connect(Some(current_dir.as_ref()), delegate, cx))
-        .await
-        .unwrap();
+    let connection = cx.update(|cx| server.connect(delegate, cx)).await.unwrap();
 
     cx.update(|cx| connection.new_session(project.clone(), current_dir.as_ref(), cx))
         .await

crates/agent_servers/src/gemini.rs 🔗

@@ -1,130 +0,0 @@
-use std::rc::Rc;
-use std::{any::Any, path::Path};
-
-use crate::{AgentServer, AgentServerDelegate, load_proxy_env};
-use acp_thread::AgentConnection;
-use anyhow::{Context as _, Result};
-use credentials_provider::CredentialsProvider;
-use gpui::{App, AppContext as _, SharedString, Task};
-use language_model::{ApiKey, EnvVar};
-use project::agent_server_store::{AllAgentServersSettings, GEMINI_NAME};
-use settings::SettingsStore;
-
-const GEMINI_API_KEY_VAR_NAME: &str = "GEMINI_API_KEY";
-const GOOGLE_AI_API_KEY_VAR_NAME: &str = "GOOGLE_AI_API_KEY";
-
-fn api_key_for_gemini_cli(cx: &mut App) -> Task<Result<String>> {
-    let env_var = EnvVar::new(GEMINI_API_KEY_VAR_NAME.into())
-        .or(EnvVar::new(GOOGLE_AI_API_KEY_VAR_NAME.into()));
-    if let Some(key) = env_var.value {
-        return Task::ready(Ok(key));
-    }
-    let credentials_provider = <dyn CredentialsProvider>::global(cx);
-    let api_url = google_ai::API_URL.to_string();
-    cx.spawn(async move |cx| {
-        Ok(
-            ApiKey::load_from_system_keychain(&api_url, credentials_provider.as_ref(), cx)
-                .await?
-                .key()
-                .to_string(),
-        )
-    })
-}
-
-#[derive(Clone)]
-pub struct Gemini;
-
-impl AgentServer for Gemini {
-    fn name(&self) -> SharedString {
-        "Gemini CLI".into()
-    }
-
-    fn logo(&self) -> ui::IconName {
-        ui::IconName::AiGemini
-    }
-
-    fn connect(
-        &self,
-        root_dir: Option<&Path>,
-        delegate: AgentServerDelegate,
-        cx: &mut App,
-    ) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
-        let name = self.name();
-        let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
-        let is_remote = delegate.project.read(cx).is_via_remote_server();
-        let store = delegate.store.downgrade();
-        let mut extra_env = load_proxy_env(cx);
-        let default_mode = self.default_mode(cx);
-        let default_model = self.default_model(cx);
-        let default_config_options = cx.read_global(|settings: &SettingsStore, _| {
-            settings
-                .get::<AllAgentServersSettings>(None)
-                .gemini
-                .as_ref()
-                .map(|s| s.default_config_options.clone())
-                .unwrap_or_default()
-        });
-
-        cx.spawn(async move |cx| {
-            extra_env.insert("SURFACE".to_owned(), "zed".to_owned());
-
-            if let Some(api_key) = cx.update(api_key_for_gemini_cli).await.ok() {
-                extra_env.insert("GEMINI_API_KEY".into(), api_key);
-            }
-            let (command, root_dir, login) = store
-                .update(cx, |store, cx| {
-                    let agent = store
-                        .get_external_agent(&GEMINI_NAME.into())
-                        .context("Gemini CLI is not registered")?;
-                    anyhow::Ok(agent.get_command(
-                        root_dir.as_deref(),
-                        extra_env,
-                        delegate.status_tx,
-                        delegate.new_version_available,
-                        &mut cx.to_async(),
-                    ))
-                })??
-                .await?;
-
-            let connection = crate::acp::connect(
-                name,
-                command,
-                root_dir.as_ref(),
-                default_mode,
-                default_model,
-                default_config_options,
-                is_remote,
-                cx,
-            )
-            .await?;
-            Ok((connection, login))
-        })
-    }
-
-    fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
-        self
-    }
-}
-
-#[cfg(test)]
-pub(crate) mod tests {
-    use project::agent_server_store::AgentServerCommand;
-
-    use super::*;
-    use std::path::Path;
-
-    crate::common_e2e_tests!(async |_, _| Gemini, allow_option_id = "proceed_once");
-
-    pub fn local_command() -> AgentServerCommand {
-        let cli_path = Path::new(env!("CARGO_MANIFEST_DIR"))
-            .join("../../../gemini-cli/packages/cli")
-            .to_string_lossy()
-            .to_string();
-
-        AgentServerCommand {
-            path: "node".into(),
-            args: vec![cli_path],
-            env: None,
-        }
-    }
-}

crates/agent_ui/Cargo.toml 🔗

@@ -58,6 +58,7 @@ feature_flags.workspace = true
 file_icons.workspace = true
 fs.workspace = true
 futures.workspace = true
+git.workspace = true
 fuzzy.workspace = true
 gpui.workspace = true
 gpui_tokio.workspace = true

crates/agent_ui/src/acp.rs 🔗

@@ -1,14 +0,0 @@
-mod config_options;
-mod entry_view_state;
-mod message_editor;
-mod mode_selector;
-mod model_selector;
-mod model_selector_popover;
-mod thread_history;
-pub(crate) mod thread_view;
-
-pub use mode_selector::ModeSelector;
-pub use model_selector::AcpModelSelector;
-pub use model_selector_popover::AcpModelSelectorPopover;
-pub use thread_history::*;
-pub use thread_view::AcpServerView;

crates/agent_ui/src/agent_configuration.rs 🔗

@@ -8,7 +8,6 @@ use std::{ops::Range, sync::Arc};
 
 use agent::ContextServerRegistry;
 use anyhow::Result;
-use client::zed_urls;
 use cloud_api_types::Plan;
 use collections::HashMap;
 use context_server::ContextServerId;
@@ -20,6 +19,7 @@ use gpui::{
     Action, AnyView, App, AsyncWindowContext, Corner, Entity, EventEmitter, FocusHandle, Focusable,
     ScrollHandle, Subscription, Task, WeakEntity,
 };
+use itertools::Itertools;
 use language::LanguageRegistry;
 use language_model::{
     IconOrSvg, LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry,
@@ -28,10 +28,7 @@ use language_model::{
 use language_models::AllLanguageModelSettings;
 use notifications::status_toast::{StatusToast, ToastIcon};
 use project::{
-    agent_server_store::{
-        AgentServerStore, CLAUDE_AGENT_NAME, CODEX_NAME, ExternalAgentServerName,
-        ExternalAgentSource, GEMINI_NAME,
-    },
+    agent_server_store::{AgentServerStore, ExternalAgentServerName, ExternalAgentSource},
     context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore},
 };
 use settings::{Settings, SettingsStore, update_settings_file};
@@ -941,9 +938,6 @@ impl AgentConfiguration {
 
         let user_defined_agents = agent_server_store
             .external_agents()
-            .filter(|name| {
-                name.0 != GEMINI_NAME && name.0 != CLAUDE_AGENT_NAME && name.0 != CODEX_NAME
-            })
             .cloned()
             .collect::<Vec<_>>();
 
@@ -961,6 +955,7 @@ impl AgentConfiguration {
                 let source = agent_server_store.agent_source(&name).unwrap_or_default();
                 (name, icon, display_name, source)
             })
+            .sorted_unstable_by_key(|(_, _, display_name, _)| display_name.to_lowercase())
             .collect();
 
         let add_agent_popover = PopoverMenu::new("add-agent-server-popover")
@@ -998,22 +993,6 @@ impl AgentConfiguration {
                         })
                         .separator()
                         .header("Learn More")
-                        .item(
-                            ContextMenuEntry::new("Agent Servers Docs")
-                                .icon(IconName::ArrowUpRight)
-                                .icon_color(Color::Muted)
-                                .icon_position(IconPosition::End)
-                                .handler({
-                                    move |window, cx| {
-                                        window.dispatch_action(
-                                            Box::new(OpenBrowser {
-                                                url: zed_urls::agent_server_docs(cx),
-                                            }),
-                                            cx,
-                                        );
-                                    }
-                                }),
-                        )
                         .item(
                             ContextMenuEntry::new("ACP Docs")
                                 .icon(IconName::ArrowUpRight)
@@ -1049,51 +1028,24 @@ impl AgentConfiguration {
                         "All agents connected through the Agent Client Protocol.",
                         add_agent_popover.into_any_element(),
                     ))
-                    .child(
-                        v_flex()
-                            .p_4()
-                            .pt_0()
-                            .gap_2()
-                            .child(self.render_agent_server(
-                                AgentIcon::Name(IconName::AiClaude),
-                                "Claude Agent",
-                                "Claude Agent",
-                                ExternalAgentSource::Builtin,
-                                cx,
-                            ))
-                            .child(Divider::horizontal().color(DividerColor::BorderFaded))
-                            .child(self.render_agent_server(
-                                AgentIcon::Name(IconName::AiOpenAi),
-                                "Codex CLI",
-                                "Codex CLI",
-                                ExternalAgentSource::Builtin,
-                                cx,
-                            ))
-                            .child(Divider::horizontal().color(DividerColor::BorderFaded))
-                            .child(self.render_agent_server(
-                                AgentIcon::Name(IconName::AiGemini),
-                                "Gemini CLI",
-                                "Gemini CLI",
-                                ExternalAgentSource::Builtin,
+                    .child(v_flex().p_4().pt_0().gap_2().map(|mut parent| {
+                        let mut first = true;
+                        for (name, icon, display_name, source) in user_defined_agents {
+                            if !first {
+                                parent = parent
+                                    .child(Divider::horizontal().color(DividerColor::BorderFaded));
+                            }
+                            first = false;
+                            parent = parent.child(self.render_agent_server(
+                                icon,
+                                name,
+                                display_name,
+                                source,
                                 cx,
-                            ))
-                            .map(|mut parent| {
-                                for (name, icon, display_name, source) in user_defined_agents {
-                                    parent = parent
-                                        .child(
-                                            Divider::horizontal().color(DividerColor::BorderFaded),
-                                        )
-                                        .child(self.render_agent_server(
-                                            icon,
-                                            name,
-                                            display_name,
-                                            source,
-                                            cx,
-                                        ));
-                                }
-                                parent
-                            }),
-                    ),
+                            ));
+                        }
+                        parent
+                    })),
             )
     }
 
@@ -1134,7 +1086,7 @@ impl AgentConfiguration {
                 )),
                 IconName::AcpRegistry,
             )),
-            ExternalAgentSource::Builtin | ExternalAgentSource::Custom => None,
+            ExternalAgentSource::Custom => None,
         };
 
         let agent_server_name = ExternalAgentServerName(id.clone());
@@ -1176,19 +1128,46 @@ impl AgentConfiguration {
                             let Some(agent_servers) = settings.agent_servers.as_mut() else {
                                 return;
                             };
-                            if let Some(entry) = agent_servers.custom.get(agent_name.0.as_ref())
+                            if let Some(entry) = agent_servers.get(agent_name.0.as_ref())
                                 && matches!(
                                     entry,
                                     settings::CustomAgentServerSettings::Registry { .. }
                                 )
                             {
-                                agent_servers.custom.remove(agent_name.0.as_ref());
+                                agent_servers.remove(agent_name.0.as_ref());
+                            }
+                        });
+                    })),
+                )
+            }
+            ExternalAgentSource::Custom => {
+                let fs = self.fs.clone();
+                Some(
+                    IconButton::new(
+                        SharedString::from(format!("uninstall-{}", id)),
+                        IconName::Trash,
+                    )
+                    .icon_color(Color::Muted)
+                    .icon_size(IconSize::Small)
+                    .tooltip(Tooltip::text("Remove Custom Agent"))
+                    .on_click(cx.listener(move |_, _, _window, cx| {
+                        let agent_name = agent_server_name.clone();
+                        update_settings_file(fs.clone(), cx, move |settings, _| {
+                            let Some(agent_servers) = settings.agent_servers.as_mut() else {
+                                return;
+                            };
+                            if let Some(entry) = agent_servers.get(agent_name.0.as_ref())
+                                && matches!(
+                                    entry,
+                                    settings::CustomAgentServerSettings::Custom { .. }
+                                )
+                            {
+                                agent_servers.remove(agent_name.0.as_ref());
                             }
                         });
                     })),
                 )
             }
-            ExternalAgentSource::Builtin | ExternalAgentSource::Custom => None,
         };
 
         h_flex()
@@ -1367,29 +1346,23 @@ async fn open_new_agent_servers_entry_in_settings_editor(
                         !settings
                             .agent_servers
                             .as_ref()
-                            .is_some_and(|agent_servers| {
-                                agent_servers.custom.contains_key(name.as_str())
-                            })
+                            .is_some_and(|agent_servers| agent_servers.contains_key(name.as_str()))
                     });
                 if let Some(server_name) = server_name {
                     unique_server_name = Some(SharedString::from(server_name.clone()));
-                    settings
-                        .agent_servers
-                        .get_or_insert_default()
-                        .custom
-                        .insert(
-                            server_name,
-                            settings::CustomAgentServerSettings::Custom {
-                                path: "path_to_executable".into(),
-                                args: vec![],
-                                env: HashMap::default(),
-                                default_mode: None,
-                                default_model: None,
-                                favorite_models: vec![],
-                                default_config_options: Default::default(),
-                                favorite_config_option_values: Default::default(),
-                            },
-                        );
+                    settings.agent_servers.get_or_insert_default().insert(
+                        server_name,
+                        settings::CustomAgentServerSettings::Custom {
+                            path: "path_to_executable".into(),
+                            args: vec![],
+                            env: HashMap::default(),
+                            default_mode: None,
+                            default_model: None,
+                            favorite_models: vec![],
+                            default_config_options: Default::default(),
+                            favorite_config_option_values: Default::default(),
+                        },
+                    );
                 }
             });
 

crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs 🔗

@@ -877,9 +877,14 @@ fn wait_for_context_server(
     context_server_id: ContextServerId,
     cx: &mut App,
 ) -> Task<Result<(), Arc<str>>> {
+    use std::time::Duration;
+
+    const WAIT_TIMEOUT: Duration = Duration::from_secs(120);
+
     let (tx, rx) = futures::channel::oneshot::channel();
     let tx = Arc::new(Mutex::new(Some(tx)));
 
+    let context_server_id_for_timeout = context_server_id.clone();
     let subscription = cx.subscribe(context_server_store, move |_, event, _cx| {
         let project::context_server_store::ServerStatusChangedEvent { server_id, status } = event;
 
@@ -909,12 +914,20 @@ fn wait_for_context_server(
         }
     });
 
-    cx.spawn(async move |_cx| {
-        let result = rx
-            .await
-            .map_err(|_| Arc::from("Context server store was dropped"))?;
+    cx.spawn(async move |cx| {
+        let timeout = cx.background_executor().timer(WAIT_TIMEOUT);
+        let result = futures::future::select(rx, timeout).await;
         drop(subscription);
-        result
+        match result {
+            futures::future::Either::Left((Ok(inner), _)) => inner,
+            futures::future::Either::Left((Err(_), _)) => {
+                Err(Arc::from("Context server store was dropped"))
+            }
+            futures::future::Either::Right(_) => Err(Arc::from(format!(
+                "Timed out waiting for context server `{}` to start. Check the Zed log for details.",
+                context_server_id_for_timeout
+            ))),
+        }
     })
 }
 

crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs 🔗

@@ -2,10 +2,9 @@ mod profile_modal_header;
 
 use std::sync::Arc;
 
-use agent::{AgentTool, ContextServerRegistry, SpawnAgentTool};
+use agent::ContextServerRegistry;
 use agent_settings::{AgentProfile, AgentProfileId, AgentSettings, builtin_profiles};
 use editor::Editor;
-use feature_flags::{FeatureFlagAppExt as _, SubagentsFeatureFlag};
 use fs::Fs;
 use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription, prelude::*};
 use language_model::{LanguageModel, LanguageModelRegistry};
@@ -363,10 +362,7 @@ impl ManageProfilesModal {
                 let supported_by_provider = provider.as_ref().map_or(true, |provider| {
                     agent::tool_supports_provider(name, provider)
                 });
-                let enabled_by_feature_flag =
-                    *name != SpawnAgentTool::NAME || cx.has_flag::<SubagentsFeatureFlag>();
-
-                supported_by_provider && enabled_by_feature_flag
+                supported_by_provider
             })
             .map(Arc::from)
             .collect();

crates/agent_ui/src/agent_configuration/tool_picker.rs 🔗

@@ -172,12 +172,7 @@ impl PickerDelegate for ToolPickerDelegate {
         self.selected_index = ix;
     }
 
-    fn can_select(
-        &mut self,
-        ix: usize,
-        _window: &mut Window,
-        _cx: &mut Context<Picker<Self>>,
-    ) -> bool {
+    fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context<Picker<Self>>) -> bool {
         let item = &self.filtered_items[ix];
         match item {
             PickerItem::Tool { .. } => true,

crates/agent_ui/src/agent_diff.rs 🔗

@@ -1403,7 +1403,7 @@ impl AgentDiff {
                     self.update_reviewing_editors(workspace, window, cx);
                 }
             }
-            AcpThreadEvent::Stopped => {
+            AcpThreadEvent::Stopped(_) => {
                 self.update_reviewing_editors(workspace, window, cx);
             }
             AcpThreadEvent::Error | AcpThreadEvent::LoadError(_) | AcpThreadEvent::Refusal => {

crates/agent_ui/src/agent_panel.rs 🔗

@@ -1,6 +1,6 @@
 use std::{
     ops::Range,
-    path::Path,
+    path::{Path, PathBuf},
     rc::Rc,
     sync::{
         Arc,
@@ -9,11 +9,12 @@ use std::{
     time::Duration,
 };
 
-use acp_thread::{AcpThread, AgentSessionInfo, MentionUri};
+use acp_thread::{AcpThread, AgentSessionInfo, MentionUri, ThreadStatus};
 use agent::{ContextServerRegistry, SharedThread, ThreadStore};
 use agent_client_protocol as acp;
 use agent_servers::AgentServer;
 use db::kvp::{Dismissable, KEY_VALUE_STORE};
+use itertools::Itertools;
 use project::{
     ExternalAgentServerName,
     agent_server_store::{CLAUDE_AGENT_NAME, CODEX_NAME, GEMINI_NAME},
@@ -21,16 +22,18 @@ use project::{
 use serde::{Deserialize, Serialize};
 use settings::{LanguageModelProviderSetting, LanguageModelSelection};
 
+use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt as _};
 use zed_actions::agent::{OpenClaudeAgentOnboardingModal, ReauthenticateAgent, ReviewBranchDiff};
 
+use crate::ManageProfiles;
 use crate::ui::{AcpOnboardingModal, ClaudeCodeOnboardingModal};
 use crate::{
-    AddContextServer, AgentDiffPane, CopyThreadToClipboard, Follow, InlineAssistant,
-    LoadThreadFromClipboard, NewTextThread, NewThread, OpenActiveThreadAsMarkdown, OpenAgentDiff,
-    OpenHistory, ResetTrialEndUpsell, ResetTrialUpsell, ToggleNavigationMenu, ToggleNewThreadMenu,
-    ToggleOptionsMenu,
-    acp::AcpServerView,
+    AddContextServer, AgentDiffPane, ConnectionView, CopyThreadToClipboard, Follow,
+    InlineAssistant, LoadThreadFromClipboard, NewTextThread, NewThread, OpenActiveThreadAsMarkdown,
+    OpenAgentDiff, OpenHistory, ResetTrialEndUpsell, ResetTrialUpsell, StartThreadIn,
+    ToggleNavigationMenu, ToggleNewThreadMenu, ToggleOptionsMenu,
     agent_configuration::{AgentConfiguration, AssistantConfigurationEvent},
+    connection_view::{AcpThreadViewEvent, ThreadView},
     slash_command::SlashCommandCompletionProvider,
     text_thread_editor::{AgentPanelDelegate, TextThreadEditor, make_lsp_adapter_delegate},
     ui::EndTrialUpsell,
@@ -39,11 +42,9 @@ use crate::{
     AgentInitialContent, ExternalAgent, NewExternalAgentThread, NewNativeAgentThreadFromSummary,
 };
 use crate::{
-    ExpandMessageEditor,
-    acp::{AcpThreadHistory, ThreadHistoryEvent},
+    ExpandMessageEditor, ThreadHistory, ThreadHistoryEvent,
     text_thread_history::{TextThreadHistory, TextThreadHistoryEvent},
 };
-use crate::{ManageProfiles, acp::thread_view::AcpThreadView};
 use agent_settings::AgentSettings;
 use ai_onboarding::AgentPanelOnboarding;
 use anyhow::{Result, anyhow};
@@ -51,10 +52,12 @@ use assistant_slash_command::SlashCommandWorkingSet;
 use assistant_text_thread::{TextThread, TextThreadEvent, TextThreadSummary};
 use client::UserStore;
 use cloud_api_types::Plan;
+use collections::HashMap;
 use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer};
 use extension::ExtensionEvents;
 use extension_host::ExtensionStore;
 use fs::Fs;
+use git::repository::validate_worktree_directory;
 use gpui::{
     Action, Animation, AnimationExt, AnyElement, App, AsyncWindowContext, ClipboardItem, Corner,
     DismissEvent, Entity, EventEmitter, ExternalPaths, FocusHandle, Focusable, KeyContext, Pixels,
@@ -62,6 +65,7 @@ use gpui::{
 };
 use language::LanguageRegistry;
 use language_model::{ConfigurationError, LanguageModelRegistry};
+use project::project_settings::ProjectSettings;
 use project::{Project, ProjectPath, Worktree};
 use prompt_store::{PromptBuilder, PromptStore, UserPromptId};
 use rules_library::{RulesLibrary, open_rules_library};
@@ -69,8 +73,8 @@ use search::{BufferSearchBar, buffer_search};
 use settings::{Settings, update_settings_file};
 use theme::ThemeSettings;
 use ui::{
-    Callout, ContextMenu, ContextMenuEntry, KeyBinding, PopoverMenu, PopoverMenuHandle, Tab,
-    Tooltip, prelude::*, utils::WithRemSize,
+    Button, Callout, ContextMenu, ContextMenuEntry, DocumentationSide, KeyBinding, PopoverMenu,
+    PopoverMenuHandle, SpinnerLabel, Tab, Tooltip, prelude::*, utils::WithRemSize,
 };
 use util::ResultExt as _;
 use workspace::{
@@ -124,6 +128,8 @@ struct SerializedAgentPanel {
     selected_agent: Option<AgentType>,
     #[serde(default)]
     last_active_thread: Option<SerializedActiveThread>,
+    #[serde(default)]
+    start_thread_in: Option<StartThreadIn>,
 }
 
 #[derive(Serialize, Deserialize, Debug, Clone)]
@@ -325,6 +331,13 @@ pub fn init(cx: &mut App) {
                             cx,
                         );
                     });
+                })
+                .register_action(|workspace, action: &StartThreadIn, _window, cx| {
+                    if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
+                        panel.update(cx, |panel, cx| {
+                            panel.set_start_thread_in(action, cx);
+                        });
+                    }
                 });
         },
     )
@@ -340,7 +353,7 @@ enum HistoryKind {
 enum ActiveView {
     Uninitialized,
     AgentThread {
-        server_view: Entity<AcpServerView>,
+        server_view: Entity<ConnectionView>,
     },
     TextThread {
         text_thread_editor: Entity<TextThreadEditor>,
@@ -366,21 +379,19 @@ pub enum AgentType {
     #[default]
     NativeAgent,
     TextThread,
-    Gemini,
-    ClaudeAgent,
-    Codex,
     Custom {
         name: SharedString,
     },
 }
 
 impl AgentType {
+    pub fn is_native(&self) -> bool {
+        matches!(self, Self::NativeAgent)
+    }
+
     fn label(&self) -> SharedString {
         match self {
             Self::NativeAgent | Self::TextThread => "Zed Agent".into(),
-            Self::Gemini => "Gemini CLI".into(),
-            Self::ClaudeAgent => "Claude Agent".into(),
-            Self::Codex => "Codex".into(),
             Self::Custom { name, .. } => name.into(),
         }
     }
@@ -388,9 +399,6 @@ impl AgentType {
     fn icon(&self) -> Option<IconName> {
         match self {
             Self::NativeAgent | Self::TextThread => None,
-            Self::Gemini => Some(IconName::AiGemini),
-            Self::ClaudeAgent => Some(IconName::AiClaude),
-            Self::Codex => Some(IconName::AiOpenAi),
             Self::Custom { .. } => Some(IconName::Sparkle),
         }
     }
@@ -399,15 +407,35 @@ impl AgentType {
 impl From<ExternalAgent> for AgentType {
     fn from(value: ExternalAgent) -> Self {
         match value {
-            ExternalAgent::Gemini => Self::Gemini,
-            ExternalAgent::ClaudeCode => Self::ClaudeAgent,
-            ExternalAgent::Codex => Self::Codex,
             ExternalAgent::Custom { name } => Self::Custom { name },
             ExternalAgent::NativeAgent => Self::NativeAgent,
         }
     }
 }
 
+impl StartThreadIn {
+    fn label(&self) -> SharedString {
+        match self {
+            Self::LocalProject => "Local Project".into(),
+            Self::NewWorktree => "New Worktree".into(),
+        }
+    }
+
+    fn icon(&self) -> IconName {
+        match self {
+            Self::LocalProject => IconName::Screen,
+            Self::NewWorktree => IconName::GitBranchPlus,
+        }
+    }
+}
+
+#[derive(Clone, Debug)]
+#[allow(dead_code)]
+pub enum WorktreeCreationStatus {
+    Creating,
+    Error(SharedString),
+}
+
 impl ActiveView {
     pub fn which_font_size_used(&self) -> WhichFontSize {
         match self {
@@ -515,7 +543,7 @@ pub struct AgentPanel {
     project: Entity<Project>,
     fs: Arc<dyn Fs>,
     language_registry: Arc<LanguageRegistry>,
-    acp_history: Entity<AcpThreadHistory>,
+    acp_history: Entity<ThreadHistory>,
     text_thread_history: Entity<TextThreadHistory>,
     thread_store: Entity<ThreadStore>,
     text_thread_store: Entity<assistant_text_thread::TextThreadStore>,
@@ -526,8 +554,9 @@ pub struct AgentPanel {
     focus_handle: FocusHandle,
     active_view: ActiveView,
     previous_view: Option<ActiveView>,
-    _active_view_observation: Option<Subscription>,
+    background_threads: HashMap<acp::SessionId, Entity<ConnectionView>>,
     new_thread_menu_handle: PopoverMenuHandle<ContextMenu>,
+    start_thread_in_menu_handle: PopoverMenuHandle<ContextMenu>,
     agent_panel_menu_handle: PopoverMenuHandle<ContextMenu>,
     agent_navigation_menu_handle: PopoverMenuHandle<ContextMenu>,
     agent_navigation_menu: Option<Entity<ContextMenu>>,
@@ -538,9 +567,14 @@ pub struct AgentPanel {
     pending_serialization: Option<Task<Result<()>>>,
     onboarding: Entity<AgentPanelOnboarding>,
     selected_agent: AgentType,
+    start_thread_in: StartThreadIn,
+    worktree_creation_status: Option<WorktreeCreationStatus>,
+    _thread_view_subscription: Option<Subscription>,
+    _worktree_creation_task: Option<Task<()>>,
     show_trust_workspace_message: bool,
     last_configuration_error_telemetry: Option<String>,
     on_boarding_upsell_dismissed: AtomicBool,
+    _active_view_observation: Option<Subscription>,
 }
 
 impl AgentPanel {
@@ -551,6 +585,7 @@ impl AgentPanel {
 
         let width = self.width;
         let selected_agent = self.selected_agent.clone();
+        let start_thread_in = Some(self.start_thread_in);
 
         let last_active_thread = self.active_agent_thread(cx).map(|thread| {
             let thread = thread.read(cx);
@@ -574,6 +609,7 @@ impl AgentPanel {
                     width,
                     selected_agent: Some(selected_agent),
                     last_active_thread,
+                    start_thread_in,
                 },
             )
             .await?;
@@ -618,6 +654,37 @@ impl AgentPanel {
                 })?
                 .await?;
 
+            let last_active_thread = if let Some(thread_info) = serialized_panel
+                .as_ref()
+                .and_then(|p| p.last_active_thread.clone())
+            {
+                if thread_info.agent_type.is_native() {
+                    let session_id = acp::SessionId::new(thread_info.session_id.clone());
+                    let load_result = cx.update(|_window, cx| {
+                        let thread_store = ThreadStore::global(cx);
+                        thread_store.update(cx, |store, cx| store.load_thread(session_id, cx))
+                    });
+                    let thread_exists = if let Ok(task) = load_result {
+                        task.await.ok().flatten().is_some()
+                    } else {
+                        false
+                    };
+                    if thread_exists {
+                        Some(thread_info)
+                    } else {
+                        log::warn!(
+                            "last active thread {} not found in database, skipping restoration",
+                            thread_info.session_id
+                        );
+                        None
+                    }
+                } else {
+                    Some(thread_info)
+                }
+            } else {
+                None
+            };
+
             let panel = workspace.update_in(cx, |workspace, window, cx| {
                 let panel =
                     cx.new(|cx| Self::new(workspace, text_thread_store, prompt_store, window, cx));
@@ -628,44 +695,45 @@ impl AgentPanel {
                         if let Some(selected_agent) = serialized_panel.selected_agent.clone() {
                             panel.selected_agent = selected_agent;
                         }
+                        if let Some(start_thread_in) = serialized_panel.start_thread_in {
+                            let is_worktree_flag_enabled =
+                                cx.has_flag::<AgentV2FeatureFlag>();
+                            let is_valid = match &start_thread_in {
+                                StartThreadIn::LocalProject => true,
+                                StartThreadIn::NewWorktree => {
+                                    let project = panel.project.read(cx);
+                                    is_worktree_flag_enabled && !project.is_via_collab()
+                                }
+                            };
+                            if is_valid {
+                                panel.start_thread_in = start_thread_in;
+                            } else {
+                                log::info!(
+                                    "deserialized start_thread_in {:?} is no longer valid, falling back to LocalProject",
+                                    start_thread_in,
+                                );
+                            }
+                        }
                         cx.notify();
                     });
                 }
 
-                panel
-            })?;
-
-            if let Some(thread_info) = serialized_panel.and_then(|p| p.last_active_thread) {
-                let session_id = acp::SessionId::new(thread_info.session_id.clone());
-                let load_task = panel.update(cx, |panel, cx| {
-                    let thread_store = panel.thread_store.clone();
-                    thread_store.update(cx, |store, cx| store.load_thread(session_id, cx))
-                });
-                let thread_exists = load_task
-                    .await
-                    .map(|thread: Option<agent::DbThread>| thread.is_some())
-                    .unwrap_or(false);
-
-                if thread_exists {
-                    panel.update_in(cx, |panel, window, cx| {
-                        panel.selected_agent = thread_info.agent_type.clone();
-                        let session_info = AgentSessionInfo {
-                            session_id: acp::SessionId::new(thread_info.session_id),
-                            cwd: thread_info.cwd,
-                            title: thread_info.title.map(SharedString::from),
-                            updated_at: None,
-                            meta: None,
-                        };
+                if let Some(thread_info) = last_active_thread {
+                    let agent_type = thread_info.agent_type.clone();
+                    let session_info = AgentSessionInfo {
+                        session_id: acp::SessionId::new(thread_info.session_id),
+                        cwd: thread_info.cwd,
+                        title: thread_info.title.map(SharedString::from),
+                        updated_at: None,
+                        meta: None,
+                    };
+                    panel.update(cx, |panel, cx| {
+                        panel.selected_agent = agent_type;
                         panel.load_agent_thread(session_info, window, cx);
-                    })?;
-                } else {
-                    log::error!(
-                        "could not restore last active thread: \
-                         no thread found in database with ID {:?}",
-                        thread_info.session_id
-                    );
+                    });
                 }
-            }
+                panel
+            })?;
 
             Ok(panel)
         })
@@ -690,7 +758,7 @@ impl AgentPanel {
             cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
 
         let thread_store = ThreadStore::global(cx);
-        let acp_history = cx.new(|cx| AcpThreadHistory::new(None, window, cx));
+        let acp_history = cx.new(|cx| ThreadHistory::new(None, window, cx));
         let text_thread_history =
             cx.new(|cx| TextThreadHistory::new(text_thread_store.clone(), window, cx));
         cx.subscribe_in(
@@ -811,8 +879,9 @@ impl AgentPanel {
             focus_handle: cx.focus_handle(),
             context_server_registry,
             previous_view: None,
-            _active_view_observation: None,
+            background_threads: HashMap::default(),
             new_thread_menu_handle: PopoverMenuHandle::default(),
+            start_thread_in_menu_handle: PopoverMenuHandle::default(),
             agent_panel_menu_handle: PopoverMenuHandle::default(),
             agent_navigation_menu_handle: PopoverMenuHandle::default(),
             agent_navigation_menu: None,
@@ -826,9 +895,14 @@ impl AgentPanel {
             text_thread_history,
             thread_store,
             selected_agent: AgentType::default(),
+            start_thread_in: StartThreadIn::default(),
+            worktree_creation_status: None,
+            _thread_view_subscription: None,
+            _worktree_creation_task: None,
             show_trust_workspace_message: false,
             last_configuration_error_telemetry: None,
             on_boarding_upsell_dismissed: AtomicBool::new(OnboardingUpsell::dismissed()),
+            _active_view_observation: None,
         };
 
         // Initial sync of agent servers from extensions
@@ -874,7 +948,7 @@ impl AgentPanel {
         &self.thread_store
     }
 
-    pub fn history(&self) -> &Entity<AcpThreadHistory> {
+    pub fn history(&self) -> &Entity<ThreadHistory> {
         &self.acp_history
     }
 
@@ -914,7 +988,7 @@ impl AgentPanel {
             .unwrap_or(false)
     }
 
-    pub(crate) fn active_thread_view(&self) -> Option<&Entity<AcpServerView>> {
+    pub(crate) fn active_thread_view(&self) -> Option<&Entity<ConnectionView>> {
         match &self.active_view {
             ActiveView::AgentThread { server_view, .. } => Some(server_view),
             ActiveView::Uninitialized
@@ -924,7 +998,7 @@ impl AgentPanel {
         }
     }
 
-    fn new_thread(&mut self, _action: &NewThread, window: &mut Window, cx: &mut Context<Self>) {
+    pub fn new_thread(&mut self, _action: &NewThread, window: &mut Window, cx: &mut Context<Self>) {
         self.new_agent_thread(AgentType::NativeAgent, window, cx);
     }
 
@@ -1057,7 +1131,7 @@ impl AgentPanel {
 
             let server = ext_agent.server(fs, thread_store);
             this.update_in(cx, |agent_panel, window, cx| {
-                agent_panel._external_thread(
+                agent_panel.create_external_thread(
                     server,
                     resume_thread,
                     initial_content,
@@ -1117,10 +1191,7 @@ impl AgentPanel {
         match self.selected_agent {
             AgentType::NativeAgent => Some(HistoryKind::AgentThreads),
             AgentType::TextThread => Some(HistoryKind::TextThreads),
-            AgentType::Gemini
-            | AgentType::ClaudeAgent
-            | AgentType::Codex
-            | AgentType::Custom { .. } => {
+            AgentType::Custom { .. } => {
                 if self.acp_history.read(cx).has_session_list() {
                     Some(HistoryKind::AgentThreads)
                 } else {
@@ -1477,7 +1548,7 @@ impl AgentPanel {
         cx.spawn_in(window, async move |this, cx| {
             thread_store
                 .update(&mut cx.clone(), |store, cx| {
-                    store.save_thread(session_id.clone(), db_thread, cx)
+                    store.save_thread(session_id.clone(), db_thread, Default::default(), cx)
                 })
                 .await?;
 
@@ -1560,14 +1631,14 @@ impl AgentPanel {
         }
     }
 
-    pub fn as_active_server_view(&self) -> Option<&Entity<AcpServerView>> {
+    pub fn as_active_server_view(&self) -> Option<&Entity<ConnectionView>> {
         match &self.active_view {
             ActiveView::AgentThread { server_view } => Some(server_view),
             _ => None,
         }
     }
 
-    pub fn as_active_thread_view(&self, cx: &App) -> Option<Entity<AcpThreadView>> {
+    pub fn as_active_thread_view(&self, cx: &App) -> Option<Entity<ThreadView>> {
         let server_view = self.as_active_server_view()?;
         server_view.read(cx).active_thread().cloned()
     }
@@ -1582,6 +1653,53 @@ impl AgentPanel {
         }
     }
 
+    /// Returns the primary thread views for all retained connections: the
+    pub fn is_background_thread(&self, session_id: &acp::SessionId) -> bool {
+        self.background_threads.contains_key(session_id)
+    }
+
+    /// active thread plus any background threads that are still running or
+    /// completed but unseen.
+    pub fn parent_threads(&self, cx: &App) -> Vec<Entity<ThreadView>> {
+        let mut views = Vec::new();
+
+        if let Some(server_view) = self.as_active_server_view() {
+            if let Some(thread_view) = server_view.read(cx).parent_thread(cx) {
+                views.push(thread_view);
+            }
+        }
+
+        for server_view in self.background_threads.values() {
+            if let Some(thread_view) = server_view.read(cx).parent_thread(cx) {
+                views.push(thread_view);
+            }
+        }
+
+        views
+    }
+
+    fn retain_running_thread(&mut self, old_view: ActiveView, cx: &mut Context<Self>) {
+        let ActiveView::AgentThread { server_view } = old_view else {
+            return;
+        };
+
+        let Some(thread_view) = server_view.read(cx).parent_thread(cx) else {
+            return;
+        };
+
+        let thread = &thread_view.read(cx).thread;
+        let (status, session_id) = {
+            let thread = thread.read(cx);
+            (thread.status(), thread.session_id().clone())
+        };
+
+        if status != ThreadStatus::Generating {
+            return;
+        }
+
+        self.background_threads.insert(session_id, server_view);
+    }
+
     pub(crate) fn active_native_agent_thread(&self, cx: &App) -> Option<Entity<agent::Thread>> {
         match &self.active_view {
             ActiveView::AgentThread { server_view, .. } => {
@@ -1620,29 +1738,45 @@ impl AgentPanel {
         let current_is_config = matches!(self.active_view, ActiveView::Configuration);
         let new_is_config = matches!(new_view, ActiveView::Configuration);
 
-        let current_is_special = current_is_history || current_is_config;
-        let new_is_special = new_is_history || new_is_config;
+        let current_is_overlay = current_is_history || current_is_config;
+        let new_is_overlay = new_is_history || new_is_config;
 
-        if current_is_uninitialized || (current_is_special && !new_is_special) {
+        if current_is_uninitialized || (current_is_overlay && !new_is_overlay) {
             self.active_view = new_view;
-        } else if !current_is_special && new_is_special {
+        } else if !current_is_overlay && new_is_overlay {
             self.previous_view = Some(std::mem::replace(&mut self.active_view, new_view));
         } else {
-            if !new_is_special {
-                self.previous_view = None;
+            let old_view = std::mem::replace(&mut self.active_view, new_view);
+            if !new_is_overlay {
+                if let Some(previous) = self.previous_view.take() {
+                    self.retain_running_thread(previous, cx);
+                }
             }
-            self.active_view = new_view;
+            self.retain_running_thread(old_view, cx);
         }
 
+        // Subscribe to the active ThreadView's events (e.g. FirstSendRequested)
+        // so the panel can intercept the first send for worktree creation.
+        // Re-subscribe whenever the ConnectionView changes, since the inner
+        // ThreadView may have been replaced (e.g. navigating between threads).
         self._active_view_observation = match &self.active_view {
             ActiveView::AgentThread { server_view } => {
-                Some(cx.observe(server_view, |this, _, cx| {
-                    cx.emit(AgentPanelEvent::ActiveViewChanged);
-                    this.serialize(cx);
-                    cx.notify();
-                }))
+                self._thread_view_subscription =
+                    Self::subscribe_to_active_thread_view(server_view, window, cx);
+                Some(
+                    cx.observe_in(server_view, window, |this, server_view, window, cx| {
+                        this._thread_view_subscription =
+                            Self::subscribe_to_active_thread_view(&server_view, window, cx);
+                        cx.emit(AgentPanelEvent::ActiveViewChanged);
+                        this.serialize(cx);
+                        cx.notify();
+                    }),
+                )
+            }
+            _ => {
+                self._thread_view_subscription = None;
+                None
             }
-            _ => None,
         };
 
         let is_in_agent_history = matches!(
@@ -1756,12 +1890,57 @@ impl AgentPanel {
         self.selected_agent.clone()
     }
 
+    fn subscribe_to_active_thread_view(
+        server_view: &Entity<ConnectionView>,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) -> Option<Subscription> {
+        server_view.read(cx).active_thread().cloned().map(|tv| {
+            cx.subscribe_in(
+                &tv,
+                window,
+                |this, view, event: &AcpThreadViewEvent, window, cx| match event {
+                    AcpThreadViewEvent::FirstSendRequested { content } => {
+                        this.handle_first_send_requested(view.clone(), content.clone(), window, cx);
+                    }
+                },
+            )
+        })
+    }
+
+    pub fn start_thread_in(&self) -> &StartThreadIn {
+        &self.start_thread_in
+    }
+
+    fn set_start_thread_in(&mut self, action: &StartThreadIn, cx: &mut Context<Self>) {
+        if matches!(action, StartThreadIn::NewWorktree) && !cx.has_flag::<AgentV2FeatureFlag>() {
+            return;
+        }
+
+        let new_target = match *action {
+            StartThreadIn::LocalProject => StartThreadIn::LocalProject,
+            StartThreadIn::NewWorktree => {
+                if !self.project_has_git_repository(cx) {
+                    log::error!(
+                        "set_start_thread_in: cannot use NewWorktree without a git repository"
+                    );
+                    return;
+                }
+                if self.project.read(cx).is_via_collab() {
+                    log::error!("set_start_thread_in: cannot use NewWorktree in a collab project");
+                    return;
+                }
+                StartThreadIn::NewWorktree
+            }
+        };
+        self.start_thread_in = new_target;
+        self.serialize(cx);
+        cx.notify();
+    }
+
     fn selected_external_agent(&self) -> Option<ExternalAgent> {
         match &self.selected_agent {
             AgentType::NativeAgent => Some(ExternalAgent::NativeAgent),
-            AgentType::Gemini => Some(ExternalAgent::Gemini),
-            AgentType::ClaudeAgent => Some(ExternalAgent::ClaudeCode),
-            AgentType::Codex => Some(ExternalAgent::Codex),
             AgentType::Custom { name } => Some(ExternalAgent::Custom { name: name.clone() }),
             AgentType::TextThread => None,
         }
@@ -1827,25 +2006,6 @@ impl AgentPanel {
                 window,
                 cx,
             ),
-            AgentType::Gemini => {
-                self.external_thread(Some(crate::ExternalAgent::Gemini), None, None, window, cx)
-            }
-            AgentType::ClaudeAgent => {
-                self.selected_agent = AgentType::ClaudeAgent;
-                self.serialize(cx);
-                self.external_thread(
-                    Some(crate::ExternalAgent::ClaudeCode),
-                    None,
-                    None,
-                    window,
-                    cx,
-                )
-            }
-            AgentType::Codex => {
-                self.selected_agent = AgentType::Codex;
-                self.serialize(cx);
-                self.external_thread(Some(crate::ExternalAgent::Codex), None, None, window, cx)
-            }
             AgentType::Custom { name } => self.external_thread(
                 Some(crate::ExternalAgent::Custom { name }),
                 None,
@@ -1862,13 +2022,43 @@ impl AgentPanel {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
+        let session_id = thread.session_id.clone();
+        if let Some(server_view) = self.background_threads.remove(&session_id) {
+            self.set_active_view(ActiveView::AgentThread { server_view }, true, window, cx);
+            return;
+        }
+
+        if let ActiveView::AgentThread { server_view } = &self.active_view {
+            if server_view
+                .read(cx)
+                .active_thread()
+                .map(|t| t.read(cx).id.clone())
+                == Some(session_id.clone())
+            {
+                return;
+            }
+        }
+
+        if let Some(ActiveView::AgentThread { server_view }) = &self.previous_view {
+            if server_view
+                .read(cx)
+                .active_thread()
+                .map(|t| t.read(cx).id.clone())
+                == Some(session_id.clone())
+            {
+                let view = self.previous_view.take().unwrap();
+                self.set_active_view(view, true, window, cx);
+                return;
+            }
+        }
+
         let Some(agent) = self.selected_external_agent() else {
             return;
         };
         self.external_thread(Some(agent), Some(thread), None, window, cx);
     }
 
-    fn _external_thread(
+    pub(crate) fn create_external_thread(
         &mut self,
         server: Rc<dyn AgentServer>,
         resume_thread: Option<AgentSessionInfo>,
@@ -1891,7 +2081,7 @@ impl AgentPanel {
             .then(|| self.thread_store.clone());
 
         let server_view = cx.new(|cx| {
-            crate::acp::AcpServerView::new(
+            crate::ConnectionView::new(
                 server,
                 resume_thread,
                 initial_content,
@@ -1905,101 +2095,622 @@ impl AgentPanel {
             )
         });
 
+        cx.observe(&server_view, |this, server_view, cx| {
+            let is_active = this
+                .as_active_server_view()
+                .is_some_and(|active| active.entity_id() == server_view.entity_id());
+            if is_active {
+                cx.emit(AgentPanelEvent::ActiveViewChanged);
+                this.serialize(cx);
+            } else {
+                cx.emit(AgentPanelEvent::BackgroundThreadChanged);
+            }
+            cx.notify();
+        })
+        .detach();
+
         self.set_active_view(ActiveView::AgentThread { server_view }, true, window, cx);
     }
-}
 
-impl Focusable for AgentPanel {
-    fn focus_handle(&self, cx: &App) -> FocusHandle {
-        match &self.active_view {
-            ActiveView::Uninitialized => self.focus_handle.clone(),
-            ActiveView::AgentThread { server_view, .. } => server_view.focus_handle(cx),
-            ActiveView::History { kind } => match kind {
-                HistoryKind::AgentThreads => self.acp_history.focus_handle(cx),
-                HistoryKind::TextThreads => self.text_thread_history.focus_handle(cx),
-            },
-            ActiveView::TextThread {
-                text_thread_editor, ..
-            } => text_thread_editor.focus_handle(cx),
-            ActiveView::Configuration => {
-                if let Some(configuration) = self.configuration.as_ref() {
-                    configuration.focus_handle(cx)
-                } else {
-                    self.focus_handle.clone()
-                }
-            }
-        }
+    fn active_thread_has_messages(&self, cx: &App) -> bool {
+        self.active_agent_thread(cx)
+            .is_some_and(|thread| !thread.read(cx).entries().is_empty())
     }
-}
 
-fn agent_panel_dock_position(cx: &App) -> DockPosition {
-    AgentSettings::get_global(cx).dock.into()
-}
+    fn handle_first_send_requested(
+        &mut self,
+        thread_view: Entity<ThreadView>,
+        content: Vec<acp::ContentBlock>,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        if self.start_thread_in == StartThreadIn::NewWorktree {
+            self.handle_worktree_creation_requested(content, window, cx);
+        } else {
+            cx.defer_in(window, move |_this, window, cx| {
+                thread_view.update(cx, |thread_view, cx| {
+                    let editor = thread_view.message_editor.clone();
+                    thread_view.send_impl(editor, window, cx);
+                });
+            });
+        }
+    }
 
-pub enum AgentPanelEvent {
-    ActiveViewChanged,
-}
+    /// Partitions the project's visible worktrees into git-backed repositories
+    /// and plain (non-git) paths. Git repos will have worktrees created for
+    /// them; non-git paths are carried over to the new workspace as-is.
+    ///
+    /// When multiple worktrees map to the same repository, the most specific
+    /// match wins (deepest work directory path), with a deterministic
+    /// tie-break on entity id. Each repository appears at most once.
+    fn classify_worktrees(
+        &self,
+        cx: &App,
+    ) -> (Vec<Entity<project::git_store::Repository>>, Vec<PathBuf>) {
+        let project = &self.project;
+        let repositories = project.read(cx).repositories(cx).clone();
+        let mut git_repos: Vec<Entity<project::git_store::Repository>> = Vec::new();
+        let mut non_git_paths: Vec<PathBuf> = Vec::new();
+        let mut seen_repo_ids = std::collections::HashSet::new();
+
+        for worktree in project.read(cx).visible_worktrees(cx) {
+            let wt_path = worktree.read(cx).abs_path();
+
+            let matching_repo = repositories
+                .iter()
+                .filter_map(|(id, repo)| {
+                    let work_dir = repo.read(cx).work_directory_abs_path.clone();
+                    if wt_path.starts_with(work_dir.as_ref())
+                        || work_dir.starts_with(wt_path.as_ref())
+                    {
+                        Some((*id, repo.clone(), work_dir.as_ref().components().count()))
+                    } else {
+                        None
+                    }
+                })
+                .max_by(
+                    |(left_id, _left_repo, left_depth), (right_id, _right_repo, right_depth)| {
+                        left_depth
+                            .cmp(right_depth)
+                            .then_with(|| left_id.cmp(right_id))
+                    },
+                );
 
-impl EventEmitter<PanelEvent> for AgentPanel {}
-impl EventEmitter<AgentPanelEvent> for AgentPanel {}
+            if let Some((id, repo, _)) = matching_repo {
+                if seen_repo_ids.insert(id) {
+                    git_repos.push(repo);
+                }
+            } else {
+                non_git_paths.push(wt_path.to_path_buf());
+            }
+        }
 
-impl Panel for AgentPanel {
-    fn persistent_name() -> &'static str {
-        "AgentPanel"
+        (git_repos, non_git_paths)
     }
 
-    fn panel_key() -> &'static str {
-        AGENT_PANEL_KEY
-    }
+    /// Kicks off an async git-worktree creation for each repository. Returns:
+    ///
+    /// - `creation_infos`: a vec of `(repo, new_path, receiver)` tuples—the
+    ///   receiver resolves once the git worktree command finishes.
+    /// - `path_remapping`: `(old_work_dir, new_worktree_path)` pairs used
+    ///   later to remap open editor tabs into the new workspace.
+    fn start_worktree_creations(
+        git_repos: &[Entity<project::git_store::Repository>],
+        branch_name: &str,
+        worktree_directory_setting: &str,
+        cx: &mut Context<Self>,
+    ) -> Result<(
+        Vec<(
+            Entity<project::git_store::Repository>,
+            PathBuf,
+            futures::channel::oneshot::Receiver<Result<()>>,
+        )>,
+        Vec<(PathBuf, PathBuf)>,
+    )> {
+        let mut creation_infos = Vec::new();
+        let mut path_remapping = Vec::new();
+
+        for repo in git_repos {
+            let (work_dir, new_path, receiver) = repo.update(cx, |repo, _cx| {
+                let original_repo = repo.original_repo_abs_path.clone();
+                let directory =
+                    validate_worktree_directory(&original_repo, worktree_directory_setting)?;
+                let new_path = directory.join(branch_name);
+                let receiver = repo.create_worktree(branch_name.to_string(), directory, None);
+                let work_dir = repo.work_directory_abs_path.clone();
+                anyhow::Ok((work_dir, new_path, receiver))
+            })?;
+            path_remapping.push((work_dir.to_path_buf(), new_path.clone()));
+            creation_infos.push((repo.clone(), new_path, receiver));
+        }
 
-    fn position(&self, _window: &Window, cx: &App) -> DockPosition {
-        agent_panel_dock_position(cx)
+        Ok((creation_infos, path_remapping))
     }
 
-    fn position_is_valid(&self, position: DockPosition) -> bool {
-        position != DockPosition::Bottom
-    }
+    /// Waits for every in-flight worktree creation to complete. If any
+    /// creation fails, all successfully-created worktrees are rolled back
+    /// (removed) so the project isn't left in a half-migrated state.
+    async fn await_and_rollback_on_failure(
+        creation_infos: Vec<(
+            Entity<project::git_store::Repository>,
+            PathBuf,
+            futures::channel::oneshot::Receiver<Result<()>>,
+        )>,
+        cx: &mut AsyncWindowContext,
+    ) -> Result<Vec<PathBuf>> {
+        let mut created_paths: Vec<PathBuf> = Vec::new();
+        let mut repos_and_paths: Vec<(Entity<project::git_store::Repository>, PathBuf)> =
+            Vec::new();
+        let mut first_error: Option<anyhow::Error> = None;
+
+        for (repo, new_path, receiver) in creation_infos {
+            match receiver.await {
+                Ok(Ok(())) => {
+                    created_paths.push(new_path.clone());
+                    repos_and_paths.push((repo, new_path));
+                }
+                Ok(Err(err)) => {
+                    if first_error.is_none() {
+                        first_error = Some(err);
+                    }
+                }
+                Err(_canceled) => {
+                    if first_error.is_none() {
+                        first_error = Some(anyhow!("Worktree creation was canceled"));
+                    }
+                }
+            }
+        }
 
-    fn set_position(&mut self, position: DockPosition, _: &mut Window, cx: &mut Context<Self>) {
-        settings::update_settings_file(self.fs.clone(), cx, move |settings, _| {
-            settings
-                .agent
-                .get_or_insert_default()
-                .set_dock(position.into());
-        });
-    }
+        let Some(err) = first_error else {
+            return Ok(created_paths);
+        };
 
-    fn size(&self, window: &Window, cx: &App) -> Pixels {
-        let settings = AgentSettings::get_global(cx);
-        match self.position(window, cx) {
-            DockPosition::Left | DockPosition::Right => {
-                self.width.unwrap_or(settings.default_width)
+        // Rollback all successfully created worktrees
+        let mut rollback_receivers = Vec::new();
+        for (rollback_repo, rollback_path) in &repos_and_paths {
+            if let Ok(receiver) = cx.update(|_, cx| {
+                rollback_repo.update(cx, |repo, _cx| {
+                    repo.remove_worktree(rollback_path.clone(), true)
+                })
+            }) {
+                rollback_receivers.push((rollback_path.clone(), receiver));
             }
-            DockPosition::Bottom => self.height.unwrap_or(settings.default_height),
         }
-    }
-
-    fn set_size(&mut self, size: Option<Pixels>, window: &mut Window, cx: &mut Context<Self>) {
-        match self.position(window, cx) {
-            DockPosition::Left | DockPosition::Right => self.width = size,
-            DockPosition::Bottom => self.height = size,
+        let mut rollback_failures: Vec<String> = Vec::new();
+        for (path, receiver) in rollback_receivers {
+            match receiver.await {
+                Ok(Ok(())) => {}
+                Ok(Err(rollback_err)) => {
+                    log::error!(
+                        "failed to rollback worktree at {}: {rollback_err}",
+                        path.display()
+                    );
+                    rollback_failures.push(format!("{}: {rollback_err}", path.display()));
+                }
+                Err(rollback_err) => {
+                    log::error!(
+                        "failed to rollback worktree at {}: {rollback_err}",
+                        path.display()
+                    );
+                    rollback_failures.push(format!("{}: {rollback_err}", path.display()));
+                }
+            }
         }
-        self.serialize(cx);
-        cx.notify();
+        let mut error_message = format!("Failed to create worktree: {err}");
+        if !rollback_failures.is_empty() {
+            error_message.push_str("\n\nFailed to clean up: ");
+            error_message.push_str(&rollback_failures.join(", "));
+        }
+        Err(anyhow!(error_message))
     }
 
-    fn set_active(&mut self, active: bool, window: &mut Window, cx: &mut Context<Self>) {
-        if active && matches!(self.active_view, ActiveView::Uninitialized) {
+    fn set_worktree_creation_error(
+        &mut self,
+        message: SharedString,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        self.worktree_creation_status = Some(WorktreeCreationStatus::Error(message));
+        if matches!(self.active_view, ActiveView::Uninitialized) {
             let selected_agent = self.selected_agent.clone();
             self.new_agent_thread(selected_agent, window, cx);
         }
+        cx.notify();
     }
 
-    fn remote_id() -> Option<proto::PanelId> {
-        Some(proto::PanelId::AssistantPanel)
-    }
-
-    fn icon(&self, _window: &Window, cx: &App) -> Option<IconName> {
+    fn handle_worktree_creation_requested(
+        &mut self,
+        content: Vec<acp::ContentBlock>,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        if matches!(
+            self.worktree_creation_status,
+            Some(WorktreeCreationStatus::Creating)
+        ) {
+            return;
+        }
+
+        self.worktree_creation_status = Some(WorktreeCreationStatus::Creating);
+        cx.notify();
+
+        let (git_repos, non_git_paths) = self.classify_worktrees(cx);
+
+        if git_repos.is_empty() {
+            self.set_worktree_creation_error(
+                "No git repositories found in the project".into(),
+                window,
+                cx,
+            );
+            return;
+        }
+
+        // Kick off branch listing as early as possible so it can run
+        // concurrently with the remaining synchronous setup work.
+        let branch_receivers: Vec<_> = git_repos
+            .iter()
+            .map(|repo| repo.update(cx, |repo, _cx| repo.branches()))
+            .collect();
+
+        let worktree_directory_setting = ProjectSettings::get_global(cx)
+            .git
+            .worktree_directory
+            .clone();
+
+        let (dock_structure, open_file_paths) = self
+            .workspace
+            .upgrade()
+            .map(|workspace| {
+                let dock_structure = workspace.read(cx).capture_dock_state(window, cx);
+                let open_file_paths = workspace.read(cx).open_item_abs_paths(cx);
+                (dock_structure, open_file_paths)
+            })
+            .unwrap_or_default();
+
+        let workspace = self.workspace.clone();
+        let window_handle = window
+            .window_handle()
+            .downcast::<workspace::MultiWorkspace>();
+
+        let task = cx.spawn_in(window, async move |this, cx| {
+            // Await the branch listings we kicked off earlier.
+            let mut existing_branches = Vec::new();
+            for result in futures::future::join_all(branch_receivers).await {
+                match result {
+                    Ok(Ok(branches)) => {
+                        for branch in branches {
+                            existing_branches.push(branch.name().to_string());
+                        }
+                    }
+                    Ok(Err(err)) => {
+                        Err::<(), _>(err).log_err();
+                    }
+                    Err(_) => {}
+                }
+            }
+
+            let existing_branch_refs: Vec<&str> =
+                existing_branches.iter().map(|s| s.as_str()).collect();
+            let mut rng = rand::rng();
+            let branch_name =
+                match crate::branch_names::generate_branch_name(&existing_branch_refs, &mut rng) {
+                    Some(name) => name,
+                    None => {
+                        this.update_in(cx, |this, window, cx| {
+                            this.set_worktree_creation_error(
+                                "Failed to generate a branch name: all typewriter names are taken"
+                                    .into(),
+                                window,
+                                cx,
+                            );
+                        })?;
+                        return anyhow::Ok(());
+                    }
+                };
+
+            let (creation_infos, path_remapping) = match this.update_in(cx, |_this, _window, cx| {
+                Self::start_worktree_creations(
+                    &git_repos,
+                    &branch_name,
+                    &worktree_directory_setting,
+                    cx,
+                )
+            }) {
+                Ok(Ok(result)) => result,
+                Ok(Err(err)) | Err(err) => {
+                    this.update_in(cx, |this, window, cx| {
+                        this.set_worktree_creation_error(
+                            format!("Failed to validate worktree directory: {err}").into(),
+                            window,
+                            cx,
+                        );
+                    })
+                    .log_err();
+                    return anyhow::Ok(());
+                }
+            };
+
+            let created_paths = match Self::await_and_rollback_on_failure(creation_infos, cx).await
+            {
+                Ok(paths) => paths,
+                Err(err) => {
+                    this.update_in(cx, |this, window, cx| {
+                        this.set_worktree_creation_error(format!("{err}").into(), window, cx);
+                    })?;
+                    return anyhow::Ok(());
+                }
+            };
+
+            let mut all_paths = created_paths;
+            let has_non_git = !non_git_paths.is_empty();
+            all_paths.extend(non_git_paths.iter().cloned());
+
+            let app_state = match workspace.upgrade() {
+                Some(workspace) => cx.update(|_, cx| workspace.read(cx).app_state().clone())?,
+                None => {
+                    this.update_in(cx, |this, window, cx| {
+                        this.set_worktree_creation_error(
+                            "Workspace no longer available".into(),
+                            window,
+                            cx,
+                        );
+                    })?;
+                    return anyhow::Ok(());
+                }
+            };
+
+            let this_for_error = this.clone();
+            if let Err(err) = Self::setup_new_workspace(
+                this,
+                all_paths,
+                app_state,
+                window_handle,
+                dock_structure,
+                open_file_paths,
+                path_remapping,
+                non_git_paths,
+                has_non_git,
+                content,
+                cx,
+            )
+            .await
+            {
+                this_for_error
+                    .update_in(cx, |this, window, cx| {
+                        this.set_worktree_creation_error(
+                            format!("Failed to set up workspace: {err}").into(),
+                            window,
+                            cx,
+                        );
+                    })
+                    .log_err();
+            }
+            anyhow::Ok(())
+        });
+
+        self._worktree_creation_task = Some(cx.foreground_executor().spawn(async move {
+            task.await.log_err();
+        }));
+    }
+
+    async fn setup_new_workspace(
+        this: WeakEntity<Self>,
+        all_paths: Vec<PathBuf>,
+        app_state: Arc<workspace::AppState>,
+        window_handle: Option<gpui::WindowHandle<workspace::MultiWorkspace>>,
+        dock_structure: workspace::DockStructure,
+        open_file_paths: Vec<PathBuf>,
+        path_remapping: Vec<(PathBuf, PathBuf)>,
+        non_git_paths: Vec<PathBuf>,
+        has_non_git: bool,
+        content: Vec<acp::ContentBlock>,
+        cx: &mut AsyncWindowContext,
+    ) -> Result<()> {
+        let init: Option<
+            Box<dyn FnOnce(&mut Workspace, &mut Window, &mut gpui::Context<Workspace>) + Send>,
+        > = Some(Box::new(move |workspace, window, cx| {
+            workspace.set_dock_structure(dock_structure, window, cx);
+        }));
+
+        let (new_window_handle, _) = cx
+            .update(|_window, cx| {
+                Workspace::new_local(all_paths, app_state, window_handle, None, init, false, cx)
+            })?
+            .await?;
+
+        let new_workspace = new_window_handle.update(cx, |multi_workspace, _window, _cx| {
+            let workspaces = multi_workspace.workspaces();
+            workspaces.last().cloned()
+        })?;
+
+        let Some(new_workspace) = new_workspace else {
+            anyhow::bail!("New workspace was not added to MultiWorkspace");
+        };
+
+        let panels_task = new_window_handle.update(cx, |_, _, cx| {
+            new_workspace.update(cx, |workspace, _cx| workspace.take_panels_task())
+        })?;
+        if let Some(task) = panels_task {
+            task.await.log_err();
+        }
+
+        let initial_content = AgentInitialContent::ContentBlock {
+            blocks: content,
+            auto_submit: true,
+        };
+
+        new_window_handle.update(cx, |_multi_workspace, window, cx| {
+            new_workspace.update(cx, |workspace, cx| {
+                if has_non_git {
+                    let toast_id = workspace::notifications::NotificationId::unique::<AgentPanel>();
+                    workspace.show_toast(
+                        workspace::Toast::new(
+                            toast_id,
+                            "Some project folders are not git repositories. \
+                             They were included as-is without creating a worktree.",
+                        ),
+                        cx,
+                    );
+                }
+
+                let remapped_paths: Vec<PathBuf> = open_file_paths
+                    .iter()
+                    .filter_map(|original_path| {
+                        let best_match = path_remapping
+                            .iter()
+                            .filter_map(|(old_root, new_root)| {
+                                original_path.strip_prefix(old_root).ok().map(|relative| {
+                                    (old_root.components().count(), new_root.join(relative))
+                                })
+                            })
+                            .max_by_key(|(depth, _)| *depth);
+
+                        if let Some((_, remapped_path)) = best_match {
+                            return Some(remapped_path);
+                        }
+
+                        for non_git in &non_git_paths {
+                            if original_path.starts_with(non_git) {
+                                return Some(original_path.clone());
+                            }
+                        }
+                        None
+                    })
+                    .collect();
+
+                if !remapped_paths.is_empty() {
+                    workspace
+                        .open_paths(
+                            remapped_paths,
+                            workspace::OpenOptions::default(),
+                            None,
+                            window,
+                            cx,
+                        )
+                        .detach();
+                }
+
+                workspace.focus_panel::<AgentPanel>(window, cx);
+                if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
+                    panel.update(cx, |panel, cx| {
+                        panel.external_thread(None, None, Some(initial_content), window, cx);
+                    });
+                }
+            });
+        })?;
+
+        new_window_handle.update(cx, |multi_workspace, _window, cx| {
+            multi_workspace.activate(new_workspace.clone(), cx);
+        })?;
+
+        this.update_in(cx, |this, _window, cx| {
+            this.worktree_creation_status = None;
+            cx.notify();
+        })?;
+
+        anyhow::Ok(())
+    }
+}
+
+impl Focusable for AgentPanel {
+    fn focus_handle(&self, cx: &App) -> FocusHandle {
+        match &self.active_view {
+            ActiveView::Uninitialized => self.focus_handle.clone(),
+            ActiveView::AgentThread { server_view, .. } => server_view.focus_handle(cx),
+            ActiveView::History { kind } => match kind {
+                HistoryKind::AgentThreads => self.acp_history.focus_handle(cx),
+                HistoryKind::TextThreads => self.text_thread_history.focus_handle(cx),
+            },
+            ActiveView::TextThread {
+                text_thread_editor, ..
+            } => text_thread_editor.focus_handle(cx),
+            ActiveView::Configuration => {
+                if let Some(configuration) = self.configuration.as_ref() {
+                    configuration.focus_handle(cx)
+                } else {
+                    self.focus_handle.clone()
+                }
+            }
+        }
+    }
+}
+
+fn agent_panel_dock_position(cx: &App) -> DockPosition {
+    AgentSettings::get_global(cx).dock.into()
+}
+
+pub enum AgentPanelEvent {
+    ActiveViewChanged,
+    BackgroundThreadChanged,
+}
+
+impl EventEmitter<PanelEvent> for AgentPanel {}
+impl EventEmitter<AgentPanelEvent> for AgentPanel {}
+
+impl Panel for AgentPanel {
+    fn persistent_name() -> &'static str {
+        "AgentPanel"
+    }
+
+    fn panel_key() -> &'static str {
+        AGENT_PANEL_KEY
+    }
+
+    fn position(&self, _window: &Window, cx: &App) -> DockPosition {
+        agent_panel_dock_position(cx)
+    }
+
+    fn position_is_valid(&self, position: DockPosition) -> bool {
+        position != DockPosition::Bottom
+    }
+
+    fn set_position(&mut self, position: DockPosition, _: &mut Window, cx: &mut Context<Self>) {
+        settings::update_settings_file(self.fs.clone(), cx, move |settings, _| {
+            settings
+                .agent
+                .get_or_insert_default()
+                .set_dock(position.into());
+        });
+    }
+
+    fn size(&self, window: &Window, cx: &App) -> Pixels {
+        let settings = AgentSettings::get_global(cx);
+        match self.position(window, cx) {
+            DockPosition::Left | DockPosition::Right => {
+                self.width.unwrap_or(settings.default_width)
+            }
+            DockPosition::Bottom => self.height.unwrap_or(settings.default_height),
+        }
+    }
+
+    fn set_size(&mut self, size: Option<Pixels>, window: &mut Window, cx: &mut Context<Self>) {
+        match self.position(window, cx) {
+            DockPosition::Left | DockPosition::Right => self.width = size,
+            DockPosition::Bottom => self.height = size,
+        }
+        self.serialize(cx);
+        cx.notify();
+    }
+
+    fn set_active(&mut self, active: bool, window: &mut Window, cx: &mut Context<Self>) {
+        if active
+            && matches!(self.active_view, ActiveView::Uninitialized)
+            && !matches!(
+                self.worktree_creation_status,
+                Some(WorktreeCreationStatus::Creating)
+            )
+        {
+            let selected_agent = self.selected_agent.clone();
+            self.new_agent_thread(selected_agent, window, cx);
+        }
+    }
+
+    fn remote_id() -> Option<proto::PanelId> {
+        Some(proto::PanelId::AssistantPanel)
+    }
+
+    fn icon(&self, _window: &Window, cx: &App) -> Option<IconName> {
         (self.enabled(cx) && AgentSettings::get_global(cx).button).then_some(IconName::ZedAssistant)
     }
 

crates/agent_ui/src/agent_registry_ui.rs 🔗

@@ -1,6 +1,4 @@
-use std::collections::{BTreeMap, BTreeSet};
 use std::ops::Range;
-use std::sync::OnceLock;
 
 use client::zed_urls;
 use collections::HashMap;
@@ -16,7 +14,7 @@ use project::{AgentRegistryStore, RegistryAgent};
 use settings::{Settings, SettingsStore, update_settings_file};
 use theme::ThemeSettings;
 use ui::{
-    Banner, ButtonStyle, ScrollableHandle, Severity, ToggleButtonGroup, ToggleButtonGroupSize,
+    ButtonStyle, ScrollableHandle, ToggleButtonGroup, ToggleButtonGroupSize,
     ToggleButtonGroupStyle, ToggleButtonSimple, Tooltip, WithScrollbar, prelude::*,
 };
 use workspace::{
@@ -24,10 +22,6 @@ use workspace::{
     item::{Item, ItemEvent},
 };
 
-/// Registry IDs for built-in agents that Zed already provides first-class support for.
-/// These are filtered out of the ACP Agent Registry UI to avoid showing duplicates.
-const BUILT_IN_REGISTRY_IDS: [&str; 4] = ["claude-acp", "claude-code-acp", "codex-acp", "gemini"];
-
 #[derive(Clone, Copy, Debug, PartialEq, Eq)]
 enum RegistryFilter {
     All,
@@ -43,28 +37,6 @@ enum RegistryInstallStatus {
     InstalledExtension,
 }
 
-#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
-enum BuiltInAgent {
-    Claude,
-    Codex,
-    Gemini,
-}
-
-fn keywords_by_agent_feature() -> &'static BTreeMap<BuiltInAgent, Vec<&'static str>> {
-    static KEYWORDS_BY_FEATURE: OnceLock<BTreeMap<BuiltInAgent, Vec<&'static str>>> =
-        OnceLock::new();
-    KEYWORDS_BY_FEATURE.get_or_init(|| {
-        BTreeMap::from_iter([
-            (
-                BuiltInAgent::Claude,
-                vec!["claude", "claude code", "claude agent"],
-            ),
-            (BuiltInAgent::Codex, vec!["codex", "codex cli"]),
-            (BuiltInAgent::Gemini, vec!["gemini", "gemini cli"]),
-        ])
-    })
-}
-
 #[derive(IntoElement)]
 struct AgentRegistryCard {
     children: Vec<AnyElement>,
@@ -110,7 +82,6 @@ pub struct AgentRegistryPage {
     installed_statuses: HashMap<String, RegistryInstallStatus>,
     query_editor: Entity<Editor>,
     filter: RegistryFilter,
-    upsells: BTreeSet<BuiltInAgent>,
     _subscriptions: Vec<gpui::Subscription>,
 }
 
@@ -145,7 +116,6 @@ impl AgentRegistryPage {
                 installed_statuses: HashMap::default(),
                 query_editor,
                 filter: RegistryFilter::All,
-                upsells: BTreeSet::new(),
                 _subscriptions: subscriptions,
             };
 
@@ -162,8 +132,14 @@ impl AgentRegistryPage {
         self.registry_agents.sort_by(|left, right| {
             left.name()
                 .as_ref()
-                .cmp(right.name().as_ref())
-                .then_with(|| left.id().as_ref().cmp(right.id().as_ref()))
+                .to_lowercase()
+                .cmp(&right.name().as_ref().to_lowercase())
+                .then_with(|| {
+                    left.id()
+                        .as_ref()
+                        .to_lowercase()
+                        .cmp(&right.id().as_ref().to_lowercase())
+                })
         });
         self.filter_registry_agents(cx);
     }
@@ -173,7 +149,7 @@ impl AgentRegistryPage {
             .global::<SettingsStore>()
             .get::<AllAgentServersSettings>(None);
         self.installed_statuses.clear();
-        for (id, settings) in &settings.custom {
+        for (id, settings) in settings.iter() {
             let status = match settings {
                 CustomAgentServerSettings::Registry { .. } => {
                     RegistryInstallStatus::InstalledRegistry
@@ -205,7 +181,6 @@ impl AgentRegistryPage {
 
     fn filter_registry_agents(&mut self, cx: &mut Context<Self>) {
         self.refresh_installed_statuses(cx);
-        self.refresh_feature_upsells(cx);
         let search = self.search_query(cx).map(|search| search.to_lowercase());
         let filter = self.filter;
         let installed_statuses = self.installed_statuses.clone();
@@ -215,12 +190,6 @@ impl AgentRegistryPage {
             .iter()
             .enumerate()
             .filter(|(_, agent)| {
-                // Filter out built-in agents since they already appear in the main
-                // agent configuration UI and don't need to be installed from the registry.
-                if BUILT_IN_REGISTRY_IDS.contains(&agent.id().as_ref()) {
-                    return false;
-                }
-
                 let matches_search = search.as_ref().is_none_or(|query| {
                     let query = query.as_str();
                     agent.id().as_ref().to_lowercase().contains(query)
@@ -269,83 +238,6 @@ impl AgentRegistryPage {
         }
     }
 
-    fn refresh_feature_upsells(&mut self, cx: &mut Context<Self>) {
-        let Some(search) = self.search_query(cx) else {
-            self.upsells.clear();
-            return;
-        };
-
-        let search = search.to_lowercase();
-        let search_terms = search
-            .split_whitespace()
-            .map(|term| term.trim())
-            .collect::<Vec<_>>();
-
-        for (feature, keywords) in keywords_by_agent_feature() {
-            if keywords
-                .iter()
-                .any(|keyword| search_terms.contains(keyword))
-            {
-                self.upsells.insert(*feature);
-            } else {
-                self.upsells.remove(feature);
-            }
-        }
-    }
-
-    fn render_feature_upsell_banner(
-        &self,
-        label: SharedString,
-        docs_url: SharedString,
-    ) -> impl IntoElement {
-        let docs_url_button = Button::new("open_docs", "View Documentation")
-            .icon(IconName::ArrowUpRight)
-            .icon_size(IconSize::Small)
-            .icon_position(IconPosition::End)
-            .icon_color(Color::Muted)
-            .on_click({
-                move |_event, _window, cx| {
-                    telemetry::event!(
-                        "Documentation Viewed",
-                        source = "Agent Registry Feature Upsell",
-                        url = docs_url,
-                    );
-                    cx.open_url(&docs_url)
-                }
-            });
-
-        div().pt_4().px_4().child(
-            Banner::new()
-                .severity(Severity::Success)
-                .child(Label::new(label).mt_0p5())
-                .action_slot(docs_url_button),
-        )
-    }
-
-    fn render_feature_upsells(&self) -> impl IntoElement {
-        let mut container = v_flex();
-
-        for feature in &self.upsells {
-            let banner = match feature {
-                BuiltInAgent::Claude => self.render_feature_upsell_banner(
-                    "Claude Agent support is built-in to Zed!".into(),
-                    "https://zed.dev/docs/ai/external-agents#claude-agent".into(),
-                ),
-                BuiltInAgent::Codex => self.render_feature_upsell_banner(
-                    "Codex CLI support is built-in to Zed!".into(),
-                    "https://zed.dev/docs/ai/external-agents#codex-cli".into(),
-                ),
-                BuiltInAgent::Gemini => self.render_feature_upsell_banner(
-                    "Gemini CLI support is built-in to Zed!".into(),
-                    "https://zed.dev/docs/ai/external-agents#gemini-cli".into(),
-                ),
-            };
-            container = container.child(banner);
-        }
-
-        container
-    }
-
     fn render_search(&self, cx: &mut Context<Self>) -> Div {
         let mut key_context = KeyContext::new_with_defaults();
         key_context.add("BufferSearchBar");
@@ -583,7 +475,7 @@ impl AgentRegistryPage {
                         let agent_id = agent_id.clone();
                         update_settings_file(fs.clone(), cx, move |settings, _| {
                             let agent_servers = settings.agent_servers.get_or_insert_default();
-                            agent_servers.custom.entry(agent_id).or_insert_with(|| {
+                            agent_servers.entry(agent_id).or_insert_with(|| {
                                 settings::CustomAgentServerSettings::Registry {
                                     default_mode: None,
                                     default_model: None,
@@ -607,13 +499,13 @@ impl AgentRegistryPage {
                             let Some(agent_servers) = settings.agent_servers.as_mut() else {
                                 return;
                             };
-                            if let Some(entry) = agent_servers.custom.get(agent_id.as_str())
+                            if let Some(entry) = agent_servers.get(agent_id.as_str())
                                 && matches!(
                                     entry,
                                     settings::CustomAgentServerSettings::Registry { .. }
                                 )
                             {
-                                agent_servers.custom.remove(agent_id.as_str());
+                                agent_servers.remove(agent_id.as_str());
                             }
                         });
                     })
@@ -708,14 +600,10 @@ impl Render for AgentRegistryPage {
                             ),
                     ),
             )
-            .child(self.render_feature_upsells())
             .child(v_flex().px_4().size_full().overflow_y_hidden().map(|this| {
                 let count = self.filtered_registry_indices.len();
-                let has_upsells = !self.upsells.is_empty();
-                if count == 0 && !has_upsells {
+                if count == 0 {
                     this.child(self.render_empty_state(cx)).into_any_element()
-                } else if count == 0 {
-                    this.into_any_element()
                 } else {
                     let scroll_handle = &self.list;
                     this.child(

crates/agent_ui/src/agent_ui.rs 🔗

@@ -1,31 +1,40 @@
-pub mod acp;
 mod agent_configuration;
 mod agent_diff;
 mod agent_model_selector;
 mod agent_panel;
 mod agent_registry_ui;
+mod branch_names;
 mod buffer_codegen;
 mod completion_provider;
+mod config_options;
+pub(crate) mod connection_view;
 mod context;
 mod context_server_configuration;
+mod entry_view_state;
 mod favorite_models;
 mod inline_assistant;
 mod inline_prompt_editor;
 mod language_model_selector;
 mod mention_set;
+mod message_editor;
+mod mode_selector;
+mod model_selector;
+mod model_selector_popover;
 mod profile_selector;
 mod slash_command;
 mod slash_command_picker;
 mod terminal_codegen;
 mod terminal_inline_assistant;
+#[cfg(any(test, feature = "test-support"))]
+pub mod test_support;
 mod text_thread_editor;
 mod text_thread_history;
+mod thread_history;
 mod ui;
 
 use std::rc::Rc;
 use std::sync::Arc;
 
-// Another comment
 use agent_settings::{AgentProfileId, AgentSettings};
 use assistant_slash_command::SlashCommandRegistry;
 use client::Client;
@@ -49,11 +58,18 @@ use std::any::TypeId;
 use workspace::Workspace;
 
 use crate::agent_configuration::{ConfigureContextServerModal, ManageProfilesModal};
-pub use crate::agent_panel::{AgentPanel, AgentPanelEvent, ConcreteAssistantPanelDelegate};
+pub use crate::agent_panel::{
+    AgentPanel, AgentPanelEvent, ConcreteAssistantPanelDelegate, WorktreeCreationStatus,
+};
 use crate::agent_registry_ui::AgentRegistryPage;
 pub use crate::inline_assistant::InlineAssistant;
 pub use agent_diff::{AgentDiffPane, AgentDiffToolbar};
+pub(crate) use connection_view::ConnectionView;
+pub(crate) use mode_selector::ModeSelector;
+pub(crate) use model_selector::ModelSelector;
+pub(crate) use model_selector_popover::ModelSelectorPopover;
 pub use text_thread_editor::{AgentPanelDelegate, TextThreadEditor};
+pub(crate) use thread_history::*;
 use zed_actions;
 
 actions!(
@@ -149,6 +165,8 @@ actions!(
         CycleThinkingEffort,
         /// Toggles the thinking effort selector menu open or closed.
         ToggleThinkingEffortMenu,
+        /// Toggles fast mode for models that support it.
+        ToggleFastMode,
     ]
 );
 
@@ -166,18 +184,6 @@ pub struct AuthorizeToolCall {
     pub option_kind: String,
 }
 
-/// Action to select a permission granularity option from the dropdown.
-/// This updates the selected granularity without triggering authorization.
-#[derive(Clone, PartialEq, Deserialize, JsonSchema, Action)]
-#[action(namespace = agent)]
-#[serde(deny_unknown_fields)]
-pub struct SelectPermissionGranularity {
-    /// The tool call ID for which to select the granularity.
-    pub tool_call_id: String,
-    /// The index of the selected granularity option.
-    pub index: usize,
-}
-
 /// Creates a new conversation thread, optionally based on an existing thread.
 #[derive(Default, Clone, PartialEq, Deserialize, JsonSchema, Action)]
 #[action(namespace = agent)]
@@ -204,9 +210,6 @@ pub struct NewNativeAgentThreadFromSummary {
 #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, JsonSchema)]
 #[serde(rename_all = "snake_case")]
 pub enum ExternalAgent {
-    Gemini,
-    ClaudeCode,
-    Codex,
     NativeAgent,
     Custom { name: SharedString },
 }
@@ -218,15 +221,24 @@ impl ExternalAgent {
         thread_store: Entity<agent::ThreadStore>,
     ) -> Rc<dyn agent_servers::AgentServer> {
         match self {
-            Self::Gemini => Rc::new(agent_servers::Gemini),
-            Self::ClaudeCode => Rc::new(agent_servers::ClaudeCode),
-            Self::Codex => Rc::new(agent_servers::Codex),
             Self::NativeAgent => Rc::new(agent::NativeAgentServer::new(fs, thread_store)),
             Self::Custom { name } => Rc::new(agent_servers::CustomAgentServer::new(name.clone())),
         }
     }
 }
 
+/// Sets where new threads will run.
+#[derive(
+    Clone, Copy, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema, Action,
+)]
+#[action(namespace = agent)]
+#[serde(rename_all = "snake_case", tag = "kind")]
+pub enum StartThreadIn {
+    #[default]
+    LocalProject,
+    NewWorktree,
+}
+
 /// Content to initialize new external agent with.
 pub enum AgentInitialContent {
     ThreadSummary(acp_thread::AgentSessionInfo),
@@ -310,6 +322,10 @@ pub fn init(
                     .find_map(|item| item.downcast::<AgentRegistryPage>());
 
                 if let Some(existing) = existing {
+                    existing.update(cx, |_, cx| {
+                        project::AgentRegistryStore::global(cx)
+                            .update(cx, |store, cx| store.refresh(cx));
+                    });
                     workspace.activate_item(&existing, true, true, window, cx);
                 } else {
                     let registry_page = AgentRegistryPage::new(workspace, window, cx);
@@ -372,7 +388,6 @@ fn update_command_palette_filter(cx: &mut App) {
             filter.hide_namespace("agents");
             filter.hide_namespace("assistant");
             filter.hide_namespace("copilot");
-            filter.hide_namespace("supermaven");
             filter.hide_namespace("zed_predict_onboarding");
             filter.hide_namespace("edit_prediction");
 
@@ -393,19 +408,11 @@ fn update_command_palette_filter(cx: &mut App) {
                 EditPredictionProvider::None => {
                     filter.hide_namespace("edit_prediction");
                     filter.hide_namespace("copilot");
-                    filter.hide_namespace("supermaven");
                     filter.hide_action_types(&edit_prediction_actions);
                 }
                 EditPredictionProvider::Copilot => {
                     filter.show_namespace("edit_prediction");
                     filter.show_namespace("copilot");
-                    filter.hide_namespace("supermaven");
-                    filter.show_action_types(edit_prediction_actions.iter());
-                }
-                EditPredictionProvider::Supermaven => {
-                    filter.show_namespace("edit_prediction");
-                    filter.hide_namespace("copilot");
-                    filter.show_namespace("supermaven");
                     filter.show_action_types(edit_prediction_actions.iter());
                 }
                 EditPredictionProvider::Zed
@@ -417,7 +424,6 @@ fn update_command_palette_filter(cx: &mut App) {
                 | EditPredictionProvider::Experimental(_) => {
                     filter.show_namespace("edit_prediction");
                     filter.hide_namespace("copilot");
-                    filter.hide_namespace("supermaven");
                     filter.show_action_types(edit_prediction_actions.iter());
                 }
             }

crates/agent_ui/src/branch_names.rs 🔗

@@ -0,0 +1,847 @@
+use collections::HashSet;
+use rand::Rng;
+
+/// Names of historical typewriter brands, for use in auto-generated branch names.
+/// (Hyphens and parens have been dropped so that the branch names are one-word.)
+///
+/// Thanks to https://typewriterdatabase.com/alph.0.brands for the names!
+const TYPEWRITER_NAMES: &[&str] = &[
+    "abeille",
+    "acme",
+    "addo",
+    "adler",
+    "adlerette",
+    "adlerita",
+    "admiral",
+    "agamli",
+    "agar",
+    "agidel",
+    "agil",
+    "aguia",
+    "aguila",
+    "ahram",
+    "aigle",
+    "ajax",
+    "aktiv",
+    "ala",
+    "alba",
+    "albus",
+    "alexander",
+    "alexis",
+    "alfa",
+    "allen",
+    "alonso",
+    "alpina",
+    "amata",
+    "amaya",
+    "amka",
+    "anavi",
+    "anderson",
+    "andina",
+    "antares",
+    "apex",
+    "apsco",
+    "aquila",
+    "archo",
+    "ardita",
+    "argyle",
+    "aristocrat",
+    "aristokrat",
+    "arlington",
+    "armstrong",
+    "arpha",
+    "artus",
+    "astoria",
+    "atlantia",
+    "atlantic",
+    "atlas",
+    "augusta",
+    "aurora",
+    "austro",
+    "automatic",
+    "avanti",
+    "avona",
+    "azzurra",
+    "bajnok",
+    "baldwin",
+    "balkan",
+    "baltica",
+    "baltimore",
+    "barlock",
+    "barr",
+    "barrat",
+    "bartholomew",
+    "bashkiriya",
+    "bavaria",
+    "beaucourt",
+    "beko",
+    "belka",
+    "bennett",
+    "bennington",
+    "berni",
+    "bianca",
+    "bijou",
+    "bing",
+    "bisei",
+    "biser",
+    "bluebird",
+    "bolida",
+    "borgo",
+    "boston",
+    "boyce",
+    "bradford",
+    "brandenburg",
+    "brigitte",
+    "briton",
+    "brooks",
+    "brosette",
+    "buddy",
+    "burns",
+    "burroughs",
+    "byron",
+    "calanda",
+    "caligraph",
+    "cappel",
+    "cardinal",
+    "carissima",
+    "carlem",
+    "carlton",
+    "carmen",
+    "cawena",
+    "cella",
+    "celtic",
+    "century",
+    "champignon",
+    "cherryland",
+    "chevron",
+    "chicago",
+    "cicero",
+    "cifra",
+    "citizen",
+    "claudia",
+    "cleveland",
+    "clover",
+    "coffman",
+    "cole",
+    "columbia",
+    "commercial",
+    "companion",
+    "concentra",
+    "concord",
+    "concordia",
+    "conover",
+    "constanta",
+    "consul",
+    "conta",
+    "contenta",
+    "contimat",
+    "contina",
+    "continento",
+    "cornelia",
+    "coronado",
+    "cosmopolita",
+    "courier",
+    "craftamatic",
+    "crandall",
+    "crown",
+    "culema",
+    "dactyle",
+    "dankers",
+    "dart",
+    "daugherty",
+    "davis",
+    "dayton",
+    "dea",
+    "delmar",
+    "densmore",
+    "depantio",
+    "diadema",
+    "dial",
+    "diamant",
+    "diana",
+    "dictatype",
+    "diplomat",
+    "diskret",
+    "dolfus",
+    "dollar",
+    "domus",
+    "drake",
+    "draper",
+    "duplex",
+    "durabel",
+    "dynacord",
+    "eagle",
+    "eclipse",
+    "edelmann",
+    "edelweiss",
+    "edison",
+    "edita",
+    "edland",
+    "efka",
+    "eldorado",
+    "electa",
+    "electromatic",
+    "elektro",
+    "elgin",
+    "elliot",
+    "emerson",
+    "emka",
+    "emona",
+    "empire",
+    "engadine",
+    "engler",
+    "erfurt",
+    "erika",
+    "esko",
+    "essex",
+    "eureka",
+    "europa",
+    "everest",
+    "everlux",
+    "excelsior",
+    "express",
+    "fabers",
+    "facit",
+    "fairbanks",
+    "faktotum",
+    "famos",
+    "federal",
+    "felio",
+    "fidat",
+    "filius",
+    "fips",
+    "fish",
+    "fitch",
+    "fleet",
+    "florida",
+    "flott",
+    "flyer",
+    "flying",
+    "fontana",
+    "ford",
+    "forto",
+    "fortuna",
+    "fox",
+    "framo",
+    "franconia",
+    "franklin",
+    "friden",
+    "frolio",
+    "furstenberg",
+    "galesburg",
+    "galiette",
+    "gallia",
+    "garbell",
+    "gardner",
+    "geka",
+    "generation",
+    "genia",
+    "geniatus",
+    "gerda",
+    "gisela",
+    "glashutte",
+    "gloria",
+    "godrej",
+    "gossen",
+    "gourland",
+    "grandjean",
+    "granta",
+    "granville",
+    "graphic",
+    "gritzner",
+    "groma",
+    "guhl",
+    "guidonia",
+    "gundka",
+    "hacabo",
+    "haddad",
+    "halberg",
+    "halda",
+    "hall",
+    "hammond",
+    "hammonia",
+    "hanford",
+    "hansa",
+    "harmony",
+    "harris",
+    "hartford",
+    "hassia",
+    "hatch",
+    "heady",
+    "hebronia",
+    "hebros",
+    "hega",
+    "helios",
+    "helma",
+    "herald",
+    "hercules",
+    "hermes",
+    "herold",
+    "heros",
+    "hesperia",
+    "hogar",
+    "hooven",
+    "hopkins",
+    "horton",
+    "hugin",
+    "hungaria",
+    "hurtu",
+    "iberia",
+    "idea",
+    "ideal",
+    "imperia",
+    "impo",
+    "industria",
+    "industrio",
+    "ingersoll",
+    "international",
+    "invicta",
+    "irene",
+    "iris",
+    "iskra",
+    "ivitsa",
+    "ivriah",
+    "jackson",
+    "janalif",
+    "janos",
+    "jolux",
+    "juki",
+    "junior",
+    "juventa",
+    "juwel",
+    "kamkap",
+    "kamo",
+    "kanzler",
+    "kappel",
+    "karli",
+    "karstadt",
+    "keaton",
+    "kenbar",
+    "keystone",
+    "kim",
+    "klein",
+    "kneist",
+    "knoch",
+    "koh",
+    "kolibri",
+    "kolumbus",
+    "komet",
+    "kondor",
+    "koniger",
+    "konryu",
+    "kontor",
+    "kosmopolit",
+    "krypton",
+    "lambert",
+    "lasalle",
+    "lectra",
+    "leframa",
+    "lemair",
+    "lemco",
+    "liberty",
+    "libia",
+    "liga",
+    "lignose",
+    "lilliput",
+    "lindeteves",
+    "linowriter",
+    "listvitsa",
+    "ludolf",
+    "lutece",
+    "luxa",
+    "lyubava",
+    "mafra",
+    "magnavox",
+    "maher",
+    "majestic",
+    "majitouch",
+    "manhattan",
+    "mapuua",
+    "marathon",
+    "marburger",
+    "maritsa",
+    "maruzen",
+    "maskelyne",
+    "masspro",
+    "matous",
+    "mccall",
+    "mccool",
+    "mcloughlin",
+    "mead",
+    "mechno",
+    "mehano",
+    "meiselbach",
+    "melbi",
+    "melior",
+    "melotyp",
+    "mentor",
+    "mepas",
+    "mercedesia",
+    "mercurius",
+    "mercury",
+    "merkur",
+    "merritt",
+    "merz",
+    "messa",
+    "meteco",
+    "meteor",
+    "micron",
+    "mignon",
+    "mikro",
+    "minerva",
+    "mirian",
+    "mirina",
+    "mitex",
+    "molle",
+    "monac",
+    "monarch",
+    "mondiale",
+    "monica",
+    "monofix",
+    "monopol",
+    "monpti",
+    "monta",
+    "montana",
+    "montgomery",
+    "moon",
+    "morgan",
+    "morris",
+    "morse",
+    "moya",
+    "moyer",
+    "munson",
+    "musicwriter",
+    "nadex",
+    "nakajima",
+    "neckermann",
+    "neubert",
+    "neya",
+    "ninety",
+    "nisa",
+    "noiseless",
+    "noor",
+    "nora",
+    "nord",
+    "norden",
+    "norica",
+    "norma",
+    "norman",
+    "north",
+    "nototyp",
+    "nova",
+    "novalevi",
+    "odell",
+    "odhner",
+    "odo",
+    "odoma",
+    "ohio",
+    "ohtani",
+    "oliva",
+    "oliver",
+    "olivetti",
+    "olympia",
+    "omega",
+    "optima",
+    "orbis",
+    "orel",
+    "orga",
+    "oriette",
+    "orion",
+    "orn",
+    "orplid",
+    "pacior",
+    "pagina",
+    "parisienne",
+    "passat",
+    "pearl",
+    "peerless",
+    "perfect",
+    "perfecta",
+    "perkeo",
+    "perkins",
+    "perlita",
+    "pettypet",
+    "phoenix",
+    "piccola",
+    "picht",
+    "pinnock",
+    "pionier",
+    "plurotyp",
+    "plutarch",
+    "pneumatic",
+    "pocket",
+    "polyglott",
+    "polygraph",
+    "pontiac",
+    "portable",
+    "portex",
+    "pozzi",
+    "premier",
+    "presto",
+    "primavera",
+    "progress",
+    "protos",
+    "pterotype",
+    "pullman",
+    "pulsatta",
+    "quick",
+    "racer",
+    "radio",
+    "rally",
+    "rand",
+    "readers",
+    "reed",
+    "referent",
+    "reff",
+    "regent",
+    "regia",
+    "regina",
+    "rekord",
+    "reliable",
+    "reliance",
+    "remagg",
+    "rembrandt",
+    "remer",
+    "remington",
+    "remsho",
+    "remstar",
+    "remtor",
+    "reporters",
+    "resko",
+    "rex",
+    "rexpel",
+    "rheinita",
+    "rheinmetall",
+    "rival",
+    "roberts",
+    "robotron",
+    "rocher",
+    "rochester",
+    "roebuck",
+    "rofa",
+    "roland",
+    "rooy",
+    "rover",
+    "roxy",
+    "roy",
+    "royal",
+    "rundstatler",
+    "sabaudia",
+    "sabb",
+    "saleem",
+    "salter",
+    "sampo",
+    "sarafan",
+    "saturn",
+    "saxonia",
+    "schade",
+    "schapiro",
+    "schreibi",
+    "scripta",
+    "sears",
+    "secor",
+    "selectric",
+    "selekta",
+    "senator",
+    "sense",
+    "senta",
+    "serd",
+    "shilling",
+    "shimade",
+    "shimer",
+    "sholes",
+    "shuang",
+    "siegfried",
+    "siemag",
+    "silma",
+    "silver",
+    "simplex",
+    "simtype",
+    "singer",
+    "smith",
+    "soemtron",
+    "sonja",
+    "speedwriter",
+    "sphinx",
+    "starlet",
+    "stearns",
+    "steel",
+    "stella",
+    "steno",
+    "sterling",
+    "stoewer",
+    "stolzenberg",
+    "stott",
+    "strangfeld",
+    "sture",
+    "stylotyp",
+    "sun",
+    "superba",
+    "superia",
+    "supermetall",
+    "surety",
+    "swintec",
+    "swissa",
+    "talbos",
+    "talleres",
+    "tatrapoint",
+    "taurus",
+    "taylorix",
+    "tell",
+    "tempotype",
+    "tippco",
+    "titania",
+    "tops",
+    "towa",
+    "toyo",
+    "tradition",
+    "transatlantic",
+    "traveller",
+    "trebla",
+    "triumph",
+    "turia",
+    "typatune",
+    "typen",
+    "typorium",
+    "ugro",
+    "ultima",
+    "unda",
+    "underwood",
+    "unica",
+    "unitype",
+    "ursula",
+    "utax",
+    "varityper",
+    "vasanta",
+    "vendex",
+    "venus",
+    "victor",
+    "victoria",
+    "video",
+    "viking",
+    "vira",
+    "virotyp",
+    "visigraph",
+    "vittoria",
+    "volcan",
+    "vornado",
+    "voss",
+    "vultur",
+    "waltons",
+    "wanamaker",
+    "wanderer",
+    "ward",
+    "warner",
+    "waterloo",
+    "waverley",
+    "wayne",
+    "webster",
+    "wedgefield",
+    "welco",
+    "wellington",
+    "wellon",
+    "weltblick",
+    "westphalia",
+    "wiedmer",
+    "williams",
+    "wilson",
+    "winkel",
+    "winsor",
+    "wizard",
+    "woodstock",
+    "woodwards",
+    "yatran",
+    "yost",
+    "zenit",
+    "zentronik",
+    "zeta",
+    "zeya",
+];
+
+/// Picks a typewriter name that isn't already taken by an existing branch.
+///
+/// Each entry in `existing_branches` is expected to be a full branch name
+/// like `"olivetti-a3f9b2c1"`. The prefix before the last `'-'` is treated
+/// as the taken typewriter name. Branches without a `'-'` are ignored.
+///
+/// Returns `None` when every name in the pool is already taken.
+pub fn pick_typewriter_name(
+    existing_branches: &[&str],
+    rng: &mut impl Rng,
+) -> Option<&'static str> {
+    let disallowed: HashSet<&str> = existing_branches
+        .iter()
+        .filter_map(|branch| branch.rsplit_once('-').map(|(prefix, _)| prefix))
+        .collect();
+
+    let available: Vec<&'static str> = TYPEWRITER_NAMES
+        .iter()
+        .copied()
+        .filter(|name| !disallowed.contains(name))
+        .collect();
+
+    if available.is_empty() {
+        return None;
+    }
+
+    let index = rng.random_range(0..available.len());
+    Some(available[index])
+}
+
+/// Generates a branch name like `"olivetti-a3f9b2c1"` by picking a typewriter
+/// name that isn't already taken and appending an 8-character alphanumeric hash.
+///
+/// Returns `None` when every typewriter name in the pool is already taken.
+pub fn generate_branch_name(existing_branches: &[&str], rng: &mut impl Rng) -> Option<String> {
+    let typewriter_name = pick_typewriter_name(existing_branches, rng)?;
+    let hash: String = (0..8)
+        .map(|_| {
+            let idx: u8 = rng.random_range(0..36);
+            if idx < 10 {
+                (b'0' + idx) as char
+            } else {
+                (b'a' + idx - 10) as char
+            }
+        })
+        .collect();
+    Some(format!("{typewriter_name}-{hash}"))
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use rand::rngs::StdRng;
+
+    #[gpui::test(iterations = 10)]
+    fn test_pick_typewriter_name_with_no_disallowed(mut rng: StdRng) {
+        let name = pick_typewriter_name(&[], &mut rng);
+        assert!(name.is_some());
+        assert!(TYPEWRITER_NAMES.contains(&name.unwrap()));
+    }
+
+    #[gpui::test(iterations = 10)]
+    fn test_pick_typewriter_name_excludes_taken_names(mut rng: StdRng) {
+        let branch_names = &["olivetti-abc12345", "selectric-def67890"];
+        let name = pick_typewriter_name(branch_names, &mut rng).unwrap();
+        assert_ne!(name, "olivetti");
+        assert_ne!(name, "selectric");
+    }
+
+    #[gpui::test]
+    fn test_pick_typewriter_name_all_taken(mut rng: StdRng) {
+        let branch_names: Vec<String> = TYPEWRITER_NAMES
+            .iter()
+            .map(|name| format!("{name}-00000000"))
+            .collect();
+        let branch_name_refs: Vec<&str> = branch_names.iter().map(|s| s.as_str()).collect();
+        let name = pick_typewriter_name(&branch_name_refs, &mut rng);
+        assert!(name.is_none());
+    }
+
+    #[gpui::test(iterations = 10)]
+    fn test_pick_typewriter_name_ignores_branches_without_hyphen(mut rng: StdRng) {
+        let branch_names = &["main", "develop", "feature"];
+        let name = pick_typewriter_name(branch_names, &mut rng);
+        assert!(name.is_some());
+        assert!(TYPEWRITER_NAMES.contains(&name.unwrap()));
+    }
+
+    #[gpui::test(iterations = 10)]
+    fn test_generate_branch_name_format(mut rng: StdRng) {
+        let branch_name = generate_branch_name(&[], &mut rng).unwrap();
+        let (prefix, suffix) = branch_name.rsplit_once('-').unwrap();
+        assert!(TYPEWRITER_NAMES.contains(&prefix));
+        assert_eq!(suffix.len(), 8);
+        assert!(suffix.chars().all(|c| c.is_ascii_alphanumeric()));
+    }
+
+    #[gpui::test]
+    fn test_generate_branch_name_returns_none_when_exhausted(mut rng: StdRng) {
+        let branch_names: Vec<String> = TYPEWRITER_NAMES
+            .iter()
+            .map(|name| format!("{name}-00000000"))
+            .collect();
+        let branch_name_refs: Vec<&str> = branch_names.iter().map(|s| s.as_str()).collect();
+        let result = generate_branch_name(&branch_name_refs, &mut rng);
+        assert!(result.is_none());
+    }
+
+    #[gpui::test(iterations = 100)]
+    fn test_generate_branch_name_never_reuses_taken_prefix(mut rng: StdRng) {
+        let existing = &["olivetti-123abc", "selectric-def456"];
+        let branch_name = generate_branch_name(existing, &mut rng).unwrap();
+        let (prefix, _) = branch_name.rsplit_once('-').unwrap();
+        assert_ne!(prefix, "olivetti");
+        assert_ne!(prefix, "selectric");
+    }
+
+    #[gpui::test(iterations = 100)]
+    fn test_generate_branch_name_avoids_multiple_taken_prefixes(mut rng: StdRng) {
+        let existing = &[
+            "olivetti-aaa11111",
+            "selectric-bbb22222",
+            "corona-ccc33333",
+            "remington-ddd44444",
+            "underwood-eee55555",
+        ];
+        let taken_prefixes: HashSet<&str> = existing
+            .iter()
+            .filter_map(|b| b.rsplit_once('-').map(|(prefix, _)| prefix))
+            .collect();
+        let branch_name = generate_branch_name(existing, &mut rng).unwrap();
+        let (prefix, _) = branch_name.rsplit_once('-').unwrap();
+        assert!(
+            !taken_prefixes.contains(prefix),
+            "generated prefix {prefix:?} collides with an existing branch"
+        );
+    }
+
+    #[gpui::test(iterations = 100)]
+    fn test_generate_branch_name_with_varied_hash_suffixes(mut rng: StdRng) {
+        let existing = &[
+            "olivetti-aaaaaaaa",
+            "olivetti-bbbbbbbb",
+            "olivetti-cccccccc",
+        ];
+        let branch_name = generate_branch_name(existing, &mut rng).unwrap();
+        let (prefix, _) = branch_name.rsplit_once('-').unwrap();
+        assert_ne!(
+            prefix, "olivetti",
+            "should avoid olivetti regardless of how many variants exist"
+        );
+    }
+
+    #[test]
+    fn test_typewriter_names_are_valid() {
+        let mut seen = HashSet::default();
+        for &name in TYPEWRITER_NAMES {
+            assert!(
+                seen.insert(name),
+                "duplicate entry in TYPEWRITER_NAMES: {name:?}"
+            );
+        }
+
+        for window in TYPEWRITER_NAMES.windows(2) {
+            assert!(
+                window[0] <= window[1],
+                "TYPEWRITER_NAMES is not sorted: {0:?} should come after {1:?}",
+                window[1],
+                window[0],
+            );
+        }
+
+        for &name in TYPEWRITER_NAMES {
+            assert!(
+                !name.contains('-'),
+                "TYPEWRITER_NAMES entry contains a hyphen: {name:?}"
+            );
+        }
+
+        for &name in TYPEWRITER_NAMES {
+            assert!(
+                name.chars().all(|c| c.is_lowercase() || !c.is_alphabetic()),
+                "TYPEWRITER_NAMES entry is not lowercase: {name:?}"
+            );
+        }
+    }
+}

crates/agent_ui/src/buffer_codegen.rs 🔗

@@ -526,11 +526,13 @@ impl CodegenAlternative {
                     name: REWRITE_SECTION_TOOL_NAME.to_string(),
                     description: "Replaces text in <rewrite_this></rewrite_this> tags with your replacement_text.".to_string(),
                     input_schema: language_model::tool_schema::root_schema_for::<RewriteSectionInput>(tool_input_format).to_value(),
+                    use_input_streaming: false,
                 },
                 LanguageModelRequestTool {
                     name: FAILURE_MESSAGE_TOOL_NAME.to_string(),
                     description: "Use this tool to provide a message to the user when you're unable to complete a task.".to_string(),
                     input_schema: language_model::tool_schema::root_schema_for::<FailureMessageInput>(tool_input_format).to_value(),
+                    use_input_streaming: false,
                 },
             ];
 
@@ -545,6 +547,7 @@ impl CodegenAlternative {
                 messages,
                 thinking_allowed: false,
                 thinking_effort: None,
+                speed: None,
             }
         }))
     }
@@ -624,6 +627,7 @@ impl CodegenAlternative {
                 messages: vec![request_message],
                 thinking_allowed: false,
                 thinking_effort: None,
+                speed: None,
             }
         }))
     }

crates/agent_ui/src/completion_provider.rs 🔗

@@ -4,7 +4,7 @@ use std::path::PathBuf;
 use std::sync::Arc;
 use std::sync::atomic::AtomicBool;
 
-use crate::acp::AcpThreadHistory;
+use crate::ThreadHistory;
 use acp_thread::{AgentSessionInfo, MentionUri};
 use anyhow::Result;
 use editor::{
@@ -206,7 +206,7 @@ pub struct PromptCompletionProvider<T: PromptCompletionProviderDelegate> {
     source: Arc<T>,
     editor: WeakEntity<Editor>,
     mention_set: Entity<MentionSet>,
-    history: WeakEntity<AcpThreadHistory>,
+    history: WeakEntity<ThreadHistory>,
     prompt_store: Option<Entity<PromptStore>>,
     workspace: WeakEntity<Workspace>,
 }
@@ -216,7 +216,7 @@ impl<T: PromptCompletionProviderDelegate> PromptCompletionProvider<T> {
         source: T,
         editor: WeakEntity<Editor>,
         mention_set: Entity<MentionSet>,
-        history: WeakEntity<AcpThreadHistory>,
+        history: WeakEntity<ThreadHistory>,
         prompt_store: Option<Entity<PromptStore>>,
         workspace: WeakEntity<Workspace>,
     ) -> Self {
@@ -617,6 +617,7 @@ impl<T: PromptCompletionProviderDelegate> PromptCompletionProvider<T> {
                                     let crease = crate::mention_set::crease_for_mention(
                                         mention_uri.name().into(),
                                         mention_uri.icon_path(cx),
+                                        None,
                                         range,
                                         editor.downgrade(),
                                     );

crates/agent_ui/src/acp/config_options.rs → crates/agent_ui/src/config_options.rs 🔗

@@ -49,7 +49,7 @@ impl ConfigOptionsView {
             if let Some(mut rx) = rx {
                 while let Ok(()) = rx.recv().await {
                     this.update_in(cx, |this, window, cx| {
-                        this.refresh_selectors_if_needed(window, cx);
+                        this.rebuild_selectors(window, cx);
                         cx.notify();
                     })
                     .log_err();
@@ -184,15 +184,10 @@ impl ConfigOptionsView {
             .collect()
     }
 
-    fn refresh_selectors_if_needed(&mut self, window: &mut Window, cx: &mut Context<Self>) {
-        let current_ids = Self::config_option_ids(&self.config_options);
-        if current_ids != self.config_option_ids {
-            self.config_option_ids = current_ids;
-            self.rebuild_selectors(window, cx);
-        }
-    }
-
     fn rebuild_selectors(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+        // Config option updates can mutate option values for existing IDs (for example,
+        // reasoning levels after a model switch). Rebuild to refresh cached picker entries.
+        self.config_option_ids = Self::config_option_ids(&self.config_options);
         self.selectors = Self::build_selectors(
             &self.config_options,
             &self.agent_server,
@@ -498,12 +493,7 @@ impl PickerDelegate for ConfigOptionPickerDelegate {
         cx.notify();
     }
 
-    fn can_select(
-        &mut self,
-        ix: usize,
-        _window: &mut Window,
-        _cx: &mut Context<Picker<Self>>,
-    ) -> bool {
+    fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context<Picker<Self>>) -> bool {
         match self.filtered_entries.get(ix) {
             Some(ConfigOptionPickerEntry::Option(_)) => true,
             Some(ConfigOptionPickerEntry::Separator(_)) | None => false,

crates/agent_ui/src/acp/thread_view.rs → crates/agent_ui/src/connection_view.rs 🔗

@@ -26,10 +26,10 @@ use fs::Fs;
 use futures::FutureExt as _;
 use gpui::{
     Action, Animation, AnimationExt, AnyView, App, ClickEvent, ClipboardItem, CursorStyle,
-    ElementId, Empty, Entity, FocusHandle, Focusable, Hsla, ListOffset, ListState, ObjectFit,
-    PlatformDisplay, ScrollHandle, SharedString, Subscription, Task, TextStyle, WeakEntity, Window,
-    WindowHandle, div, ease_in_out, img, linear_color_stop, linear_gradient, list, point,
-    pulsating_between,
+    ElementId, Empty, Entity, EventEmitter, FocusHandle, Focusable, Hsla, ListOffset, ListState,
+    ObjectFit, PlatformDisplay, ScrollHandle, SharedString, Subscription, Task, TextStyle,
+    WeakEntity, Window, WindowHandle, div, ease_in_out, img, linear_color_stop, linear_gradient,
+    list, point, pulsating_between,
 };
 use language::Buffer;
 use language_model::LanguageModelRegistry;
@@ -62,28 +62,28 @@ use zed_actions::assistant::OpenRulesLibrary;
 
 use super::config_options::ConfigOptionsView;
 use super::entry_view_state::EntryViewState;
-use super::thread_history::AcpThreadHistory;
-use crate::acp::AcpModelSelectorPopover;
-use crate::acp::ModeSelector;
-use crate::acp::entry_view_state::{EntryViewEvent, ViewEvent};
-use crate::acp::message_editor::{MessageEditor, MessageEditorEvent};
+use super::thread_history::ThreadHistory;
+use crate::ModeSelector;
+use crate::ModelSelectorPopover;
 use crate::agent_diff::AgentDiff;
+use crate::entry_view_state::{EntryViewEvent, ViewEvent};
+use crate::message_editor::{MessageEditor, MessageEditorEvent};
 use crate::profile_selector::{ProfileProvider, ProfileSelector};
 use crate::ui::{AgentNotification, AgentNotificationEvent};
 use crate::{
     AgentDiffPane, AgentInitialContent, AgentPanel, AllowAlways, AllowOnce, AuthorizeToolCall,
     ClearMessageQueue, CycleFavoriteModels, CycleModeSelector, CycleThinkingEffort,
     EditFirstQueuedMessage, ExpandMessageEditor, Follow, KeepAll, NewThread, OpenAddContextMenu,
-    OpenAgentDiff, OpenHistory, RejectAll, RejectOnce, RemoveFirstQueuedMessage,
-    SelectPermissionGranularity, SendImmediately, SendNextQueuedMessage, ToggleProfileSelector,
-    ToggleThinkingEffortMenu, ToggleThinkingMode, UndoLastReject,
+    OpenAgentDiff, OpenHistory, RejectAll, RejectOnce, RemoveFirstQueuedMessage, SendImmediately,
+    SendNextQueuedMessage, ToggleFastMode, ToggleProfileSelector, ToggleThinkingEffortMenu,
+    ToggleThinkingMode, UndoLastReject,
 };
 
 const STOPWATCH_THRESHOLD: Duration = Duration::from_secs(30);
 const TOKEN_THRESHOLD: u64 = 250;
 
-mod active_thread;
-pub use active_thread::*;
+mod thread_view;
+pub use thread_view::*;
 
 pub struct QueuedMessage {
     pub content: Vec<acp::ContentBlock>,
@@ -107,8 +107,8 @@ pub(crate) enum ThreadError {
     },
 }
 
-impl ThreadError {
-    fn from_err(error: anyhow::Error, agent_name: &str) -> Self {
+impl From<anyhow::Error> for ThreadError {
+    fn from(error: anyhow::Error) -> Self {
         if error.is::<language_model::PaymentRequiredError>() {
             Self::PaymentRequired
         } else if let Some(acp_error) = error.downcast_ref::<acp::Error>()
@@ -123,18 +123,9 @@ impl ThreadError {
                 .downcast_ref::<acp::Error>()
                 .map(|acp_error| SharedString::from(acp_error.code.to_string()));
 
-            // TODO: we should have Gemini return better errors here.
-            if agent_name == "Gemini CLI"
-                && message.contains("Could not load the default credentials")
-                || message.contains("API key not valid")
-                || message.contains("Request had invalid authentication credentials")
-            {
-                Self::AuthenticationRequired(message)
-            } else {
-                Self::Other {
-                    message,
-                    acp_error_code,
-                }
+            Self::Other {
+                message,
+                acp_error_code,
             }
         }
     }
@@ -164,6 +155,9 @@ pub(crate) struct Conversation {
     threads: HashMap<acp::SessionId, Entity<AcpThread>>,
     permission_requests: IndexMap<acp::SessionId, Vec<acp::ToolCallId>>,
     subscriptions: Vec<Subscription>,
+    /// Tracks the selected granularity index for each tool call's permission dropdown.
+    /// The index corresponds to the position in the allow_options list.
+    selected_permission_granularity: HashMap<acp::SessionId, HashMap<acp::ToolCallId, usize>>,
 }
 
 impl Conversation {
@@ -191,7 +185,7 @@ impl Conversation {
             | AcpThreadEvent::EntriesRemoved(_)
             | AcpThreadEvent::Retry(_)
             | AcpThreadEvent::SubagentSpawned(_)
-            | AcpThreadEvent::Stopped
+            | AcpThreadEvent::Stopped(_)
             | AcpThreadEvent::Error
             | AcpThreadEvent::LoadError(_)
             | AcpThreadEvent::PromptCapabilitiesUpdated
@@ -205,6 +199,29 @@ impl Conversation {
             .insert(thread.read(cx).session_id().clone(), thread);
     }
 
+    pub fn selected_permission_granularity(
+        &self,
+        session_id: &acp::SessionId,
+        tool_call_id: &acp::ToolCallId,
+    ) -> Option<usize> {
+        self.selected_permission_granularity
+            .get(session_id)
+            .and_then(|map| map.get(tool_call_id))
+            .copied()
+    }
+
+    pub fn set_selected_permission_granularity(
+        &mut self,
+        session_id: acp::SessionId,
+        tool_call_id: acp::ToolCallId,
+        granularity: usize,
+    ) {
+        self.selected_permission_granularity
+            .entry(session_id)
+            .or_default()
+            .insert(tool_call_id, granularity);
+    }
+
     pub fn pending_tool_call<'a>(
         &'a self,
         session_id: &acp::SessionId,
@@ -278,7 +295,13 @@ impl Conversation {
     }
 }
 
-pub struct AcpServerView {
+pub enum AcpServerViewEvent {
+    ActiveThreadChanged,
+}
+
+impl EventEmitter<AcpServerViewEvent> for ConnectionView {}
+
+pub struct ConnectionView {
     agent: Rc<dyn AgentServer>,
     agent_server_store: Entity<AgentServerStore>,
     workspace: WeakEntity<Workspace>,
@@ -286,8 +309,7 @@ pub struct AcpServerView {
     thread_store: Option<Entity<ThreadStore>>,
     prompt_store: Option<Entity<PromptStore>>,
     server_state: ServerState,
-    login: Option<task::SpawnInTerminal>, // is some <=> Active | Unauthenticated
-    history: Entity<AcpThreadHistory>,
+    history: Entity<ThreadHistory>,
     focus_handle: FocusHandle,
     notifications: Vec<WindowHandle<AgentNotification>>,
     notification_subscriptions: HashMap<WindowHandle<AgentNotification>, Vec<Subscription>>,
@@ -295,8 +317,14 @@ pub struct AcpServerView {
     _subscriptions: Vec<Subscription>,
 }
 
-impl AcpServerView {
-    pub fn active_thread(&self) -> Option<&Entity<AcpThreadView>> {
+impl ConnectionView {
+    pub fn has_auth_methods(&self) -> bool {
+        self.as_connected().map_or(false, |connected| {
+            !connected.connection.auth_methods().is_empty()
+        })
+    }
+
+    pub fn active_thread(&self) -> Option<&Entity<ThreadView>> {
         match &self.server_state {
             ServerState::Connected(connected) => connected.active_view(),
             _ => None,
@@ -314,7 +342,7 @@ impl AcpServerView {
             .pending_tool_call(id, cx)
     }
 
-    pub fn parent_thread(&self, cx: &App) -> Option<Entity<AcpThreadView>> {
+    pub fn parent_thread(&self, cx: &App) -> Option<Entity<ThreadView>> {
         match &self.server_state {
             ServerState::Connected(connected) => {
                 let mut current = connected.active_view()?;
@@ -331,7 +359,7 @@ impl AcpServerView {
         }
     }
 
-    pub fn thread_view(&self, session_id: &acp::SessionId) -> Option<Entity<AcpThreadView>> {
+    pub fn thread_view(&self, session_id: &acp::SessionId) -> Option<Entity<ThreadView>> {
         let connected = self.as_connected()?;
         connected.threads.get(session_id).cloned()
     }
@@ -364,6 +392,7 @@ impl AcpServerView {
         if let Some(view) = self.active_thread() {
             view.focus_handle(cx).focus(window, cx);
         }
+        cx.emit(AcpServerViewEvent::ActiveThreadChanged);
         cx.notify();
     }
 }
@@ -379,7 +408,7 @@ enum ServerState {
 pub struct ConnectedServerState {
     auth_state: AuthState,
     active_id: Option<acp::SessionId>,
-    threads: HashMap<acp::SessionId, Entity<AcpThreadView>>,
+    threads: HashMap<acp::SessionId, Entity<ThreadView>>,
     connection: Rc<dyn AgentConnection>,
     conversation: Entity<Conversation>,
 }
@@ -407,7 +436,7 @@ struct LoadingView {
 }
 
 impl ConnectedServerState {
-    pub fn active_view(&self) -> Option<&Entity<AcpThreadView>> {
+    pub fn active_view(&self) -> Option<&Entity<ThreadView>> {
         self.active_id.as_ref().and_then(|id| self.threads.get(id))
     }
 
@@ -434,7 +463,7 @@ impl ConnectedServerState {
     }
 }
 
-impl AcpServerView {
+impl ConnectionView {
     pub fn new(
         agent: Rc<dyn AgentServer>,
         resume_thread: Option<AgentSessionInfo>,
@@ -443,7 +472,7 @@ impl AcpServerView {
         project: Entity<Project>,
         thread_store: Option<Entity<ThreadStore>>,
         prompt_store: Option<Entity<PromptStore>>,
-        history: Entity<AcpThreadHistory>,
+        history: Entity<ThreadHistory>,
         window: &mut Window,
         cx: &mut Context<Self>,
     ) -> Self {
@@ -487,7 +516,6 @@ impl AcpServerView {
                 window,
                 cx,
             ),
-            login: None,
             notifications: Vec::new(),
             notification_subscriptions: HashMap::default(),
             auth_task: None,
@@ -503,6 +531,7 @@ impl AcpServerView {
         }
 
         self.server_state = state;
+        cx.emit(AcpServerViewEvent::ActiveThreadChanged);
         cx.notify();
     }
 
@@ -569,22 +598,29 @@ impl AcpServerView {
                 }
             })
             .collect();
-        let root_dir = worktree_roots.first().cloned();
         let session_cwd = resume_thread
             .as_ref()
             .and_then(|resume| {
                 resume
                     .cwd
                     .as_ref()
-                    .and_then(|cwd| util::paths::normalize_lexically(cwd).ok())
                     .filter(|cwd| {
-                        worktree_roots
-                            .iter()
-                            .any(|root| cwd.starts_with(root.as_ref()))
+                        // Validate with the normalized path (rejects `..` traversals),
+                        // but return the original cwd to preserve its path separators.
+                        // On Windows, `normalize_lexically` rebuilds the path with
+                        // backslashes via `PathBuf::push`, which would corrupt
+                        // forward-slash Linux paths used by WSL agents.
+                        util::paths::normalize_lexically(cwd)
+                            .ok()
+                            .is_some_and(|normalized| {
+                                worktree_roots
+                                    .iter()
+                                    .any(|root| normalized.starts_with(root.as_ref()))
+                            })
                     })
-                    .map(|path| path.into())
+                    .map(|path| Arc::from(path.as_path()))
             })
-            .or_else(|| root_dir.clone())
+            .or_else(|| worktree_roots.first().cloned())
             .unwrap_or_else(|| paths::home_dir().as_path().into());
 
         let (status_tx, mut status_rx) = watch::channel("Loading…".into());
@@ -596,19 +632,18 @@ impl AcpServerView {
             Some(new_version_available_tx),
         );
 
-        let connect_task = agent.connect(root_dir.as_deref(), delegate, cx);
+        let connect_task = agent.connect(delegate, cx);
         let load_task = cx.spawn_in(window, async move |this, cx| {
             let connection = match connect_task.await {
-                Ok((connection, login)) => {
-                    this.update(cx, |this, _| this.login = login).ok();
-                    connection
-                }
+                Ok(connection) => connection,
                 Err(err) => {
                     this.update_in(cx, |this, window, cx| {
                         if err.downcast_ref::<LoadError>().is_some() {
                             this.handle_load_error(err, window, cx);
                         } else if let Some(active) = this.active_thread() {
-                            active.update(cx, |active, cx| active.handle_any_thread_error(err, cx));
+                            active.update(cx, |active, cx| active.handle_thread_error(err, cx));
+                        } else {
+                            this.handle_load_error(err, window, cx);
                         }
                         cx.notify();
                     })
@@ -701,6 +736,14 @@ impl AcpServerView {
                         }
 
                         let id = current.read(cx).thread.read(cx).session_id().clone();
+                        let session_list = if connection.supports_session_history() {
+                            connection.session_list(cx)
+                        } else {
+                            None
+                        };
+                        this.history.update(cx, |history, cx| {
+                            history.set_session_list(session_list, cx);
+                        });
                         this.set_server_state(
                             ServerState::Connected(ConnectedServerState {
                                 connection,
@@ -768,7 +811,7 @@ impl AcpServerView {
         initial_content: Option<AgentInitialContent>,
         window: &mut Window,
         cx: &mut Context<Self>,
-    ) -> Entity<AcpThreadView> {
+    ) -> Entity<ThreadView> {
         let agent_name = self.agent.name();
         let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default()));
         let available_commands = Rc::new(RefCell::new(vec![]));
@@ -802,18 +845,14 @@ impl AcpServerView {
             );
         });
 
+        if let Some(scroll_position) = thread.read(cx).ui_scroll_position() {
+            list_state.scroll_to(scroll_position);
+        }
+
         AgentDiff::set_active_thread(&self.workspace, thread.clone(), window, cx);
 
         let connection = thread.read(cx).connection().clone();
         let session_id = thread.read(cx).session_id().clone();
-        let session_list = if connection.supports_session_history() {
-            connection.session_list(cx)
-        } else {
-            None
-        };
-        self.history.update(cx, |history, cx| {
-            history.set_session_list(session_list, cx);
-        });
 
         // Check for config options first
         // Config options take precedence over legacy mode/model selectors
@@ -840,7 +879,7 @@ impl AcpServerView {
                 let agent_server = self.agent.clone();
                 let fs = self.project.read(cx).fs().clone();
                 cx.new(|cx| {
-                    AcpModelSelectorPopover::new(
+                    ModelSelectorPopover::new(
                         selector,
                         agent_server,
                         fs,
@@ -871,7 +910,10 @@ impl AcpServerView {
             .entries()
             .iter()
             .filter_map(|entry| match entry {
-                AgentThreadEntry::ToolCall(call) => call.subagent_session_id.clone(),
+                AgentThreadEntry::ToolCall(call) => call
+                    .subagent_session_info
+                    .as_ref()
+                    .map(|i| i.session_id.clone()),
                 _ => None,
             })
             .collect::<Vec<_>>();
@@ -914,16 +956,28 @@ impl AcpServerView {
             .unwrap_or_else(|| agent_name.clone());
 
         let agent_icon = self.agent.logo();
+        let agent_icon_from_external_svg = self
+            .agent_server_store
+            .read(cx)
+            .agent_icon(&ExternalAgentServerName(self.agent.name()))
+            .or_else(|| {
+                project::AgentRegistryStore::try_global(cx).and_then(|store| {
+                    store
+                        .read(cx)
+                        .agent(self.agent.name().as_ref())
+                        .and_then(|a| a.icon_path().cloned())
+                })
+            });
 
         let weak = cx.weak_entity();
         cx.new(|cx| {
-            AcpThreadView::new(
+            ThreadView::new(
                 parent_id,
                 thread,
                 conversation,
-                self.login.clone(),
                 weak,
                 agent_icon,
+                agent_icon_from_external_svg,
                 agent_name,
                 agent_display_name,
                 self.workspace.clone(),
@@ -1136,6 +1190,20 @@ impl AcpServerView {
         }
     }
 
+    fn move_queued_message_to_main_editor(
+        &mut self,
+        index: usize,
+        inserted_text: Option<&str>,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        if let Some(active) = self.active_thread() {
+            active.update(cx, |active, cx| {
+                active.move_queued_message_to_main_editor(index, inserted_text, window, cx);
+            });
+        }
+    }
+
     fn handle_thread_event(
         &mut self,
         thread: &Entity<AcpThread>,
@@ -1198,13 +1266,18 @@ impl AcpServerView {
                     });
                 }
             }
-            AcpThreadEvent::Stopped => {
+            AcpThreadEvent::Stopped(stop_reason) => {
                 if let Some(active) = self.thread_view(&thread_id) {
                     active.update(cx, |active, _cx| {
                         active.thread_retry_status.take();
                     });
                 }
                 if is_subagent {
+                    if *stop_reason == acp::StopReason::EndTurn {
+                        thread.update(cx, |thread, cx| {
+                            thread.mark_as_subagent_output(cx);
+                        });
+                    }
                     return;
                 }
 
@@ -1300,6 +1373,7 @@ impl AcpServerView {
                         }
                     });
                 }
+                cx.notify();
             }
             AcpThreadEvent::PromptCapabilitiesUpdated => {
                 if let Some(active) = self.thread_view(&thread_id) {
@@ -1417,13 +1491,6 @@ impl AcpServerView {
                     })
                     .unwrap_or_default();
 
-                // Run SpawnInTerminal in the same dir as the ACP server
-                let cwd = connected
-                    .connection
-                    .clone()
-                    .downcast::<agent_servers::AcpConnection>()
-                    .map(|acp_conn| acp_conn.root_dir().to_path_buf());
-
                 // Build SpawnInTerminal from _meta
                 let login = task::SpawnInTerminal {
                     id: task::TaskId(format!("external-agent-{}-login", label)),
@@ -1432,7 +1499,6 @@ impl AcpServerView {
                     command: Some(command.to_string()),
                     args,
                     command_label: label.to_string(),
-                    cwd,
                     env,
                     use_new_terminal: true,
                     allow_concurrent_runs: true,
@@ -1487,7 +1553,7 @@ impl AcpServerView {
                                     }
                                     if let Some(active) = this.active_thread() {
                                         active.update(cx, |active, cx| {
-                                            active.handle_any_thread_error(err, cx);
+                                            active.handle_thread_error(err, cx);
                                         })
                                     }
                                 } else {
@@ -1503,79 +1569,10 @@ impl AcpServerView {
             }
         }
 
-        if method.0.as_ref() == "gemini-api-key" {
-            let registry = LanguageModelRegistry::global(cx);
-            let provider = registry
-                .read(cx)
-                .provider(&language_model::GOOGLE_PROVIDER_ID)
-                .unwrap();
-            if !provider.is_authenticated(cx) {
-                let this = cx.weak_entity();
-                let agent_name = self.agent.name();
-                let connection = connection.clone();
-                window.defer(cx, |window, cx| {
-                    Self::handle_auth_required(
-                        this,
-                        AuthRequired {
-                            description: Some("GEMINI_API_KEY must be set".to_owned()),
-                            provider_id: Some(language_model::GOOGLE_PROVIDER_ID),
-                        },
-                        agent_name,
-                        connection,
-                        window,
-                        cx,
-                    );
-                });
-                return;
-            }
-        } else if method.0.as_ref() == "vertex-ai"
-            && std::env::var("GOOGLE_API_KEY").is_err()
-            && (std::env::var("GOOGLE_CLOUD_PROJECT").is_err()
-                || (std::env::var("GOOGLE_CLOUD_PROJECT").is_err()))
-        {
-            let this = cx.weak_entity();
-            let agent_name = self.agent.name();
-            let connection = connection.clone();
-
-            window.defer(cx, |window, cx| {
-                    Self::handle_auth_required(
-                        this,
-                        AuthRequired {
-                            description: Some(
-                                "GOOGLE_API_KEY must be set in the environment to use Vertex AI authentication for Gemini CLI. Please export it and restart Zed."
-                                    .to_owned(),
-                            ),
-                            provider_id: None,
-                        },
-                        agent_name,
-                        connection,
-                        window,
-                        cx,
-                    )
-                });
-            return;
-        }
-
         configuration_view.take();
         pending_auth_method.replace(method.clone());
-        let authenticate = if let Some(login) = self.login.clone() {
-            if let Some(workspace) = self.workspace.upgrade() {
-                let project = self.project.clone();
-                Self::spawn_external_agent_login(
-                    login,
-                    workspace,
-                    project,
-                    method.clone(),
-                    false,
-                    window,
-                    cx,
-                )
-            } else {
-                Task::ready(Ok(()))
-            }
-        } else {
-            connection.authenticate(method, cx)
-        };
+
+        let authenticate = connection.authenticate(method, cx);
         cx.notify();
         self.auth_task = Some(cx.spawn_in(window, {
             async move |this, cx| {
@@ -1605,7 +1602,7 @@ impl AcpServerView {
                             pending_auth_method.take();
                         }
                         if let Some(active) = this.active_thread() {
-                            active.update(cx, |active, cx| active.handle_any_thread_error(err, cx));
+                            active.update(cx, |active, cx| active.handle_thread_error(err, cx));
                         }
                     } else {
                         this.reset(window, cx);
@@ -1850,15 +1847,7 @@ impl AcpServerView {
                     .enumerate()
                     .rev()
                     .map(|(ix, method)| {
-                        let (method_id, name) = if self.project.read(cx).is_via_remote_server()
-                            && method.id.0.as_ref() == "oauth-personal"
-                            && method.name == "Log in with Google"
-                        {
-                            ("spawn-gemini-cli".into(), "Log in with Gemini CLI".into())
-                        } else {
-                            (method.id.0.clone(), method.name.clone())
-                        };
-
+                        let (method_id, name) = (method.id.0.clone(), method.name.clone());
                         let agent_telemetry_id = connection.telemetry_id();
 
                         Button::new(method_id.clone(), name)
@@ -2213,6 +2202,7 @@ impl AcpServerView {
             for (index, editor) in editors.into_iter().enumerate() {
                 if let Some(content) = queued_messages.get(index) {
                     editor.update(cx, |editor, cx| {
+                        editor.set_read_only(true, cx);
                         editor.set_message(content.clone(), window, cx);
                     });
                 }
@@ -2241,6 +2231,7 @@ impl AcpServerView {
                     window,
                     cx,
                 );
+                editor.set_read_only(true, cx);
                 editor.set_message(content, window, cx);
                 editor
             });
@@ -2249,6 +2240,8 @@ impl AcpServerView {
                 &editor,
                 window,
                 move |this, _editor, event, window, cx| match event {
+                    MessageEditorEvent::InputAttempted(text) => this
+                        .move_queued_message_to_main_editor(index, Some(text.as_ref()), window, cx),
                     MessageEditorEvent::LostFocus => {
                         this.save_queued_message_at_index(index, cx);
                     }
@@ -2283,7 +2276,7 @@ impl AcpServerView {
     fn render_markdown(&self, markdown: Entity<Markdown>, style: MarkdownStyle) -> MarkdownElement {
         let workspace = self.workspace.clone();
         MarkdownElement::new(markdown, style).on_url_click(move |text, window, cx| {
-            crate::acp::thread_view::active_thread::open_link(text, &workspace, window, cx);
+            crate::connection_view::thread_view::open_link(text, &workspace, window, cx);
         })
     }
 
@@ -2606,7 +2599,7 @@ fn placeholder_text(agent_name: &str, has_commands: bool) -> String {
     }
 }
 
-impl Focusable for AcpServerView {
+impl Focusable for ConnectionView {
     fn focus_handle(&self, cx: &App) -> FocusHandle {
         match self.active_thread() {
             Some(thread) => thread.read(cx).focus_handle(cx),
@@ -2616,7 +2609,7 @@ impl Focusable for AcpServerView {
 }
 
 #[cfg(any(test, feature = "test-support"))]
-impl AcpServerView {
+impl ConnectionView {
     /// Expands a tool call so its content is visible.
     /// This is primarily useful for visual testing.
     pub fn expand_tool_call(&mut self, tool_call_id: acp::ToolCallId, cx: &mut Context<Self>) {
@@ -2629,7 +2622,7 @@ impl AcpServerView {
     }
 }
 
-impl Render for AcpServerView {
+impl Render for ConnectionView {
     fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
         self.sync_queued_message_editors(window, cx);
 
@@ -2808,11 +2801,11 @@ pub(crate) mod tests {
 
         let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx)));
         // Create history without an initial session list - it will be set after connection
-        let history = cx.update(|window, cx| cx.new(|cx| AcpThreadHistory::new(None, window, cx)));
+        let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx)));
 
         let thread_view = cx.update(|window, cx| {
             cx.new(|cx| {
-                AcpServerView::new(
+                ConnectionView::new(
                     Rc::new(StubAgentServer::default_response()),
                     None,
                     None,
@@ -2868,6 +2861,33 @@ pub(crate) mod tests {
         });
     }
 
+    #[gpui::test]
+    async fn test_new_thread_creation_triggers_session_list_refresh(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let session = AgentSessionInfo::new(SessionId::new("history-session"));
+        let (thread_view, history, cx) = setup_thread_view_with_history(
+            StubAgentServer::new(SessionHistoryConnection::new(vec![session.clone()])),
+            cx,
+        )
+        .await;
+
+        history.read_with(cx, |history, _cx| {
+            assert!(
+                history.has_session_list(),
+                "session list should be attached after thread creation"
+            );
+        });
+
+        active_thread(&thread_view, cx).read_with(cx, |view, _cx| {
+            assert_eq!(view.recent_history_entries.len(), 1);
+            assert_eq!(
+                view.recent_history_entries[0].session_id,
+                session.session_id
+            );
+        });
+    }
+
     #[gpui::test]
     async fn test_resume_without_history_adds_notice(cx: &mut TestAppContext) {
         init_test(cx);
@@ -2880,11 +2900,11 @@ pub(crate) mod tests {
         let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
 
         let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx)));
-        let history = cx.update(|window, cx| cx.new(|cx| AcpThreadHistory::new(None, window, cx)));
+        let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx)));
 
         let thread_view = cx.update(|window, cx| {
             cx.new(|cx| {
-                AcpServerView::new(
+                ConnectionView::new(
                     Rc::new(StubAgentServer::new(ResumeOnlyAgentConnection)),
                     Some(session),
                     None,
@@ -2934,11 +2954,11 @@ pub(crate) mod tests {
         session.cwd = Some(PathBuf::from("/project/subdir"));
 
         let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx)));
-        let history = cx.update(|window, cx| cx.new(|cx| AcpThreadHistory::new(None, window, cx)));
+        let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx)));
 
         let _thread_view = cx.update(|window, cx| {
             cx.new(|cx| {
-                AcpServerView::new(
+                ConnectionView::new(
                     Rc::new(StubAgentServer::new(connection)),
                     Some(session),
                     None,
@@ -2986,11 +3006,11 @@ pub(crate) mod tests {
         session.cwd = Some(PathBuf::from("/some/other/path"));
 
         let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx)));
-        let history = cx.update(|window, cx| cx.new(|cx| AcpThreadHistory::new(None, window, cx)));
+        let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx)));
 
         let _thread_view = cx.update(|window, cx| {
             cx.new(|cx| {
-                AcpServerView::new(
+                ConnectionView::new(
                     Rc::new(StubAgentServer::new(connection)),
                     Some(session),
                     None,
@@ -3038,11 +3058,11 @@ pub(crate) mod tests {
         session.cwd = Some(PathBuf::from("/project/../outside"));
 
         let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx)));
-        let history = cx.update(|window, cx| cx.new(|cx| AcpThreadHistory::new(None, window, cx)));
+        let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx)));
 
         let _thread_view = cx.update(|window, cx| {
             cx.new(|cx| {
-                AcpServerView::new(
+                ConnectionView::new(
                     Rc::new(StubAgentServer::new(connection)),
                     Some(session),
                     None,
@@ -3092,6 +3112,38 @@ pub(crate) mod tests {
         });
     }
 
+    #[gpui::test]
+    async fn test_connect_failure_transitions_to_load_error(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let (thread_view, cx) = setup_thread_view(FailingAgentServer, cx).await;
+
+        thread_view.read_with(cx, |view, cx| {
+            let title = view.title(cx);
+            assert_eq!(
+                title.as_ref(),
+                "Error Loading Codex CLI",
+                "Tab title should show the agent name with an error prefix"
+            );
+            match &view.server_state {
+                ServerState::LoadError(LoadError::Other(msg)) => {
+                    assert!(
+                        msg.contains("Invalid gzip header"),
+                        "Error callout should contain the underlying extraction error, got: {msg}"
+                    );
+                }
+                other => panic!(
+                    "Expected LoadError::Other, got: {}",
+                    match other {
+                        ServerState::Loading(_) => "Loading (stuck!)",
+                        ServerState::LoadError(_) => "LoadError (wrong variant)",
+                        ServerState::Connected(_) => "Connected",
+                    }
+                ),
+            }
+        });
+    }
+
     #[gpui::test]
     async fn test_auth_required_on_initial_connect(cx: &mut TestAppContext) {
         init_test(cx);
@@ -3126,7 +3178,7 @@ pub(crate) mod tests {
             );
         });
 
-        // Authenticate using the real authenticate flow on AcpServerView.
+        // Authenticate using the real authenticate flow on ConnectionView.
         // This calls connection.authenticate(), which flips the internal flag,
         // then on success triggers reset() -> new_session() which now succeeds.
         thread_view.update_in(cx, |view, window, cx| {
@@ -3313,12 +3365,12 @@ pub(crate) mod tests {
 
         // Set up thread view in workspace 1
         let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx)));
-        let history = cx.update(|window, cx| cx.new(|cx| AcpThreadHistory::new(None, window, cx)));
+        let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx)));
 
         let agent = StubAgentServer::default_response();
         let thread_view = cx.update(|window, cx| {
             cx.new(|cx| {
-                AcpServerView::new(
+                ConnectionView::new(
                     Rc::new(agent),
                     None,
                     None,
@@ -3482,7 +3534,19 @@ pub(crate) mod tests {
     async fn setup_thread_view(
         agent: impl AgentServer + 'static,
         cx: &mut TestAppContext,
-    ) -> (Entity<AcpServerView>, &mut VisualTestContext) {
+    ) -> (Entity<ConnectionView>, &mut VisualTestContext) {
+        let (thread_view, _history, cx) = setup_thread_view_with_history(agent, cx).await;
+        (thread_view, cx)
+    }
+
+    async fn setup_thread_view_with_history(
+        agent: impl AgentServer + 'static,
+        cx: &mut TestAppContext,
+    ) -> (
+        Entity<ConnectionView>,
+        Entity<ThreadHistory>,
+        &mut VisualTestContext,
+    ) {
         let fs = FakeFs::new(cx.executor());
         let project = Project::test(fs, [], cx).await;
         let (multi_workspace, cx) =
@@ -3490,11 +3554,11 @@ pub(crate) mod tests {
         let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
 
         let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx)));
-        let history = cx.update(|window, cx| cx.new(|cx| AcpThreadHistory::new(None, window, cx)));
+        let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx)));
 
         let thread_view = cx.update(|window, cx| {
             cx.new(|cx| {
-                AcpServerView::new(
+                ConnectionView::new(
                     Rc::new(agent),
                     None,
                     None,
@@ -3502,17 +3566,17 @@ pub(crate) mod tests {
                     project,
                     Some(thread_store),
                     None,
-                    history,
+                    history.clone(),
                     window,
                     cx,
                 )
             })
         });
         cx.run_until_parked();
-        (thread_view, cx)
+        (thread_view, history, cx)
     }
 
-    fn add_to_workspace(thread_view: Entity<AcpServerView>, cx: &mut VisualTestContext) {
+    fn add_to_workspace(thread_view: Entity<ConnectionView>, cx: &mut VisualTestContext) {
         let workspace = thread_view.read_with(cx, |thread_view, _cx| thread_view.workspace.clone());
 
         workspace
@@ -3528,7 +3592,7 @@ pub(crate) mod tests {
             .unwrap();
     }
 
-    struct ThreadViewItem(Entity<AcpServerView>);
+    struct ThreadViewItem(Entity<ConnectionView>);
 
     impl Item for ThreadViewItem {
         type Event = ();
@@ -3590,11 +3654,39 @@ pub(crate) mod tests {
 
         fn connect(
             &self,
-            _root_dir: Option<&Path>,
             _delegate: AgentServerDelegate,
             _cx: &mut App,
-        ) -> Task<gpui::Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
-            Task::ready(Ok((Rc::new(self.connection.clone()), None)))
+        ) -> Task<gpui::Result<Rc<dyn AgentConnection>>> {
+            Task::ready(Ok(Rc::new(self.connection.clone())))
+        }
+
+        fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
+            self
+        }
+    }
+
+    struct FailingAgentServer;
+
+    impl AgentServer for FailingAgentServer {
+        fn logo(&self) -> ui::IconName {
+            ui::IconName::AiOpenAi
+        }
+
+        fn name(&self) -> SharedString {
+            "Codex CLI".into()
+        }
+
+        fn connect(
+            &self,
+            _delegate: AgentServerDelegate,
+            _cx: &mut App,
+        ) -> Task<gpui::Result<Rc<dyn AgentConnection>>> {
+            Task::ready(Err(anyhow!(
+                "extracting downloaded asset for \
+                 https://github.com/zed-industries/codex-acp/releases/download/v0.9.4/\
+                 codex-acp-0.9.4-aarch64-pc-windows-msvc.zip: \
+                 failed to iterate over archive: Invalid gzip header"
+            )))
         }
 
         fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
@@ -3621,6 +3713,102 @@ pub(crate) mod tests {
         ) -> Task<anyhow::Result<AgentSessionListResponse>> {
             Task::ready(Ok(AgentSessionListResponse::new(self.sessions.clone())))
         }
+
+        fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
+            self
+        }
+    }
+
+    #[derive(Clone)]
+    struct SessionHistoryConnection {
+        sessions: Vec<AgentSessionInfo>,
+    }
+
+    impl SessionHistoryConnection {
+        fn new(sessions: Vec<AgentSessionInfo>) -> Self {
+            Self { sessions }
+        }
+    }
+
+    fn build_test_thread(
+        connection: Rc<dyn AgentConnection>,
+        project: Entity<Project>,
+        name: &'static str,
+        session_id: SessionId,
+        cx: &mut App,
+    ) -> Entity<AcpThread> {
+        let action_log = cx.new(|_| ActionLog::new(project.clone()));
+        cx.new(|cx| {
+            AcpThread::new(
+                None,
+                name,
+                connection,
+                project,
+                action_log,
+                session_id,
+                watch::Receiver::constant(
+                    acp::PromptCapabilities::new()
+                        .image(true)
+                        .audio(true)
+                        .embedded_context(true),
+                ),
+                cx,
+            )
+        })
+    }
+
+    impl AgentConnection for SessionHistoryConnection {
+        fn telemetry_id(&self) -> SharedString {
+            "history-connection".into()
+        }
+
+        fn new_session(
+            self: Rc<Self>,
+            project: Entity<Project>,
+            _cwd: &Path,
+            cx: &mut App,
+        ) -> Task<anyhow::Result<Entity<AcpThread>>> {
+            let thread = build_test_thread(
+                self,
+                project,
+                "SessionHistoryConnection",
+                SessionId::new("history-session"),
+                cx,
+            );
+            Task::ready(Ok(thread))
+        }
+
+        fn supports_load_session(&self) -> bool {
+            true
+        }
+
+        fn session_list(&self, _cx: &mut App) -> Option<Rc<dyn AgentSessionList>> {
+            Some(Rc::new(StubSessionList::new(self.sessions.clone())))
+        }
+
+        fn auth_methods(&self) -> &[acp::AuthMethod] {
+            &[]
+        }
+
+        fn authenticate(
+            &self,
+            _method_id: acp::AuthMethodId,
+            _cx: &mut App,
+        ) -> Task<anyhow::Result<()>> {
+            Task::ready(Ok(()))
+        }
+
+        fn prompt(
+            &self,
+            _id: Option<acp_thread::UserMessageId>,
+            _params: acp::PromptRequest,
+            _cx: &mut App,
+        ) -> Task<anyhow::Result<acp::PromptResponse>> {
+            Task::ready(Ok(acp::PromptResponse::new(acp::StopReason::EndTurn)))
+        }
+
+        fn cancel(&self, _session_id: &acp::SessionId, _cx: &mut App) {}
+
         fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
             self
         }

crates/agent_ui/src/acp/thread_view/active_thread.rs → crates/agent_ui/src/connection_view/thread_view.rs 🔗

@@ -1,8 +1,13 @@
+use acp_thread::ContentBlock;
 use cloud_api_types::{SubmitAgentThreadFeedbackBody, SubmitAgentThreadFeedbackCommentsBody};
+use editor::actions::OpenExcerpts;
+
+use crate::StartThreadIn;
 use gpui::{Corner, List};
-use language_model::LanguageModelEffortLevel;
+use language_model::{LanguageModelEffortLevel, Speed};
 use settings::update_settings_file;
 use ui::{ButtonLike, SplitButton, SplitButtonStyle, Tab};
+use workspace::SERIALIZATION_THROTTLE_TIME;
 
 use super::*;
 
@@ -43,6 +48,7 @@ impl ThreadFeedbackState {
             }
         }
         let session_id = thread.read(cx).session_id().clone();
+        let parent_session_id = thread.read(cx).parent_session_id().cloned();
         let agent_telemetry_id = thread.read(cx).connection().telemetry_id();
         let task = telemetry.thread_data(&session_id, cx);
         let rating = match feedback {
@@ -58,6 +64,7 @@ impl ThreadFeedbackState {
                     organization_id: organization.map(|organization| organization.id.clone()),
                     agent: agent_telemetry_id.to_string(),
                     session_id: session_id.to_string(),
+                    parent_session_id: parent_session_id.map(|id| id.to_string()),
                     rating: rating.to_string(),
                     thread,
                 })
@@ -186,14 +193,20 @@ impl DiffStats {
     }
 }
 
-pub struct AcpThreadView {
+pub enum AcpThreadViewEvent {
+    FirstSendRequested { content: Vec<acp::ContentBlock> },
+}
+
+impl EventEmitter<AcpThreadViewEvent> for ThreadView {}
+
+pub struct ThreadView {
     pub id: acp::SessionId,
     pub parent_id: Option<acp::SessionId>,
-    pub login: Option<task::SpawnInTerminal>, // is some <=> Active | Unauthenticated
     pub thread: Entity<AcpThread>,
     pub(crate) conversation: Entity<super::Conversation>,
-    pub server_view: WeakEntity<AcpServerView>,
+    pub server_view: WeakEntity<ConnectionView>,
     pub agent_icon: IconName,
+    pub agent_icon_from_external_svg: Option<SharedString>,
     pub agent_name: SharedString,
     pub focus_handle: FocusHandle,
     pub workspace: WeakEntity<Workspace>,
@@ -201,7 +214,7 @@ pub struct AcpThreadView {
     pub title_editor: Entity<Editor>,
     pub config_options_view: Option<Entity<ConfigOptionsView>>,
     pub mode_selector: Option<Entity<ModeSelector>>,
-    pub model_selector: Option<Entity<AcpModelSelectorPopover>>,
+    pub model_selector: Option<Entity<ModelSelectorPopover>>,
     pub profile_selector: Option<Entity<ProfileSelector>>,
     pub permission_dropdown_handle: PopoverMenuHandle<ContextMenu>,
     pub thread_retry_status: Option<RetryStatus>,
@@ -234,12 +247,10 @@ pub struct AcpThreadView {
     pub is_loading_contents: bool,
     pub new_server_version_available: Option<SharedString>,
     pub resumed_without_history: bool,
-    /// Tracks the selected granularity index for each tool call's permission dropdown.
-    /// The index corresponds to the position in the allow_options list.
-    /// Default is the last option (index pointing to "Only this time").
-    pub selected_permission_granularity: HashMap<agent_client_protocol::ToolCallId, usize>,
     pub resume_thread_metadata: Option<AgentSessionInfo>,
     pub _cancel_task: Option<Task<()>>,
+    _save_task: Option<Task<()>>,
+    _draft_resolve_task: Option<Task<()>>,
     pub skip_queue_processing_count: usize,
     pub user_interrupted_generation: bool,
     pub can_fast_track_queue: bool,
@@ -253,10 +264,10 @@ pub struct AcpThreadView {
     pub recent_history_entries: Vec<AgentSessionInfo>,
     pub hovered_recent_history_item: Option<usize>,
     pub show_codex_windows_warning: bool,
-    pub history: Entity<AcpThreadHistory>,
+    pub history: Entity<ThreadHistory>,
     pub _history_subscription: Subscription,
 }
-impl Focusable for AcpThreadView {
+impl Focusable for ThreadView {
     fn focus_handle(&self, cx: &App) -> FocusHandle {
         if self.parent_id.is_some() {
             self.focus_handle.clone()
@@ -276,21 +287,21 @@ pub struct TurnFields {
     pub turn_tokens: Option<u64>,
 }
 
-impl AcpThreadView {
+impl ThreadView {
     pub(crate) fn new(
         parent_id: Option<acp::SessionId>,
         thread: Entity<AcpThread>,
         conversation: Entity<super::Conversation>,
-        login: Option<task::SpawnInTerminal>,
-        server_view: WeakEntity<AcpServerView>,
+        server_view: WeakEntity<ConnectionView>,
         agent_icon: IconName,
+        agent_icon_from_external_svg: Option<SharedString>,
         agent_name: SharedString,
         agent_display_name: SharedString,
         workspace: WeakEntity<Workspace>,
         entry_view_state: Entity<EntryViewState>,
         config_options_view: Option<Entity<ConfigOptionsView>>,
         mode_selector: Option<Entity<ModeSelector>>,
-        model_selector: Option<Entity<AcpModelSelectorPopover>>,
+        model_selector: Option<Entity<ModelSelectorPopover>>,
         profile_selector: Option<Entity<ProfileSelector>>,
         list_state: ListState,
         prompt_capabilities: Rc<RefCell<PromptCapabilities>>,
@@ -299,7 +310,7 @@ impl AcpThreadView {
         resume_thread_metadata: Option<AgentSessionInfo>,
         project: WeakEntity<Project>,
         thread_store: Option<Entity<ThreadStore>>,
-        history: Entity<AcpThreadHistory>,
+        history: Entity<ThreadHistory>,
         prompt_store: Option<Entity<PromptStore>>,
         initial_content: Option<AgentInitialContent>,
         mut subscriptions: Vec<Subscription>,
@@ -347,6 +358,8 @@ impl AcpThreadView {
                         editor.set_message(blocks, window, cx);
                     }
                 }
+            } else if let Some(draft) = thread.read(cx).draft_prompt() {
+                editor.set_message(draft.to_vec(), window, cx);
             }
             editor
         });
@@ -379,6 +392,30 @@ impl AcpThreadView {
             Self::handle_message_editor_event,
         ));
 
+        subscriptions.push(cx.observe(&message_editor, |this, editor, cx| {
+            let is_empty = editor.read(cx).text(cx).is_empty();
+            let draft_contents_task = if is_empty {
+                None
+            } else {
+                Some(editor.update(cx, |editor, cx| editor.draft_contents(cx)))
+            };
+            this._draft_resolve_task = Some(cx.spawn(async move |this, cx| {
+                let draft = if let Some(task) = draft_contents_task {
+                    let blocks = task.await.ok().filter(|b| !b.is_empty());
+                    blocks
+                } else {
+                    None
+                };
+                this.update(cx, |this, cx| {
+                    this.thread.update(cx, |thread, _cx| {
+                        thread.set_draft_prompt(draft);
+                    });
+                    this.schedule_save(cx);
+                })
+                .ok();
+            }));
+        }));
+
         let recent_history_entries = history.read(cx).get_recent_sessions(3);
 
         let mut this = Self {
@@ -387,9 +424,9 @@ impl AcpThreadView {
             focus_handle: cx.focus_handle(),
             thread,
             conversation,
-            login,
             server_view,
             agent_icon,
+            agent_icon_from_external_svg,
             agent_name,
             workspace,
             entry_view_state,
@@ -429,8 +466,9 @@ impl AcpThreadView {
             discarded_partial_edits: HashSet::default(),
             is_loading_contents: false,
             new_server_version_available: None,
-            selected_permission_granularity: HashMap::default(),
             _cancel_task: None,
+            _save_task: None,
+            _draft_resolve_task: None,
             skip_queue_processing_count: 0,
             user_interrupted_generation: false,
             can_fast_track_queue: false,
@@ -446,12 +484,50 @@ impl AcpThreadView {
             _history_subscription: history_subscription,
             show_codex_windows_warning,
         };
+        let list_state_for_scroll = this.list_state.clone();
+        let thread_view = cx.entity().downgrade();
+        this.list_state
+            .set_scroll_handler(move |_event, _window, cx| {
+                let list_state = list_state_for_scroll.clone();
+                let thread_view = thread_view.clone();
+                // N.B. We must defer because the scroll handler is called while the
+                // ListState's RefCell is mutably borrowed. Reading logical_scroll_top()
+                // directly would panic from a double borrow.
+                cx.defer(move |cx| {
+                    let scroll_top = list_state.logical_scroll_top();
+                    let _ = thread_view.update(cx, |this, cx| {
+                        if let Some(thread) = this.as_native_thread(cx) {
+                            thread.update(cx, |thread, _cx| {
+                                thread.set_ui_scroll_position(Some(scroll_top));
+                            });
+                        }
+                        this.schedule_save(cx);
+                    });
+                });
+            });
+
         if should_auto_submit {
             this.send(window, cx);
         }
         this
     }
 
+    /// Schedule a throttled save of the thread state (draft prompt, scroll position, etc.).
+    /// Multiple calls within `SERIALIZATION_THROTTLE_TIME` are coalesced into a single save.
+    fn schedule_save(&mut self, cx: &mut Context<Self>) {
+        self._save_task = Some(cx.spawn(async move |this, cx| {
+            cx.background_executor()
+                .timer(SERIALIZATION_THROTTLE_TIME)
+                .await;
+            this.update(cx, |this, cx| {
+                if let Some(thread) = this.as_native_thread(cx) {
+                    thread.update(cx, |_thread, cx| cx.notify());
+                }
+            })
+            .ok();
+        }));
+    }
+
     pub fn handle_message_editor_event(
         &mut self,
         _editor: &Entity<MessageEditor>,
@@ -467,6 +543,7 @@ impl AcpThreadView {
                 self.cancel_editing(&Default::default(), window, cx);
             }
             MessageEditorEvent::LostFocus => {}
+            MessageEditorEvent::InputAttempted(_) => {}
         }
     }
 
@@ -484,6 +561,24 @@ impl AcpThreadView {
             .thread(acp_thread.session_id(), cx)
     }
 
+    /// Resolves the message editor's contents into content blocks. For profiles
+    /// that do not enable any tools, directory mentions are expanded to inline
+    /// file contents since the agent can't read files on its own.
+    fn resolve_message_contents(
+        &self,
+        message_editor: &Entity<MessageEditor>,
+        cx: &mut App,
+    ) -> Task<Result<(Vec<acp::ContentBlock>, Vec<Entity<Buffer>>)>> {
+        let expand = self.as_native_thread(cx).is_some_and(|thread| {
+            let thread = thread.read(cx);
+            AgentSettings::get_global(cx)
+                .profiles
+                .get(thread.profile())
+                .is_some_and(|profile| profile.tools.is_empty())
+        });
+        message_editor.update(cx, |message_editor, cx| message_editor.contents(expand, cx))
+    }
+
     pub fn current_model_id(&self, cx: &App) -> Option<String> {
         let selector = self.model_selector.as_ref()?;
         let model = selector.read(cx).active_model(cx)?;
@@ -581,9 +676,71 @@ impl AcpThreadView {
             ViewEvent::MessageEditorEvent(_editor, MessageEditorEvent::Cancel) => {
                 self.cancel_editing(&Default::default(), window, cx);
             }
+            ViewEvent::MessageEditorEvent(_editor, MessageEditorEvent::InputAttempted(_)) => {}
+            ViewEvent::OpenDiffLocation {
+                path,
+                position,
+                split,
+            } => {
+                self.open_diff_location(path, *position, *split, window, cx);
+            }
         }
     }
 
+    fn open_diff_location(
+        &self,
+        path: &str,
+        position: Point,
+        split: bool,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        let Some(project) = self.project.upgrade() else {
+            return;
+        };
+        let Some(project_path) = project.read(cx).find_project_path(path, cx) else {
+            return;
+        };
+
+        let open_task = if split {
+            self.workspace
+                .update(cx, |workspace, cx| {
+                    workspace.split_path(project_path, window, cx)
+                })
+                .log_err()
+        } else {
+            self.workspace
+                .update(cx, |workspace, cx| {
+                    workspace.open_path(project_path, None, true, window, cx)
+                })
+                .log_err()
+        };
+
+        let Some(open_task) = open_task else {
+            return;
+        };
+
+        window
+            .spawn(cx, async move |cx| {
+                let item = open_task.await?;
+                let Some(editor) = item.downcast::<Editor>() else {
+                    return anyhow::Ok(());
+                };
+                editor.update_in(cx, |editor, window, cx| {
+                    editor.change_selections(
+                        SelectionEffects::scroll(Autoscroll::center()),
+                        window,
+                        cx,
+                        |selections| {
+                            selections.select_ranges([position..position]);
+                        },
+                    );
+                })?;
+                anyhow::Ok(())
+            })
+            .detach_and_log_err(cx);
+    }
+
     // turns
 
     pub fn start_turn(&mut self, cx: &mut Context<Self>) -> usize {
@@ -635,6 +792,46 @@ impl AcpThreadView {
         }
 
         let message_editor = self.message_editor.clone();
+
+        // Intercept the first send so the agent panel can capture the full
+        // content blocks — needed for "Start thread in New Worktree",
+        // which must create a workspace before sending the message there.
+        let intercept_first_send = self.thread.read(cx).entries().is_empty()
+            && !message_editor.read(cx).is_empty(cx)
+            && self
+                .workspace
+                .upgrade()
+                .and_then(|workspace| workspace.read(cx).panel::<AgentPanel>(cx))
+                .is_some_and(|panel| {
+                    panel.read(cx).start_thread_in() == &StartThreadIn::NewWorktree
+                });
+
+        if intercept_first_send {
+            let content_task = self.resolve_message_contents(&message_editor, cx);
+
+            cx.spawn(async move |this, cx| match content_task.await {
+                Ok((content, _tracked_buffers)) => {
+                    if content.is_empty() {
+                        return;
+                    }
+
+                    this.update(cx, |_, cx| {
+                        cx.emit(AcpThreadViewEvent::FirstSendRequested { content });
+                    })
+                    .ok();
+                }
+                Err(error) => {
+                    this.update(cx, |this, cx| {
+                        this.handle_thread_error(error, cx);
+                    })
+                    .ok();
+                }
+            })
+            .detach();
+
+            return;
+        }
+
         let is_editor_empty = message_editor.read(cx).is_empty(cx);
         let is_generating = thread.read(cx).status() != ThreadStatus::Idle;
 
@@ -658,7 +855,7 @@ impl AcpThreadView {
         let text = text.trim();
         if text == "/login" || text == "/logout" {
             let connection = thread.read(cx).connection().clone();
-            let can_login = !connection.auth_methods().is_empty() || self.login.is_some();
+            let can_login = !connection.auth_methods().is_empty();
             // Does the agent have a specific logout command? Prefer that in case they need to reset internal state.
             let logout_supported = text == "/logout"
                 && self
@@ -674,7 +871,7 @@ impl AcpThreadView {
                     let agent_name = self.agent_name.clone();
                     let server_view = self.server_view.clone();
                     move |window, cx| {
-                        AcpServerView::handle_auth_required(
+                        ConnectionView::handle_auth_required(
                             server_view.clone(),
                             AuthRequired::new(),
                             agent_name,
@@ -698,18 +895,7 @@ impl AcpThreadView {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
-        let full_mention_content = self.as_native_thread(cx).is_some_and(|thread| {
-            // Include full contents when using minimal profile
-            let thread = thread.read(cx);
-            AgentSettings::get_global(cx)
-                .profiles
-                .get(thread.profile())
-                .is_some_and(|profile| profile.tools.is_empty())
-        });
-
-        let contents = message_editor.update(cx, |message_editor, cx| {
-            message_editor.contents(full_mention_content, cx)
-        });
+        let contents = self.resolve_message_contents(&message_editor, cx);
 
         self.thread_error.take();
         self.thread_feedback.clear();
@@ -749,7 +935,9 @@ impl AcpThreadView {
         cx: &mut Context<Self>,
     ) {
         let session_id = self.thread.read(cx).session_id().clone();
+        let parent_session_id = self.thread.read(cx).parent_session_id().cloned();
         let agent_telemetry_id = self.thread.read(cx).connection().telemetry_id();
+        let is_first_message = self.thread.read(cx).entries().is_empty();
         let thread = self.thread.downgrade();
 
         self.is_loading_contents = true;
@@ -790,6 +978,25 @@ impl AcpThreadView {
                     .ok();
                 }
             });
+            if is_first_message {
+                let text: String = contents
+                    .iter()
+                    .filter_map(|block| match block {
+                        acp::ContentBlock::Text(text_content) => Some(text_content.text.as_str()),
+                        _ => None,
+                    })
+                    .collect::<Vec<_>>()
+                    .join(" ");
+                let text = text.lines().next().unwrap_or("").trim();
+                if !text.is_empty() {
+                    let title: SharedString = util::truncate_and_trailoff(text, 20).into();
+                    thread
+                        .update(cx, |thread, cx| thread.set_title(title, cx))?
+                        .await
+                        .log_err();
+                }
+            }
+
             let turn_start_time = Instant::now();
             let send = thread.update(cx, |thread, cx| {
                 thread.action_log().update(cx, |action_log, cx| {
@@ -803,6 +1010,7 @@ impl AcpThreadView {
                     "Agent Message Sent",
                     agent = agent_telemetry_id,
                     session = session_id,
+                    parent_session_id = parent_session_id.as_ref().map(|id| id.to_string()),
                     model = model_id,
                     mode = mode_id
                 );
@@ -822,6 +1030,7 @@ impl AcpThreadView {
                 "Agent Turn Completed",
                 agent = agent_telemetry_id,
                 session = session_id,
+                parent_session_id = parent_session_id.as_ref().map(|id| id.to_string()),
                 model = model_id,
                 mode = mode_id,
                 status,
@@ -833,7 +1042,7 @@ impl AcpThreadView {
         cx.spawn(async move |this, cx| {
             if let Err(err) = task.await {
                 this.update(cx, |this, cx| {
-                    this.handle_any_thread_error(err, cx);
+                    this.handle_thread_error(err, cx);
                 })
                 .ok();
             } else {
@@ -891,12 +1100,12 @@ impl AcpThreadView {
         .detach();
     }
 
-    pub(crate) fn handle_any_thread_error(&mut self, error: anyhow::Error, cx: &mut Context<Self>) {
-        let error = ThreadError::from_err(error, &self.agent_name);
-        self.handle_thread_error(error, cx);
-    }
-
-    pub(crate) fn handle_thread_error(&mut self, error: ThreadError, cx: &mut Context<Self>) {
+    pub(crate) fn handle_thread_error(
+        &mut self,
+        error: impl Into<ThreadError>,
+        cx: &mut Context<Self>,
+    ) {
+        let error = error.into();
         self.emit_thread_error_telemetry(&error, cx);
         self.thread_error = Some(error);
         cx.notify();
@@ -930,11 +1139,17 @@ impl AcpThreadView {
 
         let agent_telemetry_id = self.thread.read(cx).connection().telemetry_id();
         let session_id = self.thread.read(cx).session_id().clone();
+        let parent_session_id = self
+            .thread
+            .read(cx)
+            .parent_session_id()
+            .map(|id| id.to_string());
 
         telemetry::event!(
             "Agent Panel Error Shown",
             agent = agent_telemetry_id,
             session_id = session_id,
+            parent_session_id = parent_session_id,
             kind = error_kind,
             acp_error_code = acp_error_code,
             message = message,
@@ -964,7 +1179,7 @@ impl AcpThreadView {
 
             this.update(cx, |this, cx| {
                 if let Err(err) = result {
-                    this.handle_any_thread_error(err, cx);
+                    this.handle_thread_error(err, cx);
                 }
             })
         })
@@ -1035,21 +1250,11 @@ impl AcpThreadView {
         let is_idle = self.thread.read(cx).status() == acp_thread::ThreadStatus::Idle;
 
         if is_idle {
-            self.send_impl(message_editor.clone(), window, cx);
+            self.send_impl(message_editor, window, cx);
             return;
         }
 
-        let full_mention_content = self.as_native_thread(cx).is_some_and(|thread| {
-            let thread = thread.read(cx);
-            AgentSettings::get_global(cx)
-                .profiles
-                .get(thread.profile())
-                .is_some_and(|profile| profile.tools.is_empty())
-        });
-
-        let contents = message_editor.update(cx, |message_editor, cx| {
-            message_editor.contents(full_mention_content, cx)
-        });
+        let contents = self.resolve_message_contents(&message_editor, cx);
 
         cx.spawn_in(window, async move |this, cx| {
             let (content, tracked_buffers) = contents.await?;
@@ -1151,6 +1356,44 @@ impl AcpThreadView {
         self.send_content(contents_task, window, cx);
     }
 
+    pub fn move_queued_message_to_main_editor(
+        &mut self,
+        index: usize,
+        inserted_text: Option<&str>,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) -> bool {
+        let Some(queued_message) = self.remove_from_queue(index, cx) else {
+            return false;
+        };
+        let queued_content = queued_message.content;
+        let message_editor = self.message_editor.clone();
+        let inserted_text = inserted_text.map(ToOwned::to_owned);
+
+        window.focus(&message_editor.focus_handle(cx), cx);
+
+        if message_editor.read(cx).is_empty(cx) {
+            message_editor.update(cx, |editor, cx| {
+                editor.set_message(queued_content, window, cx);
+                if let Some(inserted_text) = inserted_text.as_deref() {
+                    editor.insert_text(inserted_text, window, cx);
+                }
+            });
+            cx.notify();
+            return true;
+        }
+
+        message_editor.update(cx, |editor, cx| {
+            editor.append_message(queued_content, Some("\n\n"), window, cx);
+            if let Some(inserted_text) = inserted_text.as_deref() {
+                editor.insert_text(inserted_text, window, cx);
+            }
+        });
+
+        cx.notify();
+        true
+    }
+
     // editor methods
 
     pub fn expand_message_editor(
@@ -1325,19 +1568,6 @@ impl AcpThreadView {
         );
     }
 
-    pub fn handle_select_permission_granularity(
-        &mut self,
-        action: &SelectPermissionGranularity,
-        _window: &mut Window,
-        cx: &mut Context<Self>,
-    ) {
-        let tool_call_id = acp::ToolCallId::new(action.tool_call_id.clone());
-        self.selected_permission_granularity
-            .insert(tool_call_id, action.index);
-
-        cx.notify();
-    }
-
     fn authorize_pending_with_granularity(
         &mut self,
         is_allow: bool,
@@ -1357,9 +1587,9 @@ impl AcpThreadView {
 
         // Get selected index, defaulting to last option ("Only this time")
         let selected_index = self
-            .selected_permission_granularity
-            .get(&tool_call_id)
-            .copied()
+            .conversation
+            .read(cx)
+            .selected_permission_granularity(&session_id, &tool_call_id)
             .unwrap_or_else(|| choices.len().saturating_sub(1));
 
         let selected_choice = choices.get(selected_index).or(choices.last())?;
@@ -1507,7 +1737,7 @@ impl AcpThreadView {
     pub fn sync_thread(
         &mut self,
         project: Entity<Project>,
-        server_view: Entity<AcpServerView>,
+        server_view: Entity<ConnectionView>,
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
@@ -1539,7 +1769,7 @@ impl AcpThreadView {
 
             thread_store
                 .update(&mut cx.clone(), |store, cx| {
-                    store.save_thread(session_id.clone(), db_thread, cx)
+                    store.save_thread(session_id.clone(), db_thread, Default::default(), cx)
                 })
                 .await?;
 
@@ -1757,23 +1987,26 @@ impl AcpThreadView {
             .when(!plan.is_empty() && !changed_buffers.is_empty(), |this| {
                 this.child(Divider::horizontal().color(DividerColor::Border))
             })
-            .when(!changed_buffers.is_empty(), |this| {
-                this.child(self.render_edits_summary(
-                    &changed_buffers,
-                    edits_expanded,
-                    pending_edits,
-                    cx,
-                ))
-                .when(edits_expanded, |parent| {
-                    parent.child(self.render_edited_files(
-                        action_log,
-                        telemetry.clone(),
+            .when(
+                !changed_buffers.is_empty() && thread.parent_session_id().is_none(),
+                |this| {
+                    this.child(self.render_edits_summary(
                         &changed_buffers,
+                        edits_expanded,
                         pending_edits,
                         cx,
                     ))
-                })
-            })
+                    .when(edits_expanded, |parent| {
+                        parent.child(self.render_edited_files(
+                            action_log,
+                            telemetry.clone(),
+                            &changed_buffers,
+                            pending_edits,
+                            cx,
+                        ))
+                    })
+                },
+            )
             .when(!queue_is_empty, |this| {
                 this.when(!plan.is_empty() || !changed_buffers.is_empty(), |this| {
                     this.child(Divider::horizontal().color(DividerColor::Border))
@@ -2529,6 +2762,7 @@ impl AcpThreadView {
                             .gap_0p5()
                             .child(self.render_add_context_button(cx))
                             .child(self.render_follow_toggle(cx))
+                            .children(self.render_fast_mode_control(cx))
                             .children(self.render_thinking_control(cx)),
                     )
                     .child(
@@ -2606,50 +2840,24 @@ impl AcpThreadView {
                             .child(if editor_focused {
                                 h_flex()
                                     .gap_1()
-                                    .min_w_40()
-                                    .child(
-                                        IconButton::new(("cancel_edit", index), IconName::Close)
-                                            .icon_size(IconSize::Small)
-                                            .icon_color(Color::Error)
-                                            .tooltip({
-                                                let focus_handle = editor.focus_handle(cx);
-                                                move |_window, cx| {
-                                                    Tooltip::for_action_in(
-                                                        "Cancel Edit",
-                                                        &editor::actions::Cancel,
-                                                        &focus_handle,
-                                                        cx,
-                                                    )
-                                                }
-                                            })
-                                            .on_click({
-                                                let main_editor = self.message_editor.clone();
-                                                cx.listener(move |_, _, window, cx| {
-                                                    window.focus(&main_editor.focus_handle(cx), cx);
-                                                })
-                                            }),
-                                    )
+                                    .min_w(rems_from_px(150.))
+                                    .justify_end()
                                     .child(
-                                        IconButton::new(("save_edit", index), IconName::Check)
+                                        IconButton::new(("edit", index), IconName::Pencil)
                                             .icon_size(IconSize::Small)
-                                            .icon_color(Color::Success)
-                                            .tooltip({
-                                                let focus_handle = editor.focus_handle(cx);
-                                                move |_window, cx| {
-                                                    Tooltip::for_action_in(
-                                                        "Save Edit",
-                                                        &Chat,
-                                                        &focus_handle,
-                                                        cx,
-                                                    )
-                                                }
+                                            .tooltip(|_window, cx| {
+                                                Tooltip::with_meta(
+                                                    "Edit Queued Message",
+                                                    None,
+                                                    "Type anything to edit",
+                                                    cx,
+                                                )
                                             })
-                                            .on_click({
-                                                let main_editor = self.message_editor.clone();
-                                                cx.listener(move |_, _, window, cx| {
-                                                    window.focus(&main_editor.focus_handle(cx), cx);
-                                                })
-                                            }),
+                                            .on_click(cx.listener(move |this, _, window, cx| {
+                                                this.move_queued_message_to_main_editor(
+                                                    index, None, window, cx,
+                                                );
+                                            })),
                                     )
                                     .child(
                                         Button::new(("send_now_focused", index), "Send Now")
@@ -2671,62 +2879,64 @@ impl AcpThreadView {
                                     )
                             } else {
                                 h_flex()
-                                    .gap_1()
                                     .when(!is_next, |this| this.visible_on_hover("queue_entry"))
+                                    .gap_1()
+                                    .min_w(rems_from_px(150.))
+                                    .justify_end()
                                     .child(
-                                        IconButton::new(("edit", index), IconName::Pencil)
+                                        IconButton::new(("delete", index), IconName::Trash)
                                             .icon_size(IconSize::Small)
                                             .tooltip({
                                                 let focus_handle = focus_handle.clone();
                                                 move |_window, cx| {
                                                     if is_next {
                                                         Tooltip::for_action_in(
-                                                            "Edit",
-                                                            &EditFirstQueuedMessage,
+                                                            "Remove Message from Queue",
+                                                            &RemoveFirstQueuedMessage,
                                                             &focus_handle,
                                                             cx,
                                                         )
                                                     } else {
-                                                        Tooltip::simple("Edit", cx)
+                                                        Tooltip::simple(
+                                                            "Remove Message from Queue",
+                                                            cx,
+                                                        )
                                                     }
                                                 }
                                             })
-                                            .on_click({
-                                                let editor = editor.clone();
-                                                cx.listener(move |_, _, window, cx| {
-                                                    window.focus(&editor.focus_handle(cx), cx);
-                                                })
-                                            }),
+                                            .on_click(cx.listener(move |this, _, _, cx| {
+                                                this.remove_from_queue(index, cx);
+                                                cx.notify();
+                                            })),
                                     )
                                     .child(
-                                        IconButton::new(("delete", index), IconName::Trash)
+                                        IconButton::new(("edit", index), IconName::Pencil)
                                             .icon_size(IconSize::Small)
                                             .tooltip({
                                                 let focus_handle = focus_handle.clone();
                                                 move |_window, cx| {
                                                     if is_next {
                                                         Tooltip::for_action_in(
-                                                            "Remove Message from Queue",
-                                                            &RemoveFirstQueuedMessage,
+                                                            "Edit",
+                                                            &EditFirstQueuedMessage,
                                                             &focus_handle,
                                                             cx,
                                                         )
                                                     } else {
-                                                        Tooltip::simple(
-                                                            "Remove Message from Queue",
-                                                            cx,
-                                                        )
+                                                        Tooltip::simple("Edit", cx)
                                                     }
                                                 }
                                             })
-                                            .on_click(cx.listener(move |this, _, _, cx| {
-                                                this.remove_from_queue(index, cx);
-                                                cx.notify();
+                                            .on_click(cx.listener(move |this, _, window, cx| {
+                                                this.move_queued_message_to_main_editor(
+                                                    index, None, window, cx,
+                                                );
                                             })),
                                     )
                                     .child(
                                         Button::new(("send_now", index), "Send Now")
                                             .label_size(LabelSize::Small)
+                                            .when(is_next, |this| this.style(ButtonStyle::Outlined))
                                             .when(is_next && message_editor.is_empty(cx), |this| {
                                                 let action: Box<dyn gpui::Action> =
                                                     if can_fast_track {
@@ -2735,7 +2945,7 @@ impl AcpThreadView {
                                                         Box::new(SendNextQueuedMessage)
                                                     };
 
-                                                this.style(ButtonStyle::Outlined).key_binding(
+                                                this.key_binding(
                                                     KeyBinding::for_action_in(
                                                         action.as_ref(),
                                                         &focus_handle.clone(),
@@ -2744,9 +2954,6 @@ impl AcpThreadView {
                                                     .map(|kb| kb.size(keybinding_size)),
                                                 )
                                             })
-                                            .when(is_next && !message_editor.is_empty(cx), |this| {
-                                                this.style(ButtonStyle::Outlined)
-                                            })
                                             .on_click(cx.listener(move |this, _, window, cx| {
                                                 this.send_queued_message_at_index(
                                                     index, true, window, cx,
@@ -2953,26 +3160,69 @@ impl AcpThreadView {
         }
     }
 
-    fn render_thinking_control(&self, cx: &mut Context<Self>) -> Option<AnyElement> {
-        let thread = self.as_native_thread(cx)?.read(cx);
-        let model = thread.model()?;
+    fn fast_mode_available(&self, cx: &Context<Self>) -> bool {
+        if !cx.is_staff() {
+            return false;
+        }
+        self.as_native_thread(cx)
+            .and_then(|thread| thread.read(cx).model())
+            .map(|model| model.supports_fast_mode())
+            .unwrap_or(false)
+    }
 
-        let supports_thinking = model.supports_thinking();
-        if !supports_thinking {
+    fn render_fast_mode_control(&self, cx: &mut Context<Self>) -> Option<AnyElement> {
+        if !self.fast_mode_available(cx) {
             return None;
         }
 
-        let thinking = thread.thinking_enabled();
+        let thread = self.as_native_thread(cx)?.read(cx);
 
-        let (tooltip_label, icon, color) = if thinking {
-            (
-                "Disable Thinking Mode",
-                IconName::ThinkingMode,
-                Color::Muted,
-            )
+        let (tooltip_label, color, icon) = if matches!(thread.speed(), Some(Speed::Fast)) {
+            ("Disable Fast Mode", Color::Muted, IconName::FastForward)
         } else {
             (
-                "Enable Thinking Mode",
+                "Enable Fast Mode",
+                Color::Custom(cx.theme().colors().icon_disabled.opacity(0.8)),
+                IconName::FastForwardOff,
+            )
+        };
+
+        let focus_handle = self.message_editor.focus_handle(cx);
+
+        Some(
+            IconButton::new("fast-mode", icon)
+                .icon_size(IconSize::Small)
+                .icon_color(color)
+                .tooltip(move |_, cx| {
+                    Tooltip::for_action_in(tooltip_label, &ToggleFastMode, &focus_handle, cx)
+                })
+                .on_click(cx.listener(move |this, _, _window, cx| {
+                    this.toggle_fast_mode(cx);
+                }))
+                .into_any_element(),
+        )
+    }
+
+    fn render_thinking_control(&self, cx: &mut Context<Self>) -> Option<AnyElement> {
+        let thread = self.as_native_thread(cx)?.read(cx);
+        let model = thread.model()?;
+
+        let supports_thinking = model.supports_thinking();
+        if !supports_thinking {
+            return None;
+        }
+
+        let thinking = thread.thinking_enabled();
+
+        let (tooltip_label, icon, color) = if thinking {
+            (
+                "Disable Thinking Mode",
+                IconName::ThinkingMode,
+                Color::Muted,
+            )
+        } else {
+            (
+                "Enable Thinking Mode",
                 IconName::ThinkingModeOff,
                 Color::Custom(cx.theme().colors().icon_disabled.opacity(0.8)),
             )
@@ -3181,7 +3431,12 @@ impl AcpThreadView {
                 .on_click(cx.listener(|this, _event, _, cx| this.cancel_generation(cx)))
                 .into_any_element()
         } else {
-            IconButton::new("send-message", IconName::Send)
+            let send_icon = if is_generating {
+                IconName::QueueMessage
+            } else {
+                IconName::Send
+            };
+            IconButton::new("send-message", send_icon)
                 .style(ButtonStyle::Filled)
                 .map(|this| {
                     if is_editor_empty && !is_generating {
@@ -3432,7 +3687,7 @@ impl AcpThreadView {
     }
 }
 
-impl AcpThreadView {
+impl ThreadView {
     pub(crate) fn render_entries(&mut self, cx: &mut Context<Self>) -> List {
         list(
             self.list_state.clone(),
@@ -3670,6 +3925,7 @@ impl AcpThreadView {
             AgentThreadEntry::AssistantMessage(AssistantMessage {
                 chunks,
                 indented: _,
+                is_subagent_output: _,
             }) => {
                 let mut is_blank = true;
                 let is_last = entry_ix + 1 == total_entries;
@@ -3736,12 +3992,49 @@ impl AcpThreadView {
                     entry_ix,
                     tool_call,
                     &self.focus_handle(cx),
+                    false,
                     window,
                     cx,
                 )
                 .into_any(),
         };
 
+        let is_subagent_output = self.is_subagent()
+            && matches!(entry, AgentThreadEntry::AssistantMessage(msg) if msg.is_subagent_output);
+
+        let primary = if is_subagent_output {
+            v_flex()
+                .w_full()
+                .child(
+                    h_flex()
+                        .id("subagent_output")
+                        .px_5()
+                        .py_1()
+                        .gap_2()
+                        .child(Divider::horizontal())
+                        .child(
+                            h_flex()
+                                .gap_1()
+                                .child(
+                                    Icon::new(IconName::ForwardArrowUp)
+                                        .color(Color::Muted)
+                                        .size(IconSize::Small),
+                                )
+                                .child(
+                                    Label::new("Subagent Output")
+                                        .size(LabelSize::Custom(self.tool_name_font_size()))
+                                        .color(Color::Muted),
+                                ),
+                        )
+                        .child(Divider::horizontal())
+                        .tooltip(Tooltip::text("Everything below this line was sent as output from this subagent to the main agent.")),
+                )
+                .child(primary)
+                .into_any_element()
+        } else {
+            primary
+        };
+
         let primary = if is_indented {
             let line_top = if is_first_indented {
                 rems_from_px(-12.0)

crates/agent_ui/src/acp/entry_view_state.rs → crates/agent_ui/src/entry_view_state.rs 🔗

@@ -1,11 +1,11 @@
 use std::{cell::RefCell, ops::Range, rc::Rc};
 
-use super::thread_history::AcpThreadHistory;
+use super::thread_history::ThreadHistory;
 use acp_thread::{AcpThread, AgentThreadEntry};
 use agent::ThreadStore;
 use agent_client_protocol::{self as acp, ToolCallId};
 use collections::HashMap;
-use editor::{Editor, EditorMode, MinimapVisibility, SizingBehavior};
+use editor::{Editor, EditorEvent, EditorMode, MinimapVisibility, SizingBehavior};
 use gpui::{
     AnyEntity, App, AppContext as _, Entity, EntityId, EventEmitter, FocusHandle, Focusable,
     ScrollHandle, SharedString, TextStyleRefinement, WeakEntity, Window,
@@ -13,19 +13,20 @@ use gpui::{
 use language::language_settings::SoftWrap;
 use project::Project;
 use prompt_store::PromptStore;
+use rope::Point;
 use settings::Settings as _;
 use terminal_view::TerminalView;
 use theme::ThemeSettings;
 use ui::{Context, TextSize};
 use workspace::Workspace;
 
-use crate::acp::message_editor::{MessageEditor, MessageEditorEvent};
+use crate::message_editor::{MessageEditor, MessageEditorEvent};
 
 pub struct EntryViewState {
     workspace: WeakEntity<Workspace>,
     project: WeakEntity<Project>,
     thread_store: Option<Entity<ThreadStore>>,
-    history: WeakEntity<AcpThreadHistory>,
+    history: WeakEntity<ThreadHistory>,
     prompt_store: Option<Entity<PromptStore>>,
     entries: Vec<Entry>,
     prompt_capabilities: Rc<RefCell<acp::PromptCapabilities>>,
@@ -38,7 +39,7 @@ impl EntryViewState {
         workspace: WeakEntity<Workspace>,
         project: WeakEntity<Project>,
         thread_store: Option<Entity<ThreadStore>>,
-        history: WeakEntity<AcpThreadHistory>,
+        history: WeakEntity<ThreadHistory>,
         prompt_store: Option<Entity<PromptStore>>,
         prompt_capabilities: Rc<RefCell<acp::PromptCapabilities>>,
         available_commands: Rc<RefCell<Vec<acp::AvailableCommand>>>,
@@ -113,7 +114,7 @@ impl EntryViewState {
                     cx.subscribe(&message_editor, move |_, editor, event, cx| {
                         cx.emit(EntryViewEvent {
                             entry_index: index,
-                            view_event: ViewEvent::MessageEditorEvent(editor, *event),
+                            view_event: ViewEvent::MessageEditorEvent(editor, event.clone()),
                         })
                     })
                     .detach();
@@ -125,14 +126,19 @@ impl EntryViewState {
                 let terminals = tool_call.terminals().cloned().collect::<Vec<_>>();
                 let diffs = tool_call.diffs().cloned().collect::<Vec<_>>();
 
-                let views = if let Some(Entry::Content(views)) = self.entries.get_mut(index) {
-                    views
+                let views = if let Some(Entry::ToolCall(tool_call)) = self.entries.get_mut(index) {
+                    &mut tool_call.content
                 } else {
-                    self.set_entry(index, Entry::empty());
-                    let Some(Entry::Content(views)) = self.entries.get_mut(index) else {
+                    self.set_entry(
+                        index,
+                        Entry::ToolCall(ToolCallEntry {
+                            content: HashMap::default(),
+                        }),
+                    );
+                    let Some(Entry::ToolCall(tool_call)) = self.entries.get_mut(index) else {
                         unreachable!()
                     };
-                    views
+                    &mut tool_call.content
                 };
 
                 let is_tool_call_completed =
@@ -168,12 +174,48 @@ impl EntryViewState {
 
                 for diff in diffs {
                     views.entry(diff.entity_id()).or_insert_with(|| {
-                        let element = create_editor_diff(diff.clone(), window, cx).into_any();
+                        let editor = create_editor_diff(diff.clone(), window, cx);
+                        cx.subscribe(&editor, {
+                            let diff = diff.clone();
+                            let entry_index = index;
+                            move |_this, _editor, event: &EditorEvent, cx| {
+                                if let EditorEvent::OpenExcerptsRequested {
+                                    selections_by_buffer,
+                                    split,
+                                } = event
+                                {
+                                    let multibuffer = diff.read(cx).multibuffer();
+                                    if let Some((buffer_id, (ranges, _))) =
+                                        selections_by_buffer.iter().next()
+                                    {
+                                        if let Some(buffer) =
+                                            multibuffer.read(cx).buffer(*buffer_id)
+                                        {
+                                            if let Some(range) = ranges.first() {
+                                                let point =
+                                                    buffer.read(cx).offset_to_point(range.start.0);
+                                                if let Some(path) = diff.read(cx).file_path(cx) {
+                                                    cx.emit(EntryViewEvent {
+                                                        entry_index,
+                                                        view_event: ViewEvent::OpenDiffLocation {
+                                                            path,
+                                                            position: point,
+                                                            split: *split,
+                                                        },
+                                                    });
+                                                }
+                                            }
+                                        }
+                                    }
+                                }
+                            }
+                        })
+                        .detach();
                         cx.emit(EntryViewEvent {
                             entry_index: index,
                             view_event: ViewEvent::NewDiff(id.clone()),
                         });
-                        element
+                        editor.into_any()
                     });
                 }
             }
@@ -213,8 +255,8 @@ impl EntryViewState {
         for entry in self.entries.iter() {
             match entry {
                 Entry::UserMessage { .. } | Entry::AssistantMessage { .. } => {}
-                Entry::Content(response_views) => {
-                    for view in response_views.values() {
+                Entry::ToolCall(ToolCallEntry { content }) => {
+                    for view in content.values() {
                         if let Ok(diff_editor) = view.clone().downcast::<Editor>() {
                             diff_editor.update(cx, |diff_editor, cx| {
                                 diff_editor.set_text_style_refinement(
@@ -242,6 +284,11 @@ pub enum ViewEvent {
     NewTerminal(ToolCallId),
     TerminalMovedToBackground(ToolCallId),
     MessageEditorEvent(Entity<MessageEditor>, MessageEditorEvent),
+    OpenDiffLocation {
+        path: String,
+        position: Point,
+        split: bool,
+    },
 }
 
 #[derive(Default, Debug)]
@@ -263,25 +310,30 @@ impl AssistantMessageEntry {
     }
 }
 
+#[derive(Debug)]
+pub struct ToolCallEntry {
+    content: HashMap<EntityId, AnyEntity>,
+}
+
 #[derive(Debug)]
 pub enum Entry {
     UserMessage(Entity<MessageEditor>),
     AssistantMessage(AssistantMessageEntry),
-    Content(HashMap<EntityId, AnyEntity>),
+    ToolCall(ToolCallEntry),
 }
 
 impl Entry {
     pub fn focus_handle(&self, cx: &App) -> Option<FocusHandle> {
         match self {
             Self::UserMessage(editor) => Some(editor.read(cx).focus_handle(cx)),
-            Self::AssistantMessage(_) | Self::Content(_) => None,
+            Self::AssistantMessage(_) | Self::ToolCall(_) => None,
         }
     }
 
     pub fn message_editor(&self) -> Option<&Entity<MessageEditor>> {
         match self {
             Self::UserMessage(editor) => Some(editor),
-            Self::AssistantMessage(_) | Self::Content(_) => None,
+            Self::AssistantMessage(_) | Self::ToolCall(_) => None,
         }
     }
 
@@ -308,25 +360,21 @@ impl Entry {
     ) -> Option<ScrollHandle> {
         match self {
             Self::AssistantMessage(message) => message.scroll_handle_for_chunk(chunk_ix),
-            Self::UserMessage(_) | Self::Content(_) => None,
+            Self::UserMessage(_) | Self::ToolCall(_) => None,
         }
     }
 
     fn content_map(&self) -> Option<&HashMap<EntityId, AnyEntity>> {
         match self {
-            Self::Content(map) => Some(map),
+            Self::ToolCall(ToolCallEntry { content }) => Some(content),
             _ => None,
         }
     }
 
-    fn empty() -> Self {
-        Self::Content(HashMap::default())
-    }
-
     #[cfg(test)]
     pub fn has_content(&self) -> bool {
         match self {
-            Self::Content(map) => !map.is_empty(),
+            Self::ToolCall(ToolCallEntry { content }) => !content.is_empty(),
             Self::UserMessage(_) | Self::AssistantMessage(_) => false,
         }
     }
@@ -379,6 +427,7 @@ fn create_editor_diff(
         editor.scroll_manager.set_forbid_vertical_scroll(true);
         editor.set_show_indent_guides(false, cx);
         editor.set_read_only(true);
+        editor.set_delegate_open_excerpts(true);
         editor.set_show_breakpoints(false, cx);
         editor.set_show_code_actions(false, cx);
         editor.set_show_git_diff_gutter(false, cx);
@@ -412,7 +461,7 @@ mod tests {
     use fs::FakeFs;
     use gpui::{AppContext as _, TestAppContext};
 
-    use crate::acp::entry_view_state::EntryViewState;
+    use crate::entry_view_state::EntryViewState;
     use multi_buffer::MultiBufferRow;
     use pretty_assertions::assert_matches;
     use project::Project;
@@ -459,8 +508,8 @@ mod tests {
         });
 
         let thread_store = None;
-        let history = cx
-            .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx)));
+        let history =
+            cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
 
         let view_state = cx.new(|_cx| {
             EntryViewState::new(

crates/agent_ui/src/inline_assistant.rs 🔗

@@ -7,7 +7,7 @@ use std::rc::Rc;
 use std::sync::Arc;
 use uuid::Uuid;
 
-use crate::acp::AcpThreadHistory;
+use crate::ThreadHistory;
 use crate::context::load_context;
 use crate::mention_set::MentionSet;
 use crate::{
@@ -26,8 +26,8 @@ use editor::RowExt;
 use editor::SelectionEffects;
 use editor::scroll::ScrollOffset;
 use editor::{
-    Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorEvent, ExcerptId, ExcerptRange,
-    HighlightKey, MultiBuffer, MultiBufferSnapshot, ToOffset as _, ToPoint,
+    Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorEvent, ExcerptId, HighlightKey,
+    MultiBuffer, MultiBufferSnapshot, ToOffset as _, ToPoint,
     actions::SelectAll,
     display_map::{
         BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, EditorMargins,
@@ -487,7 +487,7 @@ impl InlineAssistant {
         project: WeakEntity<Project>,
         thread_store: Entity<ThreadStore>,
         prompt_store: Option<Entity<PromptStore>>,
-        history: WeakEntity<AcpThreadHistory>,
+        history: WeakEntity<ThreadHistory>,
         initial_prompt: Option<String>,
         window: &mut Window,
         codegen_ranges: &[Range<Anchor>],
@@ -626,7 +626,7 @@ impl InlineAssistant {
         project: WeakEntity<Project>,
         thread_store: Entity<ThreadStore>,
         prompt_store: Option<Entity<PromptStore>>,
-        history: WeakEntity<AcpThreadHistory>,
+        history: WeakEntity<ThreadHistory>,
         initial_prompt: Option<String>,
         window: &mut Window,
         cx: &mut App,
@@ -671,7 +671,7 @@ impl InlineAssistant {
         workspace: Entity<Workspace>,
         thread_store: Entity<ThreadStore>,
         prompt_store: Option<Entity<PromptStore>>,
-        history: WeakEntity<AcpThreadHistory>,
+        history: WeakEntity<ThreadHistory>,
         window: &mut Window,
         cx: &mut App,
     ) -> InlineAssistId {
@@ -1495,11 +1495,11 @@ impl InlineAssistant {
 
             let mut new_blocks = Vec::new();
             for (new_row, old_row_range) in deleted_row_ranges {
-                let (_, buffer_start) = old_snapshot
-                    .point_to_buffer_offset(Point::new(*old_row_range.start(), 0))
+                let (_, start, _) = old_snapshot
+                    .point_to_buffer_point(Point::new(*old_row_range.start(), 0))
                     .unwrap();
-                let (_, buffer_end) = old_snapshot
-                    .point_to_buffer_offset(Point::new(
+                let (_, end, _) = old_snapshot
+                    .point_to_buffer_point(Point::new(
                         *old_row_range.end(),
                         old_snapshot.line_len(MultiBufferRow(*old_row_range.end())),
                     ))
@@ -1509,10 +1509,11 @@ impl InlineAssistant {
                     let multi_buffer =
                         cx.new(|_| MultiBuffer::without_headers(language::Capability::ReadOnly));
                     multi_buffer.update(cx, |multi_buffer, cx| {
-                        multi_buffer.push_excerpts(
+                        multi_buffer.set_excerpts_for_buffer(
                             old_buffer.clone(),
-                            // todo(lw): buffer_start and buffer_end might come from different snapshots!
-                            Some(ExcerptRange::new(buffer_start..buffer_end)),
+                            // todo(lw): start and end might come from different snapshots!
+                            [start..end],
+                            0,
                             cx,
                         );
                     });
@@ -2154,7 +2155,7 @@ pub mod test {
             });
 
             let thread_store = cx.new(|cx| ThreadStore::new(cx));
-            let history = cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx));
+            let history = cx.new(|cx| crate::ThreadHistory::new(None, window, cx));
 
             // Add editor to workspace
             workspace.update(cx, |workspace, cx| {

crates/agent_ui/src/inline_prompt_editor.rs 🔗

@@ -1,4 +1,4 @@
-use crate::acp::AcpThreadHistory;
+use crate::ThreadHistory;
 use agent::ThreadStore;
 use agent_settings::AgentSettings;
 use collections::{HashMap, VecDeque};
@@ -64,7 +64,7 @@ pub struct PromptEditor<T> {
     pub editor: Entity<Editor>,
     mode: PromptEditorMode,
     mention_set: Entity<MentionSet>,
-    history: WeakEntity<AcpThreadHistory>,
+    history: WeakEntity<ThreadHistory>,
     prompt_store: Option<Entity<PromptStore>>,
     workspace: WeakEntity<Workspace>,
     model_selector: Entity<AgentModelSelector>,
@@ -1225,7 +1225,7 @@ impl PromptEditor<BufferCodegen> {
         fs: Arc<dyn Fs>,
         thread_store: Entity<ThreadStore>,
         prompt_store: Option<Entity<PromptStore>>,
-        history: WeakEntity<AcpThreadHistory>,
+        history: WeakEntity<ThreadHistory>,
         project: WeakEntity<Project>,
         workspace: WeakEntity<Workspace>,
         window: &mut Window,
@@ -1384,7 +1384,7 @@ impl PromptEditor<TerminalCodegen> {
         fs: Arc<dyn Fs>,
         thread_store: Entity<ThreadStore>,
         prompt_store: Option<Entity<PromptStore>>,
-        history: WeakEntity<AcpThreadHistory>,
+        history: WeakEntity<ThreadHistory>,
         project: WeakEntity<Project>,
         workspace: WeakEntity<Workspace>,
         window: &mut Window,
@@ -1632,6 +1632,7 @@ fn insert_message_creases(
             crease_for_mention(
                 crease.label.clone(),
                 crease.icon_path.clone(),
+                None,
                 start..end,
                 cx.weak_entity(),
             )

crates/agent_ui/src/language_model_selector.rs 🔗

@@ -455,12 +455,7 @@ impl PickerDelegate for LanguageModelPickerDelegate {
         cx.notify();
     }
 
-    fn can_select(
-        &mut self,
-        ix: usize,
-        _window: &mut Window,
-        _cx: &mut Context<Picker<Self>>,
-    ) -> bool {
+    fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context<Picker<Self>>) -> bool {
         match self.filtered_entries.get(ix) {
             Some(LanguageModelPickerEntry::Model(_)) => true,
             Some(LanguageModelPickerEntry::Separator(_)) | None => false,

crates/agent_ui/src/mention_set.rs 🔗

@@ -233,6 +233,9 @@ impl MentionSet {
                 content_len,
                 mention_uri.name().into(),
                 IconName::Image.path().into(),
+                mention_uri.tooltip_text(),
+                Some(mention_uri.clone()),
+                Some(workspace.downgrade()),
                 Some(image),
                 editor.clone(),
                 window,
@@ -245,6 +248,9 @@ impl MentionSet {
                 content_len,
                 crease_text,
                 mention_uri.icon_path(cx),
+                mention_uri.tooltip_text(),
+                Some(mention_uri.clone()),
+                Some(workspace.downgrade()),
                 None,
                 editor.clone(),
                 window,
@@ -485,6 +491,7 @@ impl MentionSet {
             let crease = crease_for_mention(
                 selection_name(abs_path.as_deref(), &line_range).into(),
                 uri.icon_path(cx),
+                uri.tooltip_text(),
                 range,
                 editor.downgrade(),
             );
@@ -547,9 +554,9 @@ impl MentionSet {
             None,
             None,
         );
-        let connection = server.connect(None, delegate, cx);
+        let connection = server.connect(delegate, cx);
         cx.spawn(async move |_, cx| {
-            let (agent, _) = connection.await?;
+            let agent = connection.await?;
             let agent = agent.downcast::<agent::NativeAgentConnection>().unwrap();
             let summary = agent
                 .0
@@ -695,6 +702,9 @@ pub(crate) async fn insert_images_as_context(
                 content_len,
                 MentionUri::PastedImage.name().into(),
                 IconName::Image.path().into(),
+                None,
+                None,
+                None,
                 Some(Task::ready(Ok(image.clone())).shared()),
                 editor.clone(),
                 window,
@@ -805,7 +815,9 @@ pub(crate) fn insert_crease_for_mention(
     content_len: usize,
     crease_label: SharedString,
     crease_icon: SharedString,
-    // abs_path: Option<Arc<Path>>,
+    crease_tooltip: Option<SharedString>,
+    mention_uri: Option<MentionUri>,
+    workspace: Option<WeakEntity<Workspace>>,
     image: Option<Shared<Task<Result<Arc<Image>, String>>>>,
     editor: Entity<Editor>,
     window: &mut Window,
@@ -825,6 +837,9 @@ pub(crate) fn insert_crease_for_mention(
             render: render_mention_fold_button(
                 crease_label.clone(),
                 crease_icon.clone(),
+                crease_tooltip,
+                mention_uri.clone(),
+                workspace.clone(),
                 start..end,
                 rx,
                 image,
@@ -858,11 +873,12 @@ pub(crate) fn insert_crease_for_mention(
 pub(crate) fn crease_for_mention(
     label: SharedString,
     icon_path: SharedString,
+    tooltip: Option<SharedString>,
     range: Range<Anchor>,
     editor_entity: WeakEntity<Editor>,
 ) -> Crease<Anchor> {
     let placeholder = FoldPlaceholder {
-        render: render_fold_icon_button(icon_path.clone(), label.clone(), editor_entity),
+        render: render_fold_icon_button(icon_path.clone(), label.clone(), tooltip, editor_entity),
         merge_adjacent: false,
         ..Default::default()
     };
@@ -876,6 +892,7 @@ pub(crate) fn crease_for_mention(
 fn render_fold_icon_button(
     icon_path: SharedString,
     label: SharedString,
+    tooltip: Option<SharedString>,
     editor: WeakEntity<Editor>,
 ) -> Arc<dyn Send + Sync + Fn(FoldId, Range<Anchor>, &mut App) -> AnyElement> {
     Arc::new({
@@ -886,6 +903,9 @@ fn render_fold_icon_button(
 
             MentionCrease::new(fold_id, icon_path.clone(), label.clone())
                 .is_toggled(is_in_text_selection)
+                .when_some(tooltip.clone(), |this, tooltip_text| {
+                    this.tooltip(tooltip_text)
+                })
                 .into_any_element()
         }
     })
@@ -1018,6 +1038,9 @@ fn render_directory_contents(entries: Vec<(Arc<RelPath>, String, String)>) -> St
 fn render_mention_fold_button(
     label: SharedString,
     icon: SharedString,
+    tooltip: Option<SharedString>,
+    mention_uri: Option<MentionUri>,
+    workspace: Option<WeakEntity<Workspace>>,
     range: Range<Anchor>,
     mut loading_finished: postage::barrier::Receiver,
     image_task: Option<Shared<Task<Result<Arc<Image>, String>>>>,
@@ -1037,6 +1060,9 @@ fn render_mention_fold_button(
             id: cx.entity_id(),
             label,
             icon,
+            tooltip,
+            mention_uri: mention_uri.clone(),
+            workspace: workspace.clone(),
             range,
             editor,
             loading: Some(loading),
@@ -1050,6 +1076,9 @@ struct LoadingContext {
     id: EntityId,
     label: SharedString,
     icon: SharedString,
+    tooltip: Option<SharedString>,
+    mention_uri: Option<MentionUri>,
+    workspace: Option<WeakEntity<Workspace>>,
     range: Range<Anchor>,
     editor: WeakEntity<Editor>,
     loading: Option<Task<()>>,
@@ -1066,8 +1095,13 @@ impl Render for LoadingContext {
         let id = ElementId::from(("loading_context", self.id));
 
         MentionCrease::new(id, self.icon.clone(), self.label.clone())
+            .mention_uri(self.mention_uri.clone())
+            .workspace(self.workspace.clone())
             .is_toggled(is_in_text_selection)
             .is_loading(self.loading.is_some())
+            .when_some(self.tooltip.clone(), |this, tooltip_text| {
+                this.tooltip(tooltip_text)
+            })
             .when_some(self.image.clone(), |this, image_task| {
                 this.image_preview(move |_, cx| {
                     let image = image_task.peek().cloned().transpose().ok().flatten();

crates/agent_ui/src/acp/message_editor.rs → crates/agent_ui/src/message_editor.rs 🔗

@@ -1,5 +1,5 @@
 use crate::SendImmediately;
-use crate::acp::AcpThreadHistory;
+use crate::ThreadHistory;
 use crate::{
     ChatWithFollow,
     completion_provider::{
@@ -51,13 +51,14 @@ pub struct MessageEditor {
     _parse_slash_command_task: Task<()>,
 }
 
-#[derive(Clone, Copy, Debug)]
+#[derive(Clone, Debug)]
 pub enum MessageEditorEvent {
     Send,
     SendImmediately,
     Cancel,
     Focus,
     LostFocus,
+    InputAttempted(Arc<str>),
 }
 
 impl EventEmitter<MessageEditorEvent> for MessageEditor {}
@@ -107,7 +108,7 @@ impl MessageEditor {
         workspace: WeakEntity<Workspace>,
         project: WeakEntity<Project>,
         thread_store: Option<Entity<ThreadStore>>,
-        history: WeakEntity<AcpThreadHistory>,
+        history: WeakEntity<ThreadHistory>,
         prompt_store: Option<Entity<PromptStore>>,
         prompt_capabilities: Rc<RefCell<acp::PromptCapabilities>>,
         available_commands: Rc<RefCell<Vec<acp::AvailableCommand>>>,
@@ -153,6 +154,7 @@ impl MessageEditor {
                             Box::new(editor::actions::Copy),
                         )
                         .action("Paste", Box::new(editor::actions::Paste))
+                        .action("Paste as Plain Text", Box::new(PasteRaw))
                 }))
             });
 
@@ -186,6 +188,18 @@ impl MessageEditor {
 
         subscriptions.push(cx.subscribe_in(&editor, window, {
             move |this, editor, event, window, cx| {
+                let input_attempted_text = match event {
+                    EditorEvent::InputHandled { text, .. } => Some(text),
+                    EditorEvent::InputIgnored { text } => Some(text),
+                    _ => None,
+                };
+                if let Some(text) = input_attempted_text
+                    && editor.read(cx).read_only(cx)
+                    && !text.is_empty()
+                {
+                    cx.emit(MessageEditorEvent::InputAttempted(text.clone()));
+                }
+
                 if let EditorEvent::Edited { .. } = event
                     && !editor.read(cx).read_only(cx)
                 {
@@ -403,7 +417,27 @@ impl MessageEditor {
         let text = self.editor.read(cx).text(cx);
         let available_commands = self.available_commands.borrow().clone();
         let agent_name = self.agent_name.clone();
+        let build_task = self.build_content_blocks(full_mention_content, cx);
+
+        cx.spawn(async move |_, _cx| {
+            Self::validate_slash_commands(&text, &available_commands, &agent_name)?;
+            build_task.await
+        })
+    }
+
+    pub fn draft_contents(&self, cx: &mut Context<Self>) -> Task<Result<Vec<acp::ContentBlock>>> {
+        let build_task = self.build_content_blocks(false, cx);
+        cx.spawn(async move |_, _cx| {
+            let (blocks, _tracked_buffers) = build_task.await?;
+            Ok(blocks)
+        })
+    }
 
+    fn build_content_blocks(
+        &self,
+        full_mention_content: bool,
+        cx: &mut Context<Self>,
+    ) -> Task<Result<(Vec<acp::ContentBlock>, Vec<Entity<Buffer>>)>> {
         let contents = self
             .mention_set
             .update(cx, |store, cx| store.contents(full_mention_content, cx));
@@ -411,18 +445,16 @@ impl MessageEditor {
         let supports_embedded_context = self.prompt_capabilities.borrow().embedded_context;
 
         cx.spawn(async move |_, cx| {
-            Self::validate_slash_commands(&text, &available_commands, &agent_name)?;
-
             let contents = contents.await?;
             let mut all_tracked_buffers = Vec::new();
 
             let result = editor.update(cx, |editor, cx| {
+                let text = editor.text(cx);
                 let (mut ix, _) = text
                     .char_indices()
                     .find(|(_, c)| !c.is_whitespace())
                     .unwrap_or((0, '\0'));
                 let mut chunks: Vec<acp::ContentBlock> = Vec::new();
-                let text = editor.text(cx);
                 editor.display_map.update(cx, |map, cx| {
                     let snapshot = map.snapshot(cx);
                     for (crease_id, crease) in snapshot.crease_snapshot.creases() {
@@ -690,6 +722,9 @@ impl MessageEditor {
                         content_len,
                         crease_text.into(),
                         mention_uri.icon_path(cx),
+                        mention_uri.tooltip_text(),
+                        Some(mention_uri.clone()),
+                        Some(self.workspace.clone()),
                         None,
                         self.editor.clone(),
                         window,
@@ -747,70 +782,93 @@ impl MessageEditor {
                 _ => None,
             })
         {
-            let path_style = workspace.read(cx).project().read(cx).path_style(cx);
-
-            // Parse markdown mention links in format: [@name](uri)
-            let parsed_mentions = parse_mention_links(&clipboard_text, path_style);
-
-            if !parsed_mentions.is_empty() {
+            if clipboard_text.contains("[@") {
                 cx.stop_propagation();
-
-                let insertion_offset = self.editor.update(cx, |editor, cx| {
+                let selections_before = self.editor.update(cx, |editor, cx| {
                     let snapshot = editor.buffer().read(cx).snapshot(cx);
-                    editor.selections.newest_anchor().start.to_offset(&snapshot)
+                    editor
+                        .selections
+                        .disjoint_anchors()
+                        .iter()
+                        .map(|selection| {
+                            (
+                                selection.start.bias_left(&snapshot),
+                                selection.end.bias_right(&snapshot),
+                            )
+                        })
+                        .collect::<Vec<_>>()
                 });
 
-                // Insert the raw text first
                 self.editor.update(cx, |editor, cx| {
                     editor.insert(&clipboard_text, window, cx);
                 });
 
-                let supports_images = self.prompt_capabilities.borrow().image;
-                let http_client = workspace.read(cx).client().http_client();
-
-                // Now create creases for each mention and load their content
                 let snapshot = self.editor.read(cx).buffer().read(cx).snapshot(cx);
-                for (range, mention_uri) in parsed_mentions {
-                    let start_offset = insertion_offset.0 + range.start;
-                    let anchor = snapshot.anchor_before(MultiBufferOffset(start_offset));
-                    let content_len = range.end - range.start;
-
-                    let Some((crease_id, tx)) = insert_crease_for_mention(
-                        anchor.excerpt_id,
-                        anchor.text_anchor,
-                        content_len,
-                        mention_uri.name().into(),
-                        mention_uri.icon_path(cx),
-                        None,
-                        self.editor.clone(),
-                        window,
-                        cx,
-                    ) else {
-                        continue;
-                    };
+                let path_style = workspace.read(cx).project().read(cx).path_style(cx);
+
+                let mut all_mentions = Vec::new();
+                for (start_anchor, end_anchor) in selections_before {
+                    let start_offset = start_anchor.to_offset(&snapshot);
+                    let end_offset = end_anchor.to_offset(&snapshot);
+
+                    // Get the actual inserted text from the buffer (may differ due to auto-indent)
+                    let inserted_text: String =
+                        snapshot.text_for_range(start_offset..end_offset).collect();
+
+                    let parsed_mentions = parse_mention_links(&inserted_text, path_style);
+                    for (range, mention_uri) in parsed_mentions {
+                        let mention_start_offset = MultiBufferOffset(start_offset.0 + range.start);
+                        let anchor = snapshot.anchor_before(mention_start_offset);
+                        let content_len = range.end - range.start;
+                        all_mentions.push((anchor, content_len, mention_uri));
+                    }
+                }
 
-                    // Create the confirmation task based on the mention URI type.
-                    // This properly loads file content, fetches URLs, etc.
-                    let task = self.mention_set.update(cx, |mention_set, cx| {
-                        mention_set.confirm_mention_for_uri(
-                            mention_uri.clone(),
-                            supports_images,
-                            http_client.clone(),
+                if !all_mentions.is_empty() {
+                    let supports_images = self.prompt_capabilities.borrow().image;
+                    let http_client = workspace.read(cx).client().http_client();
+
+                    for (anchor, content_len, mention_uri) in all_mentions {
+                        let Some((crease_id, tx)) = insert_crease_for_mention(
+                            anchor.excerpt_id,
+                            anchor.text_anchor,
+                            content_len,
+                            mention_uri.name().into(),
+                            mention_uri.icon_path(cx),
+                            mention_uri.tooltip_text(),
+                            Some(mention_uri.clone()),
+                            Some(self.workspace.clone()),
+                            None,
+                            self.editor.clone(),
+                            window,
                             cx,
-                        )
-                    });
-                    let task = cx
-                        .spawn(async move |_, _| task.await.map_err(|e| e.to_string()))
-                        .shared();
+                        ) else {
+                            continue;
+                        };
 
-                    self.mention_set.update(cx, |mention_set, _cx| {
-                        mention_set.insert_mention(crease_id, mention_uri.clone(), task.clone())
-                    });
+                        // Create the confirmation task based on the mention URI type.
+                        // This properly loads file content, fetches URLs, etc.
+                        let task = self.mention_set.update(cx, |mention_set, cx| {
+                            mention_set.confirm_mention_for_uri(
+                                mention_uri.clone(),
+                                supports_images,
+                                http_client.clone(),
+                                cx,
+                            )
+                        });
+                        let task = cx
+                            .spawn(async move |_, _| task.await.map_err(|e| e.to_string()))
+                            .shared();
 
-                    // Drop the tx after inserting to signal the crease is ready
-                    drop(tx);
+                        self.mention_set.update(cx, |mention_set, _cx| {
+                            mention_set.insert_mention(crease_id, mention_uri.clone(), task.clone())
+                        });
+
+                        // Drop the tx after inserting to signal the crease is ready
+                        drop(tx);
+                    }
+                    return;
                 }
-                return;
             }
         }
 
@@ -960,6 +1018,9 @@ impl MessageEditor {
             content_len,
             mention_uri.name().into(),
             mention_uri.icon_path(cx),
+            mention_uri.tooltip_text(),
+            Some(mention_uri.clone()),
+            Some(self.workspace.clone()),
             None,
             self.editor.clone(),
             window,
@@ -1172,13 +1233,45 @@ impl MessageEditor {
         message: Vec<acp::ContentBlock>,
         window: &mut Window,
         cx: &mut Context<Self>,
+    ) {
+        self.clear(window, cx);
+        self.insert_message_blocks(message, false, window, cx);
+    }
+
+    pub fn append_message(
+        &mut self,
+        message: Vec<acp::ContentBlock>,
+        separator: Option<&str>,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        if message.is_empty() {
+            return;
+        }
+
+        if let Some(separator) = separator
+            && !separator.is_empty()
+            && !self.is_empty(cx)
+        {
+            self.editor.update(cx, |editor, cx| {
+                editor.insert(separator, window, cx);
+            });
+        }
+
+        self.insert_message_blocks(message, true, window, cx);
+    }
+
+    fn insert_message_blocks(
+        &mut self,
+        message: Vec<acp::ContentBlock>,
+        append_to_existing: bool,
+        window: &mut Window,
+        cx: &mut Context<Self>,
     ) {
         let Some(workspace) = self.workspace.upgrade() else {
             return;
         };
 
-        self.clear(window, cx);
-
         let path_style = workspace.read(cx).project().read(cx).path_style(cx);
         let mut text = String::new();
         let mut mentions = Vec::new();
@@ -1252,19 +1345,40 @@ impl MessageEditor {
             }
         }
 
-        let snapshot = self.editor.update(cx, |editor, cx| {
-            editor.set_text(text, window, cx);
-            editor.buffer().read(cx).snapshot(cx)
-        });
+        if text.is_empty() && mentions.is_empty() {
+            return;
+        }
+
+        let insertion_start = if append_to_existing {
+            self.editor.read(cx).text(cx).len()
+        } else {
+            0
+        };
+
+        let snapshot = if append_to_existing {
+            self.editor.update(cx, |editor, cx| {
+                editor.insert(&text, window, cx);
+                editor.buffer().read(cx).snapshot(cx)
+            })
+        } else {
+            self.editor.update(cx, |editor, cx| {
+                editor.set_text(text, window, cx);
+                editor.buffer().read(cx).snapshot(cx)
+            })
+        };
 
         for (range, mention_uri, mention) in mentions {
-            let anchor = snapshot.anchor_before(MultiBufferOffset(range.start));
+            let adjusted_start = insertion_start + range.start;
+            let anchor = snapshot.anchor_before(MultiBufferOffset(adjusted_start));
             let Some((crease_id, tx)) = insert_crease_for_mention(
                 anchor.excerpt_id,
                 anchor.text_anchor,
                 range.end - range.start,
                 mention_uri.name().into(),
                 mention_uri.icon_path(cx),
+                mention_uri.tooltip_text(),
+                Some(mention_uri.clone()),
+                Some(self.workspace.clone()),
                 None,
                 self.editor.clone(),
                 window,
@@ -1282,6 +1396,7 @@ impl MessageEditor {
                 )
             });
         }
+
         cx.notify();
     }
 
@@ -1289,6 +1404,16 @@ impl MessageEditor {
         self.editor.read(cx).text(cx)
     }
 
+    pub fn insert_text(&mut self, text: &str, window: &mut Window, cx: &mut Context<Self>) {
+        if text.is_empty() {
+            return;
+        }
+
+        self.editor.update(cx, |editor, cx| {
+            editor.insert(text, window, cx);
+        });
+    }
+
     pub fn set_placeholder_text(
         &mut self,
         placeholder: &str,
@@ -1300,7 +1425,7 @@ impl MessageEditor {
         });
     }
 
-    #[cfg(test)]
+    #[cfg(any(test, feature = "test-support"))]
     pub fn set_text(&mut self, text: &str, window: &mut Window, cx: &mut Context<Self>) {
         self.editor.update(cx, |editor, cx| {
             editor.set_text(text, window, cx);
@@ -1449,12 +1574,16 @@ mod tests {
     use acp_thread::{AgentSessionInfo, MentionUri};
     use agent::{ThreadStore, outline};
     use agent_client_protocol as acp;
-    use editor::{AnchorRangeExt as _, Editor, EditorMode, MultiBufferOffset};
+    use editor::{
+        AnchorRangeExt as _, Editor, EditorMode, MultiBufferOffset, SelectionEffects,
+        actions::Paste,
+    };
 
     use fs::FakeFs;
     use futures::StreamExt as _;
     use gpui::{
-        AppContext, Entity, EventEmitter, FocusHandle, Focusable, TestAppContext, VisualTestContext,
+        AppContext, ClipboardItem, Entity, EventEmitter, FocusHandle, Focusable, TestAppContext,
+        VisualTestContext,
     };
     use language_model::LanguageModelRegistry;
     use lsp::{CompletionContext, CompletionTriggerKind};
@@ -1466,11 +1595,11 @@ mod tests {
     use util::{path, paths::PathStyle, rel_path::rel_path};
     use workspace::{AppState, Item, MultiWorkspace};
 
-    use crate::acp::{
+    use crate::completion_provider::{PromptCompletionProviderDelegate, PromptContextType};
+    use crate::{
+        connection_view::tests::init_test,
         message_editor::{Mention, MessageEditor, parse_mention_links},
-        thread_view::tests::init_test,
     };
-    use crate::completion_provider::{PromptCompletionProviderDelegate, PromptContextType};
 
     #[test]
     fn test_parse_mention_links() {
@@ -1577,8 +1706,8 @@ mod tests {
         let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
 
         let thread_store = None;
-        let history = cx
-            .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx)));
+        let history =
+            cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
 
         let message_editor = cx.update(|window, cx| {
             cx.new(|cx| {
@@ -1691,8 +1820,8 @@ mod tests {
         let (multi_workspace, cx) =
             cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
         let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
-        let history = cx
-            .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx)));
+        let history =
+            cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
         let workspace_handle = workspace.downgrade();
         let message_editor = workspace.update_in(cx, |_, window, cx| {
             cx.new(|cx| {
@@ -1847,8 +1976,8 @@ mod tests {
         let mut cx = VisualTestContext::from_window(window.into(), cx);
 
         let thread_store = None;
-        let history = cx
-            .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx)));
+        let history =
+            cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
         let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default()));
         let available_commands = Rc::new(RefCell::new(vec![
             acp::AvailableCommand::new("quick-math", "2 + 2 = 4 - 1 = 3"),
@@ -2082,8 +2211,8 @@ mod tests {
         }
 
         let thread_store = cx.new(|cx| ThreadStore::new(cx));
-        let history = cx
-            .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx)));
+        let history =
+            cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
         let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default()));
 
         let (message_editor, editor) = workspace.update_in(&mut cx, |workspace, window, cx| {
@@ -2578,8 +2707,8 @@ mod tests {
         let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
 
         let thread_store = Some(cx.new(|cx| ThreadStore::new(cx)));
-        let history = cx
-            .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx)));
+        let history =
+            cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
 
         let message_editor = cx.update(|window, cx| {
             cx.new(|cx| {
@@ -2679,8 +2808,8 @@ mod tests {
         let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
 
         let thread_store = Some(cx.new(|cx| ThreadStore::new(cx)));
-        let history = cx
-            .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx)));
+        let history =
+            cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
 
         // Create a thread metadata to insert as summary
         let thread_metadata = AgentSessionInfo {
@@ -2761,8 +2890,8 @@ mod tests {
         let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
 
         let thread_store = None;
-        let history = cx
-            .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx)));
+        let history =
+            cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
 
         let thread_metadata = AgentSessionInfo {
             session_id: acp::SessionId::new("thread-123"),
@@ -2821,8 +2950,8 @@ mod tests {
         let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
 
         let thread_store = None;
-        let history = cx
-            .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx)));
+        let history =
+            cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
 
         let message_editor = cx.update(|window, cx| {
             cx.new(|cx| {
@@ -2876,8 +3005,8 @@ mod tests {
         let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
 
         let thread_store = Some(cx.new(|cx| ThreadStore::new(cx)));
-        let history = cx
-            .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx)));
+        let history =
+            cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
 
         let message_editor = cx.update(|window, cx| {
             cx.new(|cx| {
@@ -2932,8 +3061,8 @@ mod tests {
         let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
 
         let thread_store = Some(cx.new(|cx| ThreadStore::new(cx)));
-        let history = cx
-            .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx)));
+        let history =
+            cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
 
         let message_editor = cx.update(|window, cx| {
             cx.new(|cx| {
@@ -2997,8 +3126,8 @@ mod tests {
         let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
 
         let thread_store = Some(cx.new(|cx| ThreadStore::new(cx)));
-        let history = cx
-            .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx)));
+        let history =
+            cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
 
         let (message_editor, editor) = workspace.update_in(cx, |workspace, window, cx| {
             let workspace_handle = cx.weak_entity();
@@ -3157,8 +3286,8 @@ mod tests {
         });
 
         let thread_store = Some(cx.new(|cx| ThreadStore::new(cx)));
-        let history = cx
-            .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx)));
+        let history =
+            cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
 
         // Create a new `MessageEditor`. The `EditorMode::full()` has to be used
         // to ensure we have a fixed viewport, so we can eventually actually
@@ -3278,8 +3407,8 @@ mod tests {
         let mut cx = VisualTestContext::from_window(window.into(), cx);
 
         let thread_store = cx.new(|cx| ThreadStore::new(cx));
-        let history = cx
-            .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx)));
+        let history =
+            cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
 
         let (message_editor, editor) = workspace.update_in(&mut cx, |workspace, window, cx| {
             let workspace_handle = cx.weak_entity();
@@ -3333,4 +3462,341 @@ mod tests {
             assert_eq!(editor.text(cx), "😄😄@file");
         });
     }
+
+    #[gpui::test]
+    async fn test_paste_mention_link_with_multiple_selections(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let app_state = cx.update(AppState::test);
+
+        cx.update(|cx| {
+            editor::init(cx);
+            workspace::init(app_state.clone(), cx);
+        });
+
+        app_state
+            .fs
+            .as_fake()
+            .insert_tree(path!("/project"), json!({"file.txt": "content"}))
+            .await;
+
+        let project = Project::test(app_state.fs.clone(), [path!("/project").as_ref()], cx).await;
+        let window =
+            cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+        let workspace = window
+            .read_with(cx, |mw, _| mw.workspace().clone())
+            .unwrap();
+
+        let mut cx = VisualTestContext::from_window(window.into(), cx);
+
+        let thread_store = cx.new(|cx| ThreadStore::new(cx));
+        let history =
+            cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
+
+        let (message_editor, editor) = workspace.update_in(&mut cx, |workspace, window, cx| {
+            let workspace_handle = cx.weak_entity();
+            let message_editor = cx.new(|cx| {
+                MessageEditor::new(
+                    workspace_handle,
+                    project.downgrade(),
+                    Some(thread_store),
+                    history.downgrade(),
+                    None,
+                    Default::default(),
+                    Default::default(),
+                    "Test Agent".into(),
+                    "Test",
+                    EditorMode::AutoHeight {
+                        max_lines: None,
+                        min_lines: 1,
+                    },
+                    window,
+                    cx,
+                )
+            });
+            workspace.active_pane().update(cx, |pane, cx| {
+                pane.add_item(
+                    Box::new(cx.new(|_| MessageEditorItem(message_editor.clone()))),
+                    true,
+                    true,
+                    None,
+                    window,
+                    cx,
+                );
+            });
+            message_editor.read(cx).focus_handle(cx).focus(window, cx);
+            let editor = message_editor.read(cx).editor().clone();
+            (message_editor, editor)
+        });
+
+        editor.update_in(&mut cx, |editor, window, cx| {
+            editor.set_text(
+                "AAAAAAAAAAAAAAAAAAAAAAAAA     AAAAAAAAAAAAAAAAAAAAAAAAA",
+                window,
+                cx,
+            );
+        });
+
+        cx.run_until_parked();
+
+        editor.update_in(&mut cx, |editor, window, cx| {
+            editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
+                s.select_ranges([
+                    MultiBufferOffset(0)..MultiBufferOffset(25), // First selection (large)
+                    MultiBufferOffset(30)..MultiBufferOffset(55), // Second selection (newest)
+                ]);
+            });
+        });
+
+        let mention_link = "[@f](file:///test.txt)";
+        cx.write_to_clipboard(ClipboardItem::new_string(mention_link.into()));
+
+        message_editor.update_in(&mut cx, |message_editor, window, cx| {
+            message_editor.paste(&Paste, window, cx);
+        });
+
+        let text = editor.update(&mut cx, |editor, cx| editor.text(cx));
+        assert!(
+            text.contains("[@f](file:///test.txt)"),
+            "Expected mention link to be pasted, got: {}",
+            text
+        );
+    }
+
+    // Helper that creates a minimal MessageEditor inside a window, returning both
+    // the entity and the underlying VisualTestContext so callers can drive updates.
+    async fn setup_message_editor(
+        cx: &mut TestAppContext,
+    ) -> (Entity<MessageEditor>, &mut VisualTestContext) {
+        let fs = FakeFs::new(cx.executor());
+        fs.insert_tree("/project", json!({"file.txt": ""})).await;
+        let project = Project::test(fs, [Path::new(path!("/project"))], cx).await;
+
+        let (multi_workspace, cx) =
+            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+        let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+        let history =
+            cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
+
+        let message_editor = cx.update(|window, cx| {
+            cx.new(|cx| {
+                MessageEditor::new(
+                    workspace.downgrade(),
+                    project.downgrade(),
+                    None,
+                    history.downgrade(),
+                    None,
+                    Default::default(),
+                    Default::default(),
+                    "Test Agent".into(),
+                    "Test",
+                    EditorMode::AutoHeight {
+                        min_lines: 1,
+                        max_lines: None,
+                    },
+                    window,
+                    cx,
+                )
+            })
+        });
+
+        cx.run_until_parked();
+        (message_editor, cx)
+    }
+
+    #[gpui::test]
+    async fn test_set_message_plain_text(cx: &mut TestAppContext) {
+        init_test(cx);
+        let (message_editor, cx) = setup_message_editor(cx).await;
+
+        message_editor.update_in(cx, |editor, window, cx| {
+            editor.set_message(
+                vec![acp::ContentBlock::Text(acp::TextContent::new(
+                    "hello world".to_string(),
+                ))],
+                window,
+                cx,
+            );
+        });
+
+        let text = message_editor.update(cx, |editor, cx| editor.text(cx));
+        assert_eq!(text, "hello world");
+        assert!(!message_editor.update(cx, |editor, cx| editor.is_empty(cx)));
+    }
+
+    #[gpui::test]
+    async fn test_set_message_replaces_existing_content(cx: &mut TestAppContext) {
+        init_test(cx);
+        let (message_editor, cx) = setup_message_editor(cx).await;
+
+        // Set initial content.
+        message_editor.update_in(cx, |editor, window, cx| {
+            editor.set_message(
+                vec![acp::ContentBlock::Text(acp::TextContent::new(
+                    "old content".to_string(),
+                ))],
+                window,
+                cx,
+            );
+        });
+
+        // Replace with new content.
+        message_editor.update_in(cx, |editor, window, cx| {
+            editor.set_message(
+                vec![acp::ContentBlock::Text(acp::TextContent::new(
+                    "new content".to_string(),
+                ))],
+                window,
+                cx,
+            );
+        });
+
+        let text = message_editor.update(cx, |editor, cx| editor.text(cx));
+        assert_eq!(
+            text, "new content",
+            "set_message should replace old content"
+        );
+    }
+
+    #[gpui::test]
+    async fn test_append_message_to_empty_editor(cx: &mut TestAppContext) {
+        init_test(cx);
+        let (message_editor, cx) = setup_message_editor(cx).await;
+
+        message_editor.update_in(cx, |editor, window, cx| {
+            editor.append_message(
+                vec![acp::ContentBlock::Text(acp::TextContent::new(
+                    "appended".to_string(),
+                ))],
+                Some("\n\n"),
+                window,
+                cx,
+            );
+        });
+
+        let text = message_editor.update(cx, |editor, cx| editor.text(cx));
+        assert_eq!(
+            text, "appended",
+            "No separator should be inserted when the editor is empty"
+        );
+    }
+
+    #[gpui::test]
+    async fn test_append_message_to_non_empty_editor(cx: &mut TestAppContext) {
+        init_test(cx);
+        let (message_editor, cx) = setup_message_editor(cx).await;
+
+        // Seed initial content.
+        message_editor.update_in(cx, |editor, window, cx| {
+            editor.set_message(
+                vec![acp::ContentBlock::Text(acp::TextContent::new(
+                    "initial".to_string(),
+                ))],
+                window,
+                cx,
+            );
+        });
+
+        // Append with separator.
+        message_editor.update_in(cx, |editor, window, cx| {
+            editor.append_message(
+                vec![acp::ContentBlock::Text(acp::TextContent::new(
+                    "appended".to_string(),
+                ))],
+                Some("\n\n"),
+                window,
+                cx,
+            );
+        });
+
+        let text = message_editor.update(cx, |editor, cx| editor.text(cx));
+        assert_eq!(
+            text, "initial\n\nappended",
+            "Separator should appear between existing and appended content"
+        );
+    }
+
+    #[gpui::test]
+    async fn test_append_message_preserves_mention_offset(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let fs = FakeFs::new(cx.executor());
+        fs.insert_tree("/project", json!({"file.txt": "content"}))
+            .await;
+        let project = Project::test(fs, [Path::new(path!("/project"))], cx).await;
+
+        let (multi_workspace, cx) =
+            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+        let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+        let history =
+            cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
+
+        let message_editor = cx.update(|window, cx| {
+            cx.new(|cx| {
+                MessageEditor::new(
+                    workspace.downgrade(),
+                    project.downgrade(),
+                    None,
+                    history.downgrade(),
+                    None,
+                    Default::default(),
+                    Default::default(),
+                    "Test Agent".into(),
+                    "Test",
+                    EditorMode::AutoHeight {
+                        min_lines: 1,
+                        max_lines: None,
+                    },
+                    window,
+                    cx,
+                )
+            })
+        });
+
+        cx.run_until_parked();
+
+        // Seed plain-text prefix so the editor is non-empty before appending.
+        message_editor.update_in(cx, |editor, window, cx| {
+            editor.set_message(
+                vec![acp::ContentBlock::Text(acp::TextContent::new(
+                    "prefix text".to_string(),
+                ))],
+                window,
+                cx,
+            );
+        });
+
+        // Append a message that contains a ResourceLink mention.
+        message_editor.update_in(cx, |editor, window, cx| {
+            editor.append_message(
+                vec![acp::ContentBlock::ResourceLink(acp::ResourceLink::new(
+                    "file.txt",
+                    "file:///project/file.txt",
+                ))],
+                Some("\n\n"),
+                window,
+                cx,
+            );
+        });
+
+        cx.run_until_parked();
+
+        // The mention should be registered in the mention_set so that contents()
+        // will emit it as a structured block rather than plain text.
+        let mention_uris =
+            message_editor.update(cx, |editor, cx| editor.mention_set.read(cx).mentions());
+        assert_eq!(
+            mention_uris.len(),
+            1,
+            "Expected exactly one mention in the mention_set after append, got: {mention_uris:?}"
+        );
+
+        // The editor text should start with the prefix, then the separator, then
+        // the mention placeholder — confirming the offset was computed correctly.
+        let text = message_editor.update(cx, |editor, cx| editor.text(cx));
+        assert!(
+            text.starts_with("prefix text\n\n"),
+            "Expected text to start with 'prefix text\\n\\n', got: {text:?}"
+        );
+    }
 }

crates/agent_ui/src/acp/model_selector.rs → crates/agent_ui/src/model_selector.rs 🔗

@@ -23,7 +23,7 @@ use zed_actions::agent::OpenSettings;
 
 use crate::ui::{HoldForDefault, ModelSelectorFooter, ModelSelectorHeader, ModelSelectorListItem};
 
-pub type AcpModelSelector = Picker<AcpModelPickerDelegate>;
+pub type ModelSelector = Picker<ModelPickerDelegate>;
 
 pub fn acp_model_selector(
     selector: Rc<dyn AgentModelSelector>,
@@ -31,26 +31,25 @@ pub fn acp_model_selector(
     fs: Arc<dyn Fs>,
     focus_handle: FocusHandle,
     window: &mut Window,
-    cx: &mut Context<AcpModelSelector>,
-) -> AcpModelSelector {
-    let delegate =
-        AcpModelPickerDelegate::new(selector, agent_server, fs, focus_handle, window, cx);
+    cx: &mut Context<ModelSelector>,
+) -> ModelSelector {
+    let delegate = ModelPickerDelegate::new(selector, agent_server, fs, focus_handle, window, cx);
     Picker::list(delegate, window, cx)
         .show_scrollbar(true)
         .width(rems(20.))
         .max_height(Some(rems(20.).into()))
 }
 
-enum AcpModelPickerEntry {
+enum ModelPickerEntry {
     Separator(SharedString),
     Model(AgentModelInfo, bool),
 }
 
-pub struct AcpModelPickerDelegate {
+pub struct ModelPickerDelegate {
     selector: Rc<dyn AgentModelSelector>,
     agent_server: Rc<dyn AgentServer>,
     fs: Arc<dyn Fs>,
-    filtered_entries: Vec<AcpModelPickerEntry>,
+    filtered_entries: Vec<ModelPickerEntry>,
     models: Option<AgentModelList>,
     selected_index: usize,
     selected_description: Option<(usize, SharedString, bool)>,
@@ -61,21 +60,21 @@ pub struct AcpModelPickerDelegate {
     focus_handle: FocusHandle,
 }
 
-impl AcpModelPickerDelegate {
+impl ModelPickerDelegate {
     fn new(
         selector: Rc<dyn AgentModelSelector>,
         agent_server: Rc<dyn AgentServer>,
         fs: Arc<dyn Fs>,
         focus_handle: FocusHandle,
         window: &mut Window,
-        cx: &mut Context<AcpModelSelector>,
+        cx: &mut Context<ModelSelector>,
     ) -> Self {
         let rx = selector.watch(cx);
         let refresh_models_task = {
             cx.spawn_in(window, {
                 async move |this, cx| {
                     async fn refresh(
-                        this: &WeakEntity<Picker<AcpModelPickerDelegate>>,
+                        this: &WeakEntity<Picker<ModelPickerDelegate>>,
                         cx: &mut AsyncWindowContext,
                     ) -> Result<()> {
                         let (models_task, selected_model_task) = this.update(cx, |this, cx| {
@@ -188,7 +187,7 @@ impl AcpModelPickerDelegate {
 
         // Keep the picker selection aligned with the newly-selected model
         if let Some(new_index) = self.filtered_entries.iter().position(|entry| {
-            matches!(entry, AcpModelPickerEntry::Model(model_info, _) if self.selected_model.as_ref().is_some_and(|selected| model_info.id == selected.id))
+            matches!(entry, ModelPickerEntry::Model(model_info, _) if self.selected_model.as_ref().is_some_and(|selected| model_info.id == selected.id))
         }) {
             self.set_selected_index(new_index, window, cx);
         } else {
@@ -197,7 +196,7 @@ impl AcpModelPickerDelegate {
     }
 }
 
-impl PickerDelegate for AcpModelPickerDelegate {
+impl PickerDelegate for ModelPickerDelegate {
     type ListItem = AnyElement;
 
     fn match_count(&self) -> usize {
@@ -213,15 +212,10 @@ impl PickerDelegate for AcpModelPickerDelegate {
         cx.notify();
     }
 
-    fn can_select(
-        &mut self,
-        ix: usize,
-        _window: &mut Window,
-        _cx: &mut Context<Picker<Self>>,
-    ) -> bool {
+    fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context<Picker<Self>>) -> bool {
         match self.filtered_entries.get(ix) {
-            Some(AcpModelPickerEntry::Model(_, _)) => true,
-            Some(AcpModelPickerEntry::Separator(_)) | None => false,
+            Some(ModelPickerEntry::Model(_, _)) => true,
+            Some(ModelPickerEntry::Separator(_)) | None => false,
         }
     }
 
@@ -261,7 +255,7 @@ impl PickerDelegate for AcpModelPickerDelegate {
                     .as_ref()
                     .and_then(|selected| {
                         this.delegate.filtered_entries.iter().position(|entry| {
-                            if let AcpModelPickerEntry::Model(model_info, _) = entry {
+                            if let ModelPickerEntry::Model(model_info, _) = entry {
                                 model_info.id == selected.id
                             } else {
                                 false
@@ -277,7 +271,7 @@ impl PickerDelegate for AcpModelPickerDelegate {
     }
 
     fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
-        if let Some(AcpModelPickerEntry::Model(model_info, _)) =
+        if let Some(ModelPickerEntry::Model(model_info, _)) =
             self.filtered_entries.get(self.selected_index)
         {
             if window.modifiers().secondary() {
@@ -320,10 +314,10 @@ impl PickerDelegate for AcpModelPickerDelegate {
         cx: &mut Context<Picker<Self>>,
     ) -> Option<Self::ListItem> {
         match self.filtered_entries.get(ix)? {
-            AcpModelPickerEntry::Separator(title) => {
+            ModelPickerEntry::Separator(title) => {
                 Some(ModelSelectorHeader::new(title, ix > 1).into_any_element())
             }
-            AcpModelPickerEntry::Model(model_info, is_favorite) => {
+            ModelPickerEntry::Model(model_info, is_favorite) => {
                 let is_selected = Some(model_info) == self.selected_model.as_ref();
                 let default_model = self.agent_server.default_model(cx);
                 let is_default = default_model.as_ref() == Some(&model_info.id);
@@ -434,7 +428,7 @@ impl PickerDelegate for AcpModelPickerDelegate {
 fn info_list_to_picker_entries(
     model_list: AgentModelList,
     favorites: &HashSet<ModelId>,
-) -> Vec<AcpModelPickerEntry> {
+) -> Vec<ModelPickerEntry> {
     let mut entries = Vec::new();
 
     let all_models: Vec<_> = match &model_list {
@@ -450,28 +444,28 @@ fn info_list_to_picker_entries(
 
     let has_favorites = !favorite_models.is_empty();
     if has_favorites {
-        entries.push(AcpModelPickerEntry::Separator("Favorite".into()));
+        entries.push(ModelPickerEntry::Separator("Favorite".into()));
         for model in favorite_models {
-            entries.push(AcpModelPickerEntry::Model((*model).clone(), true));
+            entries.push(ModelPickerEntry::Model((*model).clone(), true));
         }
     }
 
     match model_list {
         AgentModelList::Flat(list) => {
             if has_favorites {
-                entries.push(AcpModelPickerEntry::Separator("All".into()));
+                entries.push(ModelPickerEntry::Separator("All".into()));
             }
             for model in list {
                 let is_favorite = favorites.contains(&model.id);
-                entries.push(AcpModelPickerEntry::Model(model, is_favorite));
+                entries.push(ModelPickerEntry::Model(model, is_favorite));
             }
         }
         AgentModelList::Grouped(index_map) => {
             for (group_name, models) in index_map {
-                entries.push(AcpModelPickerEntry::Separator(group_name.0));
+                entries.push(ModelPickerEntry::Separator(group_name.0));
                 for model in models {
                     let is_favorite = favorites.contains(&model.id);
-                    entries.push(AcpModelPickerEntry::Model(model, is_favorite));
+                    entries.push(ModelPickerEntry::Model(model, is_favorite));
                 }
             }
         }
@@ -608,22 +602,22 @@ mod tests {
             .collect()
     }
 
-    fn get_entry_model_ids(entries: &[AcpModelPickerEntry]) -> Vec<&str> {
+    fn get_entry_model_ids(entries: &[ModelPickerEntry]) -> Vec<&str> {
         entries
             .iter()
             .filter_map(|entry| match entry {
-                AcpModelPickerEntry::Model(info, _) => Some(info.id.0.as_ref()),
+                ModelPickerEntry::Model(info, _) => Some(info.id.0.as_ref()),
                 _ => None,
             })
             .collect()
     }
 
-    fn get_entry_labels(entries: &[AcpModelPickerEntry]) -> Vec<&str> {
+    fn get_entry_labels(entries: &[ModelPickerEntry]) -> Vec<&str> {
         entries
             .iter()
             .map(|entry| match entry {
-                AcpModelPickerEntry::Model(info, _) => info.id.0.as_ref(),
-                AcpModelPickerEntry::Separator(s) => &s,
+                ModelPickerEntry::Model(info, _) => info.id.0.as_ref(),
+                ModelPickerEntry::Separator(s) => &s,
             })
             .collect()
     }
@@ -671,7 +665,7 @@ mod tests {
 
         assert!(matches!(
             entries.first(),
-            Some(AcpModelPickerEntry::Separator(s)) if s == "Favorite"
+            Some(ModelPickerEntry::Separator(s)) if s == "Favorite"
         ));
 
         let model_ids = get_entry_model_ids(&entries);
@@ -687,7 +681,7 @@ mod tests {
 
         assert!(matches!(
             entries.first(),
-            Some(AcpModelPickerEntry::Separator(s)) if s == "zed"
+            Some(ModelPickerEntry::Separator(s)) if s == "zed"
         ));
     }
 
@@ -702,7 +696,7 @@ mod tests {
         let entries = info_list_to_picker_entries(models, &favorites);
 
         for entry in &entries {
-            if let AcpModelPickerEntry::Model(info, is_favorite) = entry {
+            if let ModelPickerEntry::Model(info, is_favorite) = entry {
                 if info.id.0.as_ref() == "zed/claude" {
                     assert!(is_favorite, "zed/claude should be a favorite");
                 } else {
@@ -789,12 +783,12 @@ mod tests {
 
         assert!(matches!(
             entries.first(),
-            Some(AcpModelPickerEntry::Separator(s)) if s == "Favorite"
+            Some(ModelPickerEntry::Separator(s)) if s == "Favorite"
         ));
 
         assert!(entries.iter().any(|e| matches!(
             e,
-            AcpModelPickerEntry::Separator(s) if s == "All"
+            ModelPickerEntry::Separator(s) if s == "All"
         )));
     }
 
@@ -838,7 +832,7 @@ mod tests {
         let entries = info_list_to_picker_entries(models, &favorites);
 
         for entry in &entries {
-            if let AcpModelPickerEntry::Model(info, is_favorite) = entry {
+            if let ModelPickerEntry::Model(info, is_favorite) = entry {
                 if info.id.0.as_ref() == "favorite-model" {
                     assert!(*is_favorite, "favorite-model should have is_favorite=true");
                 } else if info.id.0.as_ref() == "regular-model" {

crates/agent_ui/src/acp/model_selector_popover.rs → crates/agent_ui/src/model_selector_popover.rs 🔗

@@ -7,20 +7,20 @@ use gpui::{Entity, FocusHandle};
 use picker::popover_menu::PickerPopoverMenu;
 use ui::{ButtonLike, PopoverMenuHandle, TintColor, Tooltip, prelude::*};
 
-use crate::acp::{AcpModelSelector, model_selector::acp_model_selector};
 use crate::ui::ModelSelectorTooltip;
+use crate::{ModelSelector, model_selector::acp_model_selector};
 
-pub struct AcpModelSelectorPopover {
-    selector: Entity<AcpModelSelector>,
-    menu_handle: PopoverMenuHandle<AcpModelSelector>,
+pub struct ModelSelectorPopover {
+    selector: Entity<ModelSelector>,
+    menu_handle: PopoverMenuHandle<ModelSelector>,
 }
 
-impl AcpModelSelectorPopover {
+impl ModelSelectorPopover {
     pub(crate) fn new(
         selector: Rc<dyn AgentModelSelector>,
         agent_server: Rc<dyn agent_servers::AgentServer>,
         fs: Arc<dyn Fs>,
-        menu_handle: PopoverMenuHandle<AcpModelSelector>,
+        menu_handle: PopoverMenuHandle<ModelSelector>,
         focus_handle: FocusHandle,
         window: &mut Window,
         cx: &mut Context<Self>,
@@ -48,7 +48,7 @@ impl AcpModelSelectorPopover {
     }
 }
 
-impl Render for AcpModelSelectorPopover {
+impl Render for ModelSelectorPopover {
     fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
         let selector = self.selector.read(cx);
         let model = selector.delegate.active_model();

crates/agent_ui/src/profile_selector.rs 🔗

@@ -443,12 +443,7 @@ impl PickerDelegate for ProfilePickerDelegate {
         cx.notify();
     }
 
-    fn can_select(
-        &mut self,
-        ix: usize,
-        _window: &mut Window,
-        _cx: &mut Context<Picker<Self>>,
-    ) -> bool {
+    fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context<Picker<Self>>) -> bool {
         match self.filtered_entries.get(ix) {
             Some(ProfilePickerEntry::Profile(_)) => true,
             Some(ProfilePickerEntry::Header(_)) | None => false,

crates/agent_ui/src/terminal_inline_assistant.rs 🔗

@@ -1,5 +1,5 @@
 use crate::{
-    acp::AcpThreadHistory,
+    ThreadHistory,
     context::load_context,
     inline_prompt_editor::{
         CodegenStatus, PromptEditor, PromptEditorEvent, TerminalInlineAssistId,
@@ -64,7 +64,7 @@ impl TerminalInlineAssistant {
         project: WeakEntity<Project>,
         thread_store: Entity<ThreadStore>,
         prompt_store: Option<Entity<PromptStore>>,
-        history: WeakEntity<AcpThreadHistory>,
+        history: WeakEntity<ThreadHistory>,
         initial_prompt: Option<String>,
         window: &mut Window,
         cx: &mut App,
@@ -276,6 +276,7 @@ impl TerminalInlineAssistant {
                 temperature,
                 thinking_allowed: false,
                 thinking_effort: None,
+                speed: None,
             }
         }))
     }

crates/agent_ui/src/test_support.rs 🔗

@@ -0,0 +1,98 @@
+use acp_thread::{AgentConnection, StubAgentConnection};
+use agent_client_protocol as acp;
+use agent_servers::{AgentServer, AgentServerDelegate};
+use gpui::{Entity, SharedString, Task, TestAppContext, VisualTestContext};
+use settings::SettingsStore;
+use std::any::Any;
+use std::rc::Rc;
+
+use crate::AgentPanel;
+use crate::agent_panel;
+
+pub struct StubAgentServer<C> {
+    connection: C,
+}
+
+impl<C> StubAgentServer<C> {
+    pub fn new(connection: C) -> Self {
+        Self { connection }
+    }
+}
+
+impl StubAgentServer<StubAgentConnection> {
+    pub fn default_response() -> Self {
+        let conn = StubAgentConnection::new();
+        conn.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk(
+            acp::ContentChunk::new("Default response".into()),
+        )]);
+        Self::new(conn)
+    }
+}
+
+impl<C> AgentServer for StubAgentServer<C>
+where
+    C: 'static + AgentConnection + Send + Clone,
+{
+    fn logo(&self) -> ui::IconName {
+        ui::IconName::Ai
+    }
+
+    fn name(&self) -> SharedString {
+        "Test".into()
+    }
+
+    fn connect(
+        &self,
+        _delegate: AgentServerDelegate,
+        _cx: &mut gpui::App,
+    ) -> Task<gpui::Result<Rc<dyn AgentConnection>>> {
+        Task::ready(Ok(Rc::new(self.connection.clone())))
+    }
+
+    fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
+        self
+    }
+}
+
+pub fn init_test(cx: &mut TestAppContext) {
+    cx.update(|cx| {
+        let settings_store = SettingsStore::test(cx);
+        cx.set_global(settings_store);
+        theme::init(theme::LoadThemes::JustBase, cx);
+        editor::init(cx);
+        release_channel::init("0.0.0".parse().unwrap(), cx);
+        agent_panel::init(cx);
+    });
+}
+
+pub fn open_thread_with_connection(
+    panel: &Entity<AgentPanel>,
+    connection: StubAgentConnection,
+    cx: &mut VisualTestContext,
+) {
+    panel.update_in(cx, |panel, window, cx| {
+        panel.open_external_thread_with_server(
+            Rc::new(StubAgentServer::new(connection)),
+            window,
+            cx,
+        );
+    });
+    cx.run_until_parked();
+}
+
+pub fn send_message(panel: &Entity<AgentPanel>, cx: &mut VisualTestContext) {
+    let thread_view = panel.read_with(cx, |panel, cx| panel.as_active_thread_view(cx).unwrap());
+    let message_editor = thread_view.read_with(cx, |view, _cx| view.message_editor.clone());
+    message_editor.update_in(cx, |editor, window, cx| {
+        editor.set_text("Hello", window, cx);
+    });
+    thread_view.update_in(cx, |view, window, cx| view.send(window, cx));
+    cx.run_until_parked();
+}
+
+pub fn active_session_id(panel: &Entity<AgentPanel>, cx: &VisualTestContext) -> acp::SessionId {
+    panel.read_with(cx, |panel, cx| {
+        let thread = panel.active_agent_thread(cx).unwrap();
+        thread.read(cx).session_id().clone()
+    })
+}

crates/agent_ui/src/text_thread_editor.rs 🔗

@@ -1495,7 +1495,7 @@ impl TextThreadEditor {
             return;
         };
 
-        // Get buffer info for the delegate call (even if empty, AcpThreadView ignores these
+        // Get buffer info for the delegate call (even if empty, ThreadView ignores these
         // params and calls insert_selections which handles both terminal and buffer)
         if let Some((selections, buffer)) = maybe!({
             let editor = workspace

crates/agent_ui/src/acp/thread_history.rs → crates/agent_ui/src/thread_history.rs 🔗

@@ -1,4 +1,4 @@
-use crate::acp::AcpServerView;
+use crate::ConnectionView;
 use crate::{AgentPanel, RemoveHistory, RemoveSelectedThread};
 use acp_thread::{AgentSessionInfo, AgentSessionList, AgentSessionListRequest, SessionListUpdate};
 use agent_client_protocol as acp;
@@ -27,7 +27,7 @@ fn thread_title(entry: &AgentSessionInfo) -> &SharedString {
         .unwrap_or(DEFAULT_TITLE)
 }
 
-pub struct AcpThreadHistory {
+pub struct ThreadHistory {
     session_list: Option<Rc<dyn AgentSessionList>>,
     sessions: Vec<AgentSessionInfo>,
     scroll_handle: UniformListScrollHandle,
@@ -70,9 +70,9 @@ pub enum ThreadHistoryEvent {
     Open(AgentSessionInfo),
 }
 
-impl EventEmitter<ThreadHistoryEvent> for AcpThreadHistory {}
+impl EventEmitter<ThreadHistoryEvent> for ThreadHistory {}
 
-impl AcpThreadHistory {
+impl ThreadHistory {
     pub fn new(
         session_list: Option<Rc<dyn AgentSessionList>>,
         window: &mut Window,
@@ -720,13 +720,13 @@ impl AcpThreadHistory {
     }
 }
 
-impl Focusable for AcpThreadHistory {
+impl Focusable for ThreadHistory {
     fn focus_handle(&self, cx: &App) -> FocusHandle {
         self.search_editor.focus_handle(cx)
     }
 }
 
-impl Render for AcpThreadHistory {
+impl Render for ThreadHistory {
     fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
         let has_no_history = self.is_empty();
 
@@ -860,17 +860,17 @@ impl Render for AcpThreadHistory {
 }
 
 #[derive(IntoElement)]
-pub struct AcpHistoryEntryElement {
+pub struct HistoryEntryElement {
     entry: AgentSessionInfo,
-    thread_view: WeakEntity<AcpServerView>,
+    thread_view: WeakEntity<ConnectionView>,
     selected: bool,
     hovered: bool,
     supports_delete: bool,
     on_hover: Box<dyn Fn(&bool, &mut Window, &mut App) + 'static>,
 }
 
-impl AcpHistoryEntryElement {
-    pub fn new(entry: AgentSessionInfo, thread_view: WeakEntity<AcpServerView>) -> Self {
+impl HistoryEntryElement {
+    pub fn new(entry: AgentSessionInfo, thread_view: WeakEntity<ConnectionView>) -> Self {
         Self {
             entry,
             thread_view,
@@ -897,7 +897,7 @@ impl AcpHistoryEntryElement {
     }
 }
 
-impl RenderOnce for AcpHistoryEntryElement {
+impl RenderOnce for HistoryEntryElement {
     fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement {
         let id = ElementId::Name(self.entry.session_id.0.clone().into());
         let title = thread_title(&self.entry).clone();
@@ -1240,7 +1240,7 @@ mod tests {
         ));
 
         let (history, cx) = cx.add_window_view(|window, cx| {
-            AcpThreadHistory::new(Some(session_list.clone()), window, cx)
+            ThreadHistory::new(Some(session_list.clone()), window, cx)
         });
         cx.run_until_parked();
 
@@ -1264,7 +1264,7 @@ mod tests {
         ));
 
         let (history, cx) = cx.add_window_view(|window, cx| {
-            AcpThreadHistory::new(Some(session_list.clone()), window, cx)
+            ThreadHistory::new(Some(session_list.clone()), window, cx)
         });
         cx.run_until_parked();
         session_list.clear_requested_cursors();
@@ -1301,7 +1301,7 @@ mod tests {
         ));
 
         let (history, cx) = cx.add_window_view(|window, cx| {
-            AcpThreadHistory::new(Some(session_list.clone()), window, cx)
+            ThreadHistory::new(Some(session_list.clone()), window, cx)
         });
         cx.run_until_parked();
 
@@ -1334,7 +1334,7 @@ mod tests {
         ));
 
         let (history, cx) = cx.add_window_view(|window, cx| {
-            AcpThreadHistory::new(Some(session_list.clone()), window, cx)
+            ThreadHistory::new(Some(session_list.clone()), window, cx)
         });
         cx.run_until_parked();
 
@@ -1365,7 +1365,7 @@ mod tests {
         ));
 
         let (history, cx) = cx.add_window_view(|window, cx| {
-            AcpThreadHistory::new(Some(session_list.clone()), window, cx)
+            ThreadHistory::new(Some(session_list.clone()), window, cx)
         });
         cx.run_until_parked();
 
@@ -1410,7 +1410,7 @@ mod tests {
         );
 
         let (history, cx) = cx.add_window_view(|window, cx| {
-            AcpThreadHistory::new(Some(session_list.clone()), window, cx)
+            ThreadHistory::new(Some(session_list.clone()), window, cx)
         });
         cx.run_until_parked();
         session_list.clear_requested_cursors();
@@ -1442,7 +1442,7 @@ mod tests {
         let session_list = Rc::new(TestSessionList::new(sessions));
 
         let (history, cx) = cx.add_window_view(|window, cx| {
-            AcpThreadHistory::new(Some(session_list.clone()), window, cx)
+            ThreadHistory::new(Some(session_list.clone()), window, cx)
         });
         cx.run_until_parked();
 
@@ -1478,7 +1478,7 @@ mod tests {
         let session_list = Rc::new(TestSessionList::new(sessions));
 
         let (history, cx) = cx.add_window_view(|window, cx| {
-            AcpThreadHistory::new(Some(session_list.clone()), window, cx)
+            ThreadHistory::new(Some(session_list.clone()), window, cx)
         });
         cx.run_until_parked();
 
@@ -1511,7 +1511,7 @@ mod tests {
         let session_list = Rc::new(TestSessionList::new(sessions));
 
         let (history, cx) = cx.add_window_view(|window, cx| {
-            AcpThreadHistory::new(Some(session_list.clone()), window, cx)
+            ThreadHistory::new(Some(session_list.clone()), window, cx)
         });
         cx.run_until_parked();
 
@@ -1547,7 +1547,7 @@ mod tests {
         let session_list = Rc::new(TestSessionList::new(sessions));
 
         let (history, cx) = cx.add_window_view(|window, cx| {
-            AcpThreadHistory::new(Some(session_list.clone()), window, cx)
+            ThreadHistory::new(Some(session_list.clone()), window, cx)
         });
         cx.run_until_parked();
 
@@ -1587,7 +1587,7 @@ mod tests {
         let session_list = Rc::new(TestSessionList::new(sessions));
 
         let (history, cx) = cx.add_window_view(|window, cx| {
-            AcpThreadHistory::new(Some(session_list.clone()), window, cx)
+            ThreadHistory::new(Some(session_list.clone()), window, cx)
         });
         cx.run_until_parked();
 
@@ -1624,7 +1624,7 @@ mod tests {
         let session_list = Rc::new(TestSessionList::new(sessions));
 
         let (history, cx) = cx.add_window_view(|window, cx| {
-            AcpThreadHistory::new(Some(session_list.clone()), window, cx)
+            ThreadHistory::new(Some(session_list.clone()), window, cx)
         });
         cx.run_until_parked();
 

crates/agent_ui/src/ui/acp_onboarding_modal.rs 🔗

@@ -1,8 +1,8 @@
-use client::zed_urls;
 use gpui::{
     ClickEvent, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, MouseDownEvent, Render,
     linear_color_stop, linear_gradient,
 };
+use project::agent_server_store::GEMINI_NAME;
 use ui::{TintColor, Vector, VectorName, prelude::*};
 use workspace::{ModalView, Workspace};
 
@@ -37,7 +37,13 @@ impl AcpOnboardingModal {
 
             if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
                 panel.update(cx, |panel, cx| {
-                    panel.new_agent_thread(AgentType::Gemini, window, cx);
+                    panel.new_agent_thread(
+                        AgentType::Custom {
+                            name: GEMINI_NAME.into(),
+                        },
+                        window,
+                        cx,
+                    );
                 });
             }
         });
@@ -47,11 +53,11 @@ impl AcpOnboardingModal {
         acp_onboarding_event!("Open Panel Clicked");
     }
 
-    fn view_docs(&mut self, _: &ClickEvent, _: &mut Window, cx: &mut Context<Self>) {
-        cx.open_url(&zed_urls::external_agents_docs(cx));
+    fn open_agent_registry(&mut self, _: &ClickEvent, window: &mut Window, cx: &mut Context<Self>) {
+        window.dispatch_action(Box::new(zed_actions::AcpRegistry), cx);
         cx.notify();
 
-        acp_onboarding_event!("Documentation Link Clicked");
+        acp_onboarding_event!("Open Agent Registry Clicked");
     }
 
     fn cancel(&mut self, _: &menu::Cancel, _: &mut Window, cx: &mut Context<Self>) {
@@ -197,7 +203,7 @@ impl Render for AcpOnboardingModal {
             .icon_size(IconSize::Indicator)
             .icon_color(Color::Muted)
             .full_width()
-            .on_click(cx.listener(Self::view_docs));
+            .on_click(cx.listener(Self::open_agent_registry));
 
         let close_button = h_flex().absolute().top_2().right_2().child(
             IconButton::new("cancel", IconName::Close).on_click(cx.listener(

crates/agent_ui/src/ui/claude_agent_onboarding_modal.rs 🔗

@@ -1,8 +1,8 @@
-use client::zed_urls;
 use gpui::{
     ClickEvent, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, MouseDownEvent, Render,
     linear_color_stop, linear_gradient,
 };
+use project::agent_server_store::CLAUDE_AGENT_NAME;
 use ui::{TintColor, Vector, VectorName, prelude::*};
 use workspace::{ModalView, Workspace};
 
@@ -37,7 +37,13 @@ impl ClaudeCodeOnboardingModal {
 
             if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
                 panel.update(cx, |panel, cx| {
-                    panel.new_agent_thread(AgentType::ClaudeAgent, window, cx);
+                    panel.new_agent_thread(
+                        AgentType::Custom {
+                            name: CLAUDE_AGENT_NAME.into(),
+                        },
+                        window,
+                        cx,
+                    );
                 });
             }
         });
@@ -47,8 +53,8 @@ impl ClaudeCodeOnboardingModal {
         claude_agent_onboarding_event!("Open Panel Clicked");
     }
 
-    fn view_docs(&mut self, _: &ClickEvent, _: &mut Window, cx: &mut Context<Self>) {
-        cx.open_url(&zed_urls::external_agents_docs(cx));
+    fn view_docs(&mut self, _: &ClickEvent, window: &mut Window, cx: &mut Context<Self>) {
+        window.dispatch_action(Box::new(zed_actions::AcpRegistry), cx);
         cx.notify();
 
         claude_agent_onboarding_event!("Documentation Link Clicked");

crates/agent_ui/src/ui/mention_crease.rs 🔗

@@ -1,17 +1,28 @@
-use std::time::Duration;
+use std::{ops::RangeInclusive, path::PathBuf, time::Duration};
 
-use gpui::{Animation, AnimationExt, AnyView, IntoElement, Window, pulsating_between};
+use acp_thread::MentionUri;
+use agent_client_protocol as acp;
+use editor::{Editor, SelectionEffects, scroll::Autoscroll};
+use gpui::{
+    Animation, AnimationExt, AnyView, Context, IntoElement, WeakEntity, Window, pulsating_between,
+};
+use prompt_store::PromptId;
+use rope::Point;
 use settings::Settings;
 use theme::ThemeSettings;
-use ui::{ButtonLike, TintColor, prelude::*};
+use ui::{ButtonLike, TintColor, Tooltip, prelude::*};
+use workspace::{OpenOptions, Workspace};
 
 #[derive(IntoElement)]
 pub struct MentionCrease {
     id: ElementId,
     icon: SharedString,
     label: SharedString,
+    mention_uri: Option<MentionUri>,
+    workspace: Option<WeakEntity<Workspace>>,
     is_toggled: bool,
     is_loading: bool,
+    tooltip: Option<SharedString>,
     image_preview: Option<Box<dyn Fn(&mut Window, &mut App) -> AnyView + 'static>>,
 }
 
@@ -25,12 +36,25 @@ impl MentionCrease {
             id: id.into(),
             icon: icon.into(),
             label: label.into(),
+            mention_uri: None,
+            workspace: None,
             is_toggled: false,
             is_loading: false,
+            tooltip: None,
             image_preview: None,
         }
     }
 
+    pub fn mention_uri(mut self, mention_uri: Option<MentionUri>) -> Self {
+        self.mention_uri = mention_uri;
+        self
+    }
+
+    pub fn workspace(mut self, workspace: Option<WeakEntity<Workspace>>) -> Self {
+        self.workspace = workspace;
+        self
+    }
+
     pub fn is_toggled(mut self, is_toggled: bool) -> Self {
         self.is_toggled = is_toggled;
         self
@@ -41,6 +65,11 @@ impl MentionCrease {
         self
     }
 
+    pub fn tooltip(mut self, tooltip: impl Into<SharedString>) -> Self {
+        self.tooltip = Some(tooltip.into());
+        self
+    }
+
     pub fn image_preview(
         mut self,
         builder: impl Fn(&mut Window, &mut App) -> AnyView + 'static,
@@ -55,6 +84,9 @@ impl RenderOnce for MentionCrease {
         let settings = ThemeSettings::get_global(cx);
         let font_size = settings.agent_buffer_font_size(cx);
         let buffer_font = settings.buffer_font.clone();
+        let is_loading = self.is_loading;
+        let tooltip = self.tooltip;
+        let image_preview = self.image_preview;
 
         let button_height = DefiniteLength::Absolute(AbsoluteLength::Pixels(
             px(window.line_height().into()) - px(1.),
@@ -66,9 +98,14 @@ impl RenderOnce for MentionCrease {
             .height(button_height)
             .selected_style(ButtonStyle::Tinted(TintColor::Accent))
             .toggle_state(self.is_toggled)
-            .when_some(self.image_preview, |this, image_preview| {
-                this.hoverable_tooltip(image_preview)
-            })
+            .when_some(
+                self.mention_uri.clone().zip(self.workspace.clone()),
+                |this, (mention_uri, workspace)| {
+                    this.on_click(move |_event, window, cx| {
+                        open_mention_uri(mention_uri.clone(), &workspace, window, cx);
+                    })
+                },
+            )
             .child(
                 h_flex()
                     .pb_px()
@@ -82,7 +119,7 @@ impl RenderOnce for MentionCrease {
                     )
                     .child(self.label.clone())
                     .map(|this| {
-                        if self.is_loading {
+                        if is_loading {
                             this.with_animation(
                                 "loading-context-crease",
                                 Animation::new(Duration::from_secs(2))
@@ -96,5 +133,179 @@ impl RenderOnce for MentionCrease {
                         }
                     }),
             )
+            .map(|button| {
+                if let Some(image_preview) = image_preview {
+                    button.hoverable_tooltip(image_preview)
+                } else {
+                    button.when_some(tooltip, |this, tooltip_text| {
+                        this.tooltip(Tooltip::text(tooltip_text))
+                    })
+                }
+            })
     }
 }
+
+fn open_mention_uri(
+    mention_uri: MentionUri,
+    workspace: &WeakEntity<Workspace>,
+    window: &mut Window,
+    cx: &mut App,
+) {
+    let Some(workspace) = workspace.upgrade() else {
+        return;
+    };
+
+    workspace.update(cx, |workspace, cx| match mention_uri {
+        MentionUri::File { abs_path } => {
+            open_file(workspace, abs_path, None, window, cx);
+        }
+        MentionUri::Symbol {
+            abs_path,
+            line_range,
+            ..
+        }
+        | MentionUri::Selection {
+            abs_path: Some(abs_path),
+            line_range,
+        } => {
+            open_file(workspace, abs_path, Some(line_range), window, cx);
+        }
+        MentionUri::Directory { abs_path } => {
+            reveal_in_project_panel(workspace, abs_path, cx);
+        }
+        MentionUri::Thread { id, name } => {
+            open_thread(workspace, id, name, window, cx);
+        }
+        MentionUri::TextThread { .. } => {}
+        MentionUri::Rule { id, .. } => {
+            open_rule(workspace, id, window, cx);
+        }
+        MentionUri::Fetch { url } => {
+            cx.open_url(url.as_str());
+        }
+        MentionUri::PastedImage
+        | MentionUri::Selection { abs_path: None, .. }
+        | MentionUri::Diagnostics { .. }
+        | MentionUri::TerminalSelection { .. }
+        | MentionUri::GitDiff { .. } => {}
+    });
+}
+
+fn open_file(
+    workspace: &mut Workspace,
+    abs_path: PathBuf,
+    line_range: Option<RangeInclusive<u32>>,
+    window: &mut Window,
+    cx: &mut Context<Workspace>,
+) {
+    let project = workspace.project();
+
+    if let Some(project_path) =
+        project.update(cx, |project, cx| project.find_project_path(&abs_path, cx))
+    {
+        let item = workspace.open_path(project_path, None, true, window, cx);
+        if let Some(line_range) = line_range {
+            window
+                .spawn(cx, async move |cx| {
+                    let Some(editor) = item.await?.downcast::<Editor>() else {
+                        return Ok(());
+                    };
+                    editor
+                        .update_in(cx, |editor, window, cx| {
+                            let range = Point::new(*line_range.start(), 0)
+                                ..Point::new(*line_range.start(), 0);
+                            editor.change_selections(
+                                SelectionEffects::scroll(Autoscroll::center()),
+                                window,
+                                cx,
+                                |selections| selections.select_ranges(vec![range]),
+                            );
+                        })
+                        .ok();
+                    anyhow::Ok(())
+                })
+                .detach_and_log_err(cx);
+        } else {
+            item.detach_and_log_err(cx);
+        }
+    } else if abs_path.exists() {
+        workspace
+            .open_abs_path(
+                abs_path,
+                OpenOptions {
+                    focus: Some(true),
+                    ..Default::default()
+                },
+                window,
+                cx,
+            )
+            .detach_and_log_err(cx);
+    }
+}
+
+fn reveal_in_project_panel(
+    workspace: &mut Workspace,
+    abs_path: PathBuf,
+    cx: &mut Context<Workspace>,
+) {
+    let project = workspace.project();
+    let Some(entry_id) = project.update(cx, |project, cx| {
+        let path = project.find_project_path(&abs_path, cx)?;
+        project.entry_for_path(&path, cx).map(|entry| entry.id)
+    }) else {
+        return;
+    };
+
+    project.update(cx, |_, cx| {
+        cx.emit(project::Event::RevealInProjectPanel(entry_id));
+    });
+}
+
+fn open_thread(
+    workspace: &mut Workspace,
+    id: acp::SessionId,
+    name: String,
+    window: &mut Window,
+    cx: &mut Context<Workspace>,
+) {
+    use crate::AgentPanel;
+    use acp_thread::AgentSessionInfo;
+
+    let Some(panel) = workspace.panel::<AgentPanel>(cx) else {
+        return;
+    };
+
+    panel.update(cx, |panel, cx| {
+        panel.load_agent_thread(
+            AgentSessionInfo {
+                session_id: id,
+                cwd: None,
+                title: Some(name.into()),
+                updated_at: None,
+                meta: None,
+            },
+            window,
+            cx,
+        )
+    });
+}
+
+fn open_rule(
+    _workspace: &mut Workspace,
+    id: PromptId,
+    window: &mut Window,
+    cx: &mut Context<Workspace>,
+) {
+    use zed_actions::assistant::OpenRulesLibrary;
+
+    let PromptId::User { uuid } = id else {
+        return;
+    };
+
+    window.dispatch_action(
+        Box::new(OpenRulesLibrary {
+            prompt_to_select: Some(uuid.0),
+        }),
+        cx,
+    );
+}

crates/anthropic/src/anthropic.rs 🔗

@@ -906,11 +906,17 @@ pub struct ImageSource {
     pub data: String,
 }
 
+fn is_false(value: &bool) -> bool {
+    !value
+}
+
 #[derive(Debug, Serialize, Deserialize)]
 pub struct Tool {
     pub name: String,
     pub description: String,
     pub input_schema: serde_json::Value,
+    #[serde(default, skip_serializing_if = "is_false")]
+    pub eager_input_streaming: bool,
 }
 
 #[derive(Debug, Serialize, Deserialize)]
@@ -971,6 +977,8 @@ pub struct Request {
     #[serde(default, skip_serializing_if = "Vec::is_empty")]
     pub stop_sequences: Vec<String>,
     #[serde(default, skip_serializing_if = "Option::is_none")]
+    pub speed: Option<Speed>,
+    #[serde(default, skip_serializing_if = "Option::is_none")]
     pub temperature: Option<f32>,
     #[serde(default, skip_serializing_if = "Option::is_none")]
     pub top_k: Option<u32>,
@@ -978,6 +986,14 @@ pub struct Request {
     pub top_p: Option<f32>,
 }
 
+#[derive(Debug, Default, Serialize, Deserialize)]
+#[serde(rename_all = "snake_case")]
+pub enum Speed {
+    #[default]
+    Standard,
+    Fast,
+}
+
 #[derive(Debug, Serialize, Deserialize)]
 struct StreamingRequest {
     #[serde(flatten)]

crates/assistant_text_thread/src/text_thread.rs 🔗

@@ -2275,6 +2275,7 @@ impl TextThread {
             temperature: model.and_then(|model| AgentSettings::temperature_for_model(model, cx)),
             thinking_allowed: true,
             thinking_effort: None,
+            speed: None,
         };
         for message in self.messages(cx) {
             if message.status != MessageStatus::Done {

crates/audio/Cargo.toml 🔗

@@ -30,4 +30,4 @@ thiserror.workspace = true
 util.workspace = true
 
 [target.'cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))'.dependencies]
-libwebrtc = { rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d", git = "https://github.com/zed-industries/livekit-rust-sdks" }
+libwebrtc.workspace = true

crates/auto_update/src/auto_update.rs 🔗

@@ -990,7 +990,7 @@ async fn install_release_macos(
     };
 
     let output = new_command("rsync")
-        .args(["-av", "--delete"])
+        .args(["-av", "--delete", "--exclude", "Icon?"])
         .arg(&mounted_app_path)
         .arg(&running_app_path)
         .output()

crates/auto_update_helper/Cargo.toml 🔗

@@ -19,6 +19,7 @@ log.workspace = true
 simplelog.workspace = true
 
 [target.'cfg(target_os = "windows")'.dependencies]
+scopeguard = "1.2"
 windows.workspace = true
 
 [target.'cfg(target_os = "windows")'.dev-dependencies]

crates/auto_update_helper/src/updater.rs 🔗

@@ -1,13 +1,22 @@
 use std::{
+    ffi::OsStr,
+    os::windows::ffi::OsStrExt,
     path::Path,
     sync::LazyLock,
     time::{Duration, Instant},
 };
 
 use anyhow::{Context as _, Result};
-use windows::Win32::{
-    Foundation::{HWND, LPARAM, WPARAM},
-    UI::WindowsAndMessaging::PostMessageW,
+use windows::{
+    Win32::{
+        Foundation::{HWND, LPARAM, WPARAM},
+        System::RestartManager::{
+            CCH_RM_SESSION_KEY, RmEndSession, RmGetList, RmRegisterResources, RmShutdown,
+            RmStartSession,
+        },
+        UI::WindowsAndMessaging::PostMessageW,
+    },
+    core::{PCWSTR, PWSTR},
 };
 
 use crate::windows_impl::WM_JOB_UPDATED;
@@ -262,9 +271,106 @@ pub(crate) static JOBS: LazyLock<[Job; 9]> = LazyLock::new(|| {
     ]
 });
 
+/// Attempts to use Windows Restart Manager to release file handles held by other processes
+/// (e.g., Explorer.exe) on the files we need to move during the update.
+///
+/// This is a best-effort operation - if it fails, we'll still try the update and rely on
+/// the retry logic.
+fn release_file_handles(app_dir: &Path) -> Result<()> {
+    // Files that commonly get locked by Explorer or other processes
+    let files_to_release = [
+        app_dir.join("Zed.exe"),
+        app_dir.join("bin\\Zed.exe"),
+        app_dir.join("bin\\zed"),
+        app_dir.join("conpty.dll"),
+    ];
+
+    log::info!("Attempting to release file handles using Restart Manager...");
+
+    let mut session: u32 = 0;
+    let mut session_key = [0u16; CCH_RM_SESSION_KEY as usize + 1];
+
+    // Start a Restart Manager session
+    let err = unsafe {
+        RmStartSession(
+            &mut session,
+            Some(0),
+            PWSTR::from_raw(session_key.as_mut_ptr()),
+        )
+    };
+    if err.is_err() {
+        anyhow::bail!("RmStartSession failed: {err:?}");
+    }
+
+    // Ensure we end the session when done
+    let _session_guard = scopeguard::guard(session, |s| {
+        let _ = unsafe { RmEndSession(s) };
+    });
+
+    // Convert paths to wide strings for Windows API
+    let wide_paths: Vec<Vec<u16>> = files_to_release
+        .iter()
+        .filter(|p| p.exists())
+        .map(|p| {
+            OsStr::new(p)
+                .encode_wide()
+                .chain(std::iter::once(0))
+                .collect()
+        })
+        .collect();
+
+    if wide_paths.is_empty() {
+        log::info!("No files to release handles for");
+        return Ok(());
+    }
+
+    let pcwstr_paths: Vec<PCWSTR> = wide_paths
+        .iter()
+        .map(|p| PCWSTR::from_raw(p.as_ptr()))
+        .collect();
+
+    // Register the files we want to modify
+    let err = unsafe { RmRegisterResources(session, Some(&pcwstr_paths), None, None) };
+    if err.is_err() {
+        anyhow::bail!("RmRegisterResources failed: {err:?}");
+    }
+
+    // Check if any processes are using these files
+    let mut needed: u32 = 0;
+    let mut count: u32 = 0;
+    let mut reboot_reasons: u32 = 0;
+    let _ = unsafe { RmGetList(session, &mut needed, &mut count, None, &mut reboot_reasons) };
+
+    if needed == 0 {
+        log::info!("No processes are holding handles to the files");
+        return Ok(());
+    }
+
+    log::info!(
+        "{} process(es) are holding handles to the files, requesting release...",
+        needed
+    );
+
+    // Request processes to release their handles
+    // RmShutdown with flags=0 asks applications to release handles gracefully
+    // For Explorer, this typically releases icon cache handles without closing Explorer
+    let err = unsafe { RmShutdown(session, 0, None) };
+    if err.is_err() {
+        anyhow::bail!("RmShutdown failed: {:?}", err);
+    }
+
+    log::info!("Successfully requested handle release");
+    Ok(())
+}
+
 pub(crate) fn perform_update(app_dir: &Path, hwnd: Option<isize>, launch: bool) -> Result<()> {
     let hwnd = hwnd.map(|ptr| HWND(ptr as _));
 
+    // Try to release file handles before starting the update
+    if let Err(e) = release_file_handles(app_dir) {
+        log::warn!("Restart Manager failed (will continue anyway): {}", e);
+    }
+
     let mut last_successful_job = None;
     'outer: for (i, job) in JOBS.iter().enumerate() {
         let start = Instant::now();
@@ -279,19 +385,22 @@ pub(crate) fn perform_update(app_dir: &Path, hwnd: Option<isize>, launch: bool)
                     unsafe { PostMessageW(hwnd, WM_JOB_UPDATED, WPARAM(0), LPARAM(0))? };
                     break;
                 }
-                Err(err) => {
-                    // Check if it's a "not found" error
-                    let io_err = err.downcast_ref::<std::io::Error>().unwrap();
-                    if io_err.kind() == std::io::ErrorKind::NotFound {
-                        log::warn!("File or folder not found.");
-                        last_successful_job = Some(i);
-                        unsafe { PostMessageW(hwnd, WM_JOB_UPDATED, WPARAM(0), LPARAM(0))? };
-                        break;
+                Err(err) => match err.downcast_ref::<std::io::Error>() {
+                    Some(io_err) => match io_err.kind() {
+                        std::io::ErrorKind::NotFound => {
+                            log::error!("Operation failed with file not found, aborting: {}", err);
+                            break 'outer;
+                        }
+                        _ => {
+                            log::error!("Operation failed (retrying): {}", err);
+                            std::thread::sleep(Duration::from_millis(50));
+                        }
+                    },
+                    None => {
+                        log::error!("Operation failed with unexpected error, aborting: {}", err);
+                        break 'outer;
                     }
-
-                    log::error!("Operation failed: {} ({:?})", err, io_err.kind());
-                    std::thread::sleep(Duration::from_millis(50));
-                }
+                },
             }
         }
     }

crates/buffer_diff/src/buffer_diff.rs 🔗

@@ -1721,7 +1721,7 @@ impl BufferDiff {
             if let Some(language_registry) = language_registry {
                 base_text.set_language_registry(language_registry);
             }
-            base_text.set_language(language, cx);
+            base_text.set_language_async(language, cx);
             base_text.parsing_idle()
         });
         cx.spawn(async move |this, cx| {
@@ -1753,6 +1753,7 @@ impl BufferDiff {
         let should_compare_hunks = update.base_text_edits.is_some() || !base_text_changed;
         let parsing_idle = if let Some(diff) = update.base_text_edits {
             state.base_text.update(cx, |base_text, cx| {
+                base_text.set_sync_parse_timeout(None);
                 base_text.set_capability(Capability::ReadWrite, cx);
                 base_text.apply_diff(diff, cx);
                 base_text.set_capability(Capability::ReadOnly, cx);
@@ -1760,6 +1761,7 @@ impl BufferDiff {
             })
         } else if update.base_text_changed {
             state.base_text.update(cx, |base_text, cx| {
+                base_text.set_sync_parse_timeout(None);
                 base_text.set_capability(Capability::ReadWrite, cx);
                 base_text.set_text(new_state.base_text.clone(), cx);
                 base_text.set_capability(Capability::ReadOnly, cx);

crates/channel/src/channel_buffer.rs 🔗

@@ -22,6 +22,7 @@ pub(crate) fn init(client: &AnyProtoClient) {
 pub struct ChannelBuffer {
     pub channel_id: ChannelId,
     connected: bool,
+    rejoining: bool,
     collaborators: HashMap<PeerId, Collaborator>,
     user_store: Entity<UserStore>,
     channel_store: Entity<ChannelStore>,
@@ -84,6 +85,7 @@ impl ChannelBuffer {
                 buffer_epoch: response.epoch,
                 client,
                 connected: true,
+                rejoining: false,
                 collaborators: Default::default(),
                 acknowledge_task: None,
                 channel_id: channel.id,
@@ -111,6 +113,7 @@ impl ChannelBuffer {
 
     pub fn connected(&mut self, cx: &mut Context<Self>) {
         self.connected = true;
+        self.rejoining = false;
         if self.subscription.is_none() {
             let Ok(subscription) = self.client.subscribe_to_entity(self.channel_id.0) else {
                 return;
@@ -120,6 +123,10 @@ impl ChannelBuffer {
         }
     }
 
+    pub(crate) fn set_rejoining(&mut self, rejoining: bool) {
+        self.rejoining = rejoining;
+    }
+
     pub fn remote_id(&self, cx: &App) -> BufferId {
         self.buffer.read(cx).remote_id()
     }
@@ -204,6 +211,9 @@ impl ChannelBuffer {
                     return;
                 }
                 let operation = language::proto::serialize_operation(operation);
+                if self.rejoining {
+                    return;
+                }
                 self.client
                     .send(proto::UpdateChannelBuffer {
                         channel_id: self.channel_id.0,
@@ -263,6 +273,7 @@ impl ChannelBuffer {
         log::info!("channel buffer {} disconnected", self.channel_id);
         if self.connected {
             self.connected = false;
+            self.rejoining = false;
             self.subscription.take();
             cx.emit(ChannelBufferEvent::Disconnected);
             cx.notify()

crates/channel/src/channel_store.rs 🔗

@@ -855,12 +855,18 @@ impl ChannelStore {
             if let OpenEntityHandle::Open(buffer) = buffer
                 && let Some(buffer) = buffer.upgrade()
             {
-                let channel_buffer = buffer.read(cx);
-                let buffer = channel_buffer.buffer().read(cx);
-                buffer_versions.push(proto::ChannelBufferVersion {
-                    channel_id: channel_buffer.channel_id.0,
-                    epoch: channel_buffer.epoch(),
-                    version: language::proto::serialize_version(&buffer.version()),
+                buffer.update(cx, |channel_buffer, cx| {
+                    // Block on_buffer_update from sending UpdateChannelBuffer messages
+                    // until the rejoin completes. This prevents a race condition where
+                    // edits made during the rejoin async gap could inflate the server
+                    // version, causing offline edits to be filtered out by serialize_ops.
+                    channel_buffer.set_rejoining(true);
+                    let inner_buffer = channel_buffer.buffer().read(cx);
+                    buffer_versions.push(proto::ChannelBufferVersion {
+                        channel_id: channel_buffer.channel_id.0,
+                        epoch: channel_buffer.epoch(),
+                        version: language::proto::serialize_version(&inner_buffer.version()),
+                    });
                 });
             }
         }
@@ -874,7 +880,26 @@ impl ChannelStore {
         });
 
         cx.spawn(async move |this, cx| {
-            let mut response = response.await?;
+            let response = match response.await {
+                Ok(response) => response,
+                Err(err) => {
+                    // Clear rejoining flag on all buffers since the rejoin failed
+                    this.update(cx, |this, cx| {
+                        for buffer in this.opened_buffers.values() {
+                            if let OpenEntityHandle::Open(buffer) = buffer {
+                                if let Some(buffer) = buffer.upgrade() {
+                                    buffer.update(cx, |channel_buffer, _| {
+                                        channel_buffer.set_rejoining(false);
+                                    });
+                                }
+                            }
+                        }
+                    })
+                    .ok();
+                    return Err(err);
+                }
+            };
+            let mut response = response;
 
             this.update(cx, |this, cx| {
                 this.opened_buffers.retain(|_, buffer| match buffer {
@@ -948,6 +973,22 @@ impl ChannelStore {
     fn handle_disconnect(&mut self, wait_for_reconnect: bool, cx: &mut Context<Self>) {
         cx.notify();
         self.did_subscribe = false;
+
+        // If we're waiting for reconnect, set rejoining=true on all buffers immediately.
+        // This prevents operations from being sent during the reconnection window,
+        // before handle_connect has a chance to run and capture the version.
+        if wait_for_reconnect {
+            for buffer in self.opened_buffers.values() {
+                if let OpenEntityHandle::Open(buffer) = buffer {
+                    if let Some(buffer) = buffer.upgrade() {
+                        buffer.update(cx, |channel_buffer, _| {
+                            channel_buffer.set_rejoining(true);
+                        });
+                    }
+                }
+            }
+        }
+
         self.disconnect_channel_buffers_task.get_or_insert_with(|| {
             cx.spawn(async move |this, cx| {
                 if wait_for_reconnect {

crates/client/src/zed_urls.rs 🔗

@@ -44,22 +44,6 @@ pub fn ai_privacy_and_security(cx: &App) -> String {
     )
 }
 
-/// Returns the URL to Zed AI's external agents documentation.
-pub fn external_agents_docs(cx: &App) -> String {
-    format!(
-        "{server_url}/docs/ai/external-agents",
-        server_url = server_url(cx)
-    )
-}
-
-/// Returns the URL to Zed agent servers documentation.
-pub fn agent_server_docs(cx: &App) -> String {
-    format!(
-        "{server_url}/docs/extensions/agent-servers",
-        server_url = server_url(cx)
-    )
-}
-
 /// Returns the URL to Zed's edit prediction documentation.
 pub fn edit_prediction_docs(cx: &App) -> String {
     format!(

crates/clock/src/clock.rs 🔗

@@ -61,8 +61,8 @@ pub type Seq = u32;
 /// used to determine the ordering of events in the editor.
 #[derive(Clone, Copy, Eq, Hash, PartialEq, Serialize, Deserialize)]
 pub struct Lamport {
-    pub replica_id: ReplicaId,
     pub value: Seq,
+    pub replica_id: ReplicaId,
 }
 
 /// A [version vector](https://en.wikipedia.org/wiki/Version_vector).

crates/cloud_api_client/src/cloud_api_client.rs 🔗

@@ -9,7 +9,9 @@ use futures::AsyncReadExt as _;
 use gpui::{App, Task};
 use gpui_tokio::Tokio;
 use http_client::http::request;
-use http_client::{AsyncBody, HttpClientWithUrl, HttpRequestExt, Method, Request, StatusCode};
+use http_client::{
+    AsyncBody, HttpClientWithUrl, HttpRequestExt, Json, Method, Request, StatusCode,
+};
 use parking_lot::RwLock;
 use thiserror::Error;
 use yawc::WebSocket;
@@ -141,6 +143,7 @@ impl CloudApiClient {
     pub async fn create_llm_token(
         &self,
         system_id: Option<String>,
+        organization_id: Option<OrganizationId>,
     ) -> Result<CreateLlmTokenResponse, ClientApiError> {
         let request_builder = Request::builder()
             .method(Method::POST)
@@ -153,7 +156,10 @@ impl CloudApiClient {
                 builder.header(ZED_SYSTEM_ID_HEADER_NAME, system_id)
             });
 
-        let request = self.build_request(request_builder, AsyncBody::default())?;
+        let request = self.build_request(
+            request_builder,
+            Json(CreateLlmTokenBody { organization_id }),
+        )?;
 
         let mut response = self.http_client.send(request).await?;
 

crates/cloud_api_types/src/cloud_api_types.rs 🔗

@@ -52,6 +52,12 @@ pub struct AcceptTermsOfServiceResponse {
 #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
 pub struct LlmToken(pub String);
 
+#[derive(Debug, Default, PartialEq, Clone, Serialize, Deserialize)]
+pub struct CreateLlmTokenBody {
+    #[serde(default)]
+    pub organization_id: Option<OrganizationId>,
+}
+
 #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
 pub struct CreateLlmTokenResponse {
     pub token: LlmToken,
@@ -62,6 +68,7 @@ pub struct SubmitAgentThreadFeedbackBody {
     pub organization_id: Option<OrganizationId>,
     pub agent: String,
     pub session_id: String,
+    pub parent_session_id: Option<String>,
     pub rating: String,
     pub thread: serde_json::Value,
 }

crates/cloud_llm_client/src/cloud_llm_client.rs 🔗

@@ -142,6 +142,8 @@ pub struct PredictEditsResponse {
 #[derive(Debug, Clone, Serialize, Deserialize)]
 pub struct AcceptEditPredictionBody {
     pub request_id: String,
+    #[serde(default, skip_serializing_if = "Option::is_none")]
+    pub model_version: Option<String>,
 }
 
 #[derive(Debug, Clone, Deserialize)]
@@ -160,6 +162,8 @@ pub struct EditPredictionRejection {
     #[serde(default)]
     pub reason: EditPredictionRejectReason,
     pub was_shown: bool,
+    #[serde(default, skip_serializing_if = "Option::is_none")]
+    pub model_version: Option<String>,
 }
 
 #[derive(Default, Debug, Clone, Copy, Serialize, Deserialize, PartialEq)]
@@ -302,6 +306,8 @@ pub struct LanguageModel {
     pub supports_tools: bool,
     pub supports_images: bool,
     pub supports_thinking: bool,
+    #[serde(default)]
+    pub supports_fast_mode: bool,
     pub supported_effort_levels: Vec<SupportedEffortLevel>,
     #[serde(default)]
     pub supports_streaming_tools: bool,

crates/cloud_llm_client/src/predict_edits_v3.rs 🔗

@@ -1,6 +1,7 @@
 use crate::PredictEditsRequestTrigger;
 use serde::{Deserialize, Serialize};
 use std::borrow::Cow;
+use std::ops::Range;
 
 #[derive(Debug, Deserialize, Serialize)]
 pub struct RawCompletionRequest {
@@ -27,6 +28,13 @@ pub struct PredictEditsV3Request {
 pub struct PredictEditsV3Response {
     pub request_id: String,
     pub output: String,
+    /// The editable region byte range within `cursor_excerpt` that the
+    /// server used for this request. When present, the client should use
+    /// this range to extract the old text from its local excerpt for
+    /// diffing, rather than relying on its own format-derived range.
+    pub editable_range: Range<usize>,
+    #[serde(default, skip_serializing_if = "Option::is_none")]
+    pub model_version: Option<String>,
 }
 
 #[derive(Debug, Deserialize, Serialize)]

crates/collab/migrations.sqlite/20221109000000_test_schema.sql 🔗

@@ -122,6 +122,8 @@ CREATE TABLE "project_repository_statuses" (
     "status_kind" INT4 NOT NULL,
     "first_status" INT4 NULL,
     "second_status" INT4 NULL,
+    "lines_added" INT4 NULL,
+    "lines_deleted" INT4 NULL,
     "scan_id" INT8 NOT NULL,
     "is_deleted" BOOL NOT NULL,
     PRIMARY KEY (project_id, repository_id, repo_path)

crates/collab/migrations/20251208000000_test_schema.sql 🔗

@@ -315,6 +315,8 @@ CREATE TABLE public.project_repository_statuses (
     status_kind integer NOT NULL,
     first_status integer,
     second_status integer,
+    lines_added integer,
+    lines_deleted integer,
     scan_id bigint NOT NULL,
     is_deleted boolean NOT NULL
 );

crates/collab/src/db.rs 🔗

@@ -732,6 +732,8 @@ fn db_status_to_proto(
         status: Some(proto::GitFileStatus {
             variant: Some(variant),
         }),
+        diff_stat_added: entry.lines_added.map(|v| v as u32),
+        diff_stat_deleted: entry.lines_deleted.map(|v| v as u32),
     })
 }
 

crates/collab/src/db/queries/projects.rs 🔗

@@ -334,147 +334,6 @@ impl Database {
                     .await?;
             }
 
-            // Backward-compatibility for old Zed clients.
-            //
-            // Remove this block when Zed 1.80 stable has been out for a week.
-            {
-                if !update.updated_repositories.is_empty() {
-                    project_repository::Entity::insert_many(
-                        update.updated_repositories.iter().map(|repository| {
-                            project_repository::ActiveModel {
-                                project_id: ActiveValue::set(project_id),
-                                legacy_worktree_id: ActiveValue::set(Some(worktree_id)),
-                                id: ActiveValue::set(repository.repository_id as i64),
-                                scan_id: ActiveValue::set(update.scan_id as i64),
-                                is_deleted: ActiveValue::set(false),
-                                branch_summary: ActiveValue::Set(
-                                    repository
-                                        .branch_summary
-                                        .as_ref()
-                                        .map(|summary| serde_json::to_string(summary).unwrap()),
-                                ),
-                                current_merge_conflicts: ActiveValue::Set(Some(
-                                    serde_json::to_string(&repository.current_merge_conflicts)
-                                        .unwrap(),
-                                )),
-                                // Old clients do not use abs path, entry ids, head_commit_details, or merge_message.
-                                abs_path: ActiveValue::set(String::new()),
-                                entry_ids: ActiveValue::set("[]".into()),
-                                head_commit_details: ActiveValue::set(None),
-                                merge_message: ActiveValue::set(None),
-                                remote_upstream_url: ActiveValue::set(None),
-                                remote_origin_url: ActiveValue::set(None),
-                            }
-                        }),
-                    )
-                    .on_conflict(
-                        OnConflict::columns([
-                            project_repository::Column::ProjectId,
-                            project_repository::Column::Id,
-                        ])
-                        .update_columns([
-                            project_repository::Column::ScanId,
-                            project_repository::Column::BranchSummary,
-                            project_repository::Column::CurrentMergeConflicts,
-                        ])
-                        .to_owned(),
-                    )
-                    .exec(&*tx)
-                    .await?;
-
-                    let has_any_statuses = update
-                        .updated_repositories
-                        .iter()
-                        .any(|repository| !repository.updated_statuses.is_empty());
-
-                    if has_any_statuses {
-                        project_repository_statuses::Entity::insert_many(
-                            update.updated_repositories.iter().flat_map(
-                                |repository: &proto::RepositoryEntry| {
-                                    repository.updated_statuses.iter().map(|status_entry| {
-                                        let (repo_path, status_kind, first_status, second_status) =
-                                            proto_status_to_db(status_entry.clone());
-                                        project_repository_statuses::ActiveModel {
-                                            project_id: ActiveValue::set(project_id),
-                                            repository_id: ActiveValue::set(
-                                                repository.repository_id as i64,
-                                            ),
-                                            scan_id: ActiveValue::set(update.scan_id as i64),
-                                            is_deleted: ActiveValue::set(false),
-                                            repo_path: ActiveValue::set(repo_path),
-                                            status: ActiveValue::set(0),
-                                            status_kind: ActiveValue::set(status_kind),
-                                            first_status: ActiveValue::set(first_status),
-                                            second_status: ActiveValue::set(second_status),
-                                        }
-                                    })
-                                },
-                            ),
-                        )
-                        .on_conflict(
-                            OnConflict::columns([
-                                project_repository_statuses::Column::ProjectId,
-                                project_repository_statuses::Column::RepositoryId,
-                                project_repository_statuses::Column::RepoPath,
-                            ])
-                            .update_columns([
-                                project_repository_statuses::Column::ScanId,
-                                project_repository_statuses::Column::StatusKind,
-                                project_repository_statuses::Column::FirstStatus,
-                                project_repository_statuses::Column::SecondStatus,
-                            ])
-                            .to_owned(),
-                        )
-                        .exec(&*tx)
-                        .await?;
-                    }
-
-                    for repo in &update.updated_repositories {
-                        if !repo.removed_statuses.is_empty() {
-                            project_repository_statuses::Entity::update_many()
-                                .filter(
-                                    project_repository_statuses::Column::ProjectId
-                                        .eq(project_id)
-                                        .and(
-                                            project_repository_statuses::Column::RepositoryId
-                                                .eq(repo.repository_id),
-                                        )
-                                        .and(
-                                            project_repository_statuses::Column::RepoPath
-                                                .is_in(repo.removed_statuses.iter()),
-                                        ),
-                                )
-                                .set(project_repository_statuses::ActiveModel {
-                                    is_deleted: ActiveValue::Set(true),
-                                    scan_id: ActiveValue::Set(update.scan_id as i64),
-                                    ..Default::default()
-                                })
-                                .exec(&*tx)
-                                .await?;
-                        }
-                    }
-                }
-
-                if !update.removed_repositories.is_empty() {
-                    project_repository::Entity::update_many()
-                        .filter(
-                            project_repository::Column::ProjectId
-                                .eq(project_id)
-                                .and(project_repository::Column::LegacyWorktreeId.eq(worktree_id))
-                                .and(project_repository::Column::Id.is_in(
-                                    update.removed_repositories.iter().map(|id| *id as i64),
-                                )),
-                        )
-                        .set(project_repository::ActiveModel {
-                            is_deleted: ActiveValue::Set(true),
-                            scan_id: ActiveValue::Set(update.scan_id as i64),
-                            ..Default::default()
-                        })
-                        .exec(&*tx)
-                        .await?;
-                }
-            }
-
             let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
             Ok(connection_ids)
         })
@@ -552,6 +411,12 @@ impl Database {
                             status_kind: ActiveValue::set(status_kind),
                             first_status: ActiveValue::set(first_status),
                             second_status: ActiveValue::set(second_status),
+                            lines_added: ActiveValue::set(
+                                status_entry.diff_stat_added.map(|v| v as i32),
+                            ),
+                            lines_deleted: ActiveValue::set(
+                                status_entry.diff_stat_deleted.map(|v| v as i32),
+                            ),
                         }
                     }),
                 )
@@ -566,6 +431,8 @@ impl Database {
                         project_repository_statuses::Column::StatusKind,
                         project_repository_statuses::Column::FirstStatus,
                         project_repository_statuses::Column::SecondStatus,
+                        project_repository_statuses::Column::LinesAdded,
+                        project_repository_statuses::Column::LinesDeleted,
                     ])
                     .to_owned(),
                 )
@@ -1002,7 +869,7 @@ impl Database {
                     repositories.push(proto::UpdateRepository {
                         project_id: db_repository_entry.project_id.0 as u64,
                         id: db_repository_entry.id as u64,
-                        abs_path: db_repository_entry.abs_path,
+                        abs_path: db_repository_entry.abs_path.clone(),
                         entry_ids,
                         updated_statuses,
                         removed_statuses: Vec::new(),
@@ -1015,6 +882,7 @@ impl Database {
                         stash_entries: Vec::new(),
                         remote_upstream_url: db_repository_entry.remote_upstream_url.clone(),
                         remote_origin_url: db_repository_entry.remote_origin_url.clone(),
+                        original_repo_abs_path: Some(db_repository_entry.abs_path),
                     });
                 }
             }

crates/collab/src/db/queries/rooms.rs 🔗

@@ -738,7 +738,7 @@ impl Database {
                     while let Some(db_status) = db_statuses.next().await {
                         let db_status: project_repository_statuses::Model = db_status?;
                         if db_status.is_deleted {
-                            removed_statuses.push(db_status.repo_path);
+                            removed_statuses.push(db_status.repo_path.clone());
                         } else {
                             updated_statuses.push(db_status_to_proto(db_status)?);
                         }
@@ -791,13 +791,14 @@ impl Database {
                             head_commit_details,
                             project_id: project_id.to_proto(),
                             id: db_repository.id as u64,
-                            abs_path: db_repository.abs_path,
+                            abs_path: db_repository.abs_path.clone(),
                             scan_id: db_repository.scan_id as u64,
                             is_last_update: true,
                             merge_message: db_repository.merge_message,
                             stash_entries: Vec::new(),
                             remote_upstream_url: db_repository.remote_upstream_url.clone(),
                             remote_origin_url: db_repository.remote_origin_url.clone(),
+                            original_repo_abs_path: Some(db_repository.abs_path),
                         });
                     }
                 }

crates/collab/src/db/tables/project_repository_statuses.rs 🔗

@@ -17,6 +17,8 @@ pub struct Model {
     pub first_status: Option<i32>,
     /// For unmerged entries, this is the `second_head` status. For tracked entries, this is the `worktree_status`.
     pub second_status: Option<i32>,
+    pub lines_added: Option<i32>,
+    pub lines_deleted: Option<i32>,
     pub scan_id: i64,
     pub is_deleted: bool,
 }

crates/collab/src/rpc.rs 🔗

@@ -437,6 +437,8 @@ impl Server {
             .add_request_handler(forward_mutating_project_request::<proto::GitChangeBranch>)
             .add_request_handler(forward_mutating_project_request::<proto::GitCreateRemote>)
             .add_request_handler(forward_mutating_project_request::<proto::GitRemoveRemote>)
+            .add_request_handler(forward_read_only_project_request::<proto::GitGetWorktrees>)
+            .add_request_handler(forward_mutating_project_request::<proto::GitCreateWorktree>)
             .add_request_handler(forward_mutating_project_request::<proto::CheckForPushedCommits>)
             .add_message_handler(broadcast_project_message_from_host::<proto::AdvertiseContexts>)
             .add_message_handler(update_context)

crates/collab/tests/integration/channel_buffer_tests.rs 🔗

@@ -3,6 +3,7 @@ use call::ActiveCall;
 use channel::ACKNOWLEDGE_DEBOUNCE_INTERVAL;
 use client::{Collaborator, ParticipantIndex, UserId};
 use collab::rpc::{CLEANUP_TIMEOUT, RECONNECT_TIMEOUT};
+
 use collab_ui::channel_view::ChannelView;
 use collections::HashMap;
 use editor::{Anchor, Editor, MultiBufferOffset, ToOffset};
@@ -698,6 +699,165 @@ async fn test_channel_buffer_changes_persist(
     });
 }
 
+#[gpui::test]
+async fn test_channel_buffer_operations_lost_on_reconnect(
+    executor: BackgroundExecutor,
+    cx_a: &mut TestAppContext,
+    cx_b: &mut TestAppContext,
+) {
+    let mut server = TestServer::start(executor.clone()).await;
+    let client_a = server.create_client(cx_a, "user_a").await;
+    let client_b = server.create_client(cx_b, "user_b").await;
+
+    let channel_id = server
+        .make_channel(
+            "the-channel",
+            None,
+            (&client_a, cx_a),
+            &mut [(&client_b, cx_b)],
+        )
+        .await;
+
+    // Both clients open the channel buffer.
+    let channel_buffer_a = client_a
+        .channel_store()
+        .update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
+        .await
+        .unwrap();
+    let channel_buffer_b = client_b
+        .channel_store()
+        .update(cx_b, |store, cx| store.open_channel_buffer(channel_id, cx))
+        .await
+        .unwrap();
+
+    // Step 1: Client A makes an initial edit that syncs to B.
+    channel_buffer_a.update(cx_a, |buffer, cx| {
+        buffer.buffer().update(cx, |buffer, cx| {
+            buffer.edit([(0..0, "a")], None, cx);
+        })
+    });
+    executor.run_until_parked();
+
+    // Verify both clients see "a".
+    channel_buffer_a.read_with(cx_a, |buffer, cx| {
+        assert_eq!(buffer.buffer().read(cx).text(), "a");
+    });
+    channel_buffer_b.read_with(cx_b, |buffer, cx| {
+        assert_eq!(buffer.buffer().read(cx).text(), "a");
+    });
+
+    // Step 2: Disconnect client A. Do NOT advance past RECONNECT_TIMEOUT
+    // so that the buffer stays in `opened_buffers` for rejoin.
+    server.forbid_connections();
+    server.disconnect_client(client_a.peer_id().unwrap());
+    executor.run_until_parked();
+
+    // Step 3: While disconnected, client A makes an offline edit ("b").
+    // on_buffer_update fires but client.send() fails because transport is down.
+    channel_buffer_a.update(cx_a, |buffer, cx| {
+        buffer.buffer().update(cx, |buffer, cx| {
+            buffer.edit([(1..1, "b")], None, cx);
+        })
+    });
+    executor.run_until_parked();
+
+    // Client A sees "ab" locally; B still sees "a".
+    channel_buffer_a.read_with(cx_a, |buffer, cx| {
+        assert_eq!(buffer.buffer().read(cx).text(), "ab");
+    });
+    channel_buffer_b.read_with(cx_b, |buffer, cx| {
+        assert_eq!(buffer.buffer().read(cx).text(), "a");
+    });
+
+    // Step 4: Reconnect and make a racing edit in parallel.
+    //
+    // The race condition occurs when:
+    // 1. Transport reconnects, handle_connect captures version V (with "b") and sends RejoinChannelBuffers
+    // 2. DURING the async gap (awaiting response), user makes edit "c"
+    // 3. on_buffer_update sends UpdateChannelBuffer (succeeds because transport is up)
+    // 4. Server receives BOTH messages concurrently (FuturesUnordered)
+    // 5. If UpdateChannelBuffer commits first, server version is inflated to include "c"
+    // 6. RejoinChannelBuffers reads inflated version and sends it back
+    // 7. Client's serialize_ops(inflated_version) filters out "b" (offline edit)
+    //    because the inflated version's timestamp covers "b"'s timestamp
+
+    // Get the buffer handle for spawning
+    let buffer_for_edit = channel_buffer_a.read_with(cx_a, |buffer, _| buffer.buffer());
+
+    // Spawn the edit task - it will wait for executor to run it
+    let edit_task = cx_a.spawn({
+        let buffer = buffer_for_edit;
+        async move |mut cx| {
+            let _ = buffer.update(&mut cx, |buffer, cx| {
+                buffer.edit([(2..2, "c")], None, cx);
+            });
+        }
+    });
+
+    // Allow connections so reconnect can succeed
+    server.allow_connections();
+
+    // Advance clock to trigger reconnection attempt
+    executor.advance_clock(RECEIVE_TIMEOUT);
+
+    // Run the edit task - this races with handle_connect
+    edit_task.detach();
+
+    // Let everything settle.
+    executor.run_until_parked();
+
+    // Step 7: Read final buffer text from both clients.
+    let text_a = channel_buffer_a.read_with(cx_a, |buffer, cx| buffer.buffer().read(cx).text());
+    let text_b = channel_buffer_b.read_with(cx_b, |buffer, cx| buffer.buffer().read(cx).text());
+
+    // Both clients must see the same text containing all three edits.
+    assert_eq!(
+        text_a, text_b,
+        "Client A and B diverged! A sees {:?}, B sees {:?}. \
+         Operations were lost during reconnection.",
+        text_a, text_b
+    );
+    assert!(
+        text_a.contains('a'),
+        "Initial edit 'a' missing from final text {:?}",
+        text_a
+    );
+    assert!(
+        text_a.contains('b'),
+        "Offline edit 'b' missing from final text {:?}. \
+         This is the reconnection race bug: the offline operation was \
+         filtered out by serialize_ops because the server_version was \
+         inflated by a racing UpdateChannelBuffer.",
+        text_a
+    );
+    assert!(
+        text_a.contains('c'),
+        "Racing edit 'c' missing from final text {:?}",
+        text_a
+    );
+
+    // Step 8: Verify the invariant directly — every operation known to
+    // client A must be observed by client B's version. If any operation
+    // in A's history is not covered by B's version, it was lost.
+    channel_buffer_a.read_with(cx_a, |buf_a, cx_a_inner| {
+        let buffer_a = buf_a.buffer().read(cx_a_inner);
+        let ops_a = buffer_a.operations();
+        channel_buffer_b.read_with(cx_b, |buf_b, cx_b_inner| {
+            let buffer_b = buf_b.buffer().read(cx_b_inner);
+            let version_b = buffer_b.version();
+            for (lamport, _op) in ops_a.iter() {
+                assert!(
+                    version_b.observed(*lamport),
+                    "Operation with lamport timestamp {:?} from client A \
+                     is NOT observed by client B's version. This operation \
+                     was lost during reconnection.",
+                    lamport
+                );
+            }
+        });
+    });
+}
+
 #[track_caller]
 fn assert_collaborators(collaborators: &HashMap<PeerId, Collaborator>, ids: &[Option<UserId>]) {
     let mut user_ids = collaborators

crates/collab/tests/integration/following_tests.rs 🔗

@@ -8,8 +8,8 @@ use collab_ui::{
 };
 use editor::{Editor, MultiBuffer, MultiBufferOffset, PathKey, SelectionEffects};
 use gpui::{
-    AppContext as _, BackgroundExecutor, BorrowAppContext, Entity, SharedString, TestAppContext,
-    VisualContext, VisualTestContext, point,
+    Action, AppContext as _, BackgroundExecutor, BorrowAppContext, Entity, SharedString,
+    TestAppContext, VisualContext, VisualTestContext, point,
 };
 use language::Capability;
 use rpc::proto::PeerId;
@@ -18,7 +18,7 @@ use settings::SettingsStore;
 use text::{Point, ToPoint};
 use util::{path, rel_path::rel_path, test::sample_text};
 use workspace::{
-    CollaboratorId, MultiWorkspace, ParticipantLocation, SplitDirection, Workspace,
+    CloseWindow, CollaboratorId, MultiWorkspace, ParticipantLocation, SplitDirection, Workspace,
     item::ItemHandle as _,
 };
 
@@ -259,8 +259,8 @@ async fn test_basic_following(
 
     // Client C closes the project.
     let weak_workspace_c = workspace_c.downgrade();
-    workspace_c.update_in(cx_c, |workspace, window, cx| {
-        workspace.close_window(&Default::default(), window, cx);
+    workspace_c.update_in(cx_c, |_, window, cx| {
+        window.dispatch_action(Box::new(CloseWindow) as Box<dyn Action>, cx);
     });
     executor.run_until_parked();
     // are you sure you want to leave the call?

crates/collab/tests/integration/git_tests.rs 🔗

@@ -1,17 +1,40 @@
-use std::path::Path;
+use std::path::{Path, PathBuf};
 
 use call::ActiveCall;
-use git::status::{FileStatus, StatusCode, TrackedStatus};
-use git_ui::project_diff::ProjectDiff;
-use gpui::{AppContext as _, TestAppContext, VisualTestContext};
+use collections::HashMap;
+use git::{
+    repository::RepoPath,
+    status::{DiffStat, FileStatus, StatusCode, TrackedStatus},
+};
+use git_ui::{git_panel::GitPanel, project_diff::ProjectDiff};
+use gpui::{AppContext as _, BackgroundExecutor, TestAppContext, VisualTestContext};
 use project::ProjectPath;
 use serde_json::json;
+
 use util::{path, rel_path::rel_path};
 use workspace::{MultiWorkspace, Workspace};
 
-//
 use crate::TestServer;
 
+fn collect_diff_stats<C: gpui::AppContext>(
+    panel: &gpui::Entity<GitPanel>,
+    cx: &C,
+) -> HashMap<RepoPath, DiffStat> {
+    panel.read_with(cx, |panel, cx| {
+        let Some(repo) = panel.active_repository() else {
+            return HashMap::default();
+        };
+        let snapshot = repo.read(cx).snapshot();
+        let mut stats = HashMap::default();
+        for entry in snapshot.statuses_by_path.iter() {
+            if let Some(diff_stat) = entry.diff_stat {
+                stats.insert(entry.repo_path.clone(), diff_stat);
+            }
+        }
+        stats
+    })
+}
+
 #[gpui::test]
 async fn test_project_diff(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
     let mut server = TestServer::start(cx_a.background_executor.clone()).await;
@@ -141,3 +164,337 @@ async fn test_project_diff(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext)
         );
     });
 }
+
+#[gpui::test]
+async fn test_remote_git_worktrees(
+    executor: BackgroundExecutor,
+    cx_a: &mut TestAppContext,
+    cx_b: &mut TestAppContext,
+) {
+    let mut server = TestServer::start(executor.clone()).await;
+    let client_a = server.create_client(cx_a, "user_a").await;
+    let client_b = server.create_client(cx_b, "user_b").await;
+    server
+        .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
+        .await;
+    let active_call_a = cx_a.read(ActiveCall::global);
+
+    client_a
+        .fs()
+        .insert_tree(
+            path!("/project"),
+            json!({ ".git": {}, "file.txt": "content" }),
+        )
+        .await;
+
+    let (project_a, _) = client_a.build_local_project(path!("/project"), cx_a).await;
+
+    let project_id = active_call_a
+        .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
+        .await
+        .unwrap();
+    let project_b = client_b.join_remote_project(project_id, cx_b).await;
+
+    executor.run_until_parked();
+
+    let repo_b = cx_b.update(|cx| project_b.read(cx).active_repository(cx).unwrap());
+
+    // Initially only the main worktree (the repo itself) should be present
+    let worktrees = cx_b
+        .update(|cx| repo_b.update(cx, |repository, _| repository.worktrees()))
+        .await
+        .unwrap()
+        .unwrap();
+    assert_eq!(worktrees.len(), 1);
+    assert_eq!(worktrees[0].path, PathBuf::from(path!("/project")));
+
+    // Client B creates a git worktree via the remote project
+    let worktree_directory = PathBuf::from(path!("/project"));
+    cx_b.update(|cx| {
+        repo_b.update(cx, |repository, _| {
+            repository.create_worktree(
+                "feature-branch".to_string(),
+                worktree_directory.clone(),
+                Some("abc123".to_string()),
+            )
+        })
+    })
+    .await
+    .unwrap()
+    .unwrap();
+
+    executor.run_until_parked();
+
+    // Client B lists worktrees — should see main + the one just created
+    let worktrees = cx_b
+        .update(|cx| repo_b.update(cx, |repository, _| repository.worktrees()))
+        .await
+        .unwrap()
+        .unwrap();
+    assert_eq!(worktrees.len(), 2);
+    assert_eq!(worktrees[0].path, PathBuf::from(path!("/project")));
+    assert_eq!(worktrees[1].path, worktree_directory.join("feature-branch"));
+    assert_eq!(worktrees[1].ref_name.as_ref(), "refs/heads/feature-branch");
+    assert_eq!(worktrees[1].sha.as_ref(), "abc123");
+
+    // Verify from the host side that the worktree was actually created
+    let host_worktrees = {
+        let repo_a = cx_a.update(|cx| {
+            project_a
+                .read(cx)
+                .repositories(cx)
+                .values()
+                .next()
+                .unwrap()
+                .clone()
+        });
+        cx_a.update(|cx| repo_a.update(cx, |repository, _| repository.worktrees()))
+            .await
+            .unwrap()
+            .unwrap()
+    };
+    assert_eq!(host_worktrees.len(), 2);
+    assert_eq!(host_worktrees[0].path, PathBuf::from(path!("/project")));
+    assert_eq!(
+        host_worktrees[1].path,
+        worktree_directory.join("feature-branch")
+    );
+
+    // Client B creates a second git worktree without an explicit commit
+    cx_b.update(|cx| {
+        repo_b.update(cx, |repository, _| {
+            repository.create_worktree(
+                "bugfix-branch".to_string(),
+                worktree_directory.clone(),
+                None,
+            )
+        })
+    })
+    .await
+    .unwrap()
+    .unwrap();
+
+    executor.run_until_parked();
+
+    // Client B lists worktrees — should now have main + two created
+    let worktrees = cx_b
+        .update(|cx| repo_b.update(cx, |repository, _| repository.worktrees()))
+        .await
+        .unwrap()
+        .unwrap();
+    assert_eq!(worktrees.len(), 3);
+
+    let feature_worktree = worktrees
+        .iter()
+        .find(|worktree| worktree.ref_name.as_ref() == "refs/heads/feature-branch")
+        .expect("should find feature-branch worktree");
+    assert_eq!(
+        feature_worktree.path,
+        worktree_directory.join("feature-branch")
+    );
+
+    let bugfix_worktree = worktrees
+        .iter()
+        .find(|worktree| worktree.ref_name.as_ref() == "refs/heads/bugfix-branch")
+        .expect("should find bugfix-branch worktree");
+    assert_eq!(
+        bugfix_worktree.path,
+        worktree_directory.join("bugfix-branch")
+    );
+    assert_eq!(bugfix_worktree.sha.as_ref(), "fake-sha");
+}
+
+#[gpui::test]
+async fn test_diff_stat_sync_between_host_and_downstream_client(
+    cx_a: &mut TestAppContext,
+    cx_b: &mut TestAppContext,
+    cx_c: &mut TestAppContext,
+) {
+    let mut server = TestServer::start(cx_a.background_executor.clone()).await;
+    let client_a = server.create_client(cx_a, "user_a").await;
+    let client_b = server.create_client(cx_b, "user_b").await;
+    let client_c = server.create_client(cx_c, "user_c").await;
+
+    server
+        .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)])
+        .await;
+
+    let fs = client_a.fs();
+    fs.insert_tree(
+        path!("/code"),
+        json!({
+            "project1": {
+                ".git": {},
+                "src": {
+                    "lib.rs": "line1\nline2\nline3\n",
+                    "new_file.rs": "added1\nadded2\n",
+                },
+                "README.md": "# project 1",
+            }
+        }),
+    )
+    .await;
+
+    let dot_git = Path::new(path!("/code/project1/.git"));
+    fs.set_head_for_repo(
+        dot_git,
+        &[
+            ("src/lib.rs", "line1\nold_line2\n".into()),
+            ("src/deleted.rs", "was_here\n".into()),
+        ],
+        "deadbeef",
+    );
+    fs.set_index_for_repo(
+        dot_git,
+        &[
+            ("src/lib.rs", "line1\nold_line2\nline3\nline4\n".into()),
+            ("src/staged_only.rs", "x\ny\n".into()),
+            ("src/new_file.rs", "added1\nadded2\n".into()),
+            ("README.md", "# project 1".into()),
+        ],
+    );
+
+    let (project_a, worktree_id) = client_a
+        .build_local_project(path!("/code/project1"), cx_a)
+        .await;
+    let active_call_a = cx_a.read(ActiveCall::global);
+    let project_id = active_call_a
+        .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
+        .await
+        .unwrap();
+    let project_b = client_b.join_remote_project(project_id, cx_b).await;
+    let _project_c = client_c.join_remote_project(project_id, cx_c).await;
+    cx_a.run_until_parked();
+
+    let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a);
+    let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
+
+    let panel_a = workspace_a.update_in(cx_a, GitPanel::new_test);
+    workspace_a.update_in(cx_a, |workspace, window, cx| {
+        workspace.add_panel(panel_a.clone(), window, cx);
+    });
+
+    let panel_b = workspace_b.update_in(cx_b, GitPanel::new_test);
+    workspace_b.update_in(cx_b, |workspace, window, cx| {
+        workspace.add_panel(panel_b.clone(), window, cx);
+    });
+
+    cx_a.run_until_parked();
+
+    let stats_a = collect_diff_stats(&panel_a, cx_a);
+    let stats_b = collect_diff_stats(&panel_b, cx_b);
+
+    let mut expected: HashMap<RepoPath, DiffStat> = HashMap::default();
+    expected.insert(
+        RepoPath::new("src/lib.rs").unwrap(),
+        DiffStat {
+            added: 3,
+            deleted: 2,
+        },
+    );
+    expected.insert(
+        RepoPath::new("src/deleted.rs").unwrap(),
+        DiffStat {
+            added: 0,
+            deleted: 1,
+        },
+    );
+    expected.insert(
+        RepoPath::new("src/new_file.rs").unwrap(),
+        DiffStat {
+            added: 2,
+            deleted: 0,
+        },
+    );
+    expected.insert(
+        RepoPath::new("README.md").unwrap(),
+        DiffStat {
+            added: 1,
+            deleted: 0,
+        },
+    );
+    assert_eq!(stats_a, expected, "host diff stats should match expected");
+    assert_eq!(stats_a, stats_b, "host and remote should agree");
+
+    let buffer_a = project_a
+        .update(cx_a, |p, cx| {
+            p.open_buffer((worktree_id, rel_path("src/lib.rs")), cx)
+        })
+        .await
+        .unwrap();
+
+    let _buffer_b = project_b
+        .update(cx_b, |p, cx| {
+            p.open_buffer((worktree_id, rel_path("src/lib.rs")), cx)
+        })
+        .await
+        .unwrap();
+    cx_a.run_until_parked();
+
+    buffer_a.update(cx_a, |buf, cx| {
+        buf.edit([(buf.len()..buf.len(), "line4\n")], None, cx);
+    });
+    project_a
+        .update(cx_a, |project, cx| {
+            project.save_buffer(buffer_a.clone(), cx)
+        })
+        .await
+        .unwrap();
+    cx_a.run_until_parked();
+
+    let stats_a = collect_diff_stats(&panel_a, cx_a);
+    let stats_b = collect_diff_stats(&panel_b, cx_b);
+
+    let mut expected_after_edit = expected.clone();
+    expected_after_edit.insert(
+        RepoPath::new("src/lib.rs").unwrap(),
+        DiffStat {
+            added: 4,
+            deleted: 2,
+        },
+    );
+    assert_eq!(
+        stats_a, expected_after_edit,
+        "host diff stats should reflect the edit"
+    );
+    assert_eq!(
+        stats_b, expected_after_edit,
+        "remote diff stats should reflect the host's edit"
+    );
+
+    let active_call_b = cx_b.read(ActiveCall::global);
+    active_call_b
+        .update(cx_b, |call, cx| call.hang_up(cx))
+        .await
+        .unwrap();
+    cx_a.run_until_parked();
+
+    let user_id_b = client_b.current_user_id(cx_b).to_proto();
+    active_call_a
+        .update(cx_a, |call, cx| call.invite(user_id_b, None, cx))
+        .await
+        .unwrap();
+    cx_b.run_until_parked();
+    let active_call_b = cx_b.read(ActiveCall::global);
+    active_call_b
+        .update(cx_b, |call, cx| call.accept_incoming(cx))
+        .await
+        .unwrap();
+    cx_a.run_until_parked();
+
+    let project_b = client_b.join_remote_project(project_id, cx_b).await;
+    cx_a.run_until_parked();
+
+    let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
+    let panel_b = workspace_b.update_in(cx_b, GitPanel::new_test);
+    workspace_b.update_in(cx_b, |workspace, window, cx| {
+        workspace.add_panel(panel_b.clone(), window, cx);
+    });
+    cx_b.run_until_parked();
+
+    let stats_b = collect_diff_stats(&panel_b, cx_b);
+    assert_eq!(
+        stats_b, expected_after_edit,
+        "remote diff stats should be restored from the database after rejoining the call"
+    );
+}

crates/collab/tests/integration/integration_tests.rs 🔗

@@ -7205,3 +7205,89 @@ async fn test_remote_git_branches(
 
     assert_eq!(host_branch.name(), "totally-new-branch");
 }
+
+#[gpui::test]
+async fn test_guest_can_rejoin_shared_project_after_leaving_call(
+    executor: BackgroundExecutor,
+    cx_a: &mut TestAppContext,
+    cx_b: &mut TestAppContext,
+    cx_c: &mut TestAppContext,
+) {
+    let mut server = TestServer::start(executor.clone()).await;
+    let client_a = server.create_client(cx_a, "user_a").await;
+    let client_b = server.create_client(cx_b, "user_b").await;
+    let client_c = server.create_client(cx_c, "user_c").await;
+
+    server
+        .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)])
+        .await;
+
+    client_a
+        .fs()
+        .insert_tree(
+            path!("/project"),
+            json!({
+                "file.txt": "hello\n",
+            }),
+        )
+        .await;
+
+    let (project_a, _worktree_id) = client_a.build_local_project(path!("/project"), cx_a).await;
+    let active_call_a = cx_a.read(ActiveCall::global);
+    let project_id = active_call_a
+        .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
+        .await
+        .unwrap();
+
+    let _project_b = client_b.join_remote_project(project_id, cx_b).await;
+    executor.run_until_parked();
+
+    // third client joins call to prevent room from being torn down
+    let _project_c = client_c.join_remote_project(project_id, cx_c).await;
+    executor.run_until_parked();
+
+    let active_call_b = cx_b.read(ActiveCall::global);
+    active_call_b
+        .update(cx_b, |call, cx| call.hang_up(cx))
+        .await
+        .unwrap();
+    executor.run_until_parked();
+
+    let user_id_b = client_b.current_user_id(cx_b).to_proto();
+    let active_call_a = cx_a.read(ActiveCall::global);
+    active_call_a
+        .update(cx_a, |call, cx| call.invite(user_id_b, None, cx))
+        .await
+        .unwrap();
+    executor.run_until_parked();
+    let active_call_b = cx_b.read(ActiveCall::global);
+    active_call_b
+        .update(cx_b, |call, cx| call.accept_incoming(cx))
+        .await
+        .unwrap();
+    executor.run_until_parked();
+
+    let _project_b2 = client_b.join_remote_project(project_id, cx_b).await;
+    executor.run_until_parked();
+
+    project_a.read_with(cx_a, |project, _| {
+        let guest_count = project
+            .collaborators()
+            .values()
+            .filter(|c| !c.is_host)
+            .count();
+
+        assert_eq!(
+            guest_count, 2,
+            "host should have exactly one guest collaborator after rejoin"
+        );
+    });
+
+    _project_b.read_with(cx_b, |project, _| {
+        assert_eq!(
+            project.client_subscriptions().len(),
+            0,
+            "We should clear all host subscriptions after leaving the project"
+        );
+    })
+}

crates/collab/tests/integration/randomized_test_helpers.rs 🔗

@@ -180,6 +180,13 @@ pub async fn run_randomized_test<T: RandomizedTest>(
     T::on_quiesce(&mut server, &mut clients).await;
 
     for (client, cx) in clients {
+        cx.update(|cx| {
+            for window in cx.windows() {
+                window
+                    .update(cx, |_, window, _| window.remove_window())
+                    .ok();
+            }
+        });
         cx.update(|cx| {
             let settings = cx.remove_global::<SettingsStore>();
             cx.clear_globals();
@@ -187,8 +194,8 @@ pub async fn run_randomized_test<T: RandomizedTest>(
             theme::init(theme::LoadThemes::JustBase, cx);
             drop(client);
         });
+        executor.run_until_parked();
     }
-    executor.run_until_parked();
 
     if let Some(path) = plan_save_path() {
         eprintln!("saved test plan to path {:?}", path);
@@ -556,6 +563,13 @@ impl<T: RandomizedTest> TestPlan<T> {
 
                 log::info!("{} removed", client.username);
                 plan.lock().user(removed_user_id).online = false;
+                client_cx.update(|cx| {
+                    for window in cx.windows() {
+                        window
+                            .update(cx, |_, window, _| window.remove_window())
+                            .ok();
+                    }
+                });
                 client_cx.update(|cx| {
                     cx.clear_globals();
                     drop(client);

crates/collab/tests/integration/remote_editing_collaboration_tests.rs 🔗

@@ -33,7 +33,7 @@ use settings::{
     SettingsStore,
 };
 use std::{
-    path::Path,
+    path::{Path, PathBuf},
     sync::{
         Arc,
         atomic::{AtomicUsize, Ordering},
@@ -396,6 +396,130 @@ async fn test_ssh_collaboration_git_branches(
     });
 }
 
+#[gpui::test]
+async fn test_ssh_collaboration_git_worktrees(
+    executor: BackgroundExecutor,
+    cx_a: &mut TestAppContext,
+    cx_b: &mut TestAppContext,
+    server_cx: &mut TestAppContext,
+) {
+    cx_a.set_name("a");
+    cx_b.set_name("b");
+    server_cx.set_name("server");
+
+    cx_a.update(|cx| {
+        release_channel::init(semver::Version::new(0, 0, 0), cx);
+    });
+    server_cx.update(|cx| {
+        release_channel::init(semver::Version::new(0, 0, 0), cx);
+    });
+
+    let mut server = TestServer::start(executor.clone()).await;
+    let client_a = server.create_client(cx_a, "user_a").await;
+    let client_b = server.create_client(cx_b, "user_b").await;
+    server
+        .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
+        .await;
+
+    let (opts, server_ssh, _) = RemoteClient::fake_server(cx_a, server_cx);
+    let remote_fs = FakeFs::new(server_cx.executor());
+    remote_fs
+        .insert_tree("/project", json!({ ".git": {}, "file.txt": "content" }))
+        .await;
+
+    server_cx.update(HeadlessProject::init);
+    let languages = Arc::new(LanguageRegistry::new(server_cx.executor()));
+    let headless_project = server_cx.new(|cx| {
+        HeadlessProject::new(
+            HeadlessAppState {
+                session: server_ssh,
+                fs: remote_fs.clone(),
+                http_client: Arc::new(BlockedHttpClient),
+                node_runtime: NodeRuntime::unavailable(),
+                languages,
+                extension_host_proxy: Arc::new(ExtensionHostProxy::new()),
+                startup_time: std::time::Instant::now(),
+            },
+            false,
+            cx,
+        )
+    });
+
+    let client_ssh = RemoteClient::connect_mock(opts, cx_a).await;
+    let (project_a, _) = client_a
+        .build_ssh_project("/project", client_ssh, false, cx_a)
+        .await;
+
+    let active_call_a = cx_a.read(ActiveCall::global);
+    let project_id = active_call_a
+        .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
+        .await
+        .unwrap();
+    let project_b = client_b.join_remote_project(project_id, cx_b).await;
+
+    executor.run_until_parked();
+
+    let repo_b = cx_b.update(|cx| project_b.read(cx).active_repository(cx).unwrap());
+
+    let worktrees = cx_b
+        .update(|cx| repo_b.update(cx, |repo, _| repo.worktrees()))
+        .await
+        .unwrap()
+        .unwrap();
+    assert_eq!(worktrees.len(), 1);
+
+    let worktree_directory = PathBuf::from("/project");
+    cx_b.update(|cx| {
+        repo_b.update(cx, |repo, _| {
+            repo.create_worktree(
+                "feature-branch".to_string(),
+                worktree_directory.clone(),
+                Some("abc123".to_string()),
+            )
+        })
+    })
+    .await
+    .unwrap()
+    .unwrap();
+
+    executor.run_until_parked();
+
+    let worktrees = cx_b
+        .update(|cx| repo_b.update(cx, |repo, _| repo.worktrees()))
+        .await
+        .unwrap()
+        .unwrap();
+    assert_eq!(worktrees.len(), 2);
+    assert_eq!(worktrees[1].path, worktree_directory.join("feature-branch"));
+    assert_eq!(worktrees[1].ref_name.as_ref(), "refs/heads/feature-branch");
+    assert_eq!(worktrees[1].sha.as_ref(), "abc123");
+
+    let server_worktrees = {
+        let server_repo = server_cx.update(|cx| {
+            headless_project.update(cx, |headless_project, cx| {
+                headless_project
+                    .git_store
+                    .read(cx)
+                    .repositories()
+                    .values()
+                    .next()
+                    .unwrap()
+                    .clone()
+            })
+        });
+        server_cx
+            .update(|cx| server_repo.update(cx, |repo, _| repo.worktrees()))
+            .await
+            .unwrap()
+            .unwrap()
+    };
+    assert_eq!(server_worktrees.len(), 2);
+    assert_eq!(
+        server_worktrees[1].path,
+        worktree_directory.join("feature-branch")
+    );
+}
+
 #[gpui::test]
 async fn test_ssh_collaboration_formatting_with_prettier(
     executor: BackgroundExecutor,

crates/command_palette/src/command_palette.rs 🔗

@@ -510,7 +510,7 @@ impl PickerDelegate for CommandPaletteDelegate {
                         .delegate
                         .matches_updated(query, commands, matches, intercept_result, cx)
                 })
-                .log_err();
+                .ok();
         })
     }
 
@@ -543,7 +543,7 @@ impl PickerDelegate for CommandPaletteDelegate {
     fn dismissed(&mut self, _window: &mut Window, cx: &mut Context<Picker<Self>>) {
         self.command_palette
             .update(cx, |_, cx| cx.emit(DismissEvent))
-            .log_err();
+            .ok();
     }
 
     fn confirm(&mut self, secondary: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {

crates/copilot/src/copilot.rs 🔗

@@ -1035,10 +1035,9 @@ impl Copilot {
         };
         let buffer_entity = buffer.clone();
         let lsp = server.lsp.clone();
-        let registered_buffer = server
-            .registered_buffers
-            .get_mut(&buffer.entity_id())
-            .unwrap();
+        let Some(registered_buffer) = server.registered_buffers.get_mut(&buffer.entity_id()) else {
+            return Task::ready(Err(anyhow::anyhow!("buffer not registered")));
+        };
         let pending_snapshot = registered_buffer.report_changes(buffer, cx);
         let buffer = buffer.read(cx);
         let uri = registered_buffer.uri.clone();

crates/copilot/src/copilot_edit_prediction_delegate.rs 🔗

@@ -233,8 +233,8 @@ mod tests {
     use super::*;
     use edit_prediction_types::EditPredictionGranularity;
     use editor::{
-        Editor, ExcerptRange, MultiBuffer, MultiBufferOffset, SelectionEffects,
-        test::editor_lsp_test_context::EditorLspTestContext,
+        Editor, MultiBuffer, MultiBufferOffset, PathKey, SelectionEffects,
+        test::{editor_content_with_blocks, editor_lsp_test_context::EditorLspTestContext},
     };
     use fs::FakeFs;
     use futures::StreamExt;
@@ -685,32 +685,32 @@ mod tests {
         let buffer_2 = cx.new(|cx| Buffer::local("c = 3\nd = 4\n", cx));
         let multibuffer = cx.new(|cx| {
             let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite);
-            multibuffer.push_excerpts(
+            multibuffer.set_excerpts_for_path(
+                PathKey::sorted(0),
                 buffer_1.clone(),
-                [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))],
+                [Point::new(0, 0)..Point::new(1, 0)],
+                0,
                 cx,
             );
-            multibuffer.push_excerpts(
+            multibuffer.set_excerpts_for_path(
+                PathKey::sorted(1),
                 buffer_2.clone(),
-                [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))],
+                [Point::new(0, 0)..Point::new(1, 0)],
+                0,
                 cx,
             );
             multibuffer
         });
-        let editor =
-            cx.add_window(|window, cx| Editor::for_multibuffer(multibuffer, None, window, cx));
-        editor
-            .update(cx, |editor, window, cx| {
-                use gpui::Focusable;
-                window.focus(&editor.focus_handle(cx), cx);
-            })
-            .unwrap();
+        let (editor, cx) =
+            cx.add_window_view(|window, cx| Editor::for_multibuffer(multibuffer, None, window, cx));
+        editor.update_in(cx, |editor, window, cx| {
+            use gpui::Focusable;
+            window.focus(&editor.focus_handle(cx), cx);
+        });
         let copilot_provider = cx.new(|_| CopilotEditPredictionDelegate::new(copilot));
-        editor
-            .update(cx, |editor, window, cx| {
-                editor.set_edit_prediction_provider(Some(copilot_provider), window, cx)
-            })
-            .unwrap();
+        editor.update_in(cx, |editor, window, cx| {
+            editor.set_edit_prediction_provider(Some(copilot_provider), window, cx)
+        });
 
         handle_copilot_completion_request(
             &copilot_lsp,
@@ -724,7 +724,7 @@ mod tests {
                 },
             }],
         );
-        _ = editor.update(cx, |editor, window, cx| {
+        _ = editor.update_in(cx, |editor, window, cx| {
             // Ensure copilot suggestions are shown for the first excerpt.
             editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
                 s.select_ranges([Point::new(1, 5)..Point::new(1, 5)])
@@ -732,14 +732,22 @@ mod tests {
             editor.show_edit_prediction(&Default::default(), window, cx);
         });
         executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT);
-        _ = editor.update(cx, |editor, _, cx| {
+        _ = editor.update_in(cx, |editor, _, _| {
             assert!(editor.has_active_edit_prediction());
-            assert_eq!(
-                editor.display_text(cx),
-                "\n\na = 1\nb = 2 + a\n\n\n\nc = 3\nd = 4\n"
-            );
-            assert_eq!(editor.text(cx), "a = 1\nb = 2\n\nc = 3\nd = 4\n");
         });
+        pretty_assertions::assert_eq!(
+            editor_content_with_blocks(&editor, cx),
+            indoc! { "
+                § <no file>
+                § -----
+                a = 1
+                b = 2 + a
+                § <no file>
+                § -----
+                c = 3
+                d = 4"
+            }
+        );
 
         handle_copilot_completion_request(
             &copilot_lsp,
@@ -753,38 +761,61 @@ mod tests {
                 },
             }],
         );
-        _ = editor.update(cx, |editor, window, cx| {
+        _ = editor.update_in(cx, |editor, window, cx| {
             // Move to another excerpt, ensuring the suggestion gets cleared.
             editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
                 s.select_ranges([Point::new(4, 5)..Point::new(4, 5)])
             });
             assert!(!editor.has_active_edit_prediction());
-            assert_eq!(
-                editor.display_text(cx),
-                "\n\na = 1\nb = 2\n\n\n\nc = 3\nd = 4\n"
-            );
-            assert_eq!(editor.text(cx), "a = 1\nb = 2\n\nc = 3\nd = 4\n");
-
+        });
+        pretty_assertions::assert_eq!(
+            editor_content_with_blocks(&editor, cx),
+            indoc! { "
+                § <no file>
+                § -----
+                a = 1
+                b = 2
+                § <no file>
+                § -----
+                c = 3
+                d = 4"}
+        );
+        editor.update_in(cx, |editor, window, cx| {
             // Type a character, ensuring we don't even try to interpolate the previous suggestion.
             editor.handle_input(" ", window, cx);
             assert!(!editor.has_active_edit_prediction());
-            assert_eq!(
-                editor.display_text(cx),
-                "\n\na = 1\nb = 2\n\n\n\nc = 3\nd = 4 \n"
-            );
-            assert_eq!(editor.text(cx), "a = 1\nb = 2\n\nc = 3\nd = 4 \n");
         });
+        pretty_assertions::assert_eq!(
+            editor_content_with_blocks(&editor, cx),
+            indoc! {"
+                § <no file>
+                § -----
+                a = 1
+                b = 2
+                § <no file>
+                § -----
+                c = 3
+                d = 4\x20"
+            },
+        );
 
         // Ensure the new suggestion is displayed when the debounce timeout expires.
         executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT);
-        _ = editor.update(cx, |editor, _, cx| {
+        _ = editor.update(cx, |editor, _| {
             assert!(editor.has_active_edit_prediction());
-            assert_eq!(
-                editor.display_text(cx),
-                "\n\na = 1\nb = 2\n\n\n\nc = 3\nd = 4 + c\n"
-            );
-            assert_eq!(editor.text(cx), "a = 1\nb = 2\n\nc = 3\nd = 4 \n");
         });
+        assert_eq!(
+            editor_content_with_blocks(&editor, cx),
+            indoc! {"
+               § <no file>
+               § -----
+               a = 1
+               b = 2
+               § <no file>
+               § -----
+               c = 3
+               d = 4 + c"}
+        );
     }
 
     #[gpui::test]
@@ -947,14 +978,18 @@ mod tests {
 
         let multibuffer = cx.new(|cx| {
             let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite);
-            multibuffer.push_excerpts(
+            multibuffer.set_excerpts_for_path(
+                PathKey::sorted(0),
                 private_buffer.clone(),
-                [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))],
+                [Point::new(0, 0)..Point::new(1, 0)],
+                0,
                 cx,
             );
-            multibuffer.push_excerpts(
+            multibuffer.set_excerpts_for_path(
+                PathKey::sorted(1),
                 public_buffer.clone(),
-                [ExcerptRange::new(Point::new(0, 0)..Point::new(6, 0))],
+                [Point::new(0, 0)..Point::new(6, 0)],
+                0,
                 cx,
             );
             multibuffer

crates/crashes/Cargo.toml 🔗

@@ -6,13 +6,12 @@ edition.workspace = true
 license = "GPL-3.0-or-later"
 
 [dependencies]
-bincode.workspace = true
 cfg-if.workspace = true
 crash-handler.workspace = true
 futures.workspace = true
 log.workspace = true
 minidumper.workspace = true
-
+parking_lot.workspace = true
 paths.workspace = true
 release_channel.workspace = true
 smol.workspace = true

crates/crashes/src/crashes.rs 🔗

@@ -2,12 +2,14 @@ use crash_handler::{CrashEventResult, CrashHandler};
 use futures::future::BoxFuture;
 use log::info;
 use minidumper::{Client, LoopAction, MinidumpBinary};
+use parking_lot::Mutex;
 use release_channel::{RELEASE_CHANNEL, ReleaseChannel};
 use serde::{Deserialize, Serialize};
 use std::mem;
 
 #[cfg(not(target_os = "windows"))]
 use smol::process::Command;
+use system_specs::GpuSpecs;
 
 #[cfg(target_os = "macos")]
 use std::sync::atomic::AtomicU32;
@@ -27,12 +29,14 @@ use std::{
 };
 
 // set once the crash handler has initialized and the client has connected to it
-pub static CRASH_HANDLER: OnceLock<Arc<Client>> = OnceLock::new();
+static CRASH_HANDLER: OnceLock<Arc<Client>> = OnceLock::new();
 // set when the first minidump request is made to avoid generating duplicate crash reports
 pub static REQUESTED_MINIDUMP: AtomicBool = AtomicBool::new(false);
 const CRASH_HANDLER_PING_TIMEOUT: Duration = Duration::from_secs(60);
 const CRASH_HANDLER_CONNECT_TIMEOUT: Duration = Duration::from_secs(10);
 
+static PENDING_CRASH_SERVER_MESSAGES: Mutex<Vec<CrashServerMessage>> = Mutex::new(Vec::new());
+
 #[cfg(target_os = "macos")]
 static PANIC_THREAD_ID: AtomicU32 = AtomicU32::new(0);
 
@@ -118,6 +122,7 @@ async fn connect_and_keepalive(crash_init: InitCrashHandler, handler: CrashHandl
     spawn_crash_handler_windows(&exe, &socket_name);
 
     info!("spawning crash handler process");
+    send_crash_server_message(CrashServerMessage::Init(crash_init));
 
     let mut elapsed = Duration::ZERO;
     let retry_frequency = Duration::from_millis(100);
@@ -134,10 +139,6 @@ async fn connect_and_keepalive(crash_init: InitCrashHandler, handler: CrashHandl
         smol::Timer::after(retry_frequency).await;
     }
     let client = maybe_client.unwrap();
-    client
-        .send_message(1, serde_json::to_vec(&crash_init).unwrap())
-        .unwrap();
-
     let client = Arc::new(client);
 
     #[cfg(target_os = "linux")]
@@ -146,6 +147,10 @@ async fn connect_and_keepalive(crash_init: InitCrashHandler, handler: CrashHandl
     // Publishing the client to the OnceLock makes it visible to the signal
     // handler callback installed earlier.
     CRASH_HANDLER.set(client.clone()).ok();
+    let messages: Vec<_> = mem::take(PENDING_CRASH_SERVER_MESSAGES.lock().as_mut());
+    for message in messages.into_iter() {
+        send_crash_server_message(message);
+    }
     // mem::forget so that the drop is not called
     mem::forget(handler);
     info!("crash handler registered");
@@ -177,9 +182,10 @@ unsafe fn suspend_all_other_threads() {
 }
 
 pub struct CrashServer {
-    initialization_params: OnceLock<InitCrashHandler>,
-    panic_info: OnceLock<CrashPanic>,
-    active_gpu: OnceLock<system_specs::GpuSpecs>,
+    initialization_params: Mutex<Option<InitCrashHandler>>,
+    panic_info: Mutex<Option<CrashPanic>>,
+    active_gpu: Mutex<Option<system_specs::GpuSpecs>>,
+    user_info: Mutex<Option<UserInfo>>,
     has_connection: Arc<AtomicBool>,
 }
 
@@ -190,6 +196,7 @@ pub struct CrashInfo {
     pub minidump_error: Option<String>,
     pub gpus: Vec<system_specs::GpuInfo>,
     pub active_gpu: Option<system_specs::GpuSpecs>,
+    pub user_info: Option<UserInfo>,
 }
 
 #[derive(Debug, Deserialize, Serialize, Clone)]
@@ -207,15 +214,55 @@ pub struct CrashPanic {
     pub span: String,
 }
 
+#[derive(Deserialize, Serialize, Debug, Clone)]
+pub struct UserInfo {
+    pub metrics_id: Option<String>,
+    pub is_staff: Option<bool>,
+}
+
+fn send_crash_server_message(message: CrashServerMessage) {
+    let Some(crash_server) = CRASH_HANDLER.get() else {
+        PENDING_CRASH_SERVER_MESSAGES.lock().push(message);
+        return;
+    };
+    let data = match serde_json::to_vec(&message) {
+        Ok(data) => data,
+        Err(err) => {
+            log::warn!("Failed to serialize crash server message: {:?}", err);
+            return;
+        }
+    };
+
+    if let Err(err) = crash_server.send_message(0, data) {
+        log::warn!("Failed to send data to crash server {:?}", err);
+    }
+}
+
+pub fn set_gpu_info(specs: GpuSpecs) {
+    send_crash_server_message(CrashServerMessage::GPUInfo(specs));
+}
+
+pub fn set_user_info(info: UserInfo) {
+    send_crash_server_message(CrashServerMessage::UserInfo(info));
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+enum CrashServerMessage {
+    Init(InitCrashHandler),
+    Panic(CrashPanic),
+    GPUInfo(GpuSpecs),
+    UserInfo(UserInfo),
+}
+
 impl minidumper::ServerHandler for CrashServer {
     fn create_minidump_file(&self) -> Result<(File, PathBuf), io::Error> {
-        let err_message = "Missing initialization data";
         let dump_path = paths::logs_dir()
             .join(
                 &self
                     .initialization_params
-                    .get()
-                    .expect(err_message)
+                    .lock()
+                    .as_ref()
+                    .expect("Missing initialization data")
                     .session_id,
             )
             .with_extension("dmp");
@@ -255,13 +302,14 @@ impl minidumper::ServerHandler for CrashServer {
         let crash_info = CrashInfo {
             init: self
                 .initialization_params
-                .get()
-                .expect("not initialized")
-                .clone(),
-            panic: self.panic_info.get().cloned(),
+                .lock()
+                .clone()
+                .expect("not initialized"),
+            panic: self.panic_info.lock().clone(),
             minidump_error,
-            active_gpu: self.active_gpu.get().cloned(),
+            active_gpu: self.active_gpu.lock().clone(),
             gpus,
+            user_info: self.user_info.lock().clone(),
         };
 
         let crash_data_path = paths::logs_dir()
@@ -273,30 +321,21 @@ impl minidumper::ServerHandler for CrashServer {
         LoopAction::Exit
     }
 
-    fn on_message(&self, kind: u32, buffer: Vec<u8>) {
-        match kind {
-            1 => {
-                let init_data =
-                    serde_json::from_slice::<InitCrashHandler>(&buffer).expect("invalid init data");
-                self.initialization_params
-                    .set(init_data)
-                    .expect("already initialized");
+    fn on_message(&self, _: u32, buffer: Vec<u8>) {
+        let message: CrashServerMessage =
+            serde_json::from_slice(&buffer).expect("invalid init data");
+        match message {
+            CrashServerMessage::Init(init_data) => {
+                self.initialization_params.lock().replace(init_data);
             }
-            2 => {
-                let panic_data =
-                    serde_json::from_slice::<CrashPanic>(&buffer).expect("invalid panic data");
-                self.panic_info.set(panic_data).expect("already panicked");
+            CrashServerMessage::Panic(crash_panic) => {
+                self.panic_info.lock().replace(crash_panic);
             }
-            3 => {
-                let gpu_specs: system_specs::GpuSpecs =
-                    bincode::deserialize(&buffer).expect("gpu specs");
-                // we ignore the case where it was already set because this message is sent
-                // on each new window. in theory all zed windows should be using the same
-                // GPU so this is fine.
-                self.active_gpu.set(gpu_specs).ok();
+            CrashServerMessage::GPUInfo(gpu_specs) => {
+                self.active_gpu.lock().replace(gpu_specs);
             }
-            _ => {
-                panic!("invalid message kind");
+            CrashServerMessage::UserInfo(user_info) => {
+                self.user_info.lock().replace(user_info);
             }
         }
     }
@@ -326,37 +365,33 @@ pub fn panic_hook(info: &PanicHookInfo) {
     // if it's still not there just write panic info and no minidump
     let retry_frequency = Duration::from_millis(100);
     for _ in 0..5 {
-        if let Some(client) = CRASH_HANDLER.get() {
-            let location = info
-                .location()
-                .map_or_else(|| "<unknown>".to_owned(), |location| location.to_string());
-            log::error!("thread '{thread_name}' panicked at {location}:\n{message}...");
-            client
-                .send_message(
-                    2,
-                    serde_json::to_vec(&CrashPanic { message, span }).unwrap(),
-                )
-                .ok();
-            log::error!("triggering a crash to generate a minidump...");
-
-            #[cfg(target_os = "macos")]
-            PANIC_THREAD_ID.store(
-                unsafe { mach2::mach_init::mach_thread_self() },
-                Ordering::SeqCst,
-            );
-
-            cfg_if::cfg_if! {
-                if #[cfg(target_os = "windows")] {
-                    // https://learn.microsoft.com/en-us/windows/win32/debug/system-error-codes--0-499-
-                    CrashHandler.simulate_exception(Some(234)); // (MORE_DATA_AVAILABLE)
-                    break;
-                } else {
-                    std::process::abort();
-                }
-            }
+        if CRASH_HANDLER.get().is_some() {
+            break;
         }
         thread::sleep(retry_frequency);
     }
+    let location = info
+        .location()
+        .map_or_else(|| "<unknown>".to_owned(), |location| location.to_string());
+    log::error!("thread '{thread_name}' panicked at {location}:\n{message}...");
+
+    send_crash_server_message(CrashServerMessage::Panic(CrashPanic { message, span }));
+    log::error!("triggering a crash to generate a minidump...");
+
+    #[cfg(target_os = "macos")]
+    PANIC_THREAD_ID.store(
+        unsafe { mach2::mach_init::mach_thread_self() },
+        Ordering::SeqCst,
+    );
+
+    cfg_if::cfg_if! {
+        if #[cfg(target_os = "windows")] {
+            // https://learn.microsoft.com/en-us/windows/win32/debug/system-error-codes--0-499-
+            CrashHandler.simulate_exception(Some(234)); // (MORE_DATA_AVAILABLE)
+        } else {
+            std::process::abort();
+        }
+    }
 }
 
 #[cfg(target_os = "windows")]
@@ -436,10 +471,11 @@ pub fn crash_server(socket: &Path) {
     server
         .run(
             Box::new(CrashServer {
-                initialization_params: OnceLock::new(),
-                panic_info: OnceLock::new(),
+                initialization_params: Mutex::default(),
+                panic_info: Mutex::default(),
+                user_info: Mutex::default(),
                 has_connection,
-                active_gpu: OnceLock::new(),
+                active_gpu: Mutex::default(),
             }),
             &shutdown,
             Some(CRASH_HANDLER_PING_TIMEOUT),

crates/csv_preview/Cargo.toml 🔗

@@ -0,0 +1,21 @@
+[package]
+name = "csv_preview"
+version = "0.1.0"
+publish.workspace = true
+edition.workspace = true
+
+[lib]
+path = "src/csv_preview.rs"
+
+[dependencies]
+anyhow.workspace = true
+feature_flags.workspace = true
+gpui.workspace = true
+editor.workspace = true
+ui.workspace = true
+workspace.workspace = true
+log.workspace = true
+text.workspace = true
+
+[lints]
+workspace = true

crates/csv_preview/src/csv_preview.rs 🔗

@@ -0,0 +1,302 @@
+use editor::{Editor, EditorEvent};
+use feature_flags::{FeatureFlag, FeatureFlagAppExt as _};
+use gpui::{
+    AppContext, Entity, EventEmitter, FocusHandle, Focusable, ListAlignment, Task, actions,
+};
+use std::{
+    collections::HashMap,
+    time::{Duration, Instant},
+};
+
+use crate::table_data_engine::TableDataEngine;
+use ui::{SharedString, TableColumnWidths, TableInteractionState, prelude::*};
+use workspace::{Item, SplitDirection, Workspace};
+
+use crate::{parser::EditorState, settings::CsvPreviewSettings, types::TableLikeContent};
+
+mod parser;
+mod renderer;
+mod settings;
+mod table_data_engine;
+mod types;
+
+actions!(csv, [OpenPreview, OpenPreviewToTheSide]);
+
+pub struct TabularDataPreviewFeatureFlag;
+
+impl FeatureFlag for TabularDataPreviewFeatureFlag {
+    const NAME: &'static str = "tabular-data-preview";
+}
+
+pub struct CsvPreviewView {
+    pub(crate) engine: TableDataEngine,
+
+    pub(crate) focus_handle: FocusHandle,
+    active_editor_state: EditorState,
+    pub(crate) table_interaction_state: Entity<TableInteractionState>,
+    pub(crate) column_widths: ColumnWidths,
+    pub(crate) parsing_task: Option<Task<anyhow::Result<()>>>,
+    pub(crate) settings: CsvPreviewSettings,
+    /// Performance metrics for debugging and monitoring CSV operations.
+    pub(crate) performance_metrics: PerformanceMetrics,
+    pub(crate) list_state: gpui::ListState,
+    /// Time when the last parsing operation ended, used for smart debouncing
+    pub(crate) last_parse_end_time: Option<std::time::Instant>,
+}
+
+pub fn init(cx: &mut App) {
+    cx.observe_new(|workspace: &mut Workspace, _, _| {
+        CsvPreviewView::register(workspace);
+    })
+    .detach()
+}
+
+impl CsvPreviewView {
+    pub fn register(workspace: &mut Workspace) {
+        workspace.register_action_renderer(|div, _, _, cx| {
+            div.when(cx.has_flag::<TabularDataPreviewFeatureFlag>(), |div| {
+                div.on_action(cx.listener(|workspace, _: &OpenPreview, window, cx| {
+                    if let Some(editor) = workspace
+                        .active_item(cx)
+                        .and_then(|item| item.act_as::<Editor>(cx))
+                        .filter(|editor| Self::is_csv_file(editor, cx))
+                    {
+                        let csv_preview = Self::new(&editor, cx);
+                        workspace.active_pane().update(cx, |pane, cx| {
+                            let existing = pane
+                                .items_of_type::<CsvPreviewView>()
+                                .find(|view| view.read(cx).active_editor_state.editor == editor);
+                            if let Some(idx) = existing.and_then(|e| pane.index_for_item(&e)) {
+                                pane.activate_item(idx, true, true, window, cx);
+                            } else {
+                                pane.add_item(Box::new(csv_preview), true, true, None, window, cx);
+                            }
+                        });
+                        cx.notify();
+                    }
+                }))
+                .on_action(cx.listener(
+                    |workspace, _: &OpenPreviewToTheSide, window, cx| {
+                        if let Some(editor) = workspace
+                            .active_item(cx)
+                            .and_then(|item| item.act_as::<Editor>(cx))
+                            .filter(|editor| Self::is_csv_file(editor, cx))
+                        {
+                            let csv_preview = Self::new(&editor, cx);
+                            let pane = workspace
+                                .find_pane_in_direction(SplitDirection::Right, cx)
+                                .unwrap_or_else(|| {
+                                    workspace.split_pane(
+                                        workspace.active_pane().clone(),
+                                        SplitDirection::Right,
+                                        window,
+                                        cx,
+                                    )
+                                });
+                            pane.update(cx, |pane, cx| {
+                                let existing =
+                                    pane.items_of_type::<CsvPreviewView>().find(|view| {
+                                        view.read(cx).active_editor_state.editor == editor
+                                    });
+                                if let Some(idx) = existing.and_then(|e| pane.index_for_item(&e)) {
+                                    pane.activate_item(idx, true, true, window, cx);
+                                } else {
+                                    pane.add_item(
+                                        Box::new(csv_preview),
+                                        false,
+                                        false,
+                                        None,
+                                        window,
+                                        cx,
+                                    );
+                                }
+                            });
+                            cx.notify();
+                        }
+                    },
+                ))
+            })
+        });
+    }
+
+    fn new(editor: &Entity<Editor>, cx: &mut Context<Workspace>) -> Entity<Self> {
+        let contents = TableLikeContent::default();
+        let table_interaction_state = cx.new(|cx| {
+            TableInteractionState::new(cx)
+                .with_custom_scrollbar(ui::Scrollbars::for_settings::<editor::EditorSettings>())
+        });
+
+        cx.new(|cx| {
+            let subscription = cx.subscribe(
+                editor,
+                |this: &mut CsvPreviewView, _editor, event: &EditorEvent, cx| {
+                    match event {
+                        EditorEvent::Edited { .. }
+                        | EditorEvent::DirtyChanged
+                        | EditorEvent::ExcerptsEdited { .. } => {
+                            this.parse_csv_from_active_editor(true, cx);
+                        }
+                        _ => {}
+                    };
+                },
+            );
+
+            let mut view = CsvPreviewView {
+                focus_handle: cx.focus_handle(),
+                active_editor_state: EditorState {
+                    editor: editor.clone(),
+                    _subscription: subscription,
+                },
+                table_interaction_state,
+                column_widths: ColumnWidths::new(cx, 1),
+                parsing_task: None,
+                performance_metrics: PerformanceMetrics::default(),
+                list_state: gpui::ListState::new(contents.rows.len(), ListAlignment::Top, px(1.)),
+                settings: CsvPreviewSettings::default(),
+                last_parse_end_time: None,
+                engine: TableDataEngine::default(),
+            };
+
+            view.parse_csv_from_active_editor(false, cx);
+            view
+        })
+    }
+
+    pub(crate) fn editor_state(&self) -> &EditorState {
+        &self.active_editor_state
+    }
+    pub(crate) fn apply_sort(&mut self) {
+        self.performance_metrics.record("Sort", || {
+            self.engine.apply_sort();
+        });
+    }
+
+    /// Update ordered indices when ordering or content changes
+    pub(crate) fn apply_filter_sort(&mut self) {
+        self.performance_metrics.record("Filter&sort", || {
+            self.engine.calculate_d2d_mapping();
+        });
+
+        // Update list state with filtered row count
+        let visible_rows = self.engine.d2d_mapping().visible_row_count();
+        self.list_state = gpui::ListState::new(visible_rows, ListAlignment::Top, px(1.));
+    }
+
+    pub fn resolve_active_item_as_csv_editor(
+        workspace: &Workspace,
+        cx: &mut Context<Workspace>,
+    ) -> Option<Entity<Editor>> {
+        let editor = workspace
+            .active_item(cx)
+            .and_then(|item| item.act_as::<Editor>(cx))?;
+        Self::is_csv_file(&editor, cx).then_some(editor)
+    }
+
+    fn is_csv_file(editor: &Entity<Editor>, cx: &App) -> bool {
+        editor
+            .read(cx)
+            .buffer()
+            .read(cx)
+            .as_singleton()
+            .and_then(|buffer| {
+                buffer
+                    .read(cx)
+                    .file()
+                    .and_then(|file| file.path().extension())
+                    .map(|ext| ext.eq_ignore_ascii_case("csv"))
+            })
+            .unwrap_or(false)
+    }
+}
+
+impl Focusable for CsvPreviewView {
+    fn focus_handle(&self, _cx: &App) -> FocusHandle {
+        self.focus_handle.clone()
+    }
+}
+
+impl EventEmitter<()> for CsvPreviewView {}
+
+impl Item for CsvPreviewView {
+    type Event = ();
+
+    fn tab_icon(&self, _window: &Window, _cx: &App) -> Option<Icon> {
+        Some(Icon::new(IconName::FileDoc))
+    }
+
+    fn tab_content_text(&self, _detail: usize, cx: &App) -> SharedString {
+        self.editor_state()
+            .editor
+            .read(cx)
+            .buffer()
+            .read(cx)
+            .as_singleton()
+            .and_then(|b| {
+                let file = b.read(cx).file()?;
+                let local_file = file.as_local()?;
+                local_file
+                    .abs_path(cx)
+                    .file_name()
+                    .map(|name| format!("Preview {}", name.to_string_lossy()).into())
+            })
+            .unwrap_or_else(|| SharedString::from("CSV Preview"))
+    }
+}
+
+#[derive(Debug, Default)]
+pub struct PerformanceMetrics {
+    /// Map of timing metrics with their duration and measurement time.
+    pub timings: HashMap<&'static str, (Duration, Instant)>,
+    /// List of display indices that were rendered in the current frame.
+    pub rendered_indices: Vec<usize>,
+}
+impl PerformanceMetrics {
+    pub fn record<F, R>(&mut self, name: &'static str, mut f: F) -> R
+    where
+        F: FnMut() -> R,
+    {
+        let start_time = Instant::now();
+        let ret = f();
+        let duration = start_time.elapsed();
+        self.timings.insert(name, (duration, Instant::now()));
+        ret
+    }
+
+    /// Displays all metrics sorted A-Z in format: `{name}: {took}ms {ago}s ago`
+    pub fn display(&self) -> String {
+        let mut metrics = self.timings.iter().collect::<Vec<_>>();
+        metrics.sort_by_key(|&(name, _)| *name);
+        metrics
+            .iter()
+            .map(|(name, (duration, time))| {
+                let took = duration.as_secs_f32() * 1000.;
+                let ago = time.elapsed().as_secs();
+                format!("{name}: {took:.2}ms {ago}s ago")
+            })
+            .collect::<Vec<_>>()
+            .join("\n")
+    }
+
+    /// Get timing for a specific metric
+    pub fn get_timing(&self, name: &str) -> Option<Duration> {
+        self.timings.get(name).map(|(duration, _)| *duration)
+    }
+}
+
+/// Holds state of column widths for a table component in CSV preview.
+pub(crate) struct ColumnWidths {
+    pub widths: Entity<TableColumnWidths>,
+}
+
+impl ColumnWidths {
+    pub(crate) fn new(cx: &mut Context<CsvPreviewView>, cols: usize) -> Self {
+        Self {
+            widths: cx.new(|cx| TableColumnWidths::new(cols, cx)),
+        }
+    }
+    /// Replace the current `TableColumnWidths` entity with a new one for the given column count.
+    pub(crate) fn replace(&self, cx: &mut Context<CsvPreviewView>, cols: usize) {
+        self.widths
+            .update(cx, |entity, cx| *entity = TableColumnWidths::new(cols, cx));
+    }
+}

crates/csv_preview/src/parser.rs 🔗

@@ -0,0 +1,513 @@
+use crate::{
+    CsvPreviewView,
+    types::TableLikeContent,
+    types::{LineNumber, TableCell},
+};
+use editor::Editor;
+use gpui::{AppContext, Context, Entity, Subscription, Task};
+use std::time::{Duration, Instant};
+use text::BufferSnapshot;
+use ui::{SharedString, table_row::TableRow};
+
+pub(crate) const REPARSE_DEBOUNCE: Duration = Duration::from_millis(200);
+
+pub(crate) struct EditorState {
+    pub editor: Entity<Editor>,
+    pub _subscription: Subscription,
+}
+
+impl CsvPreviewView {
+    pub(crate) fn parse_csv_from_active_editor(
+        &mut self,
+        wait_for_debounce: bool,
+        cx: &mut Context<Self>,
+    ) {
+        let editor = self.active_editor_state.editor.clone();
+        self.parsing_task = Some(self.parse_csv_in_background(wait_for_debounce, editor, cx));
+    }
+
+    fn parse_csv_in_background(
+        &mut self,
+        wait_for_debounce: bool,
+        editor: Entity<Editor>,
+        cx: &mut Context<Self>,
+    ) -> Task<anyhow::Result<()>> {
+        cx.spawn(async move |view, cx| {
+            if wait_for_debounce {
+                // Smart debouncing: check if cooldown period has already passed
+                let now = Instant::now();
+                let should_wait = view.update(cx, |view, _| {
+                    if let Some(last_end) = view.last_parse_end_time {
+                        let cooldown_until = last_end + REPARSE_DEBOUNCE;
+                        if now < cooldown_until {
+                            Some(cooldown_until - now)
+                        } else {
+                            None // Cooldown already passed, parse immediately
+                        }
+                    } else {
+                        None // First parse, no debounce
+                    }
+                })?;
+
+                if let Some(wait_duration) = should_wait {
+                    cx.background_executor().timer(wait_duration).await;
+                }
+            }
+
+            let buffer_snapshot = view.update(cx, |_, cx| {
+                editor
+                    .read(cx)
+                    .buffer()
+                    .read(cx)
+                    .as_singleton()
+                    .map(|b| b.read(cx).text_snapshot())
+            })?;
+
+            let Some(buffer_snapshot) = buffer_snapshot else {
+                return Ok(());
+            };
+
+            let instant = Instant::now();
+            let parsed_csv = cx
+                .background_spawn(async move { from_buffer(&buffer_snapshot) })
+                .await;
+            let parse_duration = instant.elapsed();
+            let parse_end_time: Instant = Instant::now();
+            log::debug!("Parsed CSV in {}ms", parse_duration.as_millis());
+            view.update(cx, move |view, cx| {
+                view.performance_metrics
+                    .timings
+                    .insert("Parsing", (parse_duration, Instant::now()));
+
+                log::debug!("Parsed {} rows", parsed_csv.rows.len());
+                // Update table width so it can be rendered properly
+                let cols = parsed_csv.headers.cols();
+                view.column_widths.replace(cx, cols + 1); // Add 1 for the line number column
+
+                view.engine.contents = parsed_csv;
+                view.last_parse_end_time = Some(parse_end_time);
+
+                view.apply_filter_sort();
+                cx.notify();
+            })
+        })
+    }
+}
+
+pub fn from_buffer(buffer_snapshot: &BufferSnapshot) -> TableLikeContent {
+    let text = buffer_snapshot.text();
+
+    if text.trim().is_empty() {
+        return TableLikeContent::default();
+    }
+
+    let (parsed_cells_with_positions, line_numbers) = parse_csv_with_positions(&text);
+    if parsed_cells_with_positions.is_empty() {
+        return TableLikeContent::default();
+    }
+    let raw_headers = parsed_cells_with_positions[0].clone();
+
+    // Calculating the longest row, as CSV might have less headers than max row width
+    let Some(max_number_of_cols) = parsed_cells_with_positions.iter().map(|r| r.len()).max() else {
+        return TableLikeContent::default();
+    };
+
+    // Convert to TableCell objects with buffer positions
+    let headers = create_table_row(&buffer_snapshot, max_number_of_cols, raw_headers);
+
+    let rows = parsed_cells_with_positions
+        .into_iter()
+        .skip(1)
+        .map(|row| create_table_row(&buffer_snapshot, max_number_of_cols, row))
+        .collect();
+
+    let row_line_numbers = line_numbers.into_iter().skip(1).collect();
+
+    TableLikeContent {
+        headers,
+        rows,
+        line_numbers: row_line_numbers,
+        number_of_cols: max_number_of_cols,
+    }
+}
+
+/// Parse CSV and track byte positions for each cell
+fn parse_csv_with_positions(
+    text: &str,
+) -> (
+    Vec<Vec<(SharedString, std::ops::Range<usize>)>>,
+    Vec<LineNumber>,
+) {
+    let mut rows = Vec::new();
+    let mut line_numbers = Vec::new();
+    let mut current_row: Vec<(SharedString, std::ops::Range<usize>)> = Vec::new();
+    let mut current_field = String::new();
+    let mut field_start_offset = 0;
+    let mut current_offset = 0;
+    let mut in_quotes = false;
+    let mut current_line = 1; // 1-based line numbering
+    let mut row_start_line = 1;
+    let mut chars = text.chars().peekable();
+
+    while let Some(ch) = chars.next() {
+        let char_byte_len = ch.len_utf8();
+
+        match ch {
+            '"' => {
+                if in_quotes {
+                    if chars.peek() == Some(&'"') {
+                        // Escaped quote
+                        chars.next();
+                        current_field.push('"');
+                        current_offset += 1; // Skip the second quote
+                    } else {
+                        // End of quoted field
+                        in_quotes = false;
+                    }
+                } else {
+                    // Start of quoted field
+                    in_quotes = true;
+                    if current_field.is_empty() {
+                        // Include the opening quote in the range
+                        field_start_offset = current_offset;
+                    }
+                }
+            }
+            ',' if !in_quotes => {
+                // Field separator
+                let field_end_offset = current_offset;
+                if current_field.is_empty() && !in_quotes {
+                    field_start_offset = current_offset;
+                }
+                current_row.push((
+                    current_field.clone().into(),
+                    field_start_offset..field_end_offset,
+                ));
+                current_field.clear();
+                field_start_offset = current_offset + char_byte_len;
+            }
+            '\n' => {
+                current_line += 1;
+                if !in_quotes {
+                    // Row separator (only when not inside quotes)
+                    let field_end_offset = current_offset;
+                    if current_field.is_empty() && current_row.is_empty() {
+                        field_start_offset = 0;
+                    }
+                    current_row.push((
+                        current_field.clone().into(),
+                        field_start_offset..field_end_offset,
+                    ));
+                    current_field.clear();
+
+                    // Only add non-empty rows
+                    if !current_row.is_empty()
+                        && !current_row.iter().all(|(field, _)| field.trim().is_empty())
+                    {
+                        rows.push(current_row);
+                        // Add line number info for this row
+                        let line_info = if row_start_line == current_line - 1 {
+                            LineNumber::Line(row_start_line)
+                        } else {
+                            LineNumber::LineRange(row_start_line, current_line - 1)
+                        };
+                        line_numbers.push(line_info);
+                    }
+                    current_row = Vec::new();
+                    row_start_line = current_line;
+                    field_start_offset = current_offset + char_byte_len;
+                } else {
+                    // Newline inside quotes - preserve it
+                    current_field.push(ch);
+                }
+            }
+            '\r' => {
+                if chars.peek() == Some(&'\n') {
+                    // Handle Windows line endings (\r\n): account for \r byte, let \n be handled next
+                    current_offset += char_byte_len;
+                    continue;
+                } else {
+                    // Standalone \r
+                    current_line += 1;
+                    if !in_quotes {
+                        // Row separator (only when not inside quotes)
+                        let field_end_offset = current_offset;
+                        current_row.push((
+                            current_field.clone().into(),
+                            field_start_offset..field_end_offset,
+                        ));
+                        current_field.clear();
+
+                        // Only add non-empty rows
+                        if !current_row.is_empty()
+                            && !current_row.iter().all(|(field, _)| field.trim().is_empty())
+                        {
+                            rows.push(current_row);
+                            // Add line number info for this row
+                            let line_info = if row_start_line == current_line - 1 {
+                                LineNumber::Line(row_start_line)
+                            } else {
+                                LineNumber::LineRange(row_start_line, current_line - 1)
+                            };
+                            line_numbers.push(line_info);
+                        }
+                        current_row = Vec::new();
+                        row_start_line = current_line;
+                        field_start_offset = current_offset + char_byte_len;
+                    } else {
+                        // \r inside quotes - preserve it
+                        current_field.push(ch);
+                    }
+                }
+            }
+            _ => {
+                if current_field.is_empty() && !in_quotes {
+                    field_start_offset = current_offset;
+                }
+                current_field.push(ch);
+            }
+        }
+
+        current_offset += char_byte_len;
+    }
+
+    // Add the last field and row if not empty
+    if !current_field.is_empty() || !current_row.is_empty() {
+        let field_end_offset = current_offset;
+        current_row.push((
+            current_field.clone().into(),
+            field_start_offset..field_end_offset,
+        ));
+    }
+    if !current_row.is_empty() && !current_row.iter().all(|(field, _)| field.trim().is_empty()) {
+        rows.push(current_row);
+        // Add line number info for the last row
+        let line_info = if row_start_line == current_line {
+            LineNumber::Line(row_start_line)
+        } else {
+            LineNumber::LineRange(row_start_line, current_line)
+        };
+        line_numbers.push(line_info);
+    }
+
+    (rows, line_numbers)
+}
+
+fn create_table_row(
+    buffer_snapshot: &BufferSnapshot,
+    max_number_of_cols: usize,
+    row: Vec<(SharedString, std::ops::Range<usize>)>,
+) -> TableRow<TableCell> {
+    let mut raw_row = row
+        .into_iter()
+        .map(|(content, range)| {
+            TableCell::from_buffer_position(content, range.start, range.end, &buffer_snapshot)
+        })
+        .collect::<Vec<_>>();
+
+    let append_elements = max_number_of_cols - raw_row.len();
+    if append_elements > 0 {
+        for _ in 0..append_elements {
+            raw_row.push(TableCell::Virtual);
+        }
+    }
+
+    TableRow::from_vec(raw_row, max_number_of_cols)
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn test_csv_parsing_basic() {
+        let csv_data = "Name,Age,City\nJohn,30,New York\nJane,25,Los Angeles";
+        let parsed = TableLikeContent::from_str(csv_data.to_string());
+
+        assert_eq!(parsed.headers.cols(), 3);
+        assert_eq!(parsed.headers[0].display_value().unwrap().as_ref(), "Name");
+        assert_eq!(parsed.headers[1].display_value().unwrap().as_ref(), "Age");
+        assert_eq!(parsed.headers[2].display_value().unwrap().as_ref(), "City");
+
+        assert_eq!(parsed.rows.len(), 2);
+        assert_eq!(parsed.rows[0][0].display_value().unwrap().as_ref(), "John");
+        assert_eq!(parsed.rows[0][1].display_value().unwrap().as_ref(), "30");
+        assert_eq!(
+            parsed.rows[0][2].display_value().unwrap().as_ref(),
+            "New York"
+        );
+    }
+
+    #[test]
+    fn test_csv_parsing_with_quotes() {
+        let csv_data = r#"Name,Description
+"John Doe","A person with ""special"" characters"
+Jane,"Simple name""#;
+        let parsed = TableLikeContent::from_str(csv_data.to_string());
+
+        assert_eq!(parsed.headers.cols(), 2);
+        assert_eq!(parsed.rows.len(), 2);
+        assert_eq!(
+            parsed.rows[0][1].display_value().unwrap().as_ref(),
+            r#"A person with "special" characters"#
+        );
+    }
+
+    #[test]
+    fn test_csv_parsing_with_newlines_in_quotes() {
+        let csv_data = "Name,Description,Status\n\"John\nDoe\",\"A person with\nmultiple lines\",Active\n\"Jane Smith\",\"Simple\",\"Also\nActive\"";
+        let parsed = TableLikeContent::from_str(csv_data.to_string());
+
+        assert_eq!(parsed.headers.cols(), 3);
+        assert_eq!(parsed.headers[0].display_value().unwrap().as_ref(), "Name");
+        assert_eq!(
+            parsed.headers[1].display_value().unwrap().as_ref(),
+            "Description"
+        );
+        assert_eq!(
+            parsed.headers[2].display_value().unwrap().as_ref(),
+            "Status"
+        );
+
+        assert_eq!(parsed.rows.len(), 2);
+        assert_eq!(
+            parsed.rows[0][0].display_value().unwrap().as_ref(),
+            "John\nDoe"
+        );
+        assert_eq!(
+            parsed.rows[0][1].display_value().unwrap().as_ref(),
+            "A person with\nmultiple lines"
+        );
+        assert_eq!(
+            parsed.rows[0][2].display_value().unwrap().as_ref(),
+            "Active"
+        );
+
+        assert_eq!(
+            parsed.rows[1][0].display_value().unwrap().as_ref(),
+            "Jane Smith"
+        );
+        assert_eq!(
+            parsed.rows[1][1].display_value().unwrap().as_ref(),
+            "Simple"
+        );
+        assert_eq!(
+            parsed.rows[1][2].display_value().unwrap().as_ref(),
+            "Also\nActive"
+        );
+
+        // Check line numbers
+        assert_eq!(parsed.line_numbers.len(), 2);
+        match &parsed.line_numbers[0] {
+            LineNumber::LineRange(start, end) => {
+                assert_eq!(start, &2);
+                assert_eq!(end, &4);
+            }
+            _ => panic!("Expected LineRange for multiline row"),
+        }
+        match &parsed.line_numbers[1] {
+            LineNumber::LineRange(start, end) => {
+                assert_eq!(start, &5);
+                assert_eq!(end, &6);
+            }
+            _ => panic!("Expected LineRange for second multiline row"),
+        }
+    }
+
+    #[test]
+    fn test_empty_csv() {
+        let parsed = TableLikeContent::from_str("".to_string());
+        assert_eq!(parsed.headers.cols(), 0);
+        assert!(parsed.rows.is_empty());
+    }
+
+    #[test]
+    fn test_csv_parsing_quote_offset_handling() {
+        let csv_data = r#"first,"se,cond",third"#;
+        let (parsed_cells, _) = parse_csv_with_positions(csv_data);
+
+        assert_eq!(parsed_cells.len(), 1); // One row
+        assert_eq!(parsed_cells[0].len(), 3); // Three cells
+
+        // first: 0..5 (no quotes)
+        let (content1, range1) = &parsed_cells[0][0];
+        assert_eq!(content1.as_ref(), "first");
+        assert_eq!(*range1, 0..5);
+
+        // "se,cond": 6..15 (includes quotes in range, content without quotes)
+        let (content2, range2) = &parsed_cells[0][1];
+        assert_eq!(content2.as_ref(), "se,cond");
+        assert_eq!(*range2, 6..15);
+
+        // third: 16..21 (no quotes)
+        let (content3, range3) = &parsed_cells[0][2];
+        assert_eq!(content3.as_ref(), "third");
+        assert_eq!(*range3, 16..21);
+    }
+
+    #[test]
+    fn test_csv_parsing_complex_quotes() {
+        let csv_data = r#"id,"name with spaces","description, with commas",status
+1,"John Doe","A person with ""quotes"" and, commas",active
+2,"Jane Smith","Simple description",inactive"#;
+        let (parsed_cells, _) = parse_csv_with_positions(csv_data);
+
+        assert_eq!(parsed_cells.len(), 3); // header + 2 rows
+
+        // Check header row
+        let header_row = &parsed_cells[0];
+        assert_eq!(header_row.len(), 4);
+
+        // id: 0..2
+        assert_eq!(header_row[0].0.as_ref(), "id");
+        assert_eq!(header_row[0].1, 0..2);
+
+        // "name with spaces": 3..21 (includes quotes)
+        assert_eq!(header_row[1].0.as_ref(), "name with spaces");
+        assert_eq!(header_row[1].1, 3..21);
+
+        // "description, with commas": 22..48 (includes quotes)
+        assert_eq!(header_row[2].0.as_ref(), "description, with commas");
+        assert_eq!(header_row[2].1, 22..48);
+
+        // status: 49..55
+        assert_eq!(header_row[3].0.as_ref(), "status");
+        assert_eq!(header_row[3].1, 49..55);
+
+        // Check first data row
+        let first_row = &parsed_cells[1];
+        assert_eq!(first_row.len(), 4);
+
+        // 1: 56..57
+        assert_eq!(first_row[0].0.as_ref(), "1");
+        assert_eq!(first_row[0].1, 56..57);
+
+        // "John Doe": 58..68 (includes quotes)
+        assert_eq!(first_row[1].0.as_ref(), "John Doe");
+        assert_eq!(first_row[1].1, 58..68);
+
+        // Content should be stripped of quotes but include escaped quotes
+        assert_eq!(
+            first_row[2].0.as_ref(),
+            r#"A person with "quotes" and, commas"#
+        );
+        // The range should include the outer quotes: 69..107
+        assert_eq!(first_row[2].1, 69..107);
+
+        // active: 108..114
+        assert_eq!(first_row[3].0.as_ref(), "active");
+        assert_eq!(first_row[3].1, 108..114);
+    }
+}
+
+impl TableLikeContent {
+    #[cfg(test)]
+    pub fn from_str(text: String) -> Self {
+        use text::{Buffer, BufferId, ReplicaId};
+
+        let buffer_id = BufferId::new(1).unwrap();
+        let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, text);
+        let snapshot = buffer.snapshot();
+        from_buffer(snapshot)
+    }
+}

crates/csv_preview/src/renderer/preview_view.rs 🔗

@@ -0,0 +1,50 @@
+use std::time::Instant;
+
+use ui::{div, prelude::*};
+
+use crate::{CsvPreviewView, settings::FontType};
+
+impl Render for CsvPreviewView {
+    fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+        let theme = cx.theme();
+
+        self.performance_metrics.rendered_indices.clear();
+        let render_prep_start = Instant::now();
+        let table_with_settings = v_flex()
+            .size_full()
+            .p_4()
+            .bg(theme.colors().editor_background)
+            .track_focus(&self.focus_handle)
+            .child({
+                if self.engine.contents.number_of_cols == 0 {
+                    div()
+                        .flex()
+                        .items_center()
+                        .justify_center()
+                        .h_32()
+                        .text_ui(cx)
+                        .map(|div| match self.settings.font_type {
+                            FontType::Ui => div.font_ui(cx),
+                            FontType::Monospace => div.font_buffer(cx),
+                        })
+                        .text_color(cx.theme().colors().text_muted)
+                        .child("No CSV content to display")
+                        .into_any_element()
+                } else {
+                    self.create_table(&self.column_widths.widths, cx)
+                }
+            });
+
+        let render_prep_duration = render_prep_start.elapsed();
+        self.performance_metrics.timings.insert(
+            "render_prep",
+            (render_prep_duration, std::time::Instant::now()),
+        );
+
+        div()
+            .relative()
+            .w_full()
+            .h_full()
+            .child(table_with_settings)
+    }
+}

crates/csv_preview/src/renderer/render_table.rs 🔗

@@ -0,0 +1,193 @@
+use crate::types::TableCell;
+use gpui::{AnyElement, Entity};
+use std::ops::Range;
+use ui::Table;
+use ui::TableColumnWidths;
+use ui::TableResizeBehavior;
+use ui::UncheckedTableRow;
+use ui::{DefiniteLength, div, prelude::*};
+
+use crate::{
+    CsvPreviewView,
+    settings::RowRenderMechanism,
+    types::{AnyColumn, DisplayCellId, DisplayRow},
+};
+
+impl CsvPreviewView {
+    /// Creates a new table.
+    /// Column number is derived from the `TableColumnWidths` entity.
+    pub(crate) fn create_table(
+        &self,
+        current_widths: &Entity<TableColumnWidths>,
+        cx: &mut Context<Self>,
+    ) -> AnyElement {
+        let cols = current_widths.read(cx).cols();
+        let remaining_col_number = cols - 1;
+        let fraction = if remaining_col_number > 0 {
+            1. / remaining_col_number as f32
+        } else {
+            1. // only column with line numbers is present. Put 100%, but it will be overwritten anyways :D
+        };
+        let mut widths = vec![DefiniteLength::Fraction(fraction); cols];
+        let line_number_width = self.calculate_row_identifier_column_width();
+        widths[0] = DefiniteLength::Absolute(AbsoluteLength::Pixels(line_number_width.into()));
+
+        let mut resize_behaviors = vec![TableResizeBehavior::Resizable; cols];
+        resize_behaviors[0] = TableResizeBehavior::None;
+
+        self.create_table_inner(
+            self.engine.contents.rows.len(),
+            widths,
+            resize_behaviors,
+            current_widths,
+            cx,
+        )
+    }
+
+    fn create_table_inner(
+        &self,
+        row_count: usize,
+        widths: UncheckedTableRow<DefiniteLength>,
+        resize_behaviors: UncheckedTableRow<TableResizeBehavior>,
+        current_widths: &Entity<TableColumnWidths>,
+        cx: &mut Context<Self>,
+    ) -> AnyElement {
+        let cols = widths.len();
+        // Create headers array with interactive elements
+        let mut headers = Vec::with_capacity(cols);
+
+        headers.push(self.create_row_identifier_header(cx));
+
+        // Add the actual CSV headers with sort buttons
+        for i in 0..(cols - 1) {
+            let header_text = self
+                .engine
+                .contents
+                .headers
+                .get(AnyColumn(i))
+                .and_then(|h| h.display_value().cloned())
+                .unwrap_or_else(|| format!("Col {}", i + 1).into());
+
+            headers.push(self.create_header_element_with_sort_button(
+                header_text,
+                cx,
+                AnyColumn::from(i),
+            ));
+        }
+
+        Table::new(cols)
+            .interactable(&self.table_interaction_state)
+            .striped()
+            .column_widths(widths)
+            .resizable_columns(resize_behaviors, current_widths, cx)
+            .header(headers)
+            .disable_base_style()
+            .map(|table| {
+                let row_identifier_text_color = cx.theme().colors().editor_line_number;
+                match self.settings.rendering_with {
+                    RowRenderMechanism::VariableList => {
+                        table.variable_row_height_list(row_count, self.list_state.clone(), {
+                            cx.processor(move |this, display_row: usize, _window, cx| {
+                                this.performance_metrics.rendered_indices.push(display_row);
+
+                                let display_row = DisplayRow(display_row);
+                                Self::render_single_table_row(
+                                    this,
+                                    cols,
+                                    display_row,
+                                    row_identifier_text_color,
+                                    cx,
+                                )
+                                .unwrap_or_else(|| panic!("Expected to render a table row"))
+                            })
+                        })
+                    }
+                    RowRenderMechanism::UniformList => {
+                        table.uniform_list("csv-table", row_count, {
+                            cx.processor(move |this, range: Range<usize>, _window, cx| {
+                                // Record all display indices in the range for performance metrics
+                                this.performance_metrics
+                                    .rendered_indices
+                                    .extend(range.clone());
+
+                                range
+                                    .filter_map(|display_index| {
+                                        Self::render_single_table_row(
+                                            this,
+                                            cols,
+                                            DisplayRow(display_index),
+                                            row_identifier_text_color,
+                                            cx,
+                                        )
+                                    })
+                                    .collect()
+                            })
+                        })
+                    }
+                }
+            })
+            .into_any_element()
+    }
+
+    /// Render a single table row
+    ///
+    /// Used both by UniformList and VariableRowHeightList
+    fn render_single_table_row(
+        this: &CsvPreviewView,
+        cols: usize,
+        display_row: DisplayRow,
+        row_identifier_text_color: gpui::Hsla,
+        cx: &Context<CsvPreviewView>,
+    ) -> Option<UncheckedTableRow<AnyElement>> {
+        // Get the actual row index from our sorted indices
+        let data_row = this.engine.d2d_mapping().get_data_row(display_row)?;
+        let row = this.engine.contents.get_row(data_row)?;
+
+        let mut elements = Vec::with_capacity(cols);
+        elements.push(this.create_row_identifier_cell(display_row, data_row, cx)?);
+
+        // Remaining columns: actual CSV data
+        for col in (0..this.engine.contents.number_of_cols).map(AnyColumn) {
+            let table_cell = row.expect_get(col);
+
+            // TODO: Introduce `<null>` cell type
+            let cell_content = table_cell.display_value().cloned().unwrap_or_default();
+
+            let display_cell_id = DisplayCellId::new(display_row, col);
+
+            let cell = div().size_full().whitespace_nowrap().text_ellipsis().child(
+                CsvPreviewView::create_selectable_cell(
+                    display_cell_id,
+                    cell_content,
+                    this.settings.vertical_alignment,
+                    this.settings.font_type,
+                    cx,
+                ),
+            );
+
+            elements.push(
+                div()
+                    .size_full()
+                    .when(this.settings.show_debug_info, |parent| {
+                        parent.child(div().text_color(row_identifier_text_color).child(
+                            match table_cell {
+                                TableCell::Real { position: pos, .. } => {
+                                    let slv = pos.start.timestamp().value;
+                                    let so = pos.start.offset;
+                                    let elv = pos.end.timestamp().value;
+                                    let eo = pos.end.offset;
+                                    format!("Pos {so}(L{slv})-{eo}(L{elv})")
+                                }
+                                TableCell::Virtual => "Virtual cell".into(),
+                            },
+                        ))
+                    })
+                    .text_ui(cx)
+                    .child(cell)
+                    .into_any_element(),
+            );
+        }
+
+        Some(elements)
+    }
+}

crates/csv_preview/src/renderer/row_identifiers.rs 🔗

@@ -0,0 +1,189 @@
+use ui::{
+    ActiveTheme as _, AnyElement, Button, ButtonCommon as _, ButtonSize, ButtonStyle,
+    Clickable as _, Context, ElementId, FluentBuilder as _, IntoElement as _, ParentElement as _,
+    SharedString, Styled as _, StyledTypography as _, Tooltip, div,
+};
+
+use crate::{
+    CsvPreviewView,
+    settings::{FontType, RowIdentifiers},
+    types::{DataRow, DisplayRow, LineNumber},
+};
+
+pub enum RowIdentDisplayMode {
+    /// E.g
+    /// ```text
+    /// 1
+    /// ...
+    /// 5
+    /// ```
+    Vertical,
+    /// E.g.
+    /// ```text
+    /// 1-5
+    /// ```
+    Horizontal,
+}
+
+impl LineNumber {
+    pub fn display_string(&self, mode: RowIdentDisplayMode) -> String {
+        match *self {
+            LineNumber::Line(line) => line.to_string(),
+            LineNumber::LineRange(start, end) => match mode {
+                RowIdentDisplayMode::Vertical => {
+                    if start + 1 == end {
+                        format!("{start}\n{end}")
+                    } else {
+                        format!("{start}\n...\n{end}")
+                    }
+                }
+                RowIdentDisplayMode::Horizontal => {
+                    format!("{start}-{end}")
+                }
+            },
+        }
+    }
+}
+
+impl CsvPreviewView {
+    /// Calculate the optimal width for the row identifier column (line numbers or row numbers).
+    ///
+    /// This ensures the column is wide enough to display the largest identifier comfortably,
+    /// but not wastefully wide for small files.
+    pub(crate) fn calculate_row_identifier_column_width(&self) -> f32 {
+        match self.settings.numbering_type {
+            RowIdentifiers::SrcLines => self.calculate_line_number_width(),
+            RowIdentifiers::RowNum => self.calculate_row_number_width(),
+        }
+    }
+
+    /// Calculate width needed for line numbers (can be multi-line)
+    fn calculate_line_number_width(&self) -> f32 {
+        // Find the maximum line number that could be displayed
+        let max_line_number = self
+            .engine
+            .contents
+            .line_numbers
+            .iter()
+            .map(|ln| match ln {
+                LineNumber::Line(n) => *n,
+                LineNumber::LineRange(_, end) => *end,
+            })
+            .max()
+            .unwrap_or_default();
+
+        let digit_count = if max_line_number == 0 {
+            1
+        } else {
+            (max_line_number as f32).log10().floor() as usize + 1
+        };
+
+        // if !self.settings.multiline_cells_enabled {
+        //     // Uses horizontal line numbers layout like `123-456`. Needs twice the size
+        //     digit_count *= 2;
+        // }
+
+        let char_width_px = 9.0; // TODO: get real width of the characters
+        let base_width = (digit_count as f32) * char_width_px;
+        let padding = 20.0;
+        let min_width = 60.0;
+        (base_width + padding).max(min_width)
+    }
+
+    /// Calculate width needed for sequential row numbers
+    fn calculate_row_number_width(&self) -> f32 {
+        let max_row_number = self.engine.contents.rows.len();
+
+        let digit_count = if max_row_number == 0 {
+            1
+        } else {
+            (max_row_number as f32).log10().floor() as usize + 1
+        };
+
+        let char_width_px = 9.0; // TODO: get real width of the characters
+        let base_width = (digit_count as f32) * char_width_px;
+        let padding = 20.0;
+        let min_width = 60.0;
+        (base_width + padding).max(min_width)
+    }
+
+    pub(crate) fn create_row_identifier_header(
+        &self,
+        cx: &mut Context<'_, CsvPreviewView>,
+    ) -> AnyElement {
+        // First column: row identifier (clickable to toggle between Lines and Rows)
+        let row_identifier_text = match self.settings.numbering_type {
+            RowIdentifiers::SrcLines => "Lines",
+            RowIdentifiers::RowNum => "Rows",
+        };
+
+        let view = cx.entity();
+        let value = div()
+            .map(|div| match self.settings.font_type {
+                FontType::Ui => div.font_ui(cx),
+                FontType::Monospace => div.font_buffer(cx),
+            })
+            .child(
+                Button::new(
+                    ElementId::Name("row-identifier-toggle".into()),
+                    row_identifier_text,
+                )
+                .style(ButtonStyle::Subtle)
+                .size(ButtonSize::Compact)
+                .tooltip(Tooltip::text(
+                    "Toggle between: file line numbers or sequential row numbers",
+                ))
+                .on_click(move |_event, _window, cx| {
+                    view.update(cx, |this, cx| {
+                        this.settings.numbering_type = match this.settings.numbering_type {
+                            RowIdentifiers::SrcLines => RowIdentifiers::RowNum,
+                            RowIdentifiers::RowNum => RowIdentifiers::SrcLines,
+                        };
+                        cx.notify();
+                    });
+                }),
+            )
+            .into_any_element();
+        value
+    }
+
+    pub(crate) fn create_row_identifier_cell(
+        &self,
+        display_row: DisplayRow,
+        data_row: DataRow,
+        cx: &Context<'_, CsvPreviewView>,
+    ) -> Option<AnyElement> {
+        let row_identifier: SharedString = match self.settings.numbering_type {
+            RowIdentifiers::SrcLines => self
+                .engine
+                .contents
+                .line_numbers
+                .get(*data_row)?
+                .display_string(if self.settings.multiline_cells_enabled {
+                    RowIdentDisplayMode::Vertical
+                } else {
+                    RowIdentDisplayMode::Horizontal
+                })
+                .into(),
+            RowIdentifiers::RowNum => (*display_row + 1).to_string().into(),
+        };
+
+        let value = div()
+            .flex()
+            .px_1()
+            .border_b_1()
+            .border_color(cx.theme().colors().border_variant)
+            .h_full()
+            .text_ui(cx)
+            // Row identifiers are always centered
+            .items_center()
+            .justify_end()
+            .map(|div| match self.settings.font_type {
+                FontType::Ui => div.font_ui(cx),
+                FontType::Monospace => div.font_buffer(cx),
+            })
+            .child(row_identifier)
+            .into_any_element();
+        Some(value)
+    }
+}

crates/csv_preview/src/renderer/table_cell.rs 🔗

@@ -0,0 +1,72 @@
+//! Table Cell Rendering
+
+use gpui::{AnyElement, ElementId};
+use ui::{SharedString, Tooltip, div, prelude::*};
+
+use crate::{
+    CsvPreviewView,
+    settings::{FontType, VerticalAlignment},
+    types::DisplayCellId,
+};
+
+impl CsvPreviewView {
+    /// Create selectable table cell with mouse event handlers.
+    pub fn create_selectable_cell(
+        display_cell_id: DisplayCellId,
+        cell_content: SharedString,
+        vertical_alignment: VerticalAlignment,
+        font_type: FontType,
+        cx: &Context<CsvPreviewView>,
+    ) -> AnyElement {
+        create_table_cell(
+            display_cell_id,
+            cell_content,
+            vertical_alignment,
+            font_type,
+            cx,
+        )
+        // Mouse events handlers will be here
+        .into_any_element()
+    }
+}
+
+/// Create styled table cell div element.
+fn create_table_cell(
+    display_cell_id: DisplayCellId,
+    cell_content: SharedString,
+    vertical_alignment: VerticalAlignment,
+    font_type: FontType,
+    cx: &Context<'_, CsvPreviewView>,
+) -> gpui::Stateful<Div> {
+    div()
+        .id(ElementId::NamedInteger(
+            format!(
+                "csv-display-cell-{}-{}",
+                *display_cell_id.row, *display_cell_id.col
+            )
+            .into(),
+            0,
+        ))
+        .cursor_pointer()
+        .flex()
+        .h_full()
+        .px_1()
+        .bg(cx.theme().colors().editor_background)
+        .border_b_1()
+        .border_r_1()
+        .border_color(cx.theme().colors().border_variant)
+        .map(|div| match vertical_alignment {
+            VerticalAlignment::Top => div.items_start(),
+            VerticalAlignment::Center => div.items_center(),
+        })
+        .map(|div| match vertical_alignment {
+            VerticalAlignment::Top => div.content_start(),
+            VerticalAlignment::Center => div.content_center(),
+        })
+        .map(|div| match font_type {
+            FontType::Ui => div.font_ui(cx),
+            FontType::Monospace => div.font_buffer(cx),
+        })
+        .tooltip(Tooltip::text(cell_content.clone()))
+        .child(div().child(cell_content))
+}

crates/csv_preview/src/renderer/table_header.rs 🔗

@@ -0,0 +1,94 @@
+use gpui::ElementId;
+use ui::{Tooltip, prelude::*};
+
+use crate::{
+    CsvPreviewView,
+    settings::FontType,
+    table_data_engine::sorting_by_column::{AppliedSorting, SortDirection},
+    types::AnyColumn,
+};
+
+impl CsvPreviewView {
+    /// Create header for data, which is orderable with text on the left and sort button on the right
+    pub(crate) fn create_header_element_with_sort_button(
+        &self,
+        header_text: SharedString,
+        cx: &mut Context<'_, CsvPreviewView>,
+        col_idx: AnyColumn,
+    ) -> AnyElement {
+        // CSV data columns: text + filter/sort buttons
+        h_flex()
+            .justify_between()
+            .items_center()
+            .w_full()
+            .map(|div| match self.settings.font_type {
+                FontType::Ui => div.font_ui(cx),
+                FontType::Monospace => div.font_buffer(cx),
+            })
+            .child(div().child(header_text))
+            .child(h_flex().gap_1().child(self.create_sort_button(cx, col_idx)))
+            .into_any_element()
+    }
+
+    fn create_sort_button(
+        &self,
+        cx: &mut Context<'_, CsvPreviewView>,
+        col_idx: AnyColumn,
+    ) -> Button {
+        let sort_btn = Button::new(
+            ElementId::NamedInteger("sort-button".into(), col_idx.get() as u64),
+            match self.engine.applied_sorting {
+                Some(ordering) if ordering.col_idx == col_idx => match ordering.direction {
+                    SortDirection::Asc => "↓",
+                    SortDirection::Desc => "↑",
+                },
+                _ => "↕", // Unsorted/available for sorting
+            },
+        )
+        .size(ButtonSize::Compact)
+        .style(
+            if self
+                .engine
+                .applied_sorting
+                .is_some_and(|o| o.col_idx == col_idx)
+            {
+                ButtonStyle::Filled
+            } else {
+                ButtonStyle::Subtle
+            },
+        )
+        .tooltip(Tooltip::text(match self.engine.applied_sorting {
+            Some(ordering) if ordering.col_idx == col_idx => match ordering.direction {
+                SortDirection::Asc => "Sorted A-Z. Click to sort Z-A",
+                SortDirection::Desc => "Sorted Z-A. Click to disable sorting",
+            },
+            _ => "Not sorted. Click to sort A-Z",
+        }))
+        .on_click(cx.listener(move |this, _event, _window, cx| {
+            let new_sorting = match this.engine.applied_sorting {
+                Some(ordering) if ordering.col_idx == col_idx => {
+                    // Same column clicked - cycle through states
+                    match ordering.direction {
+                        SortDirection::Asc => Some(AppliedSorting {
+                            col_idx,
+                            direction: SortDirection::Desc,
+                        }),
+                        SortDirection::Desc => None, // Clear sorting
+                    }
+                }
+                _ => {
+                    // Different column or no sorting - start with ascending
+                    Some(AppliedSorting {
+                        col_idx,
+                        direction: SortDirection::Asc,
+                    })
+                }
+            };
+
+            this.engine.applied_sorting = new_sorting;
+            this.apply_sort();
+            cx.notify();
+        }));
+        sort_btn
+    }
+}

crates/csv_preview/src/settings.rs 🔗

@@ -0,0 +1,46 @@
+#[derive(Default, Clone, Copy)]
+pub enum RowRenderMechanism {
+    /// Default behaviour
+    #[default]
+    VariableList,
+    /// More performance oriented, but all rows are same height
+    #[allow(dead_code)] // Will be used when settings ui is added
+    UniformList,
+}
+
+#[derive(Default, Clone, Copy)]
+pub enum VerticalAlignment {
+    /// Align text to the top of cells
+    #[default]
+    Top,
+    /// Center text vertically in cells
+    Center,
+}
+
+#[derive(Default, Clone, Copy)]
+pub enum FontType {
+    /// Use the default UI font
+    #[default]
+    Ui,
+    /// Use monospace font (same as buffer/editor font)
+    Monospace,
+}
+
+#[derive(Default, Clone, Copy)]
+pub enum RowIdentifiers {
+    /// Show original line numbers from CSV file
+    #[default]
+    SrcLines,
+    /// Show sequential row numbers starting from 1
+    RowNum,
+}
+
+#[derive(Clone, Default)]
+pub(crate) struct CsvPreviewSettings {
+    pub(crate) rendering_with: RowRenderMechanism,
+    pub(crate) vertical_alignment: VerticalAlignment,
+    pub(crate) font_type: FontType,
+    pub(crate) numbering_type: RowIdentifiers,
+    pub(crate) show_debug_info: bool,
+    pub(crate) multiline_cells_enabled: bool,
+}

crates/csv_preview/src/table_data_engine.rs 🔗

@@ -0,0 +1,90 @@
+//! This module defines core operations and config of tabular data view (CSV table)
+//! It operates in 2 coordinate systems:
+//! - `DataCellId` - indices of src data cells
+//! - `DisplayCellId` - indices of data after applied transformations like sorting/filtering, which is used to render cell on the screen
+//!
+//! It's designed to contain core logic of operations without relying on `CsvPreviewView`, context or window handles.
+
+use std::{collections::HashMap, sync::Arc};
+
+use ui::table_row::TableRow;
+
+use crate::{
+    table_data_engine::sorting_by_column::{AppliedSorting, sort_data_rows},
+    types::{DataRow, DisplayRow, TableCell, TableLikeContent},
+};
+
+pub mod sorting_by_column;
+
+#[derive(Default)]
+pub(crate) struct TableDataEngine {
+    pub applied_sorting: Option<AppliedSorting>,
+    d2d_mapping: DisplayToDataMapping,
+    pub contents: TableLikeContent,
+}
+
+impl TableDataEngine {
+    pub(crate) fn d2d_mapping(&self) -> &DisplayToDataMapping {
+        &self.d2d_mapping
+    }
+
+    pub(crate) fn apply_sort(&mut self) {
+        self.d2d_mapping
+            .apply_sorting(self.applied_sorting, &self.contents.rows);
+        self.d2d_mapping.merge_mappings();
+    }
+
+    /// Applies sorting and filtering to the data and produces display to data mapping
+    pub(crate) fn calculate_d2d_mapping(&mut self) {
+        self.d2d_mapping
+            .apply_sorting(self.applied_sorting, &self.contents.rows);
+        self.d2d_mapping.merge_mappings();
+    }
+}
+
+/// Relation of Display (rendered) rows to Data (src) rows with applied transformations
+/// Transformations applied:
+/// - sorting by column
+#[derive(Debug, Default)]
+pub struct DisplayToDataMapping {
+    /// All rows sorted, regardless of applied filtering. Applied every time sorting changes
+    pub sorted_rows: Vec<DataRow>,
+    /// Filtered and sorted rows. Computed cheaply from `sorted_mapping` and `filtered_out_rows`
+    pub mapping: Arc<HashMap<DisplayRow, DataRow>>,
+}
+
+impl DisplayToDataMapping {
+    /// Get the data row for a given display row
+    pub fn get_data_row(&self, display_row: DisplayRow) -> Option<DataRow> {
+        self.mapping.get(&display_row).copied()
+    }
+
+    /// Get the number of filtered rows
+    pub fn visible_row_count(&self) -> usize {
+        self.mapping.len()
+    }
+
+    /// Computes sorting
+    fn apply_sorting(&mut self, sorting: Option<AppliedSorting>, rows: &[TableRow<TableCell>]) {
+        let data_rows: Vec<DataRow> = (0..rows.len()).map(DataRow).collect();
+
+        let sorted_rows = if let Some(sorting) = sorting {
+            sort_data_rows(&rows, data_rows, sorting)
+        } else {
+            data_rows
+        };
+
+        self.sorted_rows = sorted_rows;
+    }
+
+    /// Take pre-computed sorting and filtering results, and apply them to the mapping
+    fn merge_mappings(&mut self) {
+        self.mapping = Arc::new(
+            self.sorted_rows
+                .iter()
+                .enumerate()
+                .map(|(display, data)| (DisplayRow(display), *data))
+                .collect(),
+        );
+    }
+}

crates/csv_preview/src/table_data_engine/sorting_by_column.rs 🔗

@@ -0,0 +1,49 @@
+use ui::table_row::TableRow;
+
+use crate::types::{AnyColumn, DataRow, TableCell};
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub enum SortDirection {
+    Asc,
+    Desc,
+}
+
+/// Config or currently active sorting
+#[derive(Debug, Clone, Copy)]
+pub struct AppliedSorting {
+    /// 0-based column index
+    pub col_idx: AnyColumn,
+    /// Direction of sorting (asc/desc)
+    pub direction: SortDirection,
+}
+
+pub fn sort_data_rows(
+    content_rows: &[TableRow<TableCell>],
+    mut data_row_ids: Vec<DataRow>,
+    sorting: AppliedSorting,
+) -> Vec<DataRow> {
+    data_row_ids.sort_by(|&a, &b| {
+        let row_a = &content_rows[*a];
+        let row_b = &content_rows[*b];
+
+        // TODO: Decide how to handle nulls (on top or on bottom)
+        let val_a = row_a
+            .get(sorting.col_idx)
+            .and_then(|tc| tc.display_value())
+            .map(|tc| tc.as_str())
+            .unwrap_or("");
+        let val_b = row_b
+            .get(sorting.col_idx)
+            .and_then(|tc| tc.display_value())
+            .map(|tc| tc.as_str())
+            .unwrap_or("");
+
+        let cmp = val_a.cmp(val_b);
+        match sorting.direction {
+            SortDirection::Asc => cmp,
+            SortDirection::Desc => cmp.reverse(),
+        }
+    });
+
+    data_row_ids
+}

crates/csv_preview/src/types.rs 🔗

@@ -0,0 +1,17 @@
+use std::fmt::Debug;
+
+pub use coordinates::*;
+mod coordinates;
+pub use table_cell::*;
+mod table_cell;
+pub use table_like_content::*;
+mod table_like_content;
+
+/// Line number information for CSV rows
+#[derive(Debug, Clone, Copy)]
+pub enum LineNumber {
+    /// Single line row
+    Line(usize),
+    /// Multi-line row spanning from start to end line. Incluisive
+    LineRange(usize, usize),
+}

crates/csv_preview/src/types/coordinates.rs 🔗

@@ -0,0 +1,127 @@
+//! Type definitions for CSV table coordinates and cell identifiers.
+//!
+//! Provides newtypes for self-documenting coordinate systems:
+//! - Display coordinates: Visual positions in rendered table
+//! - Data coordinates: Original CSV data positions
+
+use std::ops::Deref;
+
+///// Rows /////
+/// Visual row position in rendered table.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct DisplayRow(pub usize);
+
+impl DisplayRow {
+    /// Create a new display row
+    pub fn new(row: usize) -> Self {
+        Self(row)
+    }
+
+    /// Get the inner row value
+    pub fn get(self) -> usize {
+        self.0
+    }
+}
+
+impl Deref for DisplayRow {
+    type Target = usize;
+
+    fn deref(&self) -> &Self::Target {
+        &self.0
+    }
+}
+
+/// Original CSV row position.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct DataRow(pub usize);
+
+impl DataRow {
+    /// Create a new data row
+    pub fn new(row: usize) -> Self {
+        Self(row)
+    }
+}
+
+impl Deref for DataRow {
+    type Target = usize;
+
+    fn deref(&self) -> &Self::Target {
+        &self.0
+    }
+}
+
+impl From<usize> for DisplayRow {
+    fn from(row: usize) -> Self {
+        DisplayRow::new(row)
+    }
+}
+
+impl From<usize> for DataRow {
+    fn from(row: usize) -> Self {
+        DataRow::new(row)
+    }
+}
+
+///// Columns /////
+/// Data column position in CSV table. 0-based
+///
+/// Currently represents both display and data coordinate systems since
+/// column reordering is not yet implemented. When column reordering is added,
+/// this will need to be split into `DisplayColumn` and `DataColumn` types.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct AnyColumn(pub usize);
+
+impl AnyColumn {
+    /// Create a new column ID
+    pub fn new(col: usize) -> Self {
+        Self(col)
+    }
+
+    /// Get the inner column value
+    pub fn get(self) -> usize {
+        self.0
+    }
+}
+
+impl Deref for AnyColumn {
+    type Target = usize;
+
+    fn deref(&self) -> &Self::Target {
+        &self.0
+    }
+}
+
+impl From<usize> for AnyColumn {
+    fn from(col: usize) -> Self {
+        AnyColumn::new(col)
+    }
+}
+
+impl From<AnyColumn> for usize {
+    fn from(value: AnyColumn) -> Self {
+        *value
+    }
+}
+
+///// Cells /////
+/// Visual cell position in rendered table.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct DisplayCellId {
+    pub row: DisplayRow,
+    pub col: AnyColumn,
+}
+
+impl DisplayCellId {
+    /// Create a new display cell ID
+    pub fn new(row: impl Into<DisplayRow>, col: impl Into<AnyColumn>) -> Self {
+        Self {
+            row: row.into(),
+            col: col.into(),
+        }
+    }
+
+    /// Returns (row, column)
+    pub fn to_raw(&self) -> (usize, usize) {
+        (self.row.0, self.col.0)
+    }
+}

crates/csv_preview/src/types/table_cell.rs 🔗

@@ -0,0 +1,54 @@
+use text::Anchor;
+use ui::SharedString;
+
+/// Position of a cell within the source CSV buffer
+#[derive(Clone, Debug)]
+pub struct CellContentSpan {
+    /// Start anchor of the cell content in the source buffer
+    pub start: Anchor,
+    /// End anchor of the cell content in the source buffer
+    pub end: Anchor,
+}
+
+/// A table cell with its content and position in the source buffer
+#[derive(Clone, Debug)]
+pub enum TableCell {
+    /// Cell existing in the CSV
+    Real {
+        /// Position of this cell in the source buffer
+        position: CellContentSpan,
+        /// Cached display value (for performance)
+        cached_value: SharedString,
+    },
+    /// Virtual cell, created to pad malformed row
+    Virtual,
+}
+
+impl TableCell {
+    /// Create a TableCell with buffer position tracking
+    pub fn from_buffer_position(
+        content: SharedString,
+        start_offset: usize,
+        end_offset: usize,
+        buffer_snapshot: &text::BufferSnapshot,
+    ) -> Self {
+        let start_anchor = buffer_snapshot.anchor_before(start_offset);
+        let end_anchor = buffer_snapshot.anchor_after(end_offset);
+
+        Self::Real {
+            position: CellContentSpan {
+                start: start_anchor,
+                end: end_anchor,
+            },
+            cached_value: content,
+        }
+    }
+
+    /// Get the display value for this cell
+    pub fn display_value(&self) -> Option<&SharedString> {
+        match self {
+            TableCell::Real { cached_value, .. } => Some(cached_value),
+            TableCell::Virtual => None,
+        }
+    }
+}

crates/csv_preview/src/types/table_like_content.rs 🔗

@@ -0,0 +1,32 @@
+use ui::table_row::TableRow;
+
+use crate::types::{DataRow, LineNumber, TableCell};
+
+/// Generic container struct of table-like data (CSV, TSV, etc)
+#[derive(Clone)]
+pub struct TableLikeContent {
+    /// Number of data columns.
+    /// Defines table width used to validate `TableRow` on creation
+    pub number_of_cols: usize,
+    pub headers: TableRow<TableCell>,
+    pub rows: Vec<TableRow<TableCell>>,
+    /// Follows the same indices as `rows`
+    pub line_numbers: Vec<LineNumber>,
+}
+
+impl Default for TableLikeContent {
+    fn default() -> Self {
+        Self {
+            number_of_cols: 0,
+            headers: TableRow::<TableCell>::from_vec(vec![], 0),
+            rows: vec![],
+            line_numbers: vec![],
+        }
+    }
+}
+
+impl TableLikeContent {
+    pub(crate) fn get_row(&self, data_row: DataRow) -> Option<&TableRow<TableCell>> {
+        self.rows.get(*data_row)
+    }
+}

crates/debugger_ui/Cargo.toml 🔗

@@ -64,7 +64,6 @@ settings.workspace = true
 sysinfo.workspace = true
 task.workspace = true
 tasks_ui.workspace = true
-telemetry.workspace = true
 terminal_view.workspace = true
 text.workspace = true
 theme.workspace = true

crates/debugger_ui/src/debugger_panel.rs 🔗

@@ -35,6 +35,7 @@ use tree_sitter::{Query, StreamingIterator as _};
 use ui::{
     ContextMenu, Divider, PopoverMenu, PopoverMenuHandle, SplitButton, Tab, Tooltip, prelude::*,
 };
+use util::redact::redact_command;
 use util::rel_path::RelPath;
 use util::{ResultExt, debug_panic, maybe};
 use workspace::SplitDirection;
@@ -43,7 +44,7 @@ use workspace::{
     Item, Pane, Workspace,
     dock::{DockPosition, Panel, PanelEvent},
 };
-use zed_actions::ToggleFocus;
+use zed_actions::debug_panel::ToggleFocus;
 
 pub struct DebuggerHistoryFeatureFlag;
 
@@ -275,12 +276,13 @@ impl DebugPanel {
 
             async move |_, cx| {
                 if let Err(error) = task.await {
-                    log::error!("{error:#}");
+                    let redacted_error = redact_command(&format!("{error:#}"));
+                    log::error!("{redacted_error}");
                     session
                         .update(cx, |session, cx| {
                             session
                                 .console_output(cx)
-                                .unbounded_send(format!("error: {:#}", error))
+                                .unbounded_send(format!("error: {:#}", redacted_error))
                                 .ok();
                             session.shutdown(cx)
                         })

crates/debugger_ui/src/debugger_ui.rs 🔗

@@ -8,12 +8,12 @@ use project::debugger::{self, breakpoint_store::SourceBreakpoint, session::Threa
 use schemars::JsonSchema;
 use serde::Deserialize;
 use session::DebugSession;
-use stack_trace_view::StackTraceView;
+
 use tasks_ui::{Spawn, TaskOverrides};
 use ui::{FluentBuilder, InteractiveElement};
 use util::maybe;
-use workspace::{ItemHandle, ShutdownDebugAdapters, Workspace};
-use zed_actions::{Toggle, ToggleFocus};
+use workspace::{ShutdownDebugAdapters, Workspace};
+use zed_actions::debug_panel::{Toggle, ToggleFocus};
 
 pub mod attach_modal;
 pub mod debugger_panel;
@@ -21,7 +21,6 @@ mod dropdown_menus;
 mod new_process_modal;
 mod persistence;
 pub(crate) mod session;
-mod stack_trace_view;
 
 #[cfg(any(test, feature = "test-support"))]
 pub mod tests;
@@ -70,8 +69,6 @@ actions!(
         FocusLoadedSources,
         /// Focuses on the terminal panel.
         FocusTerminal,
-        /// Shows the stack trace for the current thread.
-        ShowStackTrace,
         /// Toggles the thread picker dropdown.
         ToggleThreadPicker,
         /// Toggles the session picker dropdown.
@@ -207,39 +204,6 @@ pub fn init(cx: &mut App) {
                                 .ok();
                         }
                     })
-                    .on_action(cx.listener(
-                        |workspace, _: &ShowStackTrace, window, cx| {
-                            let Some(debug_panel) = workspace.panel::<DebugPanel>(cx) else {
-                                return;
-                            };
-
-                            if let Some(existing) = workspace.item_of_type::<StackTraceView>(cx) {
-                                let is_active = workspace
-                                    .active_item(cx)
-                                    .is_some_and(|item| item.item_id() == existing.item_id());
-                                workspace.activate_item(&existing, true, !is_active, window, cx);
-                            } else {
-                                let Some(active_session) = debug_panel.read(cx).active_session()
-                                else {
-                                    return;
-                                };
-
-                                let project = workspace.project();
-
-                                let stack_trace_view = active_session.update(cx, |session, cx| {
-                                    session.stack_trace_view(project, window, cx).clone()
-                                });
-
-                                workspace.add_item_to_active_pane(
-                                    Box::new(stack_trace_view),
-                                    None,
-                                    true,
-                                    window,
-                                    cx,
-                                );
-                            }
-                        },
-                    ))
                 })
                 .when(supports_detach, |div| {
                     let active_item = active_item.clone();

crates/debugger_ui/src/session.rs 🔗

@@ -1,14 +1,13 @@
 pub mod running;
 
-use crate::{StackTraceView, persistence::SerializedLayout, session::running::DebugTerminal};
+use crate::{persistence::SerializedLayout, session::running::DebugTerminal};
 use dap::client::SessionId;
 use gpui::{App, Axis, Entity, EventEmitter, FocusHandle, Focusable, Task, WeakEntity};
 use project::debugger::session::Session;
-use project::worktree_store::WorktreeStore;
+
 use project::{Project, debugger::session::SessionQuirks};
 use rpc::proto;
 use running::RunningState;
-use std::cell::OnceCell;
 use ui::prelude::*;
 use workspace::{
     CollaboratorId, FollowableItem, ViewId, Workspace,
@@ -19,9 +18,6 @@ pub struct DebugSession {
     remote_id: Option<workspace::ViewId>,
     pub(crate) running_state: Entity<RunningState>,
     pub(crate) quirks: SessionQuirks,
-    stack_trace_view: OnceCell<Entity<StackTraceView>>,
-    _worktree_store: WeakEntity<WorktreeStore>,
-    workspace: WeakEntity<Workspace>,
 }
 
 impl DebugSession {
@@ -49,13 +45,10 @@ impl DebugSession {
         });
         let quirks = session.read(cx).quirks();
 
-        cx.new(|cx| Self {
+        cx.new(|_| Self {
             remote_id: None,
             running_state,
             quirks,
-            stack_trace_view: OnceCell::new(),
-            _worktree_store: project.read(cx).worktree_store().downgrade(),
-            workspace,
         })
     }
 
@@ -63,30 +56,6 @@ impl DebugSession {
         self.running_state.read(cx).session_id()
     }
 
-    pub(crate) fn stack_trace_view(
-        &mut self,
-        project: &Entity<Project>,
-        window: &mut Window,
-        cx: &mut Context<Self>,
-    ) -> &Entity<StackTraceView> {
-        let workspace = self.workspace.clone();
-        let running_state = self.running_state.clone();
-
-        self.stack_trace_view.get_or_init(|| {
-            let stackframe_list = running_state.read(cx).stack_frame_list().clone();
-
-            cx.new(|cx| {
-                StackTraceView::new(
-                    workspace.clone(),
-                    project.clone(),
-                    stackframe_list,
-                    window,
-                    cx,
-                )
-            })
-        })
-    }
-
     pub fn session(&self, cx: &App) -> Entity<Session> {
         self.running_state.read(cx).session().clone()
     }

crates/debugger_ui/src/session/running.rs 🔗

@@ -356,11 +356,11 @@ pub(crate) fn new_debugger_pane(
                     debug_assert!(_previous_subscription.is_none());
                     running
                         .panes
-                        .split(&this_pane, &new_pane, split_direction, cx)?;
-                    anyhow::Ok(new_pane)
+                        .split(&this_pane, &new_pane, split_direction, cx);
+                    new_pane
                 });
 
-                match new_pane.and_then(|r| r) {
+                match new_pane {
                     Ok(new_pane) => {
                         move_item(
                             &source,

crates/debugger_ui/src/session/running/memory_view.rs 🔗

@@ -133,7 +133,7 @@ impl ViewState {
     fn set_offset(&mut self, point: Point<Pixels>) {
         if point.y >= -Pixels::ZERO {
             self.schedule_scroll_up();
-        } else if point.y <= -self.scroll_handle.max_offset().height {
+        } else if point.y <= -self.scroll_handle.max_offset().y {
             self.schedule_scroll_down();
         }
         self.scroll_handle.set_offset(point);
@@ -141,7 +141,7 @@ impl ViewState {
 }
 
 impl ScrollableHandle for ViewStateHandle {
-    fn max_offset(&self) -> gpui::Size<Pixels> {
+    fn max_offset(&self) -> gpui::Point<Pixels> {
         self.0.borrow().scroll_handle.max_offset()
     }
 

crates/debugger_ui/src/session/running/stack_frame_list.rs 🔗

@@ -15,13 +15,13 @@ use util::{
     paths::{PathStyle, is_absolute},
 };
 
-use crate::{StackTraceView, ToggleUserFrames};
+use crate::ToggleUserFrames;
 use language::PointUtf16;
 use project::debugger::breakpoint_store::ActiveStackFrame;
 use project::debugger::session::{Session, SessionEvent, StackFrame, ThreadStatus};
 use project::{ProjectItem, ProjectPath};
 use ui::{Tooltip, WithScrollbar, prelude::*};
-use workspace::{ItemHandle, Workspace, WorkspaceId};
+use workspace::{Workspace, WorkspaceId};
 
 use super::RunningState;
 
@@ -154,6 +154,7 @@ impl StackFrameList {
         &self.entries
     }
 
+    #[cfg(test)]
     pub(crate) fn flatten_entries(
         &self,
         show_collapsed: bool,
@@ -437,14 +438,7 @@ impl StackFrameList {
                             .project_path(cx)
                             .context("Could not select a stack frame for unnamed buffer")?;
 
-                        let open_preview = !workspace
-                            .item_of_type::<StackTraceView>(cx)
-                            .map(|viewer| {
-                                workspace
-                                    .active_item(cx)
-                                    .is_some_and(|item| item.item_id() == viewer.item_id())
-                            })
-                            .unwrap_or_default();
+                        let open_preview = true;
 
                         let active_debug_line_pane = workspace
                             .project()

crates/debugger_ui/src/stack_trace_view.rs 🔗

@@ -1,458 +0,0 @@
-use std::{
-    any::{Any, TypeId},
-    sync::Arc,
-};
-
-use collections::HashMap;
-use dap::StackFrameId;
-use editor::{
-    Anchor, Bias, DebugStackFrameLine, Editor, EditorEvent, ExcerptId, ExcerptRange, HighlightKey,
-    MultiBuffer, RowHighlightOptions, SelectionEffects, ToPoint, scroll::Autoscroll,
-};
-use gpui::{
-    App, AppContext, Entity, EventEmitter, Focusable, IntoElement, Render, SharedString,
-    Subscription, Task, WeakEntity, Window,
-};
-use language::{BufferSnapshot, Capability, Point, Selection, SelectionGoal, TreeSitterOptions};
-use project::{Project, ProjectPath};
-use ui::{ActiveTheme as _, Context, ParentElement as _, Styled as _, div};
-use util::ResultExt as _;
-use workspace::{
-    Item, ItemHandle as _, ItemNavHistory, ToolbarItemLocation, Workspace,
-    item::{BreadcrumbText, ItemEvent, SaveOptions},
-    searchable::SearchableItemHandle,
-};
-
-use crate::session::running::stack_frame_list::{StackFrameList, StackFrameListEvent};
-use anyhow::Result;
-
-pub(crate) struct StackTraceView {
-    editor: Entity<Editor>,
-    multibuffer: Entity<MultiBuffer>,
-    workspace: WeakEntity<Workspace>,
-    project: Entity<Project>,
-    stack_frame_list: Entity<StackFrameList>,
-    selected_stack_frame_id: Option<StackFrameId>,
-    highlights: Vec<(StackFrameId, Anchor)>,
-    excerpt_for_frames: collections::HashMap<ExcerptId, StackFrameId>,
-    refresh_task: Option<Task<Result<()>>>,
-    _subscription: Option<Subscription>,
-}
-
-impl StackTraceView {
-    pub(crate) fn new(
-        workspace: WeakEntity<Workspace>,
-        project: Entity<Project>,
-        stack_frame_list: Entity<StackFrameList>,
-        window: &mut Window,
-        cx: &mut Context<Self>,
-    ) -> Self {
-        telemetry::event!("Stack Trace View Deployed");
-
-        let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
-        let editor = cx.new(|cx| {
-            let mut editor =
-                Editor::for_multibuffer(multibuffer.clone(), Some(project.clone()), window, cx);
-            editor.set_vertical_scroll_margin(5, cx);
-            editor
-        });
-
-        cx.subscribe_in(&editor, window, |this, editor, event, window, cx| {
-            if let EditorEvent::SelectionsChanged { local: true } = event {
-                let excerpt_id = editor.update(cx, |editor, cx| {
-                    let position: Point = editor
-                        .selections
-                        .newest(&editor.display_snapshot(cx))
-                        .head();
-
-                    editor
-                        .snapshot(window, cx)
-                        .buffer_snapshot()
-                        .excerpt_containing(position..position)
-                        .map(|excerpt| excerpt.id())
-                });
-
-                if let Some(stack_frame_id) = excerpt_id
-                    .and_then(|id| this.excerpt_for_frames.get(&id))
-                    .filter(|id| Some(**id) != this.selected_stack_frame_id)
-                {
-                    this.stack_frame_list.update(cx, |list, cx| {
-                        list.go_to_stack_frame(*stack_frame_id, window, cx).detach();
-                    });
-                }
-            }
-        })
-        .detach();
-
-        cx.subscribe_in(
-            &stack_frame_list,
-            window,
-            |this, stack_frame_list, event, window, cx| match event {
-                StackFrameListEvent::BuiltEntries => {
-                    this.selected_stack_frame_id =
-                        stack_frame_list.read(cx).opened_stack_frame_id();
-                    this.update_excerpts(window, cx);
-                }
-                StackFrameListEvent::SelectedStackFrameChanged(selected_frame_id) => {
-                    this.selected_stack_frame_id = Some(*selected_frame_id);
-                    this.update_highlights(window, cx);
-
-                    if let Some(frame_anchor) = this
-                        .highlights
-                        .iter()
-                        .find(|(frame_id, _)| frame_id == selected_frame_id)
-                        .map(|highlight| highlight.1)
-                    {
-                        this.editor.update(cx, |editor, cx| {
-                            if frame_anchor.excerpt_id
-                                != editor.selections.newest_anchor().head().excerpt_id
-                            {
-                                let effects = SelectionEffects::scroll(
-                                    Autoscroll::center().for_anchor(frame_anchor),
-                                );
-
-                                editor.change_selections(effects, window, cx, |selections| {
-                                    let selection_id = selections.new_selection_id();
-
-                                    let selection = Selection {
-                                        id: selection_id,
-                                        start: frame_anchor,
-                                        end: frame_anchor,
-                                        goal: SelectionGoal::None,
-                                        reversed: false,
-                                    };
-
-                                    selections.select_anchors(vec![selection]);
-                                })
-                            }
-                        });
-                    }
-                }
-            },
-        )
-        .detach();
-
-        let mut this = Self {
-            editor,
-            multibuffer,
-            workspace,
-            project,
-            excerpt_for_frames: HashMap::default(),
-            highlights: Vec::default(),
-            stack_frame_list,
-            selected_stack_frame_id: None,
-            refresh_task: None,
-            _subscription: None,
-        };
-
-        this.update_excerpts(window, cx);
-        this
-    }
-
-    fn update_excerpts(&mut self, window: &mut Window, cx: &mut Context<Self>) {
-        self.refresh_task.take();
-        self.editor.update(cx, |editor, cx| {
-            editor.clear_highlights(HighlightKey::DebugStackFrameLine, cx)
-        });
-
-        let stack_frames = self
-            .stack_frame_list
-            .read_with(cx, |list, _| list.flatten_entries(false, false));
-
-        let frames_to_open: Vec<_> = stack_frames
-            .into_iter()
-            .filter_map(|frame| {
-                Some((
-                    frame.id,
-                    frame.line as u32 - 1,
-                    StackFrameList::abs_path_from_stack_frame(&frame)?,
-                ))
-            })
-            .collect();
-
-        self.multibuffer
-            .update(cx, |multi_buffer, cx| multi_buffer.clear(cx));
-
-        let task = cx.spawn_in(window, async move |this, cx| {
-            let mut to_highlights = Vec::default();
-
-            for (stack_frame_id, line, abs_path) in frames_to_open {
-                let (worktree, relative_path) = this
-                    .update(cx, |this, cx| {
-                        this.workspace.update(cx, |workspace, cx| {
-                            workspace.project().update(cx, |this, cx| {
-                                this.find_or_create_worktree(&abs_path, false, cx)
-                            })
-                        })
-                    })??
-                    .await?;
-
-                let project_path = ProjectPath {
-                    worktree_id: worktree.read_with(cx, |tree, _| tree.id()),
-                    path: relative_path,
-                };
-
-                if let Some(buffer) = this
-                    .read_with(cx, |this, _| this.project.clone())?
-                    .update(cx, |project, cx| project.open_buffer(project_path, cx))
-                    .await
-                    .log_err()
-                {
-                    this.update(cx, |this, cx| {
-                        this.multibuffer.update(cx, |multi_buffer, cx| {
-                            let line_point = Point::new(line, 0);
-                            let start_context = Self::heuristic_syntactic_expand(
-                                &buffer.read(cx).snapshot(),
-                                line_point,
-                            );
-
-                            // Users will want to see what happened before an active debug line in most cases
-                            let range = ExcerptRange {
-                                context: start_context..Point::new(line.saturating_add(1), 0),
-                                primary: line_point..line_point,
-                            };
-                            multi_buffer.push_excerpts(buffer.clone(), vec![range], cx);
-
-                            let line_anchor =
-                                multi_buffer.buffer_point_to_anchor(&buffer, line_point, cx);
-
-                            if let Some(line_anchor) = line_anchor {
-                                this.excerpt_for_frames
-                                    .insert(line_anchor.excerpt_id, stack_frame_id);
-                                to_highlights.push((stack_frame_id, line_anchor));
-                            }
-                        });
-                    })
-                    .ok();
-                }
-            }
-
-            this.update_in(cx, |this, window, cx| {
-                this.highlights = to_highlights;
-                this.update_highlights(window, cx);
-            })
-            .ok();
-
-            anyhow::Ok(())
-        });
-
-        self.refresh_task = Some(task);
-    }
-
-    fn update_highlights(&mut self, window: &mut Window, cx: &mut Context<Self>) {
-        self.editor.update(cx, |editor, _| {
-            editor.clear_row_highlights::<DebugStackFrameLine>()
-        });
-
-        let stack_frames = self
-            .stack_frame_list
-            .read_with(cx, |session, _| session.flatten_entries(false, false));
-
-        let active_idx = self
-            .selected_stack_frame_id
-            .and_then(|id| {
-                stack_frames
-                    .iter()
-                    .enumerate()
-                    .find_map(|(idx, frame)| if frame.id == id { Some(idx) } else { None })
-            })
-            .unwrap_or(0);
-
-        self.editor.update(cx, |editor, cx| {
-            let snapshot = editor.snapshot(window, cx).display_snapshot;
-            let first_color = cx.theme().colors().editor_debugger_active_line_background;
-
-            let color = first_color.opacity(0.5);
-
-            let mut is_first = true;
-
-            for (_, highlight) in self.highlights.iter().skip(active_idx) {
-                let position = highlight.to_point(&snapshot.buffer_snapshot());
-                let color = if is_first {
-                    is_first = false;
-                    first_color
-                } else {
-                    color
-                };
-
-                let start = snapshot
-                    .buffer_snapshot()
-                    .clip_point(Point::new(position.row, 0), Bias::Left);
-                let end = start + Point::new(1, 0);
-                let start = snapshot.buffer_snapshot().anchor_before(start);
-                let end = snapshot.buffer_snapshot().anchor_before(end);
-                editor.highlight_rows::<DebugStackFrameLine>(
-                    start..end,
-                    color,
-                    RowHighlightOptions::default(),
-                    cx,
-                );
-            }
-        })
-    }
-
-    fn heuristic_syntactic_expand(snapshot: &BufferSnapshot, selected_point: Point) -> Point {
-        let mut text_objects = snapshot.text_object_ranges(
-            selected_point..selected_point,
-            TreeSitterOptions::max_start_depth(4),
-        );
-
-        let mut start_position = text_objects
-            .find(|(_, obj)| matches!(obj, language::TextObject::AroundFunction))
-            .map(|(range, _)| snapshot.offset_to_point(range.start))
-            .map(|point| Point::new(point.row.max(selected_point.row.saturating_sub(8)), 0))
-            .unwrap_or(selected_point);
-
-        if start_position.row == selected_point.row {
-            start_position.row = start_position.row.saturating_sub(1);
-        }
-
-        start_position
-    }
-}
-
-impl Render for StackTraceView {
-    fn render(&mut self, _: &mut Window, _: &mut Context<Self>) -> impl IntoElement {
-        div().size_full().child(self.editor.clone())
-    }
-}
-
-impl EventEmitter<EditorEvent> for StackTraceView {}
-impl Focusable for StackTraceView {
-    fn focus_handle(&self, cx: &App) -> gpui::FocusHandle {
-        self.editor.focus_handle(cx)
-    }
-}
-
-impl Item for StackTraceView {
-    type Event = EditorEvent;
-
-    fn to_item_events(event: &EditorEvent, f: &mut dyn FnMut(ItemEvent)) {
-        Editor::to_item_events(event, f)
-    }
-
-    fn deactivated(&mut self, window: &mut Window, cx: &mut Context<Self>) {
-        self.editor
-            .update(cx, |editor, cx| editor.deactivated(window, cx));
-    }
-
-    fn navigate(
-        &mut self,
-        data: Arc<dyn Any + Send>,
-        window: &mut Window,
-        cx: &mut Context<Self>,
-    ) -> bool {
-        self.editor
-            .update(cx, |editor, cx| editor.navigate(data, window, cx))
-    }
-
-    fn tab_tooltip_text(&self, _: &App) -> Option<SharedString> {
-        Some("Stack Frame Viewer".into())
-    }
-
-    fn tab_content_text(&self, _detail: usize, _: &App) -> SharedString {
-        "Stack Frames".into()
-    }
-
-    fn for_each_project_item(
-        &self,
-        cx: &App,
-        f: &mut dyn FnMut(gpui::EntityId, &dyn project::ProjectItem),
-    ) {
-        self.editor.for_each_project_item(cx, f)
-    }
-
-    fn set_nav_history(
-        &mut self,
-        nav_history: ItemNavHistory,
-        _: &mut Window,
-        cx: &mut Context<Self>,
-    ) {
-        self.editor.update(cx, |editor, _| {
-            editor.set_nav_history(Some(nav_history));
-        });
-    }
-
-    fn is_dirty(&self, cx: &App) -> bool {
-        self.multibuffer.read(cx).is_dirty(cx)
-    }
-
-    fn has_deleted_file(&self, cx: &App) -> bool {
-        self.multibuffer.read(cx).has_deleted_file(cx)
-    }
-
-    fn has_conflict(&self, cx: &App) -> bool {
-        self.multibuffer.read(cx).has_conflict(cx)
-    }
-
-    fn can_save(&self, _: &App) -> bool {
-        true
-    }
-
-    fn save(
-        &mut self,
-        options: SaveOptions,
-        project: Entity<Project>,
-        window: &mut Window,
-        cx: &mut Context<Self>,
-    ) -> Task<Result<()>> {
-        self.editor.save(options, project, window, cx)
-    }
-
-    fn save_as(
-        &mut self,
-        _: Entity<Project>,
-        _: ProjectPath,
-        _window: &mut Window,
-        _: &mut Context<Self>,
-    ) -> Task<Result<()>> {
-        unreachable!()
-    }
-
-    fn reload(
-        &mut self,
-        project: Entity<Project>,
-        window: &mut Window,
-        cx: &mut Context<Self>,
-    ) -> Task<Result<()>> {
-        self.editor.reload(project, window, cx)
-    }
-
-    fn act_as_type<'a>(
-        &'a self,
-        type_id: TypeId,
-        self_handle: &'a Entity<Self>,
-        _: &'a App,
-    ) -> Option<gpui::AnyEntity> {
-        if type_id == TypeId::of::<Self>() {
-            Some(self_handle.clone().into())
-        } else if type_id == TypeId::of::<Editor>() {
-            Some(self.editor.clone().into())
-        } else {
-            None
-        }
-    }
-
-    fn as_searchable(&self, _: &Entity<Self>, _: &App) -> Option<Box<dyn SearchableItemHandle>> {
-        Some(Box::new(self.editor.clone()))
-    }
-
-    fn breadcrumb_location(&self, _: &App) -> ToolbarItemLocation {
-        ToolbarItemLocation::PrimaryLeft
-    }
-
-    fn breadcrumbs(&self, cx: &App) -> Option<Vec<BreadcrumbText>> {
-        self.editor.breadcrumbs(cx)
-    }
-
-    fn added_to_workspace(
-        &mut self,
-        workspace: &mut Workspace,
-        window: &mut Window,
-        cx: &mut Context<Self>,
-    ) {
-        self.editor.update(cx, |editor, cx| {
-            editor.added_to_workspace(workspace, window, cx)
-        });
-    }
-}

crates/dev_container/src/lib.rs 🔗

@@ -300,14 +300,20 @@ impl PickerDelegate for TemplatePickerDelegate {
     ) {
         let fun = &mut self.on_confirm;
 
+        if self.matching_indices.is_empty() {
+            return;
+        }
         self.stateful_modal
             .update(cx, |modal, cx| {
-                fun(
-                    self.candidate_templates[self.matching_indices[self.selected_index]].clone(),
-                    modal,
-                    window,
-                    cx,
-                );
+                let Some(confirmed_entry) = self
+                    .matching_indices
+                    .get(self.selected_index)
+                    .and_then(|ix| self.candidate_templates.get(*ix))
+                else {
+                    log::error!("Selected index not in range of known matches");
+                    return;
+                };
+                fun(confirmed_entry.clone(), modal, window, cx);
             })
             .ok();
     }
@@ -476,7 +482,17 @@ impl PickerDelegate for FeaturePickerDelegate {
                 })
                 .ok();
         } else {
-            let current = &mut self.candidate_features[self.matching_indices[self.selected_index]];
+            if self.matching_indices.is_empty() {
+                return;
+            }
+            let Some(current) = self
+                .matching_indices
+                .get(self.selected_index)
+                .and_then(|ix| self.candidate_features.get_mut(*ix))
+            else {
+                log::error!("Selected index not in range of matches");
+                return;
+            };
             current.toggle_state = match current.toggle_state {
                 ToggleState::Selected => {
                     self.template_entry

crates/diagnostics/src/diagnostics.rs 🔗

@@ -322,16 +322,14 @@ impl ProjectDiagnosticsEditor {
             if !has_no_blocks {
                 continue;
             }
-            let is_dirty = self
-                .multibuffer
-                .read(cx)
-                .buffer(buffer_id)
-                .is_none_or(|buffer| buffer.read(cx).is_dirty());
-            if is_dirty {
+            let Some(buffer) = self.multibuffer.read(cx).buffer(buffer_id) else {
+                continue;
+            };
+            if buffer.read(cx).is_dirty() {
                 continue;
             }
             self.multibuffer.update(cx, |b, cx| {
-                b.remove_excerpts_for_buffer(buffer_id, cx);
+                b.remove_excerpts_for_path(PathKey::for_buffer(&buffer, cx), cx);
             });
         }
     }

crates/docs_preprocessor/src/main.rs 🔗

@@ -578,6 +578,7 @@ fn handle_postprocessing() -> Result<()> {
         .expect("Default title not a string")
         .to_string();
     let amplitude_key = std::env::var("DOCS_AMPLITUDE_API_KEY").unwrap_or_default();
+    let consent_io_instance = std::env::var("DOCS_CONSENT_IO_INSTANCE").unwrap_or_default();
 
     output.insert("html".to_string(), zed_html);
     mdbook::Renderer::render(&mdbook::renderer::HtmlHandlebars::new(), &ctx)?;
@@ -647,6 +648,7 @@ fn handle_postprocessing() -> Result<()> {
         zlog::trace!(logger => "Updating {:?}", pretty_path(&file, &root_dir));
         let contents = contents.replace("#description#", meta_description);
         let contents = contents.replace("#amplitude_key#", &amplitude_key);
+        let contents = contents.replace("#consent_io_instance#", &consent_io_instance);
         let contents = title_regex()
             .replace(&contents, |_: &regex::Captures| {
                 format!("<title>{}</title>", meta_title)

crates/edit_prediction/Cargo.toml 🔗

@@ -21,6 +21,7 @@ arrayvec.workspace = true
 brotli.workspace = true
 buffer_diff.workspace = true
 client.workspace = true
+clock.workspace = true
 cloud_api_types.workspace = true
 cloud_llm_client.workspace = true
 collections.workspace = true

crates/edit_prediction/src/cursor_excerpt.rs 🔗

@@ -13,6 +13,7 @@ pub fn compute_excerpt_ranges(
     let editable_150 = compute_editable_range(snapshot, position, 150);
     let editable_180 = compute_editable_range(snapshot, position, 180);
     let editable_350 = compute_editable_range(snapshot, position, 350);
+    let editable_512 = compute_editable_range(snapshot, position, 512);
 
     let editable_150_context_350 =
         expand_context_syntactically_then_linewise(snapshot, editable_150.clone(), 350);
@@ -20,17 +21,20 @@ pub fn compute_excerpt_ranges(
         expand_context_syntactically_then_linewise(snapshot, editable_180.clone(), 350);
     let editable_350_context_150 =
         expand_context_syntactically_then_linewise(snapshot, editable_350.clone(), 150);
+    let editable_350_context_512 =
+        expand_context_syntactically_then_linewise(snapshot, editable_350.clone(), 512);
+    let editable_350_context_1024 =
+        expand_context_syntactically_then_linewise(snapshot, editable_350.clone(), 1024);
+    let context_4096 = expand_context_syntactically_then_linewise(
+        snapshot,
+        editable_350_context_1024.clone(),
+        4096 - 1024,
+    );
+    let context_8192 =
+        expand_context_syntactically_then_linewise(snapshot, context_4096.clone(), 8192 - 4096);
 
-    let full_start_row = editable_150_context_350
-        .start
-        .row
-        .min(editable_180_context_350.start.row)
-        .min(editable_350_context_150.start.row);
-    let full_end_row = editable_150_context_350
-        .end
-        .row
-        .max(editable_180_context_350.end.row)
-        .max(editable_350_context_150.end.row);
+    let full_start_row = context_8192.start.row;
+    let full_end_row = context_8192.end.row;
 
     let full_context =
         Point::new(full_start_row, 0)..Point::new(full_end_row, snapshot.line_len(full_end_row));
@@ -47,9 +51,14 @@ pub fn compute_excerpt_ranges(
         editable_150: to_offset(&editable_150),
         editable_180: to_offset(&editable_180),
         editable_350: to_offset(&editable_350),
+        editable_512: Some(to_offset(&editable_512)),
         editable_150_context_350: to_offset(&editable_150_context_350),
         editable_180_context_350: to_offset(&editable_180_context_350),
         editable_350_context_150: to_offset(&editable_350_context_150),
+        editable_350_context_512: Some(to_offset(&editable_350_context_512)),
+        editable_350_context_1024: Some(to_offset(&editable_350_context_1024)),
+        context_4096: Some(to_offset(&context_4096)),
+        context_8192: Some(to_offset(&context_8192)),
     };
 
     (full_context, full_context_offset_range, ranges)

crates/edit_prediction/src/edit_prediction.rs 🔗

@@ -1,7 +1,7 @@
 use anyhow::Result;
 use arrayvec::ArrayVec;
 use client::{Client, EditPredictionUsage, UserStore};
-use cloud_api_types::SubmitEditPredictionFeedbackBody;
+use cloud_api_types::{OrganizationId, SubmitEditPredictionFeedbackBody};
 use cloud_llm_client::predict_edits_v3::{
     PredictEditsV3Request, PredictEditsV3Response, RawCompletionRequest, RawCompletionResponse,
 };
@@ -40,7 +40,7 @@ use settings::{
 };
 use std::collections::{VecDeque, hash_map};
 use std::env;
-use text::Edit;
+use text::{AnchorRangeExt, Edit};
 use workspace::Workspace;
 use zeta_prompt::{ZetaFormat, ZetaPromptInput};
 
@@ -69,12 +69,14 @@ pub mod sweep_ai;
 pub mod udiff;
 
 mod capture_example;
+pub mod open_ai_compatible;
 mod zed_edit_prediction_delegate;
 pub mod zeta;
 
 #[cfg(test)]
 mod edit_prediction_tests;
 
+use crate::example_spec::ExampleSpec;
 use crate::license_detection::LicenseDetectionWatcher;
 use crate::mercury::Mercury;
 use crate::onboarding_modal::ZedPredictModal;
@@ -103,15 +105,14 @@ const CHANGE_GROUPING_LINE_SPAN: u32 = 8;
 const LAST_CHANGE_GROUPING_TIME: Duration = Duration::from_secs(1);
 const ZED_PREDICT_DATA_COLLECTION_CHOICE: &str = "zed_predict_data_collection_choice";
 const REJECT_REQUEST_DEBOUNCE: Duration = Duration::from_secs(15);
+const EDIT_PREDICTION_SETTLED_EVENT: &str = "Edit Prediction Settled";
+const EDIT_PREDICTION_SETTLED_TTL: Duration = Duration::from_secs(60 * 5);
+const EDIT_PREDICTION_SETTLED_QUIESCENCE: Duration = Duration::from_secs(10);
 
-pub struct Zeta2FeatureFlag;
+pub struct EditPredictionJumpsFeatureFlag;
 
-impl FeatureFlag for Zeta2FeatureFlag {
-    const NAME: &'static str = "zeta2";
-
-    fn enabled_for_staff() -> bool {
-        true
-    }
+impl FeatureFlag for EditPredictionJumpsFeatureFlag {
+    const NAME: &'static str = "edit_prediction_jumps";
 }
 
 #[derive(Clone)]
@@ -125,6 +126,7 @@ impl Global for EditPredictionStoreGlobal {}
 #[derive(Clone)]
 pub struct Zeta2RawConfig {
     pub model_id: Option<String>,
+    pub environment: Option<String>,
     pub format: ZetaFormat,
 }
 
@@ -133,22 +135,32 @@ pub struct EditPredictionStore {
     user_store: Entity<UserStore>,
     llm_token: LlmApiToken,
     _llm_token_subscription: Subscription,
+    _fetch_experiments_task: Task<()>,
     projects: HashMap<EntityId, ProjectState>,
     update_required: bool,
     edit_prediction_model: EditPredictionModel,
     zeta2_raw_config: Option<Zeta2RawConfig>,
+    preferred_experiment: Option<String>,
+    available_experiments: Vec<String>,
     pub sweep_ai: SweepAi,
     pub mercury: Mercury,
     data_collection_choice: DataCollectionChoice,
-    reject_predictions_tx: mpsc::UnboundedSender<EditPredictionRejection>,
+    reject_predictions_tx: mpsc::UnboundedSender<EditPredictionRejectionPayload>,
+    settled_predictions_tx: mpsc::UnboundedSender<Instant>,
     shown_predictions: VecDeque<EditPrediction>,
     rated_predictions: HashSet<EditPredictionId>,
+    #[cfg(test)]
+    settled_event_callback: Option<Box<dyn Fn(EditPredictionId, String)>>,
+}
+
+pub(crate) struct EditPredictionRejectionPayload {
+    rejection: EditPredictionRejection,
+    organization_id: Option<OrganizationId>,
 }
 
 #[derive(Copy, Clone, PartialEq, Eq)]
 pub enum EditPredictionModel {
-    Zeta1,
-    Zeta2,
+    Zeta,
     Fim { format: EditPredictionPromptFormat },
     Sweep,
     Mercury,
@@ -166,6 +178,8 @@ pub struct EditPredictionModelInput {
     trigger: PredictEditsRequestTrigger,
     diagnostic_search_range: Range<Point>,
     debug_tx: Option<mpsc::UnboundedSender<DebugEvent>>,
+    can_collect_data: bool,
+    is_open_source: bool,
     pub user_actions: Vec<UserActionRecord>,
 }
 
@@ -360,6 +374,7 @@ impl ProjectState {
                         prediction_id,
                         EditPredictionRejectReason::Canceled,
                         false,
+                        None,
                         cx,
                     );
                 })
@@ -480,9 +495,19 @@ impl std::ops::Deref for BufferEditPrediction<'_> {
     }
 }
 
+#[derive(Clone)]
+struct PendingSettledPrediction {
+    request_id: EditPredictionId,
+    editable_anchor_range: Range<Anchor>,
+    example: Option<ExampleSpec>,
+    enqueued_at: Instant,
+    last_edit_at: Instant,
+}
+
 struct RegisteredBuffer {
     file: Option<Arc<dyn File>>,
     snapshot: TextBufferSnapshot,
+    pending_predictions: Vec<PendingSettledPrediction>,
     last_position: Option<Anchor>,
     _subscriptions: [gpui::Subscription; 2],
 }
@@ -674,33 +699,61 @@ impl EditPredictionStore {
         })
         .detach();
 
+        let (settled_predictions_tx, settled_predictions_rx) = mpsc::unbounded();
+        cx.spawn(async move |this, cx| {
+            Self::run_settled_predictions_worker(this, settled_predictions_rx, cx).await;
+        })
+        .detach();
+
+        let mut current_user = user_store.read(cx).watch_current_user();
+        let fetch_experiments_task = cx.spawn(async move |this, cx| {
+            while current_user.borrow().is_none() {
+                current_user.next().await;
+            }
+            this.update(cx, |this, cx| {
+                this.refresh_available_experiments(cx);
+            })
+            .log_err();
+        });
+
         let this = Self {
             projects: HashMap::default(),
             client,
             user_store,
             llm_token,
+            _fetch_experiments_task: fetch_experiments_task,
             _llm_token_subscription: cx.subscribe(
                 &refresh_llm_token_listener,
                 |this, _listener, _event, cx| {
                     let client = this.client.clone();
                     let llm_token = this.llm_token.clone();
+                    let organization_id = this
+                        .user_store
+                        .read(cx)
+                        .current_organization()
+                        .map(|organization| organization.id.clone());
                     cx.spawn(async move |_this, _cx| {
-                        llm_token.refresh(&client).await?;
+                        llm_token.refresh(&client, organization_id).await?;
                         anyhow::Ok(())
                     })
                     .detach_and_log_err(cx);
                 },
             ),
             update_required: false,
-            edit_prediction_model: EditPredictionModel::Zeta2,
+            edit_prediction_model: EditPredictionModel::Zeta,
             zeta2_raw_config: Self::zeta2_raw_config_from_env(),
+            preferred_experiment: None,
+            available_experiments: Vec::new(),
             sweep_ai: SweepAi::new(cx),
             mercury: Mercury::new(cx),
 
             data_collection_choice,
             reject_predictions_tx: reject_tx,
+            settled_predictions_tx,
             rated_predictions: Default::default(),
             shown_predictions: Default::default(),
+            #[cfg(test)]
+            settled_event_callback: None,
         };
 
         this
@@ -710,7 +763,12 @@ impl EditPredictionStore {
         let version_str = env::var("ZED_ZETA_FORMAT").ok()?;
         let format = ZetaFormat::parse(&version_str).ok()?;
         let model_id = env::var("ZED_ZETA_MODEL").ok();
-        Some(Zeta2RawConfig { model_id, format })
+        let environment = env::var("ZED_ZETA_ENVIRONMENT").ok();
+        Some(Zeta2RawConfig {
+            model_id,
+            environment,
+            format,
+        })
     }
 
     pub fn set_edit_prediction_model(&mut self, model: EditPredictionModel) {
@@ -725,6 +783,75 @@ impl EditPredictionStore {
         self.zeta2_raw_config.as_ref()
     }
 
+    pub fn preferred_experiment(&self) -> Option<&str> {
+        self.preferred_experiment.as_deref()
+    }
+
+    pub fn set_preferred_experiment(&mut self, experiment: Option<String>) {
+        self.preferred_experiment = experiment;
+    }
+
+    pub fn available_experiments(&self) -> &[String] {
+        &self.available_experiments
+    }
+
+    pub fn active_experiment(&self) -> Option<&str> {
+        self.preferred_experiment.as_deref().or_else(|| {
+            self.shown_predictions
+                .iter()
+                .find_map(|p| p.model_version.as_ref())
+                .and_then(|model_version| model_version.strip_prefix("zeta2:"))
+        })
+    }
+
+    pub fn refresh_available_experiments(&mut self, cx: &mut Context<Self>) {
+        let client = self.client.clone();
+        let llm_token = self.llm_token.clone();
+        let app_version = AppVersion::global(cx);
+        let organization_id = self
+            .user_store
+            .read(cx)
+            .current_organization()
+            .map(|organization| organization.id.clone());
+
+        cx.spawn(async move |this, cx| {
+            let experiments = cx
+                .background_spawn(async move {
+                    let http_client = client.http_client();
+                    let token = llm_token.acquire(&client, organization_id).await?;
+                    let url = http_client.build_zed_llm_url("/edit_prediction_experiments", &[])?;
+                    let request = http_client::Request::builder()
+                        .method(Method::GET)
+                        .uri(url.as_ref())
+                        .header("Authorization", format!("Bearer {}", token))
+                        .header(ZED_VERSION_HEADER_NAME, app_version.to_string())
+                        .body(Default::default())?;
+                    let mut response = http_client.send(request).await?;
+                    if response.status().is_success() {
+                        let mut body = Vec::new();
+                        response.body_mut().read_to_end(&mut body).await?;
+                        let experiments: Vec<String> = serde_json::from_slice(&body)?;
+                        Ok(experiments)
+                    } else {
+                        let mut body = String::new();
+                        response.body_mut().read_to_string(&mut body).await?;
+                        anyhow::bail!(
+                            "Failed to fetch experiments: {:?}\nBody: {}",
+                            response.status(),
+                            body
+                        );
+                    }
+                })
+                .await?;
+            this.update(cx, |this, cx| {
+                this.available_experiments = experiments;
+                cx.notify();
+            })?;
+            anyhow::Ok(())
+        })
+        .detach_and_log_err(cx);
+    }
+
     pub fn icons(&self, cx: &App) -> edit_prediction_types::EditPredictionIconSet {
         use ui::IconName;
         match self.edit_prediction_model {
@@ -738,7 +865,7 @@ impl EditPredictionStore {
             EditPredictionModel::Mercury => {
                 edit_prediction_types::EditPredictionIconSet::new(IconName::Inception)
             }
-            EditPredictionModel::Zeta1 | EditPredictionModel::Zeta2 => {
+            EditPredictionModel::Zeta => {
                 edit_prediction_types::EditPredictionIconSet::new(IconName::ZedPredict)
                     .with_disabled(IconName::ZedPredictDisabled)
                     .with_up(IconName::ZedPredictUp)
@@ -867,10 +994,7 @@ impl EditPredictionStore {
     }
 
     pub fn usage(&self, cx: &App) -> Option<EditPredictionUsage> {
-        if matches!(
-            self.edit_prediction_model,
-            EditPredictionModel::Zeta2 | EditPredictionModel::Zeta1
-        ) {
+        if matches!(self.edit_prediction_model, EditPredictionModel::Zeta) {
             self.user_store.read(cx).edit_prediction_usage()
         } else {
             None
@@ -1035,7 +1159,7 @@ impl EditPredictionStore {
                 }
             }
             project::Event::DiagnosticsUpdated { .. } => {
-                if cx.has_flag::<Zeta2FeatureFlag>() {
+                if cx.has_flag::<EditPredictionJumpsFeatureFlag>() {
                     self.refresh_prediction_from_diagnostics(
                         project,
                         DiagnosticSearchScope::Global,
@@ -1089,6 +1213,7 @@ impl EditPredictionStore {
                     snapshot,
                     file,
                     last_position: None,
+                    pending_predictions: Vec::new(),
                     _subscriptions: [
                         cx.subscribe(buffer, {
                             let project = project.downgrade();
@@ -1137,6 +1262,7 @@ impl EditPredictionStore {
         let mut total_inserted = 0usize;
         let mut edit_range: Option<Range<Anchor>> = None;
         let mut last_offset: Option<usize> = None;
+        let now = cx.background_executor().now();
 
         for (edit, anchor_range) in
             new_snapshot.anchored_edits_since::<usize>(&old_snapshot.version)
@@ -1155,6 +1281,12 @@ impl EditPredictionStore {
             return;
         };
 
+        for pending_prediction in &mut registered_buffer.pending_predictions {
+            if edit_range.overlaps(&pending_prediction.editable_anchor_range, &new_snapshot) {
+                pending_prediction.last_edit_at = now;
+            }
+        }
+
         let action_type = match (total_deleted, total_inserted, num_edits) {
             (0, ins, n) if ins == n => UserActionType::InsertChar,
             (0, _, _) => UserActionType::InsertSelection,
@@ -1181,7 +1313,6 @@ impl EditPredictionStore {
 
         let events = &mut project_state.events;
 
-        let now = cx.background_executor().now();
         if let Some(last_event) = project_state.last_event.as_mut() {
             let is_next_snapshot_of_same_buffer = old_snapshot.remote_id()
                 == last_event.new_snapshot.remote_id()
@@ -1312,7 +1443,7 @@ impl EditPredictionStore {
                     cx,
                 );
             }
-            EditPredictionModel::Zeta1 | EditPredictionModel::Zeta2 => {
+            EditPredictionModel::Zeta => {
                 let is_cloud = !matches!(
                     all_language_settings(None, cx).edit_predictions.provider,
                     EditPredictionProvider::Ollama | EditPredictionProvider::OpenAiCompatibleApi
@@ -1326,7 +1457,7 @@ impl EditPredictionStore {
     }
 
     async fn handle_rejected_predictions(
-        rx: UnboundedReceiver<EditPredictionRejection>,
+        rx: UnboundedReceiver<EditPredictionRejectionPayload>,
         client: Arc<Client>,
         llm_token: LlmApiToken,
         app_version: Version,
@@ -1335,7 +1466,11 @@ impl EditPredictionStore {
         let mut rx = std::pin::pin!(rx.peekable());
         let mut batched = Vec::new();
 
-        while let Some(rejection) = rx.next().await {
+        while let Some(EditPredictionRejectionPayload {
+            rejection,
+            organization_id,
+        }) = rx.next().await
+        {
             batched.push(rejection);
 
             if batched.len() < MAX_EDIT_PREDICTION_REJECTIONS_PER_REQUEST / 2 {
@@ -1373,6 +1508,7 @@ impl EditPredictionStore {
                 },
                 client.clone(),
                 llm_token.clone(),
+                organization_id,
                 app_version.clone(),
                 true,
             )
@@ -1384,6 +1520,120 @@ impl EditPredictionStore {
         }
     }
 
+    async fn run_settled_predictions_worker(
+        this: WeakEntity<Self>,
+        mut rx: UnboundedReceiver<Instant>,
+        cx: &mut AsyncApp,
+    ) {
+        let mut next_wake_time: Option<Instant> = None;
+        loop {
+            let now = cx.background_executor().now();
+            if let Some(wake_time) = next_wake_time.take() {
+                cx.background_executor()
+                    .timer(wake_time.duration_since(now))
+                    .await;
+            } else {
+                let Some(new_enqueue_time) = rx.next().await else {
+                    break;
+                };
+                next_wake_time = Some(new_enqueue_time + EDIT_PREDICTION_SETTLED_QUIESCENCE);
+                while rx.next().now_or_never().flatten().is_some() {}
+                continue;
+            }
+
+            let Some(this) = this.upgrade() else {
+                break;
+            };
+
+            let now = cx.background_executor().now();
+
+            let mut oldest_edited_at = None;
+
+            this.update(cx, |this, _| {
+                for (_, project_state) in this.projects.iter_mut() {
+                    for (_, registered_buffer) in project_state.registered_buffers.iter_mut() {
+                        registered_buffer
+                            .pending_predictions
+                            .retain_mut(|pending_prediction| {
+                                let age =
+                                    now.saturating_duration_since(pending_prediction.enqueued_at);
+                                if age >= EDIT_PREDICTION_SETTLED_TTL {
+                                    return false;
+                                }
+
+                                let quiet_for =
+                                    now.saturating_duration_since(pending_prediction.last_edit_at);
+                                if quiet_for >= EDIT_PREDICTION_SETTLED_QUIESCENCE {
+                                    let settled_editable_region = registered_buffer
+                                        .snapshot
+                                        .text_for_range(
+                                            pending_prediction.editable_anchor_range.clone(),
+                                        )
+                                        .collect::<String>();
+
+                                    #[cfg(test)]
+                                    if let Some(callback) = &this.settled_event_callback {
+                                        callback(
+                                            pending_prediction.request_id.clone(),
+                                            settled_editable_region.clone(),
+                                        );
+                                    }
+
+                                    telemetry::event!(
+                                        EDIT_PREDICTION_SETTLED_EVENT,
+                                        request_id = pending_prediction.request_id.0.clone(),
+                                        settled_editable_region,
+                                        example = pending_prediction.example.take(),
+                                    );
+
+                                    return false;
+                                }
+
+                                if oldest_edited_at
+                                    .is_none_or(|t| pending_prediction.last_edit_at < t)
+                                {
+                                    oldest_edited_at = Some(pending_prediction.last_edit_at);
+                                }
+
+                                true
+                            });
+                    }
+                }
+            });
+
+            next_wake_time = oldest_edited_at.map(|t| t + EDIT_PREDICTION_SETTLED_QUIESCENCE);
+        }
+    }
+
+    pub(crate) fn enqueue_settled_prediction(
+        &mut self,
+        request_id: EditPredictionId,
+        project: &Entity<Project>,
+        edited_buffer: &Entity<Buffer>,
+        edited_buffer_snapshot: &BufferSnapshot,
+        editable_offset_range: Range<usize>,
+        example: Option<ExampleSpec>,
+        cx: &mut Context<Self>,
+    ) {
+        let this = &mut *self;
+        let project_state = this.get_or_init_project(project, cx);
+        if let Some(buffer) = project_state
+            .registered_buffers
+            .get_mut(&edited_buffer.entity_id())
+        {
+            let now = cx.background_executor().now();
+            buffer.pending_predictions.push(PendingSettledPrediction {
+                request_id: request_id,
+                editable_anchor_range: edited_buffer_snapshot
+                    .anchor_range_around(editable_offset_range),
+                example,
+                enqueued_at: now,
+                last_edit_at: now,
+            });
+            this.settled_predictions_tx.unbounded_send(now).ok();
+        }
+    }
+
     fn reject_current_prediction(
         &mut self,
         reason: EditPredictionRejectReason,
@@ -1393,7 +1643,14 @@ impl EditPredictionStore {
         if let Some(project_state) = self.projects.get_mut(&project.entity_id()) {
             project_state.pending_predictions.clear();
             if let Some(prediction) = project_state.current_prediction.take() {
-                self.reject_prediction(prediction.prediction.id, reason, prediction.was_shown, cx);
+                let model_version = prediction.prediction.model_version.clone();
+                self.reject_prediction(
+                    prediction.prediction.id,
+                    reason,
+                    prediction.was_shown,
+                    model_version,
+                    cx,
+                );
             }
         };
     }
@@ -1452,20 +1709,32 @@ impl EditPredictionStore {
         prediction_id: EditPredictionId,
         reason: EditPredictionRejectReason,
         was_shown: bool,
+        model_version: Option<String>,
         cx: &App,
     ) {
         match self.edit_prediction_model {
-            EditPredictionModel::Zeta1 | EditPredictionModel::Zeta2 => {
+            EditPredictionModel::Zeta => {
                 let is_cloud = !matches!(
                     all_language_settings(None, cx).edit_predictions.provider,
                     EditPredictionProvider::Ollama | EditPredictionProvider::OpenAiCompatibleApi
                 );
+
                 if is_cloud {
+                    let organization_id = self
+                        .user_store
+                        .read(cx)
+                        .current_organization()
+                        .map(|organization| organization.id.clone());
+
                     self.reject_predictions_tx
-                        .unbounded_send(EditPredictionRejection {
-                            request_id: prediction_id.to_string(),
-                            reason,
-                            was_shown,
+                        .unbounded_send(EditPredictionRejectionPayload {
+                            rejection: EditPredictionRejection {
+                                request_id: prediction_id.to_string(),
+                                reason,
+                                was_shown,
+                                model_version,
+                            },
+                            organization_id,
                         })
                         .log_err();
                 }
@@ -1547,6 +1816,9 @@ impl EditPredictionStore {
 
         // Prefer predictions from buffer
         if project_state.current_prediction.is_some() {
+            log::debug!(
+                "edit_prediction: diagnostic refresh skipped, current prediction already exists"
+            );
             return;
         }
 
@@ -1684,7 +1956,6 @@ fn is_ep_store_provider(provider: EditPredictionProvider) -> bool {
         | EditPredictionProvider::Experimental(_) => true,
         EditPredictionProvider::None
         | EditPredictionProvider::Copilot
-        | EditPredictionProvider::Supermaven
         | EditPredictionProvider::Codestral => false,
     }
 }
@@ -1725,7 +1996,6 @@ impl EditPredictionStore {
                 EditPredictionProvider::OpenAiCompatibleApi => (false, 2),
                 EditPredictionProvider::None
                 | EditPredictionProvider::Copilot
-                | EditPredictionProvider::Supermaven
                 | EditPredictionProvider::Codestral => {
                     log::error!("queue_prediction_refresh called with non-store provider");
                     return;
@@ -1737,31 +2007,49 @@ impl EditPredictionStore {
         let project_state = self.get_or_init_project(&project, cx);
         let pending_prediction_id = project_state.next_pending_prediction_id;
         project_state.next_pending_prediction_id += 1;
-        let last_request = *select_throttle(project_state, request_trigger);
+        let throttle_at_enqueue = *select_throttle(project_state, request_trigger);
 
         let task = cx.spawn(async move |this, cx| {
-            if let Some(timeout) = last_request.and_then(|(last_entity, last_timestamp)| {
-                if throttle_entity != last_entity {
-                    return None;
-                }
-                (last_timestamp + throttle_timeout).checked_duration_since(Instant::now())
-            }) {
+            let throttle_wait = this
+                .update(cx, |this, cx| {
+                    let project_state = this.get_or_init_project(&project, cx);
+                    let throttle = *select_throttle(project_state, request_trigger);
+
+                    throttle.and_then(|(last_entity, last_timestamp)| {
+                        if throttle_entity != last_entity {
+                            return None;
+                        }
+                        (last_timestamp + throttle_timeout).checked_duration_since(Instant::now())
+                    })
+                })
+                .ok()
+                .flatten();
+
+            if let Some(timeout) = throttle_wait {
                 cx.background_executor().timer(timeout).await;
             }
 
             // If this task was cancelled before the throttle timeout expired,
-            // do not perform a request.
+            // do not perform a request. Also skip if another task already
+            // proceeded since we were enqueued (duplicate).
             let mut is_cancelled = true;
             this.update(cx, |this, cx| {
                 let project_state = this.get_or_init_project(&project, cx);
                 let was_cancelled = project_state
                     .cancelled_predictions
                     .remove(&pending_prediction_id);
-                if !was_cancelled {
-                    let new_refresh = (throttle_entity, Instant::now());
-                    *select_throttle(project_state, request_trigger) = Some(new_refresh);
-                    is_cancelled = false;
+                if was_cancelled {
+                    return;
                 }
+
+                // Another request has been already sent since this was enqueued
+                if *select_throttle(project_state, request_trigger) != throttle_at_enqueue {
+                    return;
+                }
+
+                let new_refresh = (throttle_entity, Instant::now());
+                *select_throttle(project_state, request_trigger) = Some(new_refresh);
+                is_cancelled = false;
             })
             .ok();
             if is_cancelled {
@@ -1811,6 +2099,7 @@ impl EditPredictionStore {
                                         new_prediction.prediction.id,
                                         EditPredictionRejectReason::CurrentPreferred,
                                         false,
+                                        new_prediction.prediction.model_version,
                                         cx,
                                     );
                                     None
@@ -1820,7 +2109,13 @@ impl EditPredictionStore {
                             }
                         }
                         Err(reject_reason) => {
-                            this.reject_prediction(prediction_result.id, reject_reason, false, cx);
+                            this.reject_prediction(
+                                prediction_result.id,
+                                reject_reason,
+                                false,
+                                None,
+                                cx,
+                            );
                             None
                         }
                     }
@@ -1882,7 +2177,7 @@ impl EditPredictionStore {
             active_buffer.clone(),
             position,
             trigger,
-            cx.has_flag::<Zeta2FeatureFlag>(),
+            cx.has_flag::<EditPredictionJumpsFeatureFlag>(),
             cx,
         )
     }
@@ -1901,7 +2196,7 @@ impl EditPredictionStore {
         let stored_events = project_state.events(cx);
         let has_events = !stored_events.is_empty();
         let events: Vec<Arc<zeta_prompt::Event>> =
-            stored_events.into_iter().map(|e| e.event).collect();
+            stored_events.iter().map(|e| e.event.clone()).collect();
         let debug_tx = project_state.debug_tx.clone();
 
         let snapshot = active_buffer.read(cx).snapshot();
@@ -1935,33 +2230,41 @@ impl EditPredictionStore {
 
         let related_files = self.context_for_project(&project, cx);
 
+        let is_open_source = snapshot
+            .file()
+            .map_or(false, |file| self.is_file_open_source(&project, file, cx))
+            && events.iter().all(|event| event.in_open_source_repo())
+            && related_files.iter().all(|file| file.in_open_source_repo);
+
+        let can_collect_data = !cfg!(test)
+            && is_open_source
+            && self.is_data_collection_enabled(cx)
+            && matches!(self.edit_prediction_model, EditPredictionModel::Zeta);
+
+        let recent_paths = project_state.recent_paths.clone();
+
         let inputs = EditPredictionModelInput {
             project: project.clone(),
             buffer: active_buffer,
-            snapshot: snapshot,
+            snapshot,
             position,
             events,
             related_files,
-            recent_paths: project_state.recent_paths.clone(),
+            recent_paths,
             trigger,
             diagnostic_search_range: diagnostic_search_range,
             debug_tx,
             user_actions,
+            can_collect_data,
+            is_open_source,
         };
 
+        let capture_data = (can_collect_data && rand::random_ratio(1, 1000)).then(|| stored_events);
+
         let task = match self.edit_prediction_model {
-            EditPredictionModel::Zeta1 => zeta::request_prediction_with_zeta(
-                self,
-                inputs,
-                Some(zeta_prompt::EditPredictionModelKind::Zeta1),
-                cx,
-            ),
-            EditPredictionModel::Zeta2 => zeta::request_prediction_with_zeta(
-                self,
-                inputs,
-                Some(zeta_prompt::EditPredictionModelKind::Zeta2),
-                cx,
-            ),
+            EditPredictionModel::Zeta => {
+                zeta::request_prediction_with_zeta(self, inputs, capture_data, cx)
+            }
             EditPredictionModel::Fim { format } => fim::request_prediction(inputs, format, cx),
             EditPredictionModel::Sweep => self.sweep_ai.request_prediction_with_sweep(inputs, cx),
             EditPredictionModel::Mercury => self.mercury.request_prediction(inputs, cx),
@@ -1970,7 +2273,13 @@ impl EditPredictionStore {
         cx.spawn(async move |this, cx| {
             let prediction = task.await?;
 
-            if prediction.is_none() && allow_jump && has_events {
+            // Only fall back to diagnostics-based prediction if we got a
+            // the model had nothing to suggest for the buffer
+            if prediction.is_none()
+                && allow_jump
+                && has_events
+                && !matches!(trigger, PredictEditsRequestTrigger::Diagnostics)
+            {
                 this.update(cx, |this, cx| {
                     this.refresh_prediction_from_diagnostics(
                         project,
@@ -2100,6 +2409,7 @@ impl EditPredictionStore {
         client: Arc<Client>,
         custom_url: Option<Arc<Url>>,
         llm_token: LlmApiToken,
+        organization_id: Option<OrganizationId>,
         app_version: Version,
     ) -> Result<(RawCompletionResponse, Option<EditPredictionUsage>)> {
         let url = if let Some(custom_url) = custom_url {
@@ -2119,6 +2429,7 @@ impl EditPredictionStore {
             },
             client,
             llm_token,
+            organization_id,
             app_version,
             true,
         )
@@ -2129,6 +2440,7 @@ impl EditPredictionStore {
         input: ZetaPromptInput,
         client: Arc<Client>,
         llm_token: LlmApiToken,
+        organization_id: Option<OrganizationId>,
         app_version: Version,
         trigger: PredictEditsRequestTrigger,
     ) -> Result<(PredictEditsV3Response, Option<EditPredictionUsage>)> {
@@ -2151,6 +2463,7 @@ impl EditPredictionStore {
             },
             client,
             llm_token,
+            organization_id,
             app_version,
             true,
         )
@@ -2204,6 +2517,7 @@ impl EditPredictionStore {
         build: impl Fn(http_client::http::request::Builder) -> Result<http_client::Request<AsyncBody>>,
         client: Arc<Client>,
         llm_token: LlmApiToken,
+        organization_id: Option<OrganizationId>,
         app_version: Version,
         require_auth: bool,
     ) -> Result<(Res, Option<EditPredictionUsage>)>
@@ -2213,9 +2527,12 @@ impl EditPredictionStore {
         let http_client = client.http_client();
 
         let mut token = if require_auth {
-            Some(llm_token.acquire(&client).await?)
+            Some(llm_token.acquire(&client, organization_id.clone()).await?)
         } else {
-            llm_token.acquire(&client).await.ok()
+            llm_token
+                .acquire(&client, organization_id.clone())
+                .await
+                .ok()
         };
         let mut did_retry = false;
 
@@ -2257,7 +2574,7 @@ impl EditPredictionStore {
                 return Ok((serde_json::from_slice(&body)?, usage));
             } else if !did_retry && token.is_some() && response.needs_llm_token_refresh() {
                 did_retry = true;
-                token = Some(llm_token.refresh(&client).await?);
+                token = Some(llm_token.refresh(&client, organization_id.clone()).await?);
             } else {
                 let mut body = String::new();
                 response.body_mut().read_to_string(&mut body).await?;

crates/edit_prediction/src/edit_prediction_tests.rs 🔗

@@ -29,7 +29,10 @@ use util::path;
 use uuid::Uuid;
 use zeta_prompt::ZetaPromptInput;
 
-use crate::{BufferEditPrediction, EditPredictionId, EditPredictionStore, REJECT_REQUEST_DEBOUNCE};
+use crate::{
+    BufferEditPrediction, EDIT_PREDICTION_SETTLED_QUIESCENCE, EditPredictionId,
+    EditPredictionStore, REJECT_REQUEST_DEBOUNCE,
+};
 
 #[gpui::test]
 async fn test_current_state(cx: &mut TestAppContext) {
@@ -897,7 +900,8 @@ async fn test_empty_prediction(cx: &mut TestAppContext) {
         &[EditPredictionRejection {
             request_id: id,
             reason: EditPredictionRejectReason::Empty,
-            was_shown: false
+            was_shown: false,
+            model_version: None,
         }]
     );
 }
@@ -957,7 +961,8 @@ async fn test_interpolated_empty(cx: &mut TestAppContext) {
         &[EditPredictionRejection {
             request_id: id,
             reason: EditPredictionRejectReason::InterpolatedEmpty,
-            was_shown: false
+            was_shown: false,
+            model_version: None,
         }]
     );
 }
@@ -1049,7 +1054,8 @@ async fn test_replace_current(cx: &mut TestAppContext) {
         &[EditPredictionRejection {
             request_id: first_id,
             reason: EditPredictionRejectReason::Replaced,
-            was_shown: false
+            was_shown: false,
+            model_version: None,
         }]
     );
 }
@@ -1143,7 +1149,8 @@ async fn test_current_preferred(cx: &mut TestAppContext) {
         &[EditPredictionRejection {
             request_id: second_id,
             reason: EditPredictionRejectReason::CurrentPreferred,
-            was_shown: false
+            was_shown: false,
+            model_version: None,
         }]
     );
 }
@@ -1234,7 +1241,8 @@ async fn test_cancel_earlier_pending_requests(cx: &mut TestAppContext) {
         &[EditPredictionRejection {
             request_id: first_id,
             reason: EditPredictionRejectReason::Canceled,
-            was_shown: false
+            was_shown: false,
+            model_version: None,
         }]
     );
 }
@@ -1364,12 +1372,14 @@ async fn test_cancel_second_on_third_request(cx: &mut TestAppContext) {
             EditPredictionRejection {
                 request_id: cancelled_id,
                 reason: EditPredictionRejectReason::Canceled,
-                was_shown: false
+                was_shown: false,
+                model_version: None,
             },
             EditPredictionRejection {
                 request_id: first_id,
                 reason: EditPredictionRejectReason::Replaced,
-                was_shown: false
+                was_shown: false,
+                model_version: None,
             }
         ]
     );
@@ -1476,6 +1486,52 @@ async fn test_jump_and_edit_throttles_are_independent(cx: &mut TestAppContext) {
     cx.run_until_parked();
 }
 
+#[gpui::test]
+async fn test_same_frame_duplicate_requests_deduplicated(cx: &mut TestAppContext) {
+    let (ep_store, mut requests) = init_test_with_fake_client(cx);
+    let fs = FakeFs::new(cx.executor());
+    fs.insert_tree(
+        "/root",
+        json!({
+            "foo.md":  "Hello!\nHow\nBye\n"
+        }),
+    )
+    .await;
+    let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
+
+    let buffer = project
+        .update(cx, |project, cx| {
+            let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
+            project.open_buffer(path, cx)
+        })
+        .await
+        .unwrap();
+    let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
+    let position = snapshot.anchor_before(language::Point::new(1, 3));
+
+    // Enqueue two refresh calls in the same synchronous frame (no yielding).
+    // Both `cx.spawn` tasks are created before either executes, so they both
+    // capture the same `proceed_count_at_enqueue`. Only the first task should
+    // pass the deduplication gate; the second should be skipped.
+    ep_store.update(cx, |ep_store, cx| {
+        ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
+        ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
+    });
+
+    // Let both spawned tasks run to completion (including any throttle waits).
+    cx.run_until_parked();
+
+    // Exactly one prediction request should have been sent.
+    let (request, respond_tx) = requests.predict.next().await.unwrap();
+    respond_tx
+        .send(model_response(&request, SIMPLE_DIFF))
+        .unwrap();
+    cx.run_until_parked();
+
+    // No second request should be pending.
+    assert_no_predict_request_ready(&mut requests.predict);
+}
+
 #[gpui::test]
 async fn test_rejections_flushing(cx: &mut TestAppContext) {
     let (ep_store, mut requests) = init_test_with_fake_client(cx);
@@ -1485,12 +1541,14 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) {
             EditPredictionId("test-1".into()),
             EditPredictionRejectReason::Discarded,
             false,
+            None,
             cx,
         );
         ep_store.reject_prediction(
             EditPredictionId("test-2".into()),
             EditPredictionRejectReason::Canceled,
             true,
+            None,
             cx,
         );
     });
@@ -1508,7 +1566,8 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) {
         EditPredictionRejection {
             request_id: "test-1".to_string(),
             reason: EditPredictionRejectReason::Discarded,
-            was_shown: false
+            was_shown: false,
+            model_version: None,
         }
     );
     assert_eq!(
@@ -1516,7 +1575,8 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) {
         EditPredictionRejection {
             request_id: "test-2".to_string(),
             reason: EditPredictionRejectReason::Canceled,
-            was_shown: true
+            was_shown: true,
+            model_version: None,
         }
     );
 
@@ -1527,6 +1587,7 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) {
                 EditPredictionId(format!("batch-{}", i).into()),
                 EditPredictionRejectReason::Discarded,
                 false,
+                None,
                 cx,
             );
         }
@@ -1558,6 +1619,7 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) {
             EditPredictionId("retry-1".into()),
             EditPredictionRejectReason::Discarded,
             false,
+            None,
             cx,
         );
     });
@@ -1577,6 +1639,7 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) {
             EditPredictionId("retry-2".into()),
             EditPredictionRejectReason::Discarded,
             false,
+            None,
             cx,
         );
     });
@@ -1687,20 +1750,25 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) {
 
 // Generate a model response that would apply the given diff to the active file.
 fn model_response(request: &PredictEditsV3Request, diff_to_apply: &str) -> PredictEditsV3Response {
-    let excerpt =
-        request.input.cursor_excerpt[request.input.editable_range_in_excerpt.clone()].to_string();
+    let editable_range =
+        zeta_prompt::excerpt_range_for_format(Default::default(), &request.input.excerpt_ranges).1;
+    let excerpt = request.input.cursor_excerpt[editable_range.clone()].to_string();
     let new_excerpt = apply_diff_to_string(diff_to_apply, &excerpt).unwrap();
 
     PredictEditsV3Response {
         request_id: Uuid::new_v4().to_string(),
+        editable_range,
         output: new_excerpt,
+        model_version: None,
     }
 }
 
 fn empty_response() -> PredictEditsV3Response {
     PredictEditsV3Response {
         request_id: Uuid::new_v4().to_string(),
+        editable_range: 0..0,
         output: String::new(),
+        model_version: None,
     }
 }
 
@@ -1820,16 +1888,17 @@ async fn test_edit_prediction_basic_interpolation(cx: &mut TestAppContext) {
             related_files: Default::default(),
             cursor_path: Path::new("").into(),
             cursor_excerpt: "".into(),
-            editable_range_in_excerpt: 0..0,
             cursor_offset_in_excerpt: 0,
             excerpt_start_row: None,
-            excerpt_ranges: None,
-            preferred_model: None,
+            excerpt_ranges: Default::default(),
+            experiment: None,
             in_open_source_repo: false,
             can_collect_data: false,
+            repo_url: None,
         },
         buffer_snapshotted_at: Instant::now(),
         response_received_at: Instant::now(),
+        model_version: None,
     };
 
     cx.update(|cx| {
@@ -2018,13 +2087,16 @@ async fn test_edit_prediction_no_spurious_trailing_newline(cx: &mut TestAppConte
         ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
     });
 
-    let (_request, respond_tx) = requests.predict.next().await.unwrap();
+    let (request, respond_tx) = requests.predict.next().await.unwrap();
 
     // Model returns output WITH a trailing newline, even though the buffer doesn't have one.
     // Zeta2 should normalize both sides before diffing, so no spurious newline is inserted.
+    let excerpt_length = request.input.cursor_excerpt.len();
     let response = PredictEditsV3Response {
         request_id: Uuid::new_v4().to_string(),
         output: "hello world\n".to_string(),
+        editable_range: 0..excerpt_length,
+        model_version: None,
     };
     respond_tx.send(response).unwrap();
 
@@ -2099,9 +2171,12 @@ async fn make_test_ep_store(
         let mut next_request_id = 0;
         move |req| {
             let completion_response = completion_response.clone();
+            let method = req.method().clone();
+            let uri = req.uri().path().to_string();
+            let mut body = req.into_body();
             async move {
-                match (req.method(), req.uri().path()) {
-                    (&Method::POST, "/client/llm_tokens") => Ok(http_client::Response::builder()
+                match (method, uri.as_str()) {
+                    (Method::POST, "/client/llm_tokens") => Ok(http_client::Response::builder()
                         .status(200)
                         .body(
                             serde_json::to_string(&CreateLlmTokenResponse {
@@ -2111,14 +2186,22 @@ async fn make_test_ep_store(
                             .into(),
                         )
                         .unwrap()),
-                    (&Method::POST, "/predict_edits/v3") => {
+                    (Method::POST, "/predict_edits/v3") => {
+                        let mut buf = Vec::new();
+                        body.read_to_end(&mut buf).await.ok();
+                        let decompressed = zstd::decode_all(&buf[..]).unwrap();
+                        let req: PredictEditsV3Request =
+                            serde_json::from_slice(&decompressed).unwrap();
+
                         next_request_id += 1;
                         Ok(http_client::Response::builder()
                             .status(200)
                             .body(
                                 serde_json::to_string(&PredictEditsV3Response {
                                     request_id: format!("request-{next_request_id}"),
+                                    editable_range: 0..req.input.cursor_excerpt.len(),
                                     output: completion_response.lock().clone(),
+                                    model_version: None,
                                 })
                                 .unwrap()
                                 .into(),
@@ -2127,7 +2210,7 @@ async fn make_test_ep_store(
                     }
                     _ => Ok(http_client::Response::builder()
                         .status(404)
-                        .body("Not Found".into())
+                        .body("Not Found".to_string().into())
                         .unwrap()),
                 }
             }
@@ -2142,7 +2225,7 @@ async fn make_test_ep_store(
 
     let ep_store = cx.new(|cx| {
         let mut ep_store = EditPredictionStore::new(client, project.read(cx).user_store(), cx);
-        ep_store.set_edit_prediction_model(EditPredictionModel::Zeta1);
+        ep_store.set_edit_prediction_model(EditPredictionModel::Zeta);
 
         let worktrees = project.read(cx).worktrees(cx).collect::<Vec<_>>();
         for worktree in worktrees {
@@ -2241,7 +2324,7 @@ async fn test_unauthenticated_without_custom_url_blocks_prediction_impl(cx: &mut
     cx.background_executor.run_until_parked();
 
     let completion_task = ep_store.update(cx, |ep_store, cx| {
-        ep_store.set_edit_prediction_model(EditPredictionModel::Zeta1);
+        ep_store.set_edit_prediction_model(EditPredictionModel::Zeta);
         ep_store.request_prediction(&project, &buffer, cursor, Default::default(), cx)
     });
 
@@ -2536,6 +2619,181 @@ async fn test_diagnostic_jump_excludes_collaborator_regions(cx: &mut TestAppCont
     );
 }
 
+#[gpui::test]
+async fn test_edit_prediction_settled(cx: &mut TestAppContext) {
+    let (ep_store, _requests) = init_test_with_fake_client(cx);
+    let fs = FakeFs::new(cx.executor());
+
+    // Buffer with two clearly separated regions:
+    //   Region A = lines 0-9   (offsets 0..50)
+    //   Region B = lines 20-29 (offsets 105..155)
+    // A big gap in between so edits in one region never overlap the other.
+    let mut content = String::new();
+    for i in 0..30 {
+        content.push_str(&format!("line {i:02}\n"));
+    }
+
+    fs.insert_tree(
+        "/root",
+        json!({
+            "foo.md": content.clone()
+        }),
+    )
+    .await;
+    let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
+
+    let buffer = project
+        .update(cx, |project, cx| {
+            let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
+            project.open_buffer(path, cx)
+        })
+        .await
+        .unwrap();
+
+    type SettledEventRecord = (EditPredictionId, String);
+    let settled_events: Arc<Mutex<Vec<SettledEventRecord>>> = Arc::new(Mutex::new(Vec::new()));
+
+    ep_store.update(cx, |ep_store, cx| {
+        ep_store.register_buffer(&buffer, &project, cx);
+
+        let settled_events = settled_events.clone();
+        ep_store.settled_event_callback = Some(Box::new(move |id, text| {
+            settled_events.lock().push((id, text));
+        }));
+    });
+
+    // --- Phase 1: edit in region A and enqueue prediction A ---
+
+    buffer.update(cx, |buffer, cx| {
+        // Edit at the start of line 0.
+        buffer.edit(vec![(0..0, "ADDED ")], None, cx);
+    });
+    cx.run_until_parked();
+
+    let snapshot_a = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
+
+    // Region A: first 10 lines of the buffer.
+    let editable_region_a = 0..snapshot_a.point_to_offset(Point::new(10, 0));
+
+    ep_store.update(cx, |ep_store, cx| {
+        ep_store.enqueue_settled_prediction(
+            EditPredictionId("prediction-a".into()),
+            &project,
+            &buffer,
+            &snapshot_a,
+            editable_region_a.clone(),
+            None,
+            cx,
+        );
+    });
+
+    // --- Phase 2: repeatedly edit in region A to keep it unsettled ---
+
+    // Let the worker process the channel message before we start advancing.
+    cx.run_until_parked();
+
+    let mut region_a_edit_offset = 5;
+    for _ in 0..3 {
+        // Edit inside region A (not at the boundary) so `last_edit_at` is
+        // updated before the worker's next wake.
+        buffer.update(cx, |buffer, cx| {
+            buffer.edit(
+                vec![(region_a_edit_offset..region_a_edit_offset, "x")],
+                None,
+                cx,
+            );
+        });
+        region_a_edit_offset += 1;
+        cx.run_until_parked();
+
+        cx.executor()
+            .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE / 2);
+        cx.run_until_parked();
+        assert!(
+            settled_events.lock().is_empty(),
+            "no settled events should fire while region A is still being edited"
+        );
+    }
+
+    // Still nothing settled.
+    assert!(settled_events.lock().is_empty());
+
+    // --- Phase 3: edit in distinct region B, enqueue prediction B ---
+    // Advance a small amount so B's quiescence window starts later than A's,
+    // but not so much that A settles (A's last edit was at the start of
+    // iteration 3, and it needs a full Q to settle).
+    cx.executor()
+        .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE / 4);
+    cx.run_until_parked();
+    assert!(settled_events.lock().is_empty());
+
+    let snapshot_b = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
+    let line_20_offset = snapshot_b.point_to_offset(Point::new(20, 0));
+
+    buffer.update(cx, |buffer, cx| {
+        buffer.edit(vec![(line_20_offset..line_20_offset, "NEW ")], None, cx);
+    });
+    cx.run_until_parked();
+
+    let snapshot_b2 = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
+    let editable_region_b = line_20_offset..snapshot_b2.point_to_offset(Point::new(25, 0));
+
+    ep_store.update(cx, |ep_store, cx| {
+        ep_store.enqueue_settled_prediction(
+            EditPredictionId("prediction-b".into()),
+            &project,
+            &buffer,
+            &snapshot_b2,
+            editable_region_b.clone(),
+            None,
+            cx,
+        );
+    });
+
+    cx.run_until_parked();
+    assert!(
+        settled_events.lock().is_empty(),
+        "neither prediction should have settled yet"
+    );
+
+    // --- Phase 4: let enough time pass for region A to settle ---
+    // A's last edit was at T_a (during the last loop iteration). The worker is
+    // sleeping until T_a + Q. We advance just enough to reach that wake time
+    // (Q/4 since we already advanced Q/4 in phase 3 on top of the loop's
+    // 3*Q/2). At that point A has been quiet for Q and settles, but B was
+    // enqueued only Q/4 ago and stays pending.
+    cx.executor()
+        .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE / 4);
+    cx.run_until_parked();
+
+    {
+        let events = settled_events.lock().clone();
+        assert_eq!(
+            events.len(),
+            1,
+            "prediction and capture_sample for A should have settled, got: {events:?}"
+        );
+        assert_eq!(events[0].0, EditPredictionId("prediction-a".into()));
+    }
+
+    // --- Phase 5: let more time pass for region B to settle ---
+    // B's last edit was Q/4 before A settled. The worker rescheduled to
+    // B's last_edit_at + Q, which is 3Q/4 from now.
+    cx.executor()
+        .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE * 3 / 4);
+    cx.run_until_parked();
+
+    {
+        let events = settled_events.lock().clone();
+        assert_eq!(
+            events.len(),
+            2,
+            "both prediction and capture_sample settled events should be emitted for each request, got: {events:?}"
+        );
+        assert_eq!(events[1].0, EditPredictionId("prediction-b".into()));
+    }
+}
+
 #[ctor::ctor]
 fn init_logger() {
     zlog::init_test();

crates/edit_prediction/src/fim.rs 🔗

@@ -1,6 +1,7 @@
 use crate::{
-    EditPredictionId, EditPredictionModelInput, cursor_excerpt, prediction::EditPredictionResult,
-    zeta,
+    EditPredictionId, EditPredictionModelInput, cursor_excerpt,
+    open_ai_compatible::{self, load_open_ai_compatible_api_key_if_needed},
+    prediction::EditPredictionResult,
 };
 use anyhow::{Context as _, Result, anyhow};
 use gpui::{App, AppContext as _, Entity, Task};
@@ -58,6 +59,8 @@ pub fn request_prediction(
         return Task::ready(Err(anyhow!("Unsupported edit prediction provider for FIM")));
     };
 
+    let api_key = load_open_ai_compatible_api_key_if_needed(provider, cx);
+
     let result = cx.background_spawn(async move {
         let (excerpt_range, _) = cursor_excerpt::editable_and_context_ranges_for_cursor_position(
             cursor_point,
@@ -72,18 +75,17 @@ pub fn request_prediction(
             events,
             related_files: Vec::new(),
             cursor_offset_in_excerpt: cursor_offset - excerpt_offset_range.start,
-            editable_range_in_excerpt: cursor_offset - excerpt_offset_range.start
-                ..cursor_offset - excerpt_offset_range.start,
             cursor_path: full_path.clone(),
             excerpt_start_row: Some(excerpt_range.start.row),
             cursor_excerpt: snapshot
                 .text_for_range(excerpt_range)
                 .collect::<String>()
                 .into(),
-            excerpt_ranges: None,
-            preferred_model: None,
+            excerpt_ranges: Default::default(),
+            experiment: None,
             in_open_source_repo: false,
             can_collect_data: false,
+            repo_url: None,
         };
 
         let prefix = inputs.cursor_excerpt[..inputs.cursor_offset_in_excerpt].to_string();
@@ -92,12 +94,14 @@ pub fn request_prediction(
         let stop_tokens = get_fim_stop_tokens();
 
         let max_tokens = settings.max_output_tokens;
-        let (response_text, request_id) = zeta::send_custom_server_request(
+
+        let (response_text, request_id) = open_ai_compatible::send_custom_server_request(
             provider,
             &settings,
             prompt,
             max_tokens,
             stop_tokens,
+            api_key,
             &http_client,
         )
         .await?;
@@ -141,6 +145,7 @@ pub fn request_prediction(
                 output.buffer_snapshotted_at,
                 output.response_received_at,
                 output.inputs,
+                None,
                 cx,
             )
             .await,

crates/edit_prediction/src/mercury.rs 🔗

@@ -16,7 +16,7 @@ use release_channel::AppVersion;
 use serde::Serialize;
 use std::{mem, ops::Range, path::Path, sync::Arc, time::Instant};
 
-use zeta_prompt::ZetaPromptInput;
+use zeta_prompt::{ExcerptRanges, ZetaPromptInput};
 
 const MERCURY_API_URL: &str = "https://api.inceptionlabs.ai/v1/edit/completions";
 const MAX_REWRITE_TOKENS: usize = 150;
@@ -83,6 +83,12 @@ impl Mercury {
 
             let editable_offset_range = editable_range.to_offset(&snapshot);
 
+            let editable_range_in_excerpt = (editable_offset_range.start
+                - context_offset_range.start)
+                ..(editable_offset_range.end - context_offset_range.start);
+            let context_range_in_excerpt =
+                0..(context_offset_range.end - context_offset_range.start);
+
             let inputs = zeta_prompt::ZetaPromptInput {
                 events,
                 related_files,
@@ -93,14 +99,20 @@ impl Mercury {
                     .text_for_range(context_range)
                     .collect::<String>()
                     .into(),
-                editable_range_in_excerpt: (editable_offset_range.start
-                    - context_offset_range.start)
-                    ..(editable_offset_range.end - context_offset_range.start),
+                experiment: None,
                 excerpt_start_row: Some(context_start_row),
-                excerpt_ranges: None,
-                preferred_model: None,
+                excerpt_ranges: ExcerptRanges {
+                    editable_150: editable_range_in_excerpt.clone(),
+                    editable_180: editable_range_in_excerpt.clone(),
+                    editable_350: editable_range_in_excerpt.clone(),
+                    editable_150_context_350: context_range_in_excerpt.clone(),
+                    editable_180_context_350: context_range_in_excerpt.clone(),
+                    editable_350_context_150: context_range_in_excerpt.clone(),
+                    ..Default::default()
+                },
                 in_open_source_repo: false,
                 can_collect_data: false,
+                repo_url: None,
             };
 
             let prompt = build_prompt(&inputs);
@@ -218,6 +230,7 @@ impl Mercury {
                     buffer_snapshotted_at,
                     response_received_at,
                     inputs,
+                    None,
                     cx,
                 )
                 .await,
@@ -272,19 +285,18 @@ fn build_prompt(inputs: &ZetaPromptInput) -> String {
             prompt.push_str(inputs.cursor_path.as_os_str().to_string_lossy().as_ref());
             prompt.push('\n');
 
-            prompt.push_str(&inputs.cursor_excerpt[0..inputs.editable_range_in_excerpt.start]);
+            let editable_range = &inputs.excerpt_ranges.editable_350;
+            prompt.push_str(&inputs.cursor_excerpt[0..editable_range.start]);
             push_delimited(prompt, CODE_TO_EDIT_START..CODE_TO_EDIT_END, |prompt| {
                 prompt.push_str(
-                    &inputs.cursor_excerpt
-                        [inputs.editable_range_in_excerpt.start..inputs.cursor_offset_in_excerpt],
+                    &inputs.cursor_excerpt[editable_range.start..inputs.cursor_offset_in_excerpt],
                 );
                 prompt.push_str(CURSOR_TAG);
                 prompt.push_str(
-                    &inputs.cursor_excerpt
-                        [inputs.cursor_offset_in_excerpt..inputs.editable_range_in_excerpt.end],
+                    &inputs.cursor_excerpt[inputs.cursor_offset_in_excerpt..editable_range.end],
                 );
             });
-            prompt.push_str(&inputs.cursor_excerpt[inputs.editable_range_in_excerpt.end..]);
+            prompt.push_str(&inputs.cursor_excerpt[editable_range.end..]);
         },
     );
 

crates/edit_prediction/src/open_ai_compatible.rs 🔗

@@ -0,0 +1,133 @@
+use anyhow::{Context as _, Result};
+use cloud_llm_client::predict_edits_v3::{RawCompletionRequest, RawCompletionResponse};
+use futures::AsyncReadExt as _;
+use gpui::{App, AppContext as _, Entity, Global, SharedString, Task, http_client};
+use language::language_settings::{OpenAiCompatibleEditPredictionSettings, all_language_settings};
+use language_model::{ApiKeyState, EnvVar, env_var};
+use std::sync::Arc;
+
+pub fn open_ai_compatible_api_url(cx: &App) -> SharedString {
+    all_language_settings(None, cx)
+        .edit_predictions
+        .open_ai_compatible_api
+        .as_ref()
+        .map(|settings| settings.api_url.clone())
+        .unwrap_or_default()
+        .into()
+}
+
+pub const OPEN_AI_COMPATIBLE_CREDENTIALS_USERNAME: &str = "openai-compatible-api-token";
+pub static OPEN_AI_COMPATIBLE_TOKEN_ENV_VAR: std::sync::LazyLock<EnvVar> =
+    env_var!("ZED_OPEN_AI_COMPATIBLE_EDIT_PREDICTION_API_KEY");
+
+struct GlobalOpenAiCompatibleApiKey(Entity<ApiKeyState>);
+
+impl Global for GlobalOpenAiCompatibleApiKey {}
+
+pub fn open_ai_compatible_api_token(cx: &mut App) -> Entity<ApiKeyState> {
+    if let Some(global) = cx.try_global::<GlobalOpenAiCompatibleApiKey>() {
+        return global.0.clone();
+    }
+
+    let entity = cx.new(|cx| {
+        ApiKeyState::new(
+            open_ai_compatible_api_url(cx),
+            OPEN_AI_COMPATIBLE_TOKEN_ENV_VAR.clone(),
+        )
+    });
+    cx.set_global(GlobalOpenAiCompatibleApiKey(entity.clone()));
+    entity
+}
+
+pub fn load_open_ai_compatible_api_token(
+    cx: &mut App,
+) -> Task<Result<(), language_model::AuthenticateError>> {
+    let api_url = open_ai_compatible_api_url(cx);
+    open_ai_compatible_api_token(cx).update(cx, |key_state, cx| {
+        key_state.load_if_needed(api_url, |s| s, cx)
+    })
+}
+
+pub fn load_open_ai_compatible_api_key_if_needed(
+    provider: settings::EditPredictionProvider,
+    cx: &mut App,
+) -> Option<Arc<str>> {
+    if provider != settings::EditPredictionProvider::OpenAiCompatibleApi {
+        return None;
+    }
+    _ = load_open_ai_compatible_api_token(cx);
+    let url = open_ai_compatible_api_url(cx);
+    return open_ai_compatible_api_token(cx).read(cx).key(&url);
+}
+
+pub(crate) async fn send_custom_server_request(
+    provider: settings::EditPredictionProvider,
+    settings: &OpenAiCompatibleEditPredictionSettings,
+    prompt: String,
+    max_tokens: u32,
+    stop_tokens: Vec<String>,
+    api_key: Option<Arc<str>>,
+    http_client: &Arc<dyn http_client::HttpClient>,
+) -> Result<(String, String)> {
+    match provider {
+        settings::EditPredictionProvider::Ollama => {
+            let response = crate::ollama::make_request(
+                settings.clone(),
+                prompt,
+                stop_tokens,
+                http_client.clone(),
+            )
+            .await?;
+            Ok((response.response, response.created_at))
+        }
+        _ => {
+            let request = RawCompletionRequest {
+                model: settings.model.clone(),
+                prompt,
+                max_tokens: Some(max_tokens),
+                temperature: None,
+                stop: stop_tokens
+                    .into_iter()
+                    .map(std::borrow::Cow::Owned)
+                    .collect(),
+                environment: None,
+            };
+
+            let request_body = serde_json::to_string(&request)?;
+            let mut http_request_builder = http_client::Request::builder()
+                .method(http_client::Method::POST)
+                .uri(settings.api_url.as_ref())
+                .header("Content-Type", "application/json");
+
+            if let Some(api_key) = api_key {
+                http_request_builder =
+                    http_request_builder.header("Authorization", format!("Bearer {}", api_key));
+            }
+
+            let http_request =
+                http_request_builder.body(http_client::AsyncBody::from(request_body))?;
+
+            let mut response = http_client.send(http_request).await?;
+            let status = response.status();
+
+            if !status.is_success() {
+                let mut body = String::new();
+                response.body_mut().read_to_string(&mut body).await?;
+                anyhow::bail!("custom server error: {} - {}", status, body);
+            }
+
+            let mut body = String::new();
+            response.body_mut().read_to_string(&mut body).await?;
+
+            let parsed: RawCompletionResponse =
+                serde_json::from_str(&body).context("Failed to parse completion response")?;
+            let text = parsed
+                .choices
+                .into_iter()
+                .next()
+                .map(|choice| choice.text)
+                .unwrap_or_default();
+            Ok((text, parsed.id))
+        }
+    }
+}

crates/edit_prediction/src/prediction.rs 🔗

@@ -41,6 +41,7 @@ impl EditPredictionResult {
         buffer_snapshotted_at: Instant,
         response_received_at: Instant,
         inputs: ZetaPromptInput,
+        model_version: Option<String>,
         cx: &mut AsyncApp,
     ) -> Self {
         if edits.is_empty() {
@@ -79,6 +80,7 @@ impl EditPredictionResult {
                 buffer: edited_buffer.clone(),
                 buffer_snapshotted_at,
                 response_received_at,
+                model_version,
             }),
         }
     }
@@ -95,6 +97,7 @@ pub struct EditPrediction {
     pub buffer_snapshotted_at: Instant,
     pub response_received_at: Instant,
     pub inputs: zeta_prompt::ZetaPromptInput,
+    pub model_version: Option<String>,
 }
 
 impl EditPrediction {
@@ -150,18 +153,19 @@ mod tests {
             snapshot: cx.read(|cx| buffer.read(cx).snapshot()),
             buffer: buffer.clone(),
             edit_preview,
+            model_version: None,
             inputs: ZetaPromptInput {
                 events: vec![],
                 related_files: vec![],
                 cursor_path: Path::new("path.txt").into(),
                 cursor_offset_in_excerpt: 0,
                 cursor_excerpt: "".into(),
-                editable_range_in_excerpt: 0..0,
                 excerpt_start_row: None,
-                excerpt_ranges: None,
-                preferred_model: None,
+                excerpt_ranges: Default::default(),
+                experiment: None,
                 in_open_source_repo: false,
                 can_collect_data: false,
+                repo_url: None,
             },
             buffer_snapshotted_at: Instant::now(),
             response_received_at: Instant::now(),

crates/edit_prediction/src/sweep_ai.rs 🔗

@@ -215,14 +215,21 @@ impl SweepAi {
                 related_files: inputs.related_files.clone(),
                 cursor_path: full_path.clone(),
                 cursor_excerpt: request_body.file_contents.clone().into(),
-                // we actually don't know
-                editable_range_in_excerpt: 0..inputs.snapshot.len(),
                 cursor_offset_in_excerpt: request_body.cursor_position,
                 excerpt_start_row: Some(0),
-                excerpt_ranges: None,
-                preferred_model: None,
+                excerpt_ranges: zeta_prompt::ExcerptRanges {
+                    editable_150: 0..inputs.snapshot.len(),
+                    editable_180: 0..inputs.snapshot.len(),
+                    editable_350: 0..inputs.snapshot.len(),
+                    editable_150_context_350: 0..inputs.snapshot.len(),
+                    editable_180_context_350: 0..inputs.snapshot.len(),
+                    editable_350_context_150: 0..inputs.snapshot.len(),
+                    ..Default::default()
+                },
+                experiment: None,
                 in_open_source_repo: false,
                 can_collect_data: false,
+                repo_url: None,
             };
 
             send_started_event(
@@ -303,6 +310,7 @@ impl SweepAi {
                     buffer_snapshotted_at,
                     response_received_at,
                     inputs,
+                    None,
                     cx,
                 )
                 .await,

crates/edit_prediction/src/udiff.rs 🔗

@@ -266,6 +266,66 @@ pub fn strip_diff_metadata(diff: &str) -> String {
     result
 }
 
+/// Find all byte offsets where `hunk.context` occurs as a substring of `text`.
+///
+/// If no exact matches are found and the context ends with `'\n'` but `text`
+/// does not, retries without the trailing newline, accepting only a match at
+/// the very end of `text`. When this fallback fires, the hunk's context is
+/// trimmed and its edit ranges are clamped so that downstream code doesn't
+/// index past the end of the matched region. This handles diffs that are
+/// missing a `\ No newline at end of file` marker: the parser always appends
+/// `'\n'` via `writeln!`, so the context can have a trailing newline that
+/// doesn't exist in the source text.
+fn find_context_candidates(text: &str, hunk: &mut Hunk) -> Vec<usize> {
+    let candidates: Vec<usize> = text
+        .match_indices(&hunk.context)
+        .map(|(offset, _)| offset)
+        .collect();
+
+    if !candidates.is_empty() {
+        return candidates;
+    }
+
+    if hunk.context.ends_with('\n') && !hunk.context.is_empty() {
+        let old_len = hunk.context.len();
+        hunk.context.pop();
+        let new_len = hunk.context.len();
+
+        if !hunk.context.is_empty() {
+            let candidates: Vec<usize> = text
+                .match_indices(&hunk.context)
+                .filter(|(offset, _)| offset + new_len == text.len())
+                .map(|(offset, _)| offset)
+                .collect();
+
+            if !candidates.is_empty() {
+                for edit in &mut hunk.edits {
+                    let touched_phantom = edit.range.end > new_len;
+                    edit.range.start = edit.range.start.min(new_len);
+                    edit.range.end = edit.range.end.min(new_len);
+                    if touched_phantom {
+                        // The replacement text was also written with a
+                        // trailing '\n' that corresponds to the phantom
+                        // newline we just removed from the context.
+                        if edit.text.ends_with('\n') {
+                            edit.text.pop();
+                        }
+                    }
+                }
+                return candidates;
+            }
+
+            // Restore if fallback didn't help either.
+            hunk.context.push('\n');
+            debug_assert_eq!(hunk.context.len(), old_len);
+        } else {
+            hunk.context.push('\n');
+        }
+    }
+
+    Vec::new()
+}
+
 /// Given multiple candidate offsets where context matches, use line numbers to disambiguate.
 /// Returns the offset that matches the expected line, or None if no match or no line number available.
 fn disambiguate_by_line_number(
@@ -305,15 +365,11 @@ pub fn apply_diff_to_string_with_hunk_offset(
     while let Some(event) = diff.next().context("Failed to parse diff")? {
         match event {
             DiffEvent::Hunk {
-                hunk,
+                mut hunk,
                 path: _,
                 status: _,
             } => {
-                // Find all matches of the context in the text
-                let candidates: Vec<usize> = text
-                    .match_indices(&hunk.context)
-                    .map(|(offset, _)| offset)
-                    .collect();
+                let candidates = find_context_candidates(&text, &mut hunk);
 
                 let hunk_offset =
                     disambiguate_by_line_number(&candidates, hunk.start_line, &|offset| {
@@ -348,7 +404,7 @@ pub fn edits_for_diff(content: &str, diff_str: &str) -> Result<Vec<(Range<usize>
     while let Some(event) = diff.next()? {
         match event {
             DiffEvent::Hunk {
-                hunk,
+                mut hunk,
                 path: _,
                 status: _,
             } => {
@@ -356,11 +412,7 @@ pub fn edits_for_diff(content: &str, diff_str: &str) -> Result<Vec<(Range<usize>
                     return Ok(Vec::new());
                 }
 
-                // Find all matches of the context in the content
-                let candidates: Vec<usize> = content
-                    .match_indices(&hunk.context)
-                    .map(|(offset, _)| offset)
-                    .collect();
+                let candidates = find_context_candidates(content, &mut hunk);
 
                 let Some(context_offset) =
                     disambiguate_by_line_number(&candidates, hunk.start_line, &|offset| {
@@ -611,7 +663,7 @@ impl<'a> DiffParser<'a> {
 }
 
 fn resolve_hunk_edits_in_buffer(
-    hunk: Hunk,
+    mut hunk: Hunk,
     buffer: &TextBufferSnapshot,
     ranges: &[Range<Anchor>],
     status: FileStatus,
@@ -623,7 +675,7 @@ fn resolve_hunk_edits_in_buffer(
         for range in ranges {
             let range = range.to_offset(buffer);
             let text = buffer.text_for_range(range.clone()).collect::<String>();
-            for (ix, _) in text.match_indices(&hunk.context) {
+            for ix in find_context_candidates(&text, &mut hunk) {
                 candidates.push(range.start + ix);
             }
         }
@@ -1513,4 +1565,185 @@ mod tests {
             "#}
         );
     }
+
+    #[test]
+    fn test_apply_diff_to_string_no_trailing_newline() {
+        // Text without trailing newline; diff generated without
+        // `\ No newline at end of file` marker.
+        let text = "line1\nline2\nline3";
+        let diff = indoc! {"
+            --- a/file.txt
+            +++ b/file.txt
+            @@ -1,3 +1,3 @@
+             line1
+            -line2
+            +replaced
+             line3
+        "};
+
+        let result = apply_diff_to_string(diff, text).unwrap();
+        assert_eq!(result, "line1\nreplaced\nline3");
+    }
+
+    #[test]
+    fn test_apply_diff_to_string_trailing_newline_present() {
+        // When text has a trailing newline, exact matching still works and
+        // the fallback is never needed.
+        let text = "line1\nline2\nline3\n";
+        let diff = indoc! {"
+            --- a/file.txt
+            +++ b/file.txt
+            @@ -1,3 +1,3 @@
+             line1
+            -line2
+            +replaced
+             line3
+        "};
+
+        let result = apply_diff_to_string(diff, text).unwrap();
+        assert_eq!(result, "line1\nreplaced\nline3\n");
+    }
+
+    #[test]
+    fn test_apply_diff_to_string_deletion_at_end_no_trailing_newline() {
+        // Deletion of the last line when text has no trailing newline.
+        // The edit range must be clamped so it doesn't index past the
+        // end of the text.
+        let text = "line1\nline2\nline3";
+        let diff = indoc! {"
+            --- a/file.txt
+            +++ b/file.txt
+            @@ -1,3 +1,2 @@
+             line1
+             line2
+            -line3
+        "};
+
+        let result = apply_diff_to_string(diff, text).unwrap();
+        assert_eq!(result, "line1\nline2\n");
+    }
+
+    #[test]
+    fn test_apply_diff_to_string_replace_last_line_no_trailing_newline() {
+        // Replace the last line when text has no trailing newline.
+        let text = "aaa\nbbb\nccc";
+        let diff = indoc! {"
+            --- a/file.txt
+            +++ b/file.txt
+            @@ -1,3 +1,3 @@
+             aaa
+             bbb
+            -ccc
+            +ddd
+        "};
+
+        let result = apply_diff_to_string(diff, text).unwrap();
+        assert_eq!(result, "aaa\nbbb\nddd");
+    }
+
+    #[test]
+    fn test_apply_diff_to_string_multibyte_no_trailing_newline() {
+        // Multi-byte UTF-8 characters near the end; ensures char boundary
+        // safety when the fallback clamps edit ranges.
+        let text = "hello\n세계";
+        let diff = indoc! {"
+            --- a/file.txt
+            +++ b/file.txt
+            @@ -1,2 +1,2 @@
+             hello
+            -세계
+            +world
+        "};
+
+        let result = apply_diff_to_string(diff, text).unwrap();
+        assert_eq!(result, "hello\nworld");
+    }
+
+    #[test]
+    fn test_find_context_candidates_no_false_positive_mid_text() {
+        // The stripped fallback must only match at the end of text, not in
+        // the middle where a real newline exists.
+        let text = "aaa\nbbb\nccc\n";
+        let mut hunk = Hunk {
+            context: "bbb\n".into(),
+            edits: vec![],
+            start_line: None,
+        };
+
+        let candidates = find_context_candidates(text, &mut hunk);
+        // Exact match at offset 4 — the fallback is not used.
+        assert_eq!(candidates, vec![4]);
+    }
+
+    #[test]
+    fn test_find_context_candidates_fallback_at_end() {
+        let text = "aaa\nbbb";
+        let mut hunk = Hunk {
+            context: "bbb\n".into(),
+            edits: vec![],
+            start_line: None,
+        };
+
+        let candidates = find_context_candidates(text, &mut hunk);
+        assert_eq!(candidates, vec![4]);
+        // Context should be stripped.
+        assert_eq!(hunk.context, "bbb");
+    }
+
+    #[test]
+    fn test_find_context_candidates_no_fallback_mid_text() {
+        // "bbb" appears mid-text followed by a newline, so the exact
+        // match succeeds. Verify the stripped fallback doesn't produce a
+        // second, spurious candidate.
+        let text = "aaa\nbbb\nccc";
+        let mut hunk = Hunk {
+            context: "bbb\nccc\n".into(),
+            edits: vec![],
+            start_line: None,
+        };
+
+        let candidates = find_context_candidates(text, &mut hunk);
+        // No exact match (text ends without newline after "ccc"), but the
+        // stripped context "bbb\nccc" matches at offset 4, which is the end.
+        assert_eq!(candidates, vec![4]);
+        assert_eq!(hunk.context, "bbb\nccc");
+    }
+
+    #[test]
+    fn test_find_context_candidates_clamps_edit_ranges() {
+        let text = "aaa\nbbb";
+        let mut hunk = Hunk {
+            context: "aaa\nbbb\n".into(),
+            edits: vec![Edit {
+                range: 4..8, // "bbb\n" — end points at the trailing \n
+                text: "ccc\n".into(),
+            }],
+            start_line: None,
+        };
+
+        let candidates = find_context_candidates(text, &mut hunk);
+        assert_eq!(candidates, vec![0]);
+        // Edit range end should be clamped to 7 (new context length).
+        assert_eq!(hunk.edits[0].range, 4..7);
+    }
+
+    #[test]
+    fn test_edits_for_diff_no_trailing_newline() {
+        let content = "foo\nbar\nbaz";
+        let diff = indoc! {"
+            --- a/file.txt
+            +++ b/file.txt
+            @@ -1,3 +1,3 @@
+             foo
+            -bar
+            +qux
+             baz
+        "};
+
+        let result = edits_for_diff(content, diff).unwrap();
+        assert_eq!(result.len(), 1);
+        let (range, text) = &result[0];
+        assert_eq!(&content[range.clone()], "bar");
+        assert_eq!(text, "qux");
+    }
 }

crates/edit_prediction/src/zeta.rs 🔗

@@ -2,28 +2,30 @@ use crate::cursor_excerpt::compute_excerpt_ranges;
 use crate::prediction::EditPredictionResult;
 use crate::{
     CurrentEditPrediction, DebugEvent, EditPredictionFinishedDebugEvent, EditPredictionId,
-    EditPredictionModelInput, EditPredictionStartedDebugEvent, EditPredictionStore, ollama,
+    EditPredictionModelInput, EditPredictionStartedDebugEvent, EditPredictionStore, StoredEvent,
 };
-use anyhow::{Context as _, Result};
-use cloud_llm_client::predict_edits_v3::{RawCompletionRequest, RawCompletionResponse};
+use anyhow::Result;
+use cloud_llm_client::predict_edits_v3::RawCompletionRequest;
 use cloud_llm_client::{AcceptEditPredictionBody, EditPredictionRejectReason};
 use edit_prediction_types::PredictedCursorPosition;
-use futures::AsyncReadExt as _;
-use gpui::{App, AppContext as _, Task, http_client, prelude::*};
-use language::language_settings::{OpenAiCompatibleEditPredictionSettings, all_language_settings};
+use gpui::{App, AppContext as _, Task, prelude::*};
+use language::language_settings::all_language_settings;
 use language::{BufferSnapshot, ToOffset as _, ToPoint, text_diff};
 use release_channel::AppVersion;
+use settings::EditPredictionPromptFormat;
 use text::{Anchor, Bias};
 
-use std::env;
-use std::ops::Range;
-use std::{path::Path, sync::Arc, time::Instant};
+use std::{env, ops::Range, path::Path, sync::Arc, time::Instant};
 use zeta_prompt::{
-    CURSOR_MARKER, EditPredictionModelKind, ZetaFormat, clean_zeta2_model_output,
-    format_zeta_prompt, get_prefill, prompt_input_contains_special_tokens,
+    CURSOR_MARKER, ZetaFormat, clean_zeta2_model_output, format_zeta_prompt, get_prefill,
+    output_with_context_for_format, prompt_input_contains_special_tokens,
     zeta1::{self, EDITABLE_REGION_END_MARKER},
 };
 
+use crate::open_ai_compatible::{
+    load_open_ai_compatible_api_key_if_needed, send_custom_server_request,
+};
+
 pub fn request_prediction_with_zeta(
     store: &mut EditPredictionStore,
     EditPredictionModelInput {
@@ -35,9 +37,11 @@ pub fn request_prediction_with_zeta(
         debug_tx,
         trigger,
         project,
+        can_collect_data,
+        is_open_source,
         ..
     }: EditPredictionModelInput,
-    preferred_model: Option<EditPredictionModelKind>,
+    capture_data: Option<Vec<StoredEvent>>,
     cx: &mut Context<EditPredictionStore>,
 ) -> Task<Result<Option<EditPredictionResult>>> {
     let settings = &all_language_settings(None, cx).edit_predictions;
@@ -53,24 +57,35 @@ pub fn request_prediction_with_zeta(
     let http_client = cx.http_client();
     let buffer_snapshotted_at = Instant::now();
     let raw_config = store.zeta2_raw_config().cloned();
+    let preferred_experiment = store.preferred_experiment().map(|s| s.to_owned());
+    let open_ai_compatible_api_key = load_open_ai_compatible_api_key_if_needed(provider, cx);
 
     let excerpt_path: Arc<Path> = snapshot
         .file()
         .map(|file| -> Arc<Path> { file.full_path(cx).into() })
         .unwrap_or_else(|| Arc::from(Path::new("untitled")));
 
+    let repo_url = if can_collect_data {
+        let buffer_id = buffer.read(cx).remote_id();
+        project
+            .read(cx)
+            .git_store()
+            .read(cx)
+            .repository_and_path_for_buffer_id(buffer_id, cx)
+            .and_then(|(repo, _)| repo.read(cx).default_remote_url())
+    } else {
+        None
+    };
+
     let client = store.client.clone();
     let llm_token = store.llm_token.clone();
+    let organization_id = store
+        .user_store
+        .read(cx)
+        .current_organization()
+        .map(|organization| organization.id.clone());
     let app_version = AppVersion::global(cx);
 
-    let is_open_source = snapshot
-        .file()
-        .map_or(false, |file| store.is_file_open_source(&project, file, cx))
-        && events.iter().all(|event| event.in_open_source_repo())
-        && related_files.iter().all(|file| file.in_open_source_repo);
-
-    let can_collect_data = is_open_source && store.is_data_collection_enabled(cx);
-
     let request_task = cx.background_spawn({
         async move {
             let zeta_version = raw_config
@@ -79,38 +94,25 @@ pub fn request_prediction_with_zeta(
                 .unwrap_or(ZetaFormat::default());
 
             let cursor_offset = position.to_offset(&snapshot);
-            let (editable_offset_range, prompt_input) = zeta2_prompt_input(
+            let editable_range_in_excerpt: Range<usize>;
+            let (full_context_offset_range, prompt_input) = zeta2_prompt_input(
                 &snapshot,
                 related_files,
                 events,
                 excerpt_path,
                 cursor_offset,
-                zeta_version,
-                preferred_model,
+                preferred_experiment,
                 is_open_source,
                 can_collect_data,
+                repo_url,
             );
 
             if prompt_input_contains_special_tokens(&prompt_input, zeta_version) {
                 return Ok((None, None));
             }
 
-            let is_zeta1 = preferred_model == Some(EditPredictionModelKind::Zeta1);
-            let excerpt_ranges = prompt_input
-                .excerpt_ranges
-                .as_ref()
-                .ok_or_else(|| anyhow::anyhow!("excerpt_ranges missing from prompt input"))?;
-
             if let Some(debug_tx) = &debug_tx {
-                let prompt = if is_zeta1 {
-                    zeta1::format_zeta1_from_input(
-                        &prompt_input,
-                        excerpt_ranges.editable_350.clone(),
-                        excerpt_ranges.editable_350_context_150.clone(),
-                    )
-                } else {
-                    format_zeta_prompt(&prompt_input, zeta_version)
-                };
+                let prompt = format_zeta_prompt(&prompt_input, zeta_version);
                 debug_tx
                     .unbounded_send(DebugEvent::EditPredictionStarted(
                         EditPredictionStartedDebugEvent {
@@ -124,81 +126,105 @@ pub fn request_prediction_with_zeta(
 
             log::trace!("Sending edit prediction request");
 
-            let (request_id, output_text, usage) =
+            let (request_id, output_text, model_version, usage) =
                 if let Some(custom_settings) = &custom_server_settings {
                     let max_tokens = custom_settings.max_output_tokens * 4;
 
-                    if is_zeta1 {
-                        let ranges = excerpt_ranges;
-                        let prompt = zeta1::format_zeta1_from_input(
-                            &prompt_input,
-                            ranges.editable_350.clone(),
-                            ranges.editable_350_context_150.clone(),
-                        );
-                        let stop_tokens = vec![
-                            EDITABLE_REGION_END_MARKER.to_string(),
-                            format!("{EDITABLE_REGION_END_MARKER}\n"),
-                            format!("{EDITABLE_REGION_END_MARKER}\n\n"),
-                            format!("{EDITABLE_REGION_END_MARKER}\n\n\n"),
-                        ];
-
-                        let (response_text, request_id) = send_custom_server_request(
-                            provider,
-                            custom_settings,
-                            prompt,
-                            max_tokens,
-                            stop_tokens,
-                            &http_client,
-                        )
-                        .await?;
-
-                        let request_id = EditPredictionId(request_id.into());
-                        let output_text = zeta1::clean_zeta1_model_output(&response_text);
-
-                        (request_id, output_text, None)
-                    } else {
-                        let prompt = format_zeta_prompt(&prompt_input, zeta_version);
-                        let prefill = get_prefill(&prompt_input, zeta_version);
-                        let prompt = format!("{prompt}{prefill}");
-
-                        let (response_text, request_id) = send_custom_server_request(
-                            provider,
-                            custom_settings,
-                            prompt,
-                            max_tokens,
-                            vec![],
-                            &http_client,
-                        )
-                        .await?;
-
-                        let request_id = EditPredictionId(request_id.into());
-                        let output_text = if response_text.is_empty() {
-                            None
-                        } else {
-                            let output = format!("{prefill}{response_text}");
-                            Some(clean_zeta2_model_output(&output, zeta_version).to_string())
-                        };
-
-                        (request_id, output_text, None)
+                    match custom_settings.prompt_format {
+                        EditPredictionPromptFormat::Zeta => {
+                            let ranges = &prompt_input.excerpt_ranges;
+                            let prompt = zeta1::format_zeta1_from_input(
+                                &prompt_input,
+                                ranges.editable_350.clone(),
+                                ranges.editable_350_context_150.clone(),
+                            );
+                            editable_range_in_excerpt = ranges.editable_350.clone();
+                            let stop_tokens = vec![
+                                EDITABLE_REGION_END_MARKER.to_string(),
+                                format!("{EDITABLE_REGION_END_MARKER}\n"),
+                                format!("{EDITABLE_REGION_END_MARKER}\n\n"),
+                                format!("{EDITABLE_REGION_END_MARKER}\n\n\n"),
+                            ];
+
+                            let (response_text, request_id) = send_custom_server_request(
+                                provider,
+                                custom_settings,
+                                prompt,
+                                max_tokens,
+                                stop_tokens,
+                                open_ai_compatible_api_key.clone(),
+                                &http_client,
+                            )
+                            .await?;
+
+                            let request_id = EditPredictionId(request_id.into());
+                            let output_text = zeta1::clean_zeta1_model_output(&response_text);
+
+                            (request_id, output_text, None, None)
+                        }
+                        EditPredictionPromptFormat::Zeta2 => {
+                            let prompt = format_zeta_prompt(&prompt_input, zeta_version);
+                            let prefill = get_prefill(&prompt_input, zeta_version);
+                            let prompt = format!("{prompt}{prefill}");
+
+                            editable_range_in_excerpt = zeta_prompt::excerpt_range_for_format(
+                                zeta_version,
+                                &prompt_input.excerpt_ranges,
+                            )
+                            .0;
+
+                            let (response_text, request_id) = send_custom_server_request(
+                                provider,
+                                custom_settings,
+                                prompt,
+                                max_tokens,
+                                vec![],
+                                open_ai_compatible_api_key.clone(),
+                                &http_client,
+                            )
+                            .await?;
+
+                            let request_id = EditPredictionId(request_id.into());
+                            let output_text = if response_text.is_empty() {
+                                None
+                            } else {
+                                let output = format!("{prefill}{response_text}");
+                                Some(clean_zeta2_model_output(&output, zeta_version).to_string())
+                            };
+
+                            (request_id, output_text, None, None)
+                        }
+                        _ => anyhow::bail!("unsupported prompt format"),
                     }
                 } else if let Some(config) = &raw_config {
                     let prompt = format_zeta_prompt(&prompt_input, config.format);
                     let prefill = get_prefill(&prompt_input, config.format);
                     let prompt = format!("{prompt}{prefill}");
+                    let environment = config
+                        .environment
+                        .clone()
+                        .or_else(|| Some(config.format.to_string().to_lowercase()));
                     let request = RawCompletionRequest {
                         model: config.model_id.clone().unwrap_or_default(),
                         prompt,
                         temperature: None,
                         stop: vec![],
                         max_tokens: Some(2048),
-                        environment: Some(config.format.to_string().to_lowercase()),
+                        environment,
                     };
 
+                    editable_range_in_excerpt = zeta_prompt::excerpt_range_for_format(
+                        config.format,
+                        &prompt_input.excerpt_ranges,
+                    )
+                    .1;
+
                     let (mut response, usage) = EditPredictionStore::send_raw_llm_request(
                         request,
                         client,
                         None,
                         llm_token,
+                        organization_id,
                         app_version,
                     )
                     .await?;
@@ -210,13 +236,14 @@ pub fn request_prediction_with_zeta(
                         clean_zeta2_model_output(&output, config.format).to_string()
                     });
 
-                    (request_id, output_text, usage)
+                    (request_id, output_text, None, usage)
                 } else {
                     // Use V3 endpoint - server handles model/version selection and suffix stripping
                     let (response, usage) = EditPredictionStore::send_v3_request(
                         prompt_input.clone(),
                         client,
                         llm_token,
+                        organization_id,
                         app_version,
                         trigger,
                     )
@@ -228,7 +255,10 @@ pub fn request_prediction_with_zeta(
                     } else {
                         Some(response.output)
                     };
-                    (request_id, output_text, usage)
+                    editable_range_in_excerpt = response.editable_range;
+                    let model_version = response.model_version;
+
+                    (request_id, output_text, model_version, usage)
                 };
 
             let received_response_at = Instant::now();
@@ -236,9 +266,28 @@ pub fn request_prediction_with_zeta(
             log::trace!("Got edit prediction response");
 
             let Some(mut output_text) = output_text else {
-                return Ok((Some((request_id, None)), usage));
+                return Ok((Some((request_id, None, model_version)), usage));
             };
 
+            let editable_range_in_buffer = editable_range_in_excerpt.start
+                + full_context_offset_range.start
+                ..editable_range_in_excerpt.end + full_context_offset_range.start;
+
+            let mut old_text = snapshot
+                .text_for_range(editable_range_in_buffer.clone())
+                .collect::<String>();
+
+            // For the hashline format, the model may return <|set|>/<|insert|>
+            // edit commands instead of a full replacement. Apply them against
+            // the original editable region to produce the full replacement text.
+            // This must happen before cursor marker stripping because the cursor
+            // marker is embedded inside edit command content.
+            if let Some(rewritten_output) =
+                output_with_context_for_format(zeta_version, &old_text, &output_text)?
+            {
+                output_text = rewritten_output;
+            }
+
             // Client-side cursor marker processing (applies to both raw and v3 responses)
             let cursor_offset_in_output = output_text.find(CURSOR_MARKER);
             if let Some(offset) = cursor_offset_in_output {
@@ -258,10 +307,6 @@ pub fn request_prediction_with_zeta(
                     .ok();
             }
 
-            let mut old_text = snapshot
-                .text_for_range(editable_offset_range.clone())
-                .collect::<String>();
-
             if !output_text.is_empty() && !output_text.ends_with('\n') {
                 output_text.push('\n');
             }
@@ -272,7 +317,7 @@ pub fn request_prediction_with_zeta(
             let (edits, cursor_position) = compute_edits_and_cursor_position(
                 old_text,
                 &output_text,
-                editable_offset_range.start,
+                editable_range_in_buffer.start,
                 cursor_offset_in_output,
                 &snapshot,
             );
@@ -287,7 +332,9 @@ pub fn request_prediction_with_zeta(
                         edits,
                         cursor_position,
                         received_response_at,
+                        editable_range_in_buffer,
                     )),
+                    model_version,
                 )),
                 usage,
             ))
@@ -295,7 +342,7 @@ pub fn request_prediction_with_zeta(
     });
 
     cx.spawn(async move |this, cx| {
-        let Some((id, prediction)) =
+        let Some((id, prediction, model_version)) =
             EditPredictionStore::handle_api_response(&this, request_task.await, cx)?
         else {
             return Ok(None);
@@ -308,6 +355,7 @@ pub fn request_prediction_with_zeta(
             edits,
             cursor_position,
             received_response_at,
+            editable_range_in_buffer,
         )) = prediction
         else {
             return Ok(Some(EditPredictionResult {
@@ -316,6 +364,47 @@ pub fn request_prediction_with_zeta(
             }));
         };
 
+        if can_collect_data {
+            let weak_this = this.clone();
+            let id = id.clone();
+            let edited_buffer = edited_buffer.clone();
+            let edited_buffer_snapshot = edited_buffer_snapshot.clone();
+            let example_task = capture_data.and_then(|stored_events| {
+                cx.update(|cx| {
+                    crate::capture_example(
+                        project.clone(),
+                        edited_buffer.clone(),
+                        position,
+                        stored_events,
+                        false,
+                        cx,
+                    )
+                })
+            });
+            cx.spawn(async move |cx| {
+                let example_spec = if let Some(task) = example_task {
+                    task.await.ok()
+                } else {
+                    None
+                };
+
+                weak_this
+                    .update(cx, |this, cx| {
+                        this.enqueue_settled_prediction(
+                            id.clone(),
+                            &project,
+                            &edited_buffer,
+                            &edited_buffer_snapshot,
+                            editable_range_in_buffer,
+                            example_spec,
+                            cx,
+                        );
+                    })
+                    .ok();
+            })
+            .detach();
+        }
+
         Ok(Some(
             EditPredictionResult::new(
                 id,
@@ -326,6 +415,7 @@ pub fn request_prediction_with_zeta(
                 buffer_snapshotted_at,
                 received_response_at,
                 inputs,
+                model_version,
                 cx,
             )
             .await,
@@ -339,11 +429,11 @@ pub fn zeta2_prompt_input(
     events: Vec<Arc<zeta_prompt::Event>>,
     excerpt_path: Arc<Path>,
     cursor_offset: usize,
-    zeta_format: ZetaFormat,
-    preferred_model: Option<EditPredictionModelKind>,
+    preferred_experiment: Option<String>,
     is_open_source: bool,
     can_collect_data: bool,
-) -> (std::ops::Range<usize>, zeta_prompt::ZetaPromptInput) {
+    repo_url: Option<String>,
+) -> (Range<usize>, zeta_prompt::ZetaPromptInput) {
     let cursor_point = cursor_offset.to_point(snapshot);
 
     let (full_context, full_context_offset_range, excerpt_ranges) =
@@ -358,13 +448,6 @@ pub fn zeta2_prompt_input(
     let full_context_start_offset = full_context_offset_range.start;
     let full_context_start_row = full_context.start.row;
 
-    let editable_offset_range = match preferred_model {
-        Some(EditPredictionModelKind::Zeta1) => excerpt_ranges.editable_350.clone(),
-        _ => zeta_prompt::excerpt_range_for_format(zeta_format, &excerpt_ranges).0,
-    };
-    let absolute_editable_range = full_context_start_offset + editable_offset_range.start
-        ..full_context_start_offset + editable_offset_range.end;
-
     let cursor_offset_in_excerpt = cursor_offset - full_context_start_offset;
 
     let prompt_input = zeta_prompt::ZetaPromptInput {
@@ -373,77 +456,17 @@ pub fn zeta2_prompt_input(
             .text_for_range(full_context)
             .collect::<String>()
             .into(),
-        editable_range_in_excerpt: editable_offset_range,
         cursor_offset_in_excerpt,
         excerpt_start_row: Some(full_context_start_row),
         events,
         related_files,
-        excerpt_ranges: Some(excerpt_ranges),
-        preferred_model,
+        excerpt_ranges,
+        experiment: preferred_experiment,
         in_open_source_repo: is_open_source,
         can_collect_data,
+        repo_url,
     };
-    (absolute_editable_range, prompt_input)
-}
-
-pub(crate) async fn send_custom_server_request(
-    provider: settings::EditPredictionProvider,
-    settings: &OpenAiCompatibleEditPredictionSettings,
-    prompt: String,
-    max_tokens: u32,
-    stop_tokens: Vec<String>,
-    http_client: &Arc<dyn http_client::HttpClient>,
-) -> Result<(String, String)> {
-    match provider {
-        settings::EditPredictionProvider::Ollama => {
-            let response =
-                ollama::make_request(settings.clone(), prompt, stop_tokens, http_client.clone())
-                    .await?;
-            Ok((response.response, response.created_at))
-        }
-        _ => {
-            let request = RawCompletionRequest {
-                model: settings.model.clone(),
-                prompt,
-                max_tokens: Some(max_tokens),
-                temperature: None,
-                stop: stop_tokens
-                    .into_iter()
-                    .map(std::borrow::Cow::Owned)
-                    .collect(),
-                environment: None,
-            };
-
-            let request_body = serde_json::to_string(&request)?;
-            let http_request = http_client::Request::builder()
-                .method(http_client::Method::POST)
-                .uri(settings.api_url.as_ref())
-                .header("Content-Type", "application/json")
-                .body(http_client::AsyncBody::from(request_body))?;
-
-            let mut response = http_client.send(http_request).await?;
-            let status = response.status();
-
-            if !status.is_success() {
-                let mut body = String::new();
-                response.body_mut().read_to_string(&mut body).await?;
-                anyhow::bail!("custom server error: {} - {}", status, body);
-            }
-
-            let mut body = String::new();
-            response.body_mut().read_to_string(&mut body).await?;
-
-            let parsed: RawCompletionResponse =
-                serde_json::from_str(&body).context("Failed to parse completion response")?;
-            let text = parsed
-                .choices
-                .into_iter()
-                .next()
-                .map(|choice| choice.text)
-                .unwrap_or_default();
-            Ok((text, parsed.id))
-        }
-    }
+    (full_context_offset_range, prompt_input)
 }
 
 pub(crate) fn edit_prediction_accepted(
@@ -457,9 +480,15 @@ pub(crate) fn edit_prediction_accepted(
     }
 
     let request_id = current_prediction.prediction.id.to_string();
+    let model_version = current_prediction.prediction.model_version;
     let require_auth = custom_accept_url.is_none();
     let client = store.client.clone();
     let llm_token = store.llm_token.clone();
+    let organization_id = store
+        .user_store
+        .read(cx)
+        .current_organization()
+        .map(|organization| organization.id.clone());
     let app_version = AppVersion::global(cx);
 
     cx.background_spawn(async move {
@@ -476,6 +505,7 @@ pub(crate) fn edit_prediction_accepted(
                 let req = builder.uri(url.as_ref()).body(
                     serde_json::to_string(&AcceptEditPredictionBody {
                         request_id: request_id.clone(),
+                        model_version: model_version.clone(),
                     })?
                     .into(),
                 );
@@ -483,6 +513,7 @@ pub(crate) fn edit_prediction_accepted(
             },
             client,
             llm_token,
+            organization_id,
             app_version,
             require_auth,
         )

crates/edit_prediction_cli/evals/vscode--add-async-and-await.md 🔗

@@ -0,0 +1,88 @@
++++
+repository_url = "https://github.com/microsoft/vscode"
+revision = "29e6da6efa2287aaa981635a475d425ff4fd5d5c"
++++
+
+## Edit History
+
+```diff
+--- a/src/vs/workbench/contrib/debug/browser/debugCommands.ts
++++ b/src/vs/workbench/contrib/debug/browser/debugCommands.ts
+@@ -304,8 +304,8 @@ CommandsRegistry.registerCommand({
+ 
+ CommandsRegistry.registerCommand({
+ 	id: REVERSE_CONTINUE_ID,
+-	handler: (accessor: ServicesAccessor, _: string, context: CallStackContext | unknown) => {
+-		getThreadAndRun(accessor, context, thread => thread.reverseContinue());
++	handler: async (accessor: ServicesAccessor, _: string, context: CallStackContext | unknown) => {
++		await getThreadAndRun(accessor, context, thread => thread.reverseContinue());
+ 	}
+ });
+--- a/src/vs/workbench/contrib/debug/browser/debugCommands.ts
++++ b/src/vs/workbench/contrib/debug/browser/debugCommands.ts
+@@ -311,11 +311,11 @@ CommandsRegistry.registerCommand({
+ 
+ CommandsRegistry.registerCommand({
+ 	id: STEP_BACK_ID,
+-	handler: (accessor: ServicesAccessor, _: string, context: CallStackContext | unknown) => {
++	handler: async (accessor: ServicesAccessor, _: string, context: CallStackContext | unknown) => {
+ 		const contextKeyService = accessor.get(IContextKeyService);
+ 		if (CONTEXT_DISASSEMBLY_VIEW_FOCUS.getValue(contextKeyService)) {
+-			getThreadAndRun(accessor, context, (thread: IThread) => thread.stepBack('instruction'));
++			await getThreadAndRun(accessor, context, (thread: IThread) => thread.stepBack('instruction'));
+ 		} else {
+-			getThreadAndRun(accessor, context, (thread: IThread) => thread.stepBack());
++			await getThreadAndRun(accessor, context, (thread: IThread) => thread.stepBack());
+ 		}
+ 	}
+ });
+--- a/src/vs/workbench/contrib/debug/browser/debugCommands.ts
++++ b/src/vs/workbench/contrib/debug/browser/debugCommands.ts
+@@ -323,8 +323,8 @@ CommandsRegistry.registerCommand({
+ 
+ CommandsRegistry.registerCommand({
+ 	id: TERMINATE_THREAD_ID,
+-	handler: (accessor: ServicesAccessor, _: string, context: CallStackContext | unknown) => {
+-		getThreadAndRun(accessor, context, thread => thread.terminate());
++	handler: async (accessor: ServicesAccessor, _: string, context: CallStackContext | unknown) => {
++		await getThreadAndRun(accessor, context, thread => thread.terminate());
+ 	}
+ });
+```
+
+## Cursor Position
+
+```src/vs/workbench/contrib/debug/browser/debugCommands.ts
+	weight: KeybindingWeight.WorkbenchContrib,
+	primary: isWeb ? (KeyMod.Alt | KeyCode.F10) : KeyCode.F10, // Browsers do not allow F10 to be binded so we have to bind an alternative
+	when: CONTEXT_DEBUG_STATE.isEqualTo('stopped'),
+	handler: (accessor: ServicesAccessor, _: string, context: CallStackContext | unknown) => {
+	//       ^[CURSOR_POSITION]
+		const contextKeyService = accessor.get(IContextKeyService);
+		if (CONTEXT_DISASSEMBLY_VIEW_FOCUS.getValue(contextKeyService)) {
+			getThreadAndRun(accessor, context, (thread: IThread) => thread.next('instruction'));
+		} else {
+```
+
+## Expected Patch
+
+```diff
+--- a/src/vs/workbench/contrib/debug/browser/debugCommands.ts
++++ b/src/vs/workbench/contrib/debug/browser/debugCommands.ts
+@@ -467,10 +467,10 @@ KeybindingsRegistry.registerCommandAndKeybindingRule({
+ 	weight: KeybindingWeight.WorkbenchContrib,
+ 	primary: isWeb ? (KeyMod.Alt | KeyCode.F10) : KeyCode.F10, // Browsers do not allow F10 to be binded so we have to bind an alternative
+ 	when: CONTEXT_DEBUG_STATE.isEqualTo('stopped'),
+-	handler: (accessor: ServicesAccessor, _: string, context: CallStackContext | unknown) => {
++	handler: async (accessor: ServicesAccessor, _: string, context: CallStackContext | unknown) => {
+ 		const contextKeyService = accessor.get(IContextKeyService);
+ 		if (CONTEXT_DISASSEMBLY_VIEW_FOCUS.getValue(contextKeyService)) {
+-			getThreadAndRun(accessor, context, (thread: IThread) => thread.next('instruction'));
++			await getThreadAndRun(accessor, context, (thread: IThread) => thread.next('instruction'));
+ 		} else {
+-			getThreadAndRun(accessor, context, (thread: IThread) => thread.next());
++			await getThreadAndRun(accessor, context, (thread: IThread) => thread.next());
+ 		}
+ 	}
+ });
+```

crates/edit_prediction_cli/evals/vscode--add-class-decorator.md 🔗

@@ -0,0 +1,74 @@
++++
+repository_url = "https://github.com/microsoft/vscode"
+revision = "6f6e26fcdf0a7ca5084e0da284cd7a5b2d41ae4d"
++++
+
+## Edit History
+
+```diff
+--- a/src/vs/workbench/api/common/extHostTypes.ts
++++ b/src/vs/workbench/api/common/extHostTypes.ts
+@@ -18,6 +18,14 @@ import { FileSystemProviderErrorCode, markAsFileSystemProviderError } from 'vs/
+ import type * as vscode from 'vscode';
+
++function es5ClassCompat(target: Function): any {
++	///@ts-expect-error
++	function _() { return Reflect.construct(target, arguments, this.constructor); }
++	Object.defineProperty(_, 'name', Object.getOwnPropertyDescriptor(target, 'name')!);
++	Object.setPrototypeOf(_, target);
++	Object.setPrototypeOf(_.prototype, target.prototype);
++	return _;
++}
++
++@es5ClassCompat
+ export class Disposable {
+--- a/src/vs/workbench/api/common/extHostTypes.ts
++++ b/src/vs/workbench/api/common/extHostTypes.ts
+@@ -50,6 +58,7 @@ export class Disposable {
+ 	}
+ }
+
++@es5ClassCompat
+ export class Position {
+
+ 	static Min(...positions: Position[]): Position {
+--- a/src/vs/workbench/api/common/extHostTypes.ts
++++ b/src/vs/workbench/api/common/extHostTypes.ts
+@@ -220,6 +229,7 @@ export class Position {
+ 	}
+ }
+
++@es5ClassCompat
+ export class Range {
+
+ 	static isRange(thing: any): thing is vscode.Range {
+```
+
+## Cursor Position
+
+```src/vs/workbench/api/common/extHostTypes.ts
+	Prepend = 3
+}
+
+export class TextEdit {
+// <[CURSOR_POSITION]
+
+	static isTextEdit(thing: any): thing is TextEdit {
+		if (thing instanceof TextEdit) {
+			return true;
+```
+
+## Expected Patch
+
+```diff
+--- a/src/vs/workbench/api/common/extHostTypes.ts
++++ b/src/vs/workbench/api/common/extHostTypes.ts
+@@ -475,6 +485,7 @@ export enum EnvironmentVariableMutatorType {
+ 	Prepend = 3
+ }
+
++@es5ClassCompat
+ export class TextEdit {
+
+ 	static isTextEdit(thing: any): thing is TextEdit {
+```

crates/edit_prediction_cli/evals/vscode--add-interface-method.md 🔗

@@ -0,0 +1,113 @@
++++
+repository_url = "https://github.com/microsoft/vscode"
+revision = "b64eaf598008e2d600a81d846108f72cb37b48e2"
++++
+
+## Edit History
+
+```diff
+--- a/src/vs/platform/window/electron-main/window.ts
++++ b/src/vs/platform/window/electron-main/window.ts
+@@ -1,49 +1,50 @@
+ export interface ICodeWindow extends IDisposable {
+ 
+ 	readonly onWillLoad: Event<ILoadEvent>;
+ 	readonly onDidSignalReady: Event<void>;
++	readonly onDidTriggerSystemContextMenu: Event<{ x: number; y: number }>;
+ 	readonly onDidClose: Event<void>;
+ 	readonly onDidDestroy: Event<void>;
+ 
+ 	readonly whenClosedOrLoaded: Promise<void>;
+--- a/src/vs/platform/windows/electron-main/window.ts
++++ b/src/vs/platform/windows/electron-main/window.ts
+@@ -63,60 +63,63 @@ const enum ReadyState {
+ export class CodeWindow extends Disposable implements ICodeWindow {
+ 
+ 	//#region Events
+ 
+ 	private readonly _onWillLoad = this._register(new Emitter<ILoadEvent>());
+ 	readonly onWillLoad = this._onWillLoad.event;
+ 
+ 	private readonly _onDidSignalReady = this._register(new Emitter<void>());
+ 	readonly onDidSignalReady = this._onDidSignalReady.event;
+ 
++	private readonly _onDidTriggerSystemContextMenu = this._register(new Emitter<{ x: number; y: number }>());
++	readonly onDidTriggerSystemContextMenu = this._onDidTriggerSystemContextMenu.event;
++
+ 	private readonly _onDidClose = this._register(new Emitter<void>());
+ 	readonly onDidClose = this._onDidClose.event;
+ 
+ 	private readonly _onDidDestroy = this._register(new Emitter<void>());
+ 	readonly onDidDestroy = this._onDidDestroy.event;
+ 
+ 	//#endregion
+--- a/src/vs/platform/windows/electron-main/windows.ts
++++ b/src/vs/platform/windows/electron-main/windows.ts
+@@ -1,54 +1,55 @@
+ export interface IWindowsMainService {
+ 
+ 	readonly _serviceBrand: undefined;
+ 
+ 	readonly onDidChangeWindowsCount: Event<IWindowsCountChangedEvent>;
+ 
+ 	readonly onDidOpenWindow: Event<ICodeWindow>;
+ 	readonly onDidSignalReadyWindow: Event<ICodeWindow>;
++	readonly onDidTriggerSystemContextMenu: Event<{ window: ICodeWindow; x: number; y: number }>;
+ 	readonly onDidDestroyWindow: Event<ICodeWindow>;
+--- a/src/vs/platform/windows/electron-main/windowsMainService.ts
++++ b/src/vs/platform/windows/electron-main/windowsMainService.ts
+@@ -160,60 +160,63 @@ interface ISingleFolderWorkspacePathToOpen extends IPathToOpen {
+ export class WindowsMainService extends Disposable implements IWindowsMainService {
+ 
+ 	declare readonly _serviceBrand: undefined;
+ 
+ 	private static readonly WINDOWS: ICodeWindow[] = [];
+ 
+ 	private readonly _onDidOpenWindow = this._register(new Emitter<ICodeWindow>());
+ 	readonly onDidOpenWindow = this._onDidOpenWindow.event;
+ 
+ 	private readonly _onDidSignalReadyWindow = this._register(new Emitter<ICodeWindow>());
+ 	readonly onDidSignalReadyWindow = this._onDidSignalReadyWindow.event;
+ 
+ 	private readonly _onDidDestroyWindow = this._register(new Emitter<ICodeWindow>());
+ 	readonly onDidDestroyWindow = this._onDidDestroyWindow.event;
+ 
+ 	private readonly _onDidChangeWindowsCount = this._register(new Emitter<IWindowsCountChangedEvent>());
+ 	readonly onDidChangeWindowsCount = this._onDidChangeWindowsCount.event;
+ 
++	private readonly _onDidTriggerSystemContextMenu = this._register(new Emitter<{ window: ICodeWindow; x: number; y: number }>());
++	readonly onDidTriggerSystemContextMenu = this._onDidTriggerSystemContextMenu.event;
++
+ 	private readonly windowsStateHandler = this._register(new WindowsStateHandler(this, this.stateMainService, this.lifecycleMainService, this.logService, this.configurationService));
+```
+
+## Cursor Position
+
+```src/vs/platform/windows/test/electron-main/windowsFinder.test.ts
+	function createTestCodeWindow(options: { lastFocusTime: number; openedFolderUri?: URI; openedWorkspace?: IWorkspaceIdentifier }): ICodeWindow {
+		return new class implements ICodeWindow {
+			onWillLoad: Event<ILoadEvent> = Event.None;
+			onDidSignalReady: Event<void> = Event.None;
+			// <[CURSOR_POSITION]
+			onDidClose: Event<void> = Event.None;
+			onDidDestroy: Event<void> = Event.None;
+			whenClosedOrLoaded: Promise<void> = Promise.resolve();
+			id: number = -1;
+```
+
+## Expected Patch
+
+```diff
+--- a/src/vs/platform/windows/test/electron-main/windowsFinder.test.ts
++++ b/src/vs/platform/windows/test/electron-main/windowsFinder.test.ts
+@@ -7,60 +7,61 @@ import * as assert from 'assert';
+ 	function createTestCodeWindow(options: { lastFocusTime: number; openedFolderUri?: URI; openedWorkspace?: IWorkspaceIdentifier }): ICodeWindow {
+ 		return new class implements ICodeWindow {
+ 			onWillLoad: Event<ILoadEvent> = Event.None;
++			onDidTriggerSystemContextMenu: Event<{ x: number; y: number }> = Event.None;
+ 			onDidSignalReady: Event<void> = Event.None;
+ 			onDidClose: Event<void> = Event.None;
+ 			onDidDestroy: Event<void> = Event.None;
+ 			whenClosedOrLoaded: Promise<void> = Promise.resolve();
+ 			id: number = -1;
+```

crates/edit_prediction_cli/evals/vscode--log-object-property.md 🔗

@@ -0,0 +1,56 @@
++++
+repository_url = "https://github.com/microsoft/vscode"
+revision = "e28a92fc1fbe9de11eca2f8ad19899334bff8525"
++++
+
+This prediction requires the model to see the `IDiffComputationResult` type definition.
+
+## Edit History
+
+```diff
+--- a/src/vs/editor/browser/widget/diffEditorWidget.ts
++++ b/src/vs/editor/browser/widget/diffEditorWidget.ts
+@@ -1117,6 +1117,7 @@
+ 				&& currentModifiedModel === this._modifiedEditor.getModel()
+ 			) {
+ 				this._setState(editorBrowser.DiffEditorState.DiffComputed);
++				console.log("did quit:")
+ 				this._diffComputationResult = result;
+ 				this._updateDecorationsRunner.schedule();
+ 				this._onDidUpdateDiff.fire();
+```
+
+## Cursor Position
+
+```src/vs/editor/browser/widget/diffEditorWidget.ts
+			if (currentToken === this._diffComputationToken
+				&& currentOriginalModel === this._originalEditor.getModel()
+				&& currentModifiedModel === this._modifiedEditor.getModel()
+			) {
+				this._setState(editorBrowser.DiffEditorState.DiffComputed);
+				console.log("did quit:")
+				//                    ^[CURSOR_POSITION]
+				this._diffComputationResult = result;
+				this._updateDecorationsRunner.schedule();
+				this._onDidUpdateDiff.fire();
+			}
+```
+
+## Expected Patch
+
+```diff
+--- a/src/vs/editor/browser/widget/diffEditorWidget.ts
++++ b/src/vs/editor/browser/widget/diffEditorWidget.ts
+@@ -1115,10 +1115,10 @@
+ 			if (currentToken === this._diffComputationToken
+ 				&& currentOriginalModel === this._originalEditor.getModel()
+ 				&& currentModifiedModel === this._modifiedEditor.getModel()
+ 			) {
+ 				this._setState(editorBrowser.DiffEditorState.DiffComputed);
+-				console.log("did quit:")
++				console.log("did quit:", result.quitEarly)
+ 				this._diffComputationResult = result;
+ 				this._updateDecorationsRunner.schedule();
+ 				this._onDidUpdateDiff.fire();
+ 			}
+```

crates/edit_prediction_cli/evals/zed--add-eprintln.md 🔗

@@ -1,43 +1,37 @@
 +++
 repository_url = "git@github.com:zed-industries/zed"
-revision = "780a87dd98f26816876d12e2728933b17faca78d"
+revision = "b7090c9fae7390a82021b994994c0f587744d96c"
 +++
 
+This example shows the model's preference for making conservative predictions, and ability to place
+the cursor within the predicted output.
+
 ## Edit History
 
 ```diff
 --- a/crates/edit_prediction_ui/src/rate_prediction_modal.rs
 +++ b/crates/edit_prediction_ui/src/rate_prediction_modal.rs
-@@ -206,6 +206,7 @@
-         self.select_next_edit(&Default::default(), window, cx);
-         self.confirm(&Default::default(), window, cx);
-
+@@ -144,7 +144,7 @@
+     fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {
 +        epr
-         cx.notify();
-     }
-
+         let next_index = self
+             .ep_store
+             .read(cx)
 ```
 
 ## Cursor Position
 
 ```crates/edit_prediction_ui/src/rate_prediction_modal.rs
-        let current_completion = self
-            .active_prediction
-            .as_ref()
-            .map(|completion| completion.prediction.clone());
-        self.select_completion(current_completion, false, window, cx);
-        self.select_next_edit(&Default::default(), window, cx);
-        self.confirm(&Default::default(), window, cx);
-
+    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {
         epr
         // ^[CURSOR_POSITION]
-        cx.notify();
-    }
-
-    pub fn thumbs_down_active(
-        &mut self,
-        _: &ThumbsDownActivePrediction,
-        window: &mut Window,
+        let next_index = self
+            .ep_store
+            .read(cx)
+            .shown_predictions()
+            .skip(self.selected_index)
+            .enumerate()
+            .skip(1) // Skip straight to the next item
 ```
 
 ## Expected Patch
@@ -45,12 +39,16 @@ revision = "780a87dd98f26816876d12e2728933b17faca78d"
 ```diff
 --- a/crates/edit_prediction_ui/src/rate_prediction_modal.rs
 +++ b/crates/edit_prediction_ui/src/rate_prediction_modal.rs
-@@ -201,16 +201,16 @@
-         self.confirm(&Default::default(), window, cx);
-
+@@ -144,14 +144,14 @@
+     fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {
 -        epr
 +        eprintln!("");
 #                   ^[CURSOR_POSITION]
-         cx.notify();
-     }
+         let next_index = self
+             .ep_store
+             .read(cx)
+             .shown_predictions()
+             .skip(self.selected_index)
+             .enumerate()
+             .skip(1) // Skip straight to the next item
 ```

crates/edit_prediction_cli/evals/zed--change-match-arm.md 🔗

@@ -0,0 +1,68 @@
++++
+repository_url = "git@github.com:zed-industries/zed"
+revision = "be5763632dccb33470ca233c36ccd9e5e790e3b2"
++++
+
+This prediction requires the model to see the `project::Event` enum.
+
+## Edit History
+
+```diff
+--- a/crates/edit_prediction/src/edit_prediction.rs
++++ b/crates/edit_prediction/src/edit_prediction.rs
+@@ -1035,7 +1035,7 @@
+                     project_state.recent_paths.push_front(path);
+                 }
+             }
+-            project::Event::DiagnosticsUpdated { .. } => {
++            project::Event::Disk { .. } => {
+                 if cx.has_flag::<EditPredictionJumpsFeatureFlag>() {
+                     self.refresh_prediction_from_diagnostics(
+                         project,
+```
+
+## Cursor Position
+
+```crates/edit_prediction/src/edit_prediction.rs
+                    {
+                        project_state.recent_paths.remove(ix);
+                    }
+                    project_state.recent_paths.push_front(path);
+                }
+            }
+            project::Event::Disk { .. } => {
+                //              ^[CURSOR_POSITION]
+                if cx.has_flag::<EditPredictionJumpsFeatureFlag>() {
+                    self.refresh_prediction_from_diagnostics(
+                        project,
+```
+
+## Expected Patch
+
+```diff
+--- a/crates/edit_prediction/src/edit_prediction.rs
++++ b/crates/edit_prediction/src/edit_prediction.rs
+@@ -1032,10 +1032,10 @@
+                     project_state.recent_paths.push_front(path);
+                 }
+             }
+-            project::Event::Disk { .. } => {
++            project::Event::DiskBasedDiagnosticsFinished { .. } => {
+                 if cx.has_flag::<EditPredictionJumpsFeatureFlag>() {
+                     self.refresh_prediction_from_diagnostics(
+                         project,
+```
+
+```diff
+--- a/crates/edit_prediction/src/edit_prediction.rs
++++ b/crates/edit_prediction/src/edit_prediction.rs
+@@ -1032,10 +1032,10 @@
+                     project_state.recent_paths.push_front(path);
+                 }
+             }
+-            project::Event::Disk { .. } => {
++            project::Event::DiskBasedDiagnosticsStarted { .. } => {
+                 if cx.has_flag::<EditPredictionJumpsFeatureFlag>() {
+                     self.refresh_prediction_from_diagnostics(
+                         project,
+```

crates/edit_prediction_cli/src/anthropic_client.rs 🔗

@@ -50,6 +50,7 @@ impl PlainLlmClient {
             metadata: None,
             output_config: None,
             stop_sequences: Vec::new(),
+            speed: None,
             temperature: None,
             top_k: None,
             top_p: None,
@@ -89,6 +90,7 @@ impl PlainLlmClient {
             metadata: None,
             output_config: None,
             stop_sequences: Vec::new(),
+            speed: None,
             temperature: None,
             top_k: None,
             top_p: None,
@@ -578,6 +580,7 @@ impl BatchingLlmClient {
                     temperature: None,
                     top_k: None,
                     top_p: None,
+                    speed: None,
                 };
 
                 let custom_id = format!("req_hash_{}", hash);

crates/edit_prediction_cli/src/format_prompt.rs 🔗

@@ -9,10 +9,11 @@ use anyhow::{Context as _, Result, anyhow};
 use edit_prediction::udiff;
 use gpui::AsyncApp;
 use similar::DiffableStr;
+use std::ops::Range;
 use std::sync::Arc;
-use std::{fmt::Write as _, ops::Range};
 use zeta_prompt::{
-    ZetaFormat, excerpt_range_for_format, format_zeta_prompt, resolve_cursor_region,
+    ZetaFormat, encode_patch_as_output_for_format, excerpt_range_for_format, format_zeta_prompt,
+    output_end_marker_for_format, resolve_cursor_region,
 };
 
 pub async fn run_format_prompt(
@@ -36,12 +37,8 @@ pub async fn run_format_prompt(
             step_progress.set_substatus("formatting teacher prompt");
 
             let zeta_format = ZetaFormat::default();
-            let excerpt_ranges = prompt_inputs
-                .excerpt_ranges
-                .as_ref()
-                .context("prompt_inputs must have excerpt_ranges")?;
             let (editable_range, context_range) =
-                excerpt_range_for_format(zeta_format, excerpt_ranges);
+                excerpt_range_for_format(zeta_format, &prompt_inputs.excerpt_ranges);
 
             let prompt = TeacherPrompt::format_prompt(example, editable_range, context_range);
             example.prompt = Some(ExamplePrompt {
@@ -57,18 +54,22 @@ pub async fn run_format_prompt(
 
             let prompt = format_zeta_prompt(prompt_inputs, zeta_format);
             let prefill = zeta_prompt::get_prefill(prompt_inputs, zeta_format);
-            let (expected_patch, expected_cursor_offset) = example
+            let expected_output = example
                 .spec
                 .expected_patches_with_cursor_positions()
                 .into_iter()
                 .next()
-                .context("expected patches is empty")?;
-            let expected_output = zeta2_output_for_patch(
-                prompt_inputs,
-                &expected_patch,
-                expected_cursor_offset,
-                zeta_format,
-            )?;
+                .and_then(|(expected_patch, expected_cursor_offset)| {
+                    zeta2_output_for_patch(
+                        prompt_inputs,
+                        &expected_patch,
+                        expected_cursor_offset,
+                        zeta_format,
+                    )
+                    .ok()
+                })
+                .unwrap_or_default();
+
             let rejected_output = example.spec.rejected_patch.as_ref().and_then(|patch| {
                 zeta2_output_for_patch(prompt_inputs, patch, None, zeta_format).ok()
             });
@@ -101,6 +102,12 @@ pub fn zeta2_output_for_patch(
         old_editable_region.push('\n');
     }
 
+    if let Some(encoded_output) =
+        encode_patch_as_output_for_format(version, &old_editable_region, patch, cursor_offset)?
+    {
+        return Ok(encoded_output);
+    }
+
     let (mut result, first_hunk_offset) =
         udiff::apply_diff_to_string_with_hunk_offset(patch, &old_editable_region).with_context(
             || {
@@ -120,16 +127,11 @@ pub fn zeta2_output_for_patch(
         result.insert_str(offset, zeta_prompt::CURSOR_MARKER);
     }
 
-    match version {
-        ZetaFormat::V0120GitMergeMarkers
-        | ZetaFormat::V0131GitMergeMarkersPrefix
-        | ZetaFormat::V0211SeedCoder => {
-            if !result.ends_with('\n') {
-                result.push('\n');
-            }
-            result.push_str(zeta_prompt::v0120_git_merge_markers::END_MARKER);
+    if let Some(end_marker) = output_end_marker_for_format(version) {
+        if !result.ends_with('\n') {
+            result.push('\n');
         }
-        _ => (),
+        result.push_str(end_marker);
     }
 
     Ok(result)
@@ -258,7 +260,6 @@ impl TeacherPrompt {
 
     pub fn format_context(example: &Example) -> String {
         let related_files = example.prompt_inputs.as_ref().map(|pi| &pi.related_files);
-
         let Some(related_files) = related_files else {
             return "(No context)".to_string();
         };
@@ -267,27 +268,10 @@ impl TeacherPrompt {
             return "(No context)".to_string();
         }
 
-        let mut prompt = String::new();
-        for file in related_files {
-            let path_str = file.path.to_string_lossy();
-            writeln!(&mut prompt, "`````{path_str}").ok();
-
-            let mut prev_row = 0;
-            for excerpt in &file.excerpts {
-                if excerpt.row_range.start > prev_row {
-                    prompt.push_str("…\n");
-                }
-                prompt.push_str(&excerpt.text);
-                prompt.push('\n');
-                prev_row = excerpt.row_range.end;
-            }
-            if prev_row < file.max_row {
-                prompt.push_str("…\n");
-            }
-            prompt.push_str("\n`````\n");
-        }
-
-        prompt
+        let prefix = "`````";
+        let suffix = "`````\n\n";
+        let max_tokens = 1024;
+        zeta_prompt::format_related_files_within_budget(related_files, &prefix, &suffix, max_tokens)
     }
 
     fn format_cursor_excerpt(

crates/edit_prediction_cli/src/git.rs 🔗

@@ -91,7 +91,7 @@ pub async fn ensure_repo_cloned(repo_url: &str) -> Result<PathBuf> {
     }
 
     // Always fetch to get latest commits
-    run_git(&repo_path, &["fetch", "origin"]).await?;
+    run_git(&repo_path, &["fetch", "--depth", "1000", "origin"]).await?;
 
     // Check if we have a valid HEAD, if not checkout FETCH_HEAD
     let has_head = run_git(&repo_path, &["rev-parse", "HEAD"]).await.is_ok();

crates/edit_prediction_cli/src/load_project.rs 🔗

@@ -93,21 +93,19 @@ pub async fn run_load_project(
         let cursor_offset_in_excerpt = cursor_offset - full_context_offset_range.start;
         let excerpt_start_row = Some(full_context_point_range.start.row);
 
-        let editable_range_in_excerpt = excerpt_ranges.editable_350.clone();
-
         (
             ZetaPromptInput {
                 cursor_path: example.spec.cursor_path.clone(),
                 cursor_excerpt,
-                editable_range_in_excerpt,
                 cursor_offset_in_excerpt,
                 excerpt_start_row,
                 events,
                 related_files: existing_related_files,
-                excerpt_ranges: Some(excerpt_ranges),
-                preferred_model: None,
+                excerpt_ranges,
                 in_open_source_repo: false,
                 can_collect_data: false,
+                experiment: None,
+                repo_url: None,
             },
             language_name,
         )

crates/edit_prediction_cli/src/main.rs 🔗

@@ -39,6 +39,7 @@ use zeta_prompt::ZetaFormat;
 
 use reqwest_client::ReqwestClient;
 use serde::{Deserialize, Deserializer, Serialize, Serializer};
+use std::env;
 use std::fmt::Display;
 use std::fs::{File, OpenOptions};
 use std::hash::{Hash, Hasher};
@@ -54,6 +55,7 @@ use crate::load_project::run_load_project;
 use crate::paths::{FAILED_EXAMPLES_DIR, RUN_DIR};
 use crate::predict::run_prediction;
 use crate::progress::Progress;
+use crate::pull_examples::{fetch_settled_examples_after, parse_settled_after_input};
 use crate::retrieve_context::run_context_retrieval;
 use crate::score::run_scoring;
 use crate::split_commit::SplitCommitArgs;
@@ -131,6 +133,10 @@ Inputs can be file paths or special specifiers:
       Fetch rejected edit predictions from Snowflake after the given RFC3339 timestamp.
       These are predictions that were shown to users but rejected (useful for DPO training).
 
+  settled-after:{timestamp}
+      Fetch settled stream examples from Snowflake after the given RFC3339 timestamp.
+      These are examples from the edit prediction settled stream.
+
   rated-after:{timestamp}
       Fetch user-rated edit predictions from Snowflake after the given RFC3339 timestamp.
       These are predictions that users explicitly rated as positive or negative via the
@@ -165,6 +171,9 @@ Examples:
   # Read user-rated predictions
   ep read rated-after:2025-01-01T00:00:00Z -o rated.jsonl
 
+  # Read settled stream examples
+  ep read settled-after:2025-01-01T00:00:00Z -o settled.jsonl
+
   # Read only positively rated predictions
   ep read rated-positive-after:2025-01-01T00:00:00Z -o positive.jsonl
 
@@ -294,6 +303,9 @@ struct EvalArgs {
     /// Path to write summary scores as JSON
     #[clap(long)]
     summary_json: Option<PathBuf>,
+    /// Print all individual example lines (default: up to 20)
+    #[clap(long)]
+    verbose: bool,
 }
 
 #[derive(Clone, Copy, Default, Debug, PartialEq, Eq, Hash)]
@@ -346,6 +358,7 @@ enum PredictionProvider {
     Mercury,
     Zeta1,
     Zeta2(ZetaFormat),
+    Baseten(ZetaFormat),
     Teacher(TeacherBackend),
     TeacherNonBatching(TeacherBackend),
     Repair,
@@ -364,6 +377,7 @@ impl std::fmt::Display for PredictionProvider {
             PredictionProvider::Mercury => write!(f, "mercury"),
             PredictionProvider::Zeta1 => write!(f, "zeta1"),
             PredictionProvider::Zeta2(format) => write!(f, "zeta2:{format}"),
+            PredictionProvider::Baseten(format) => write!(f, "baseten:{format}"),
             PredictionProvider::Teacher(backend) => write!(f, "teacher:{backend}"),
             PredictionProvider::TeacherNonBatching(backend) => {
                 write!(f, "teacher-non-batching:{backend}")
@@ -403,6 +417,13 @@ impl std::str::FromStr for PredictionProvider {
                 Ok(PredictionProvider::TeacherNonBatching(backend))
             }
             "repair" => Ok(PredictionProvider::Repair),
+            "baseten" => {
+                let format = arg
+                    .map(ZetaFormat::parse)
+                    .transpose()?
+                    .unwrap_or(ZetaFormat::default());
+                Ok(PredictionProvider::Baseten(format))
+            }
             _ => {
                 anyhow::bail!(
                     "unknown provider `{provider}`. Valid options: sweep, mercury, zeta1, zeta2, zeta2:<version>, teacher, teacher:<backend>, teacher-non-batching, repair\n\
@@ -631,6 +652,7 @@ async fn load_examples(
     let mut captured_after_timestamps = Vec::new();
     let mut rejected_after_timestamps = Vec::new();
     let mut requested_after_timestamps = Vec::new();
+    let mut settled_after_timestamps = Vec::new();
     let mut rated_after_inputs: Vec<(String, Option<telemetry_events::EditPredictionRating>)> =
         Vec::new();
     let mut file_inputs = Vec::new();
@@ -647,6 +669,8 @@ async fn load_examples(
             pull_examples::parse_requested_after_input(input_string.as_ref())
         {
             requested_after_timestamps.push(timestamp.to_string());
+        } else if let Some(timestamp) = parse_settled_after_input(input_string.as_ref()) {
+            settled_after_timestamps.push(timestamp.to_string());
         } else if let Some((timestamp, rating_filter)) =
             pull_examples::parse_rated_after_input(input_string.as_ref())
         {
@@ -714,6 +738,21 @@ async fn load_examples(
             examples.append(&mut requested_examples);
         }
 
+        if !settled_after_timestamps.is_empty() {
+            settled_after_timestamps.sort();
+
+            let mut settled_examples = fetch_settled_examples_after(
+                http_client.clone(),
+                &settled_after_timestamps,
+                max_rows_per_timestamp,
+                remaining_offset,
+                background_executor.clone(),
+                Some(MIN_CAPTURE_VERSION),
+            )
+            .await?;
+            examples.append(&mut settled_examples);
+        }
+
         if !rated_after_inputs.is_empty() {
             rated_after_inputs.sort();
 
@@ -897,8 +936,18 @@ fn main() {
         }
 
         Command::Synthesize(synth_args) => {
-            let Some(output_dir) = args.output else {
-                panic!("output dir is required");
+            let output_dir = if let Some(output_dir) = args.output {
+                output_dir
+            } else {
+                let default_output_dir = env::current_dir()
+                    .unwrap()
+                    .join("crates/edit_prediction_cli/evals-generated");
+                if default_output_dir.parent().unwrap().exists() {
+                    std::fs::create_dir(&default_output_dir).ok();
+                    default_output_dir
+                } else {
+                    panic!("output dir is required");
+                }
             };
             let config = SynthesizeConfig {
                 repo_urls: synth_args.repos.clone(),
@@ -1238,7 +1287,7 @@ fn main() {
                 match &command {
                     Command::Eval(args) => {
                         let examples = finished_examples.lock().unwrap();
-                        score::print_report(&examples);
+                        score::print_report(&examples, args.verbose);
                         if let Some(summary_path) = &args.summary_json {
                             score::write_summary_json(&examples, summary_path)?;
                         }

crates/edit_prediction_cli/src/metrics.rs 🔗

@@ -76,14 +76,21 @@ impl ClassificationMetrics {
 }
 
 enum ChrfWhitespace {
+    /// Preserve whitespace as-is
     #[allow(unused)]
     Unchanged,
+
+    /// Ignore all whitespace differences
+    #[allow(unused)]
     Ignore,
+
+    /// Collapse whitespace into single spaces
+    Collapse,
 }
 
 const CHR_F_CHAR_ORDER: usize = 6;
 const CHR_F_BETA: f64 = 2.0;
-const CHR_F_WHITESPACE: ChrfWhitespace = ChrfWhitespace::Ignore;
+const CHR_F_WHITESPACE: ChrfWhitespace = ChrfWhitespace::Collapse;
 
 /// Computes a delta-chrF score that compares two sets of edits.
 ///
@@ -196,9 +203,34 @@ fn filter_whitespace_chars(text: &str) -> Vec<char> {
     match CHR_F_WHITESPACE {
         ChrfWhitespace::Unchanged => text.chars().collect(),
         ChrfWhitespace::Ignore => text.chars().filter(|c| !c.is_whitespace()).collect(),
+        ChrfWhitespace::Collapse => collapse_whitespace(text.chars()),
     }
 }
 
+/// Collapse whitespace into single spaces.
+/// Newlines and spaces are collapsed separately.
+fn collapse_whitespace(chars: impl Iterator<Item = char>) -> Vec<char> {
+    let mut result = Vec::new();
+    let mut last_whitespace = None;
+    for c in chars {
+        if c.is_whitespace() && c != '\n' {
+            if last_whitespace != Some(' ') {
+                result.push(' ');
+                last_whitespace = Some(' ');
+            }
+        } else if c == '\n' {
+            if last_whitespace != Some('\n') {
+                result.push(c);
+                last_whitespace = Some('\n');
+            }
+        } else {
+            result.push(c);
+            last_whitespace = None;
+        }
+    }
+    result
+}
+
 /// Extract only the changed regions between two texts, with context for n-gram boundaries.
 ///
 /// Returns (original_affected_region, modified_affected_region) as Vec<char>.
@@ -269,15 +301,15 @@ fn count_ngrams_from_chars(chars: &[char], n: usize) -> Counts {
 
 #[allow(dead_code)]
 fn chr_f_ngram_counts(text: &str) -> Vec<Counts> {
-    // Ignore whitespace. The original chrF implementation skips all
-    // whitespace. We should consider compressing multiple consecutive
-    // spaces into one -- this may reflect our task more closely.
     let text = match CHR_F_WHITESPACE {
         ChrfWhitespace::Unchanged => text.to_string(),
         ChrfWhitespace::Ignore => text
             .chars()
             .filter(|c| !c.is_whitespace())
             .collect::<String>(),
+        ChrfWhitespace::Collapse => collapse_whitespace(text.chars())
+            .into_iter()
+            .collect::<String>(),
     };
 
     (1..=CHR_F_CHAR_ORDER)
@@ -1175,4 +1207,14 @@ index abc123..def456 100644
         assert!(counts.deleted_tokens >= 2);
         assert!(counts.inserted_tokens >= 2);
     }
+
+    #[test]
+    fn test_whitespace_collapse() {
+        let text = "abc   \n\n\n   123";
+        let collapsed = collapse_whitespace(text.chars());
+        assert_eq!(
+            collapsed,
+            vec!['a', 'b', 'c', ' ', '\n', ' ', '1', '2', '3']
+        );
+    }
 }

crates/edit_prediction_cli/src/parse_output.rs 🔗

@@ -6,7 +6,11 @@ use crate::{
 };
 use anyhow::{Context as _, Result};
 use edit_prediction::example_spec::encode_cursor_in_patch;
-use zeta_prompt::{CURSOR_MARKER, ZetaFormat};
+use zeta_prompt::{
+    CURSOR_MARKER, ZetaFormat, clean_extracted_region_for_format,
+    current_region_markers_for_format, output_end_marker_for_format,
+    output_with_context_for_format,
+};
 
 pub fn run_parse_output(example: &mut Example) -> Result<()> {
     example
@@ -51,22 +55,7 @@ pub fn parse_prediction_output(
 }
 
 fn extract_zeta2_current_region(prompt: &str, format: ZetaFormat) -> Result<String> {
-    let (current_marker, end_marker) = match format {
-        ZetaFormat::V0112MiddleAtEnd => ("<|fim_middle|>current\n", "<|fim_middle|>updated"),
-        ZetaFormat::V0113Ordered | ZetaFormat::V0114180EditableRegion => {
-            ("<|fim_middle|>current\n", "<|fim_suffix|>")
-        }
-        ZetaFormat::V0120GitMergeMarkers
-        | ZetaFormat::V0131GitMergeMarkersPrefix
-        | ZetaFormat::V0211Prefill => (
-            zeta_prompt::v0120_git_merge_markers::START_MARKER,
-            zeta_prompt::v0120_git_merge_markers::SEPARATOR,
-        ),
-        ZetaFormat::V0211SeedCoder => (
-            zeta_prompt::seed_coder::START_MARKER,
-            zeta_prompt::seed_coder::SEPARATOR,
-        ),
-    };
+    let (current_marker, end_marker) = current_region_markers_for_format(format);
 
     let start = prompt.find(current_marker).with_context(|| {
         format!(
@@ -82,8 +71,7 @@ fn extract_zeta2_current_region(prompt: &str, format: ZetaFormat) -> Result<Stri
 
     let region = &prompt[start..end];
     let region = region.replace(CURSOR_MARKER, "");
-
-    Ok(region)
+    Ok(clean_extracted_region_for_format(format, &region))
 }
 
 fn parse_zeta2_output(
@@ -100,6 +88,9 @@ fn parse_zeta2_output(
     let old_text = extract_zeta2_current_region(prompt, format)?;
 
     let mut new_text = actual_output.to_string();
+    if let Some(transformed) = output_with_context_for_format(format, &old_text, &new_text)? {
+        new_text = transformed;
+    }
     let cursor_offset = if let Some(offset) = new_text.find(CURSOR_MARKER) {
         new_text.replace_range(offset..offset + CURSOR_MARKER.len(), "");
         Some(offset)
@@ -107,19 +98,9 @@ fn parse_zeta2_output(
         None
     };
 
-    let suffix = match format {
-        ZetaFormat::V0131GitMergeMarkersPrefix | ZetaFormat::V0211Prefill => {
-            zeta_prompt::v0131_git_merge_markers_prefix::END_MARKER
-        }
-        ZetaFormat::V0120GitMergeMarkers => zeta_prompt::v0120_git_merge_markers::END_MARKER,
-        ZetaFormat::V0112MiddleAtEnd
-        | ZetaFormat::V0113Ordered
-        | ZetaFormat::V0114180EditableRegion => "",
-        ZetaFormat::V0211SeedCoder => zeta_prompt::seed_coder::END_MARKER,
-    };
-    if !suffix.is_empty() {
+    if let Some(marker) = output_end_marker_for_format(format) {
         new_text = new_text
-            .strip_suffix(suffix)
+            .strip_suffix(marker)
             .unwrap_or(&new_text)
             .to_string();
     }

crates/edit_prediction_cli/src/predict.rs 🔗

@@ -6,14 +6,18 @@ use crate::{
     headless::EpAppState,
     load_project::run_load_project,
     openai_client::OpenAiClient,
+    parse_output::parse_prediction_output,
     paths::{LATEST_EXAMPLE_RUN_DIR, RUN_DIR},
-    progress::{ExampleProgress, InfoStyle, Step},
+    progress::{ExampleProgress, InfoStyle, Step, StepProgress},
     retrieve_context::run_context_retrieval,
 };
 use anyhow::Context as _;
+use cloud_llm_client::predict_edits_v3::{RawCompletionRequest, RawCompletionResponse};
 use edit_prediction::{DebugEvent, EditPredictionStore, Zeta2RawConfig};
-use futures::{FutureExt as _, StreamExt as _, future::Shared};
+use futures::{AsyncReadExt as _, FutureExt as _, StreamExt as _, future::Shared};
 use gpui::{AppContext as _, AsyncApp, Task};
+use http_client::{AsyncBody, HttpClient, Method};
+use reqwest_client::ReqwestClient;
 use std::{
     fs,
     sync::{
@@ -79,6 +83,22 @@ pub async fn run_prediction(
         .await;
     }
 
+    if let PredictionProvider::Baseten(format) = provider {
+        run_format_prompt(
+            example,
+            &FormatPromptArgs {
+                provider: PredictionProvider::Zeta2(format),
+            },
+            app_state.clone(),
+            example_progress,
+            cx,
+        )
+        .await?;
+
+        let step_progress = example_progress.start(Step::Predict);
+        return predict_baseten(example, format, &step_progress).await;
+    }
+
     run_load_project(example, app_state.clone(), example_progress, cx.clone()).await?;
     run_context_retrieval(example, app_state.clone(), example_progress, cx.clone()).await?;
 
@@ -110,13 +130,14 @@ pub async fn run_prediction(
 
     ep_store.update(&mut cx, |store, _cx| {
         let model = match provider {
-            PredictionProvider::Zeta1 => edit_prediction::EditPredictionModel::Zeta1,
-            PredictionProvider::Zeta2(_) => edit_prediction::EditPredictionModel::Zeta2,
+            PredictionProvider::Zeta1 => edit_prediction::EditPredictionModel::Zeta,
+            PredictionProvider::Zeta2(_) => edit_prediction::EditPredictionModel::Zeta,
             PredictionProvider::Sweep => edit_prediction::EditPredictionModel::Sweep,
             PredictionProvider::Mercury => edit_prediction::EditPredictionModel::Mercury,
             PredictionProvider::Teacher(..)
             | PredictionProvider::TeacherNonBatching(..)
-            | PredictionProvider::Repair => {
+            | PredictionProvider::Repair
+            | PredictionProvider::Baseten(_) => {
                 unreachable!()
             }
         };
@@ -127,7 +148,12 @@ pub async fn run_prediction(
         if let PredictionProvider::Zeta2(format) = provider {
             if format != ZetaFormat::default() {
                 let model_id = std::env::var("ZED_ZETA_MODEL").ok();
-                store.set_zeta2_raw_config(Zeta2RawConfig { model_id, format });
+                let environment = std::env::var("ZED_ZETA_ENVIRONMENT").ok();
+                store.set_zeta2_raw_config(Zeta2RawConfig {
+                    model_id,
+                    environment,
+                    format,
+                });
             }
         }
     });
@@ -364,7 +390,7 @@ async fn predict_anthropic(
             .await?
         else {
             // Request stashed for batched processing
-            return Ok(());
+            continue;
         };
 
         let actual_output = response
@@ -438,7 +464,7 @@ async fn predict_openai(
             .await?
         else {
             // Request stashed for batched processing
-            return Ok(());
+            continue;
         };
 
         let actual_output = response
@@ -480,6 +506,89 @@ async fn predict_openai(
     Ok(())
 }
 
+pub async fn predict_baseten(
+    example: &mut Example,
+    format: ZetaFormat,
+    step_progress: &StepProgress,
+) -> anyhow::Result<()> {
+    let model_id =
+        std::env::var("ZED_ZETA_MODEL").context("ZED_ZETA_MODEL environment variable required")?;
+
+    let api_key =
+        std::env::var("BASETEN_API_KEY").context("BASETEN_API_KEY environment variable not set")?;
+
+    let prompt = example.prompt.as_ref().context("Prompt is required")?;
+    let prompt_text = prompt.input.clone();
+    let prefill = prompt.prefill.clone().unwrap_or_default();
+
+    step_progress.set_substatus("running prediction via baseten");
+
+    let environment: String = <&'static str>::from(&format).to_lowercase();
+    let url = format!(
+        "https://model-{model_id}.api.baseten.co/environments/{environment}/sync/v1/completions"
+    );
+
+    let request_body = RawCompletionRequest {
+        model: model_id,
+        prompt: prompt_text.clone(),
+        max_tokens: Some(2048),
+        temperature: Some(0.),
+        stop: vec![],
+        environment: None,
+    };
+
+    let body_bytes =
+        serde_json::to_vec(&request_body).context("Failed to serialize request body")?;
+
+    let http_client: Arc<dyn HttpClient> = Arc::new(ReqwestClient::new());
+    let request = http_client::Request::builder()
+        .method(Method::POST)
+        .uri(&url)
+        .header("Content-Type", "application/json")
+        .header("Authorization", format!("Api-Key {api_key}"))
+        .body(AsyncBody::from(body_bytes))?;
+
+    let mut response = http_client.send(request).await?;
+    let status = response.status();
+
+    let mut body = String::new();
+    response
+        .body_mut()
+        .read_to_string(&mut body)
+        .await
+        .context("Failed to read Baseten response body")?;
+
+    if !status.is_success() {
+        anyhow::bail!("Baseten API returned {status}: {body}");
+    }
+
+    let completion: RawCompletionResponse =
+        serde_json::from_str(&body).context("Failed to parse Baseten response")?;
+
+    let actual_output = completion
+        .choices
+        .into_iter()
+        .next()
+        .map(|choice| choice.text)
+        .unwrap_or_default();
+
+    let actual_output = format!("{prefill}{actual_output}");
+
+    let (actual_patch, actual_cursor) =
+        parse_prediction_output(example, &actual_output, PredictionProvider::Zeta2(format))?;
+
+    let prediction = ExamplePrediction {
+        actual_patch: Some(actual_patch),
+        actual_output,
+        actual_cursor,
+        error: None,
+        provider: PredictionProvider::Baseten(format),
+    };
+
+    example.predictions.push(prediction);
+    Ok(())
+}
+
 pub async fn sync_batches(provider: Option<&PredictionProvider>) -> anyhow::Result<()> {
     match provider {
         Some(PredictionProvider::Teacher(backend)) => match backend {

crates/edit_prediction_cli/src/pull_examples.rs 🔗

@@ -5,24 +5,25 @@ use http_client::{AsyncBody, HttpClient, Method, Request};
 use indoc::indoc;
 use serde::Deserialize;
 use serde_json::{Value as JsonValue, json};
+use std::fmt::Write as _;
 use std::io::Read;
 use std::sync::Arc;
 use std::time::Duration;
 use telemetry_events::EditPredictionRating;
 
-use zeta_prompt::ZetaPromptInput;
+use zeta_prompt::{ZetaFormat, ZetaPromptInput, excerpt_range_for_format};
 
 use crate::example::Example;
 use crate::progress::{InfoStyle, Progress, Step};
 const EDIT_PREDICTION_DEPLOYMENT_EVENT: &str = "Edit Prediction Deployment";
 use edit_prediction::example_spec::{ExampleSpec, TelemetrySource};
-use std::fmt::Write as _;
 
 pub(crate) const SNOWFLAKE_SUCCESS_CODE: &str = "090001";
 pub(crate) const SNOWFLAKE_ASYNC_IN_PROGRESS_CODE: &str = "333334";
 const PREDICTIVE_EDIT_REQUESTED_EVENT: &str = "Predictive Edit Requested";
 const PREDICTIVE_EDIT_REJECTED_EVENT: &str = "Predictive Edit Rejected";
 const EDIT_PREDICTION_RATED_EVENT: &str = "Edit Prediction Rated";
+const EDIT_PREDICTION_SETTLED_EVENT: &str = "Edit Prediction Settled";
 
 /// Minimum Zed version for filtering captured examples.
 /// For example, `MinCaptureVersion { minor: 224, patch: 1 }` means only pull examples
@@ -33,7 +34,8 @@ pub struct MinCaptureVersion {
     pub patch: u32,
 }
 
-const DEFAULT_STATEMENT_TIMEOUT_SECONDS: u64 = 120;
+const DEFAULT_STATEMENT_TIMEOUT_SECONDS: u64 = 240;
+const SETTLED_STATEMENT_TIMEOUT_SECONDS: u64 = 240;
 pub(crate) const POLL_INTERVAL: Duration = Duration::from_secs(2);
 pub(crate) const MAX_POLL_ATTEMPTS: usize = 120;
 
@@ -52,6 +54,11 @@ pub fn parse_requested_after_input(input: &str) -> Option<&str> {
     input.strip_prefix("requested-after:")
 }
 
+/// Parse an input token of the form `settled-after:{timestamp}`.
+pub fn parse_settled_after_input(input: &str) -> Option<&str> {
+    input.strip_prefix("settled-after:")
+}
+
 /// Parse an input token of the form `rated-after:{timestamp}`, `rated-positive-after:{timestamp}`,
 /// or `rated-negative-after:{timestamp}`.
 /// Returns `(timestamp, Option<EditPredictionRating>)` where `None` means all ratings.
@@ -145,6 +152,103 @@ async fn run_sql_with_polling(
     Ok(response)
 }
 
+struct SnowflakeConfig {
+    token: String,
+    base_url: String,
+    role: Option<String>,
+}
+
+async fn fetch_examples_with_query(
+    http_client: Arc<dyn HttpClient>,
+    step_progress: &crate::progress::StepProgress,
+    background_executor: BackgroundExecutor,
+    statement: &str,
+    bindings: JsonValue,
+    timeout_seconds: u64,
+    required_columns: &[&str],
+    parse_response: for<'a> fn(
+        &'a SnowflakeStatementResponse,
+        &'a std::collections::HashMap<String, usize>,
+    ) -> Result<Box<dyn Iterator<Item = Example> + 'a>>,
+) -> Result<Vec<Example>> {
+    let snowflake = SnowflakeConfig {
+        token: std::env::var("EP_SNOWFLAKE_API_KEY")
+            .context("missing required environment variable EP_SNOWFLAKE_API_KEY")?,
+        base_url: std::env::var("EP_SNOWFLAKE_BASE_URL").context(
+            "missing required environment variable EP_SNOWFLAKE_BASE_URL (e.g. https://<account>.snowflakecomputing.com)",
+        )?,
+        role: std::env::var("EP_SNOWFLAKE_ROLE").ok(),
+    };
+    let request = json!({
+        "statement": statement,
+        "timeout": timeout_seconds,
+        "database": "EVENTS",
+        "schema": "PUBLIC",
+        "warehouse": "DBT",
+        "role": snowflake.role.as_deref(),
+        "bindings": bindings
+    });
+
+    let response = run_sql_with_polling(
+        http_client.clone(),
+        &snowflake.base_url,
+        &snowflake.token,
+        &request,
+        step_progress,
+        background_executor,
+    )
+    .await?;
+
+    let total_rows = response
+        .result_set_meta_data
+        .as_ref()
+        .and_then(|meta| meta.num_rows)
+        .unwrap_or(response.data.len() as i64);
+    let partition_count = response
+        .result_set_meta_data
+        .as_ref()
+        .map(|meta| meta.partition_info.len())
+        .unwrap_or(1)
+        .max(1);
+
+    step_progress.set_info(format!("{} rows", total_rows), InfoStyle::Normal);
+    step_progress.set_substatus("parsing");
+
+    let column_indices = get_column_indices(&response.result_set_meta_data, required_columns);
+
+    let mut parsed_examples = Vec::with_capacity(total_rows as usize);
+    parsed_examples.extend(parse_response(&response, &column_indices)?);
+
+    if partition_count > 1 {
+        let statement_handle = response
+            .statement_handle
+            .as_ref()
+            .context("response has multiple partitions but no statementHandle")?;
+
+        for partition in 1..partition_count {
+            step_progress.set_substatus(format!(
+                "fetching partition {}/{}",
+                partition + 1,
+                partition_count
+            ));
+
+            let partition_response = fetch_partition(
+                http_client.clone(),
+                &snowflake.base_url,
+                &snowflake.token,
+                statement_handle,
+                partition,
+            )
+            .await?;
+
+            parsed_examples.extend(parse_response(&partition_response, &column_indices)?);
+        }
+    }
+
+    step_progress.set_substatus("done");
+    Ok(parsed_examples)
+}
+
 pub(crate) async fn fetch_partition(
     http_client: Arc<dyn HttpClient>,
     base_url: &str,
@@ -298,13 +402,6 @@ pub async fn fetch_rejected_examples_after(
 
     let progress = Progress::global();
 
-    let token = std::env::var("EP_SNOWFLAKE_API_KEY")
-        .context("missing required environment variable EP_SNOWFLAKE_API_KEY")?;
-    let base_url = std::env::var("EP_SNOWFLAKE_BASE_URL").context(
-        "missing required environment variable EP_SNOWFLAKE_BASE_URL (e.g. https://<account>.snowflakecomputing.com)",
-    )?;
-    let role = std::env::var("EP_SNOWFLAKE_ROLE").ok();
-
     let mut all_examples = Vec::new();
 
     for after_date in after_timestamps.iter() {
@@ -312,10 +409,11 @@ pub async fn fetch_rejected_examples_after(
         let step_progress = progress.start(Step::PullExamples, &step_progress_name);
         step_progress.set_substatus("querying");
 
-        // Join rejected events with their corresponding request events to get the full context.
-        // We filter for V3 sampling data which contains the structured input we need.
-        // We also filter for predictions that were actually shown to the user (was_shown = true)
-        // to focus on explicit user rejections rather than implicit cancellations.
+        let min_minor_str = min_capture_version.map(|version| version.minor.to_string());
+        let min_patch_str = min_capture_version.map(|version| version.patch.to_string());
+        let min_minor_str_ref = min_minor_str.as_deref();
+        let min_patch_str_ref = min_patch_str.as_deref();
+
         let statement = indoc! {r#"
             SELECT
                 req.event_properties:request_id::string AS request_id,
@@ -348,58 +446,25 @@ pub async fn fetch_rejected_examples_after(
             OFFSET ?
         "#};
 
-        let min_minor_str = min_capture_version.map(|v| v.minor.to_string());
-        let min_patch_str = min_capture_version.map(|v| v.patch.to_string());
-        let min_minor_str_ref = min_minor_str.as_deref();
-        let min_patch_str_ref = min_patch_str.as_deref();
-        let request = json!({
-            "statement": statement,
-            "timeout": DEFAULT_STATEMENT_TIMEOUT_SECONDS,
-            "database": "EVENTS",
-            "schema": "PUBLIC",
-            "warehouse": "DBT",
-            "role": role,
-            "bindings": {
-                "1": { "type": "TEXT", "value": PREDICTIVE_EDIT_REQUESTED_EVENT },
-                "2": { "type": "TEXT", "value": PREDICTIVE_EDIT_REJECTED_EVENT },
-                "3": { "type": "TEXT", "value": after_date },
-                "4": { "type": "FIXED", "value": min_minor_str_ref },
-                "5": { "type": "FIXED", "value": min_minor_str_ref },
-                "6": { "type": "FIXED", "value": min_minor_str_ref },
-                "7": { "type": "FIXED", "value": min_patch_str_ref },
-                "8": { "type": "FIXED", "value": max_rows_per_timestamp.to_string() },
-                "9": { "type": "FIXED", "value": offset.to_string() }
-            }
+        let bindings = json!({
+            "1": { "type": "TEXT", "value": PREDICTIVE_EDIT_REQUESTED_EVENT },
+            "2": { "type": "TEXT", "value": PREDICTIVE_EDIT_REJECTED_EVENT },
+            "3": { "type": "TEXT", "value": after_date },
+            "4": { "type": "FIXED", "value": min_minor_str_ref },
+            "5": { "type": "FIXED", "value": min_minor_str_ref },
+            "6": { "type": "FIXED", "value": min_minor_str_ref },
+            "7": { "type": "FIXED", "value": min_patch_str_ref },
+            "8": { "type": "FIXED", "value": max_rows_per_timestamp.to_string() },
+            "9": { "type": "FIXED", "value": offset.to_string() }
         });
 
-        let response = run_sql_with_polling(
+        let examples = fetch_examples_with_query(
             http_client.clone(),
-            &base_url,
-            &token,
-            &request,
             &step_progress,
             background_executor.clone(),
-        )
-        .await?;
-
-        let total_rows = response
-            .result_set_meta_data
-            .as_ref()
-            .and_then(|m| m.num_rows)
-            .unwrap_or(response.data.len() as i64);
-
-        let num_partitions = response
-            .result_set_meta_data
-            .as_ref()
-            .map(|m| m.partition_info.len())
-            .unwrap_or(1)
-            .max(1);
-
-        step_progress.set_info(format!("{} rows", total_rows), InfoStyle::Normal);
-        step_progress.set_substatus("parsing");
-
-        let column_indices = get_column_indices(
-            &response.result_set_meta_data,
+            statement,
+            bindings,
+            DEFAULT_STATEMENT_TIMEOUT_SECONDS,
             &[
                 "request_id",
                 "device_id",
@@ -411,40 +476,11 @@ pub async fn fetch_rejected_examples_after(
                 "reason",
                 "zed_version",
             ],
-        );
-
-        all_examples.extend(rejected_examples_from_response(&response, &column_indices)?);
-
-        if num_partitions > 1 {
-            let statement_handle = response
-                .statement_handle
-                .as_ref()
-                .context("response has multiple partitions but no statementHandle")?;
-
-            for partition in 1..num_partitions {
-                step_progress.set_substatus(format!(
-                    "fetching partition {}/{}",
-                    partition + 1,
-                    num_partitions
-                ));
-
-                let partition_response = fetch_partition(
-                    http_client.clone(),
-                    &base_url,
-                    &token,
-                    statement_handle,
-                    partition,
-                )
-                .await?;
-
-                all_examples.extend(rejected_examples_from_response(
-                    &partition_response,
-                    &column_indices,
-                )?);
-            }
-        }
+            rejected_examples_from_response,
+        )
+        .await?;
 
-        step_progress.set_substatus("done");
+        all_examples.extend(examples);
     }
 
     Ok(all_examples)
@@ -464,13 +500,6 @@ pub async fn fetch_requested_examples_after(
 
     let progress = Progress::global();
 
-    let token = std::env::var("EP_SNOWFLAKE_API_KEY")
-        .context("missing required environment variable EP_SNOWFLAKE_API_KEY")?;
-    let base_url = std::env::var("EP_SNOWFLAKE_BASE_URL").context(
-        "missing required environment variable EP_SNOWFLAKE_BASE_URL (e.g. https://<account>.snowflakecomputing.com)",
-    )?;
-    let role = std::env::var("EP_SNOWFLAKE_ROLE").ok();
-
     let mut all_examples = Vec::new();
 
     for after_date in after_timestamps.iter() {
@@ -478,6 +507,11 @@ pub async fn fetch_requested_examples_after(
         let step_progress = progress.start(Step::PullExamples, &step_progress_name);
         step_progress.set_substatus("querying");
 
+        let min_minor_str = min_capture_version.map(|version| version.minor.to_string());
+        let min_patch_str = min_capture_version.map(|version| version.patch.to_string());
+        let min_minor_str_ref = min_minor_str.as_deref();
+        let min_patch_str_ref = min_patch_str.as_deref();
+
         let statement = indoc! {r#"
             SELECT
                 req.event_properties:request_id::string AS request_id,
@@ -502,95 +536,123 @@ pub async fn fetch_requested_examples_after(
             OFFSET ?
         "#};
 
-        let min_minor_str = min_capture_version.map(|v| v.minor.to_string());
-        let min_patch_str = min_capture_version.map(|v| v.patch.to_string());
-        let min_minor_str_ref = min_minor_str.as_deref();
-        let min_patch_str_ref = min_patch_str.as_deref();
-        let request = json!({
-            "statement": statement,
-            "timeout": DEFAULT_STATEMENT_TIMEOUT_SECONDS,
-            "database": "EVENTS",
-            "schema": "PUBLIC",
-            "warehouse": "DBT",
-            "role": role,
-            "bindings": {
-                "1": { "type": "TEXT", "value": PREDICTIVE_EDIT_REQUESTED_EVENT },
-                "2": { "type": "TEXT", "value": after_date },
-                "3": { "type": "FIXED", "value": min_minor_str_ref },
-                "4": { "type": "FIXED", "value": min_minor_str_ref },
-                "5": { "type": "FIXED", "value": min_minor_str_ref },
-                "6": { "type": "FIXED", "value": min_patch_str_ref },
-                "7": { "type": "FIXED", "value": max_rows_per_timestamp.to_string() },
-                "8": { "type": "FIXED", "value": offset.to_string() }
-            }
+        let bindings = json!({
+            "1": { "type": "TEXT", "value": PREDICTIVE_EDIT_REQUESTED_EVENT },
+            "2": { "type": "TEXT", "value": after_date },
+            "3": { "type": "FIXED", "value": min_minor_str_ref },
+            "4": { "type": "FIXED", "value": min_minor_str_ref },
+            "5": { "type": "FIXED", "value": min_minor_str_ref },
+            "6": { "type": "FIXED", "value": min_patch_str_ref },
+            "7": { "type": "FIXED", "value": max_rows_per_timestamp.to_string() },
+            "8": { "type": "FIXED", "value": offset.to_string() }
         });
 
-        let response = run_sql_with_polling(
+        let examples = fetch_examples_with_query(
             http_client.clone(),
-            &base_url,
-            &token,
-            &request,
             &step_progress,
             background_executor.clone(),
+            statement,
+            bindings,
+            DEFAULT_STATEMENT_TIMEOUT_SECONDS,
+            &["request_id", "device_id", "time", "input", "zed_version"],
+            requested_examples_from_response,
         )
         .await?;
 
-        let total_rows = response
-            .result_set_meta_data
-            .as_ref()
-            .and_then(|m| m.num_rows)
-            .unwrap_or(response.data.len() as i64);
+        all_examples.extend(examples);
+    }
 
-        let num_partitions = response
-            .result_set_meta_data
-            .as_ref()
-            .map(|m| m.partition_info.len())
-            .unwrap_or(1)
-            .max(1);
+    Ok(all_examples)
+}
 
-        step_progress.set_info(format!("{} rows", total_rows), InfoStyle::Normal);
-        step_progress.set_substatus("parsing");
+pub async fn fetch_settled_examples_after(
+    http_client: Arc<dyn HttpClient>,
+    after_timestamps: &[String],
+    max_rows_per_timestamp: usize,
+    offset: usize,
+    background_executor: BackgroundExecutor,
+    min_capture_version: Option<MinCaptureVersion>,
+) -> Result<Vec<Example>> {
+    if after_timestamps.is_empty() {
+        return Ok(Vec::new());
+    }
 
-        let column_indices = get_column_indices(
-            &response.result_set_meta_data,
-            &["request_id", "device_id", "time", "input", "zed_version"],
-        );
+    let progress = Progress::global();
 
-        all_examples.extend(requested_examples_from_response(
-            &response,
-            &column_indices,
-        )?);
+    let mut all_examples = Vec::new();
 
-        if num_partitions > 1 {
-            let statement_handle = response
-                .statement_handle
-                .as_ref()
-                .context("response has multiple partitions but no statementHandle")?;
-
-            for partition in 1..num_partitions {
-                step_progress.set_substatus(format!(
-                    "fetching partition {}/{}",
-                    partition + 1,
-                    num_partitions
-                ));
-
-                let partition_response = fetch_partition(
-                    http_client.clone(),
-                    &base_url,
-                    &token,
-                    statement_handle,
-                    partition,
-                )
-                .await?;
-
-                all_examples.extend(requested_examples_from_response(
-                    &partition_response,
-                    &column_indices,
-                )?);
-            }
-        }
+    for after_date in after_timestamps.iter() {
+        let step_progress_name = format!("settled>{after_date}");
+        let step_progress = progress.start(Step::PullExamples, &step_progress_name);
+        step_progress.set_substatus("querying");
+
+        let _ = min_capture_version;
+
+        let statement = indoc! {r#"
+            WITH requested AS (
+                SELECT
+                    req.event_properties:request_id::string AS request_id,
+                    req.device_id::string AS device_id,
+                    req.time AS req_time,
+                    req.time::string AS time,
+                    req.event_properties:input AS input,
+                    req.event_properties:format::string AS requested_format,
+                    req.event_properties:output::string AS requested_output,
+                    req.event_properties:zed_version::string AS zed_version
+                FROM events req
+                WHERE req.event_type = ?
+                    AND req.event_properties:version = 'V3'
+                    AND req.event_properties:input:can_collect_data = true
+                    AND req.time > TRY_TO_TIMESTAMP_NTZ(?)
+            )
+            SELECT
+                req.request_id AS request_id,
+                req.device_id AS device_id,
+                req.time AS time,
+                req.input AS input,
+                req.requested_output AS requested_output,
+                settled.event_properties:settled_editable_region::string AS settled_editable_region,
+                req.requested_format AS requested_format,
+                req.zed_version AS zed_version
+            FROM requested req
+            INNER JOIN events settled
+                ON req.request_id = settled.event_properties:request_id::string
+            WHERE settled.event_type = ?
+            ORDER BY req.req_time ASC
+            LIMIT ?
+            OFFSET ?
+        "#};
+
+        let bindings = json!({
+            "1": { "type": "TEXT", "value": PREDICTIVE_EDIT_REQUESTED_EVENT },
+            "2": { "type": "TEXT", "value": after_date },
+            "3": { "type": "TEXT", "value": EDIT_PREDICTION_SETTLED_EVENT },
+            "4": { "type": "FIXED", "value": max_rows_per_timestamp.to_string() },
+            "5": { "type": "FIXED", "value": offset.to_string() }
+        });
+
+        let examples = fetch_examples_with_query(
+            http_client.clone(),
+            &step_progress,
+            background_executor.clone(),
+            statement,
+            bindings,
+            SETTLED_STATEMENT_TIMEOUT_SECONDS,
+            &[
+                "request_id",
+                "device_id",
+                "time",
+                "input",
+                "requested_output",
+                "settled_editable_region",
+                "requested_format",
+                "zed_version",
+            ],
+            settled_examples_from_response,
+        )
+        .await?;
 
-        step_progress.set_substatus("done");
+        all_examples.extend(examples);
     }
 
     Ok(all_examples)
@@ -610,13 +672,6 @@ pub async fn fetch_rated_examples_after(
 
     let progress = Progress::global();
 
-    let token = std::env::var("EP_SNOWFLAKE_API_KEY")
-        .context("missing required environment variable EP_SNOWFLAKE_API_KEY")?;
-    let base_url = std::env::var("EP_SNOWFLAKE_BASE_URL").context(
-        "missing required environment variable EP_SNOWFLAKE_BASE_URL (e.g. https://<account>.snowflakecomputing.com)",
-    )?;
-    let role = std::env::var("EP_SNOWFLAKE_ROLE").ok();
-
     let mut all_examples = Vec::new();
 
     for (after_date, rating_filter) in inputs.iter() {
@@ -629,7 +684,7 @@ pub async fn fetch_rated_examples_after(
         let step_progress = progress.start(Step::PullExamples, &step_progress_name);
         step_progress.set_substatus("querying");
 
-        let rating_value = rating_filter.as_ref().map(|r| match r {
+        let rating_value = rating_filter.as_ref().map(|rating| match rating {
             EditPredictionRating::Positive => "Positive",
             EditPredictionRating::Negative => "Negative",
         });
@@ -660,7 +715,7 @@ pub async fn fetch_rated_examples_after(
                 AND rated.event_properties:inputs IS NOT NULL
                 AND rated.event_properties:inputs:cursor_excerpt IS NOT NULL
                 AND rated.event_properties:output IS NOT NULL
-                AND rated.event_properties:can_collect_data = true
+                AND rated.event_properties:inputs:can_collect_data = true
             ORDER BY rated.time ASC
             LIMIT ?
             OFFSET ?
@@ -677,44 +732,13 @@ pub async fn fetch_rated_examples_after(
             "8": { "type": "FIXED", "value": offset.to_string() }
         });
 
-        let request = json!({
-            "statement": statement,
-            "timeout": DEFAULT_STATEMENT_TIMEOUT_SECONDS,
-            "database": "EVENTS",
-            "schema": "PUBLIC",
-            "warehouse": "DBT",
-            "role": role,
-            "bindings": bindings
-        });
-
-        let response = run_sql_with_polling(
+        let examples = fetch_examples_with_query(
             http_client.clone(),
-            &base_url,
-            &token,
-            &request,
             &step_progress,
             background_executor.clone(),
-        )
-        .await?;
-
-        let total_rows = response
-            .result_set_meta_data
-            .as_ref()
-            .and_then(|m| m.num_rows)
-            .unwrap_or(response.data.len() as i64);
-
-        let num_partitions = response
-            .result_set_meta_data
-            .as_ref()
-            .map(|m| m.partition_info.len())
-            .unwrap_or(1)
-            .max(1);
-
-        step_progress.set_info(format!("{} rows", total_rows), InfoStyle::Normal);
-        step_progress.set_substatus("parsing");
-
-        let column_indices = get_column_indices(
-            &response.result_set_meta_data,
+            statement,
+            bindings,
+            DEFAULT_STATEMENT_TIMEOUT_SECONDS,
             &[
                 "request_id",
                 "inputs",
@@ -727,40 +751,11 @@ pub async fn fetch_rated_examples_after(
                 "environment",
                 "zed_version",
             ],
-        );
-
-        all_examples.extend(rated_examples_from_response(&response, &column_indices)?);
-
-        if num_partitions > 1 {
-            let statement_handle = response
-                .statement_handle
-                .as_ref()
-                .context("response has multiple partitions but no statementHandle")?;
-
-            for partition in 1..num_partitions {
-                step_progress.set_substatus(format!(
-                    "fetching partition {}/{}",
-                    partition + 1,
-                    num_partitions
-                ));
-
-                let partition_response = fetch_partition(
-                    http_client.clone(),
-                    &base_url,
-                    &token,
-                    statement_handle,
-                    partition,
-                )
-                .await?;
-
-                all_examples.extend(rated_examples_from_response(
-                    &partition_response,
-                    &column_indices,
-                )?);
-            }
-        }
+            rated_examples_from_response,
+        )
+        .await?;
 
-        step_progress.set_substatus("done");
+        all_examples.extend(examples);
     }
 
     Ok(all_examples)
@@ -769,7 +764,7 @@ pub async fn fetch_rated_examples_after(
 fn rated_examples_from_response<'a>(
     response: &'a SnowflakeStatementResponse,
     column_indices: &'a std::collections::HashMap<String, usize>,
-) -> Result<impl Iterator<Item = Example> + 'a> {
+) -> Result<Box<dyn Iterator<Item = Example> + 'a>> {
     if let Some(code) = &response.code {
         if code != SNOWFLAKE_SUCCESS_CODE {
             anyhow::bail!(
@@ -828,11 +823,11 @@ fn rated_examples_from_response<'a>(
             let environment = get_string("environment");
             let zed_version = get_string("zed_version");
 
-            match (inputs, output.clone(), rating.clone(), device_id.clone(), time.clone()) {
-                (Some(inputs), Some(output), Some(rating), Some(device_id), Some(time)) => {
+            match (inputs, output.clone(), rating.clone(), time.clone()) {
+                (Some(inputs), Some(output), Some(rating), Some(time)) => {
                     Some(build_rated_example(
                         request_id,
-                        device_id,
+                        device_id.unwrap_or_default(),
                         time,
                         inputs,
                         output,
@@ -845,11 +840,10 @@ fn rated_examples_from_response<'a>(
                 }
                 _ => {
                     log::warn!(
-                        "skipping row {row_index}: missing fields - inputs={:?} output={:?} rating={:?} device_id={:?} time={:?}",
+                        "skipping row {row_index}: missing fields - inputs={:?} output={:?} rating={:?} time={:?}",
                         inputs_json.is_some(),
                         output.is_some(),
                         rating.is_some(),
-                        device_id.is_some(),
                         time.is_some(),
                     );
                     None
@@ -857,7 +851,7 @@ fn rated_examples_from_response<'a>(
             }
         });
 
-    Ok(iter)
+    Ok(Box::new(iter))
 }
 
 fn build_rated_example(
@@ -917,7 +911,7 @@ fn build_rated_example(
 fn requested_examples_from_response<'a>(
     response: &'a SnowflakeStatementResponse,
     column_indices: &'a std::collections::HashMap<String, usize>,
-) -> Result<impl Iterator<Item = Example> + 'a> {
+) -> Result<Box<dyn Iterator<Item = Example> + 'a>> {
     if let Some(code) = &response.code {
         if code != SNOWFLAKE_SUCCESS_CODE {
             anyhow::bail!(
@@ -986,13 +980,190 @@ fn requested_examples_from_response<'a>(
             }
         });
 
-    Ok(iter)
+    Ok(Box::new(iter))
+}
+
+fn settled_examples_from_response<'a>(
+    response: &'a SnowflakeStatementResponse,
+    column_indices: &'a std::collections::HashMap<String, usize>,
+) -> Result<Box<dyn Iterator<Item = Example> + 'a>> {
+    if let Some(code) = &response.code {
+        if code != SNOWFLAKE_SUCCESS_CODE {
+            anyhow::bail!(
+                "snowflake sql api returned error code={code} message={}",
+                response.message.as_deref().unwrap_or("<no message>")
+            );
+        }
+    }
+
+    let iter = response
+        .data
+        .iter()
+        .enumerate()
+        .filter_map(move |(row_index, data_row)| {
+            let get_value = |name: &str| -> Option<JsonValue> {
+                let index = column_indices.get(name).copied()?;
+                let value = data_row.get(index)?;
+                if value.is_null() {
+                    None
+                } else {
+                    Some(value.clone())
+                }
+            };
+
+            let get_string = |name: &str| -> Option<String> {
+                match get_value(name)? {
+                    JsonValue::String(s) => Some(s),
+                    other => Some(other.to_string()),
+                }
+            };
+
+            let parse_json_value = |_: &str, raw: Option<&JsonValue>| -> Option<JsonValue> {
+                let value = raw?;
+                match value {
+                    JsonValue::String(s) => serde_json::from_str::<JsonValue>(s).ok(),
+                    other => Some(other.clone()),
+                }
+            };
+
+            let request_id_str = get_string("request_id");
+            let device_id = get_string("device_id");
+            let time = get_string("time");
+            let input_raw = get_value("input");
+            let input_json = parse_json_value("input", input_raw.as_ref());
+            let input: Option<ZetaPromptInput> = input_json
+                .as_ref()
+                .and_then(|parsed| serde_json::from_value(parsed.clone()).ok());
+            let requested_output = get_string("requested_output");
+            let settled_editable_region = get_string("settled_editable_region");
+            let requested_format =
+                get_string("requested_format").and_then(|s| ZetaFormat::parse(&s).ok());
+            let zed_version = get_string("zed_version");
+
+            match (
+                request_id_str.clone(),
+                device_id.clone(),
+                time.clone(),
+                input.clone(),
+                requested_output.clone(),
+                settled_editable_region.clone(),
+                requested_format,
+            ) {
+                (
+                    Some(request_id),
+                    Some(device_id),
+                    Some(time),
+                    Some(input),
+                    Some(requested_output),
+                    Some(settled_editable_region),
+                    Some(requested_format),
+                ) => Some(build_settled_example(
+                    request_id,
+                    device_id,
+                    time,
+                    input,
+                    requested_output,
+                    settled_editable_region,
+                    requested_format,
+                    zed_version,
+                )),
+                _ => {
+                    let mut missing_fields = Vec::new();
+
+                    if request_id_str.is_none() {
+                        missing_fields.push("request_id");
+                    }
+                    if device_id.is_none() {
+                        missing_fields.push("device_id");
+                    }
+                    if time.is_none() {
+                        missing_fields.push("time");
+                    }
+                    if input_raw.is_none() || input_json.is_none() || input.is_none() {
+                        missing_fields.push("input");
+                    }
+                    if requested_output.is_none() {
+                        missing_fields.push("requested_output");
+                    }
+                    if settled_editable_region.is_none() {
+                        missing_fields.push("settled_editable_region");
+                    }
+                    if requested_format.is_none() {
+                        missing_fields.push("requested_format");
+                    }
+
+                    log::warn!(
+                        "skipping settled row {row_index}: [{}]",
+                        missing_fields.join(", "),
+                    );
+                    None
+                }
+            }
+        });
+
+    Ok(Box::new(iter))
+}
+
+fn build_settled_example(
+    request_id: String,
+    device_id: String,
+    time: String,
+    input: ZetaPromptInput,
+    requested_output: String,
+    settled_editable_region: String,
+    requested_format: ZetaFormat,
+    zed_version: Option<String>,
+) -> Example {
+    let requested_editable_range =
+        excerpt_range_for_format(requested_format, &input.excerpt_ranges).0;
+
+    let base_cursor_excerpt = input.cursor_excerpt.to_string();
+
+    let requested_range_is_valid = requested_editable_range.start <= requested_editable_range.end
+        && requested_editable_range.end <= base_cursor_excerpt.len();
+    let mut example = build_example_from_snowflake(
+        request_id.clone(),
+        device_id,
+        time,
+        input,
+        vec!["settled".to_string()],
+        None,
+        zed_version,
+    );
+
+    if !requested_range_is_valid {
+        log::warn!(
+            "skipping malformed requested range for request {}: requested={:?} (base_len={})",
+            request_id,
+            requested_editable_range,
+            base_cursor_excerpt.len(),
+        );
+        return example;
+    }
+
+    let settled_replacement = settled_editable_region.as_str();
+    let rejected_patch = build_output_patch(
+        &example.spec.cursor_path,
+        &base_cursor_excerpt,
+        &requested_editable_range,
+        &requested_output,
+    );
+    let expected_patch = build_output_patch(
+        &example.spec.cursor_path,
+        &base_cursor_excerpt,
+        &requested_editable_range,
+        settled_replacement,
+    );
+
+    example.spec.expected_patches = vec![expected_patch];
+    example.spec.rejected_patch = Some(rejected_patch);
+    example
 }
 
 fn rejected_examples_from_response<'a>(
     response: &'a SnowflakeStatementResponse,
     column_indices: &'a std::collections::HashMap<String, usize>,
-) -> Result<impl Iterator<Item = Example> + 'a> {
+) -> Result<Box<dyn Iterator<Item = Example> + 'a>> {
     if let Some(code) = &response.code {
         if code != SNOWFLAKE_SUCCESS_CODE {
             anyhow::bail!(
@@ -1077,7 +1248,7 @@ fn rejected_examples_from_response<'a>(
             }
         });
 
-    Ok(iter)
+    Ok(Box::new(iter))
 }
 
 fn build_rejected_example(
@@ -1093,7 +1264,7 @@ fn build_rejected_example(
     let rejected_patch = build_output_patch(
         &input.cursor_path,
         input.cursor_excerpt.as_ref(),
-        &input.editable_range_in_excerpt,
+        &input.excerpt_ranges.editable_350,
         &output,
     );
     let mut example = build_example_from_snowflake(

crates/edit_prediction_cli/src/retrieve_context.rs 🔗

@@ -85,46 +85,79 @@ async fn wait_for_language_servers_to_start(
 ) -> anyhow::Result<()> {
     let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
 
-    let (language_server_ids, mut starting_language_server_ids) =
-        buffer.update(cx, |buffer, cx| {
-            lsp_store.update(cx, |lsp_store, cx| {
-                let ids = lsp_store.language_servers_for_local_buffer(buffer, cx);
-                let starting_ids = ids
-                    .iter()
-                    .copied()
-                    .filter(|id| !lsp_store.language_server_statuses.contains_key(&id))
-                    .collect::<HashSet<_>>();
-                (ids, starting_ids)
-            })
+    // Determine which servers exist for this buffer, and which are still starting.
+    let mut servers_pending_start = HashSet::default();
+    let mut servers_pending_diagnostics = HashSet::default();
+    buffer.update(cx, |buffer, cx| {
+        lsp_store.update(cx, |lsp_store, cx| {
+            let ids = lsp_store.language_servers_for_local_buffer(buffer, cx);
+            for &id in &ids {
+                match lsp_store.language_server_statuses.get(&id) {
+                    None => {
+                        servers_pending_start.insert(id);
+                        servers_pending_diagnostics.insert(id);
+                    }
+                    Some(status) if status.has_pending_diagnostic_updates => {
+                        servers_pending_diagnostics.insert(id);
+                    }
+                    Some(_) => {}
+                }
+            }
         });
+    });
 
-    step_progress.set_substatus(format!("waiting for {} LSPs", language_server_ids.len()));
+    step_progress.set_substatus(format!(
+        "waiting for {} LSPs",
+        servers_pending_diagnostics.len()
+    ));
 
-    let timeout_duration = if starting_language_server_ids.is_empty() {
+    let timeout_duration = if servers_pending_start.is_empty() {
         Duration::from_secs(30)
     } else {
         Duration::from_secs(60 * 5)
     };
-
     let timeout = cx.background_executor().timer(timeout_duration).shared();
 
-    let (mut tx, mut rx) = mpsc::channel(language_server_ids.len());
-    let added_subscription = cx.subscribe(project, {
+    let (mut started_tx, mut started_rx) = mpsc::channel(servers_pending_start.len().max(1));
+    let (mut diag_tx, mut diag_rx) = mpsc::channel(servers_pending_diagnostics.len().max(1));
+    let subscriptions = [cx.subscribe(&lsp_store, {
         let step_progress = step_progress.clone();
-        move |_, event, _| match event {
-            project::Event::LanguageServerAdded(language_server_id, name, _) => {
+        move |lsp_store, event, cx| match event {
+            project::LspStoreEvent::LanguageServerAdded(id, name, _) => {
                 step_progress.set_substatus(format!("LSP started: {}", name));
-                tx.try_send(*language_server_id).ok();
+                started_tx.try_send(*id).ok();
+            }
+            project::LspStoreEvent::DiskBasedDiagnosticsFinished { language_server_id } => {
+                let name = lsp_store
+                    .read(cx)
+                    .language_server_adapter_for_id(*language_server_id)
+                    .unwrap()
+                    .name();
+                step_progress.set_substatus(format!("LSP idle: {}", name));
+                diag_tx.try_send(*language_server_id).ok();
+            }
+            project::LspStoreEvent::LanguageServerUpdate {
+                message:
+                    client::proto::update_language_server::Variant::WorkProgress(
+                        client::proto::LspWorkProgress {
+                            message: Some(message),
+                            ..
+                        },
+                    ),
+                ..
+            } => {
+                step_progress.set_substatus(message.clone());
             }
             _ => {}
         }
-    });
+    })];
 
-    while !starting_language_server_ids.is_empty() {
+    // Phase 1: wait for all servers to start.
+    while !servers_pending_start.is_empty() {
         futures::select! {
-            language_server_id = rx.next() => {
-                if let Some(id) = language_server_id {
-                    starting_language_server_ids.remove(&id);
+            id = started_rx.next() => {
+                if let Some(id) = id {
+                    servers_pending_start.remove(&id);
                 }
             },
             _ = timeout.clone().fuse() => {
@@ -133,67 +166,17 @@ async fn wait_for_language_servers_to_start(
         }
     }
 
-    drop(added_subscription);
-
-    let (mut tx, mut rx) = mpsc::channel(language_server_ids.len());
-    let subscriptions = [
-        cx.subscribe(&lsp_store, {
-            let step_progress = step_progress.clone();
-            move |_, event, _| {
-                if let project::LspStoreEvent::LanguageServerUpdate {
-                    message:
-                        client::proto::update_language_server::Variant::WorkProgress(
-                            client::proto::LspWorkProgress {
-                                message: Some(message),
-                                ..
-                            },
-                        ),
-                    ..
-                } = event
-                {
-                    step_progress.set_substatus(message.clone());
-                }
-            }
-        }),
-        cx.subscribe(project, {
-            let step_progress = step_progress.clone();
-            let lsp_store = lsp_store.clone();
-            move |_, event, cx| match event {
-                project::Event::DiskBasedDiagnosticsFinished { language_server_id } => {
-                    let lsp_store = lsp_store.read(cx);
-                    let name = lsp_store
-                        .language_server_adapter_for_id(*language_server_id)
-                        .unwrap()
-                        .name();
-                    step_progress.set_substatus(format!("LSP idle: {}", name));
-                    tx.try_send(*language_server_id).ok();
-                }
-                _ => {}
-            }
-        }),
-    ];
-
+    // Save the buffer so the server sees the current content and kicks off diagnostics.
     project
         .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
         .await?;
 
-    let mut pending_language_server_ids = lsp_store.read_with(cx, |lsp_store, _| {
-        language_server_ids
-            .iter()
-            .copied()
-            .filter(|id| {
-                lsp_store
-                    .language_server_statuses
-                    .get(id)
-                    .is_some_and(|status| status.has_pending_diagnostic_updates)
-            })
-            .collect::<HashSet<_>>()
-    });
-    while !pending_language_server_ids.is_empty() {
+    // Phase 2: wait for all servers to finish their diagnostic pass.
+    while !servers_pending_diagnostics.is_empty() {
         futures::select! {
-            language_server_id = rx.next() => {
-                if let Some(id) = language_server_id {
-                    pending_language_server_ids.remove(&id);
+            id = diag_rx.next() => {
+                if let Some(id) = id {
+                    servers_pending_diagnostics.remove(&id);
                 }
             },
             _ = timeout.clone().fuse() => {

crates/edit_prediction_cli/src/reversal_tracking.rs 🔗

@@ -655,6 +655,7 @@ mod tests {
     use super::*;
     use edit_prediction::udiff::apply_diff_to_string;
     use indoc::indoc;
+    use zeta_prompt::ExcerptRanges;
 
     fn make_test_prompt_inputs(
         content: &str,
@@ -664,15 +665,23 @@ mod tests {
         ZetaPromptInput {
             cursor_path: Arc::from(Path::new("src/test.rs")),
             cursor_excerpt: content.into(),
-            editable_range_in_excerpt: 0..content.len(),
             cursor_offset_in_excerpt: 0,
             excerpt_start_row,
             events,
             related_files: Vec::new(),
-            excerpt_ranges: None,
-            preferred_model: None,
+            excerpt_ranges: ExcerptRanges {
+                editable_150: 0..content.len(),
+                editable_180: 0..content.len(),
+                editable_350: 0..content.len(),
+                editable_150_context_350: 0..content.len(),
+                editable_180_context_350: 0..content.len(),
+                editable_350_context_150: 0..content.len(),
+                ..Default::default()
+            },
+            experiment: None,
             in_open_source_repo: false,
             can_collect_data: false,
+            repo_url: None,
         }
     }
 

crates/edit_prediction_cli/src/score.rs 🔗

@@ -217,7 +217,8 @@ fn compute_cursor_metrics(
     }
 }
 
-pub fn print_report(examples: &[Example]) {
+pub fn print_report(examples: &[Example], verbose: bool) {
+    const MAX_EXAMPLES_DEFAULT: usize = 20;
     use crate::metrics::ClassificationMetrics;
 
     const LINE_WIDTH: usize = 101;
@@ -250,6 +251,9 @@ pub fn print_report(examples: &[Example]) {
     let mut patch_deleted_tokens: Vec<usize> = Vec::new();
     let mut predictions_with_patch: usize = 0;
 
+    let mut printed_lines: usize = 0;
+    let mut skipped_lines: usize = 0;
+
     for example in examples {
         for (score_idx, score) in example.score.iter().enumerate() {
             let exact_lines = ClassificationMetrics {
@@ -284,18 +288,23 @@ pub fn print_report(examples: &[Example]) {
                 (None, _) => "-".to_string(),
             };
 
-            println!(
-                "{:<40} {:>8.2} {:>5} {:>6.1}% {:>6.1}% {:>7} {:>7} {:>6} {:>5}",
-                truncate_name(&example.spec.name, 40),
-                score.delta_chr_f,
-                score.braces_disbalance,
-                exact_lines.f1() * 100.0,
-                score.reversal_ratio * 100.0,
-                qa_reverts_str,
-                qa_conf_str,
-                cursor_str,
-                wrong_er_str
-            );
+            if verbose || printed_lines < MAX_EXAMPLES_DEFAULT {
+                println!(
+                    "{:<40} {:>8.2} {:>5} {:>6.1}% {:>6.1}% {:>7} {:>7} {:>6} {:>5}",
+                    truncate_name(&example.spec.name, 40),
+                    score.delta_chr_f,
+                    score.braces_disbalance,
+                    exact_lines.f1() * 100.0,
+                    score.reversal_ratio * 100.0,
+                    qa_reverts_str,
+                    qa_conf_str,
+                    cursor_str,
+                    wrong_er_str
+                );
+                printed_lines += 1;
+            } else {
+                skipped_lines += 1;
+            }
 
             all_delta_chr_f_scores.push(score.delta_chr_f);
             all_reversal_ratios.push(score.reversal_ratio);
@@ -358,6 +367,13 @@ pub fn print_report(examples: &[Example]) {
         }
     }
 
+    if skipped_lines > 0 {
+        println!(
+            "{:<40} (use --verbose to see all {} examples)",
+            format!("... and {} more", skipped_lines),
+            printed_lines + skipped_lines
+        );
+    }
     println!("{}", separator);
 
     if !all_delta_chr_f_scores.is_empty() {

crates/edit_prediction_cli/src/split_dataset.rs 🔗

@@ -1,29 +1,34 @@
 //! `ep split` implementation.
 //!
 //! This command splits a JSONL dataset into multiple files based on size specifications,
-//! with stratification by repository URL (if the field is present).
+//! with optional stratification by a JSON field.
 //!
 //! # Usage
 //!
 //! ```text
-//! ep split [input.jsonl] <out1>=<size1> <out2>=<size2> ...
+//! ep split [--stratify=<field>] [input.jsonl] <out1>=<size1> <out2>=<size2> ...
 //! ```
 //!
 //! If `input.jsonl` is not provided or is `-`, reads from stdin.
 //!
 //! # Size specifications
 //!
-//! - `80%` - percentage of total (repositories if stratified, examples otherwise)
-//! - `100` - absolute count of repositories (if stratified) or examples
+//! - `80%` - percentage of total examples (lines)
+//! - `100` - approximate absolute count of examples (lines)
 //! - `rest` - all remaining items (only one split can use this)
 //!
 //! # Stratification
 //!
-//! When examples have a `repository_url` field, the split is stratified by repository.
-//! This ensures each output file contains examples from non-overlapping repositories.
-//! Size specifications apply to the number of repositories, not individual examples.
+//! The `--stratify` flag controls how examples are grouped before splitting:
 //!
-//! Examples without `repository_url` are distributed proportionally across all outputs.
+//! - `cursor-path` (default): group by the `cursor_path` JSON field
+//! - `repo`: group by the `repository_url` JSON field
+//! - `none`: no grouping, split individual examples
+//!
+//! When stratifying, the split ensures each output file contains examples from
+//! non-overlapping groups. Size specifications always apply to the number of
+//! examples (lines), with whole groups assigned greedily to meet the target.
+//! Examples missing the stratification field are treated as individual groups.
 
 use anyhow::{Context as _, Result, bail};
 use clap::Args;
@@ -38,23 +43,27 @@ use std::path::{Path, PathBuf};
 /// `ep split` CLI args.
 #[derive(Debug, Args, Clone)]
 #[command(
-    about = "Split a JSONL dataset into multiple files (stratified by repository_url if present)",
+    about = "Split a JSONL dataset into multiple files with optional stratification",
     after_help = r#"SIZE SPECIFICATIONS:
   <percentage>%    Percentage of total (e.g., 80%)
   <count>          Absolute number (e.g., 100)
   rest             All remaining items (only one output can use this)
 
-  When stratifying by repository_url, sizes apply to repositories, not examples.
+  Sizes always apply to examples (lines). When stratifying, whole groups
+  are assigned greedily to approximate the target count.
 
 EXAMPLES:
-  # Split 80% train, 20% validation
+  # Split 80% train, 20% validation (default: stratify by cursor_path)
   ep split input.jsonl train.jsonl=80% valid.jsonl=rest
 
   # Split into train/valid/test
   ep split input.jsonl train.jsonl=80% valid.jsonl=10% test.jsonl=rest
 
-  # Use absolute counts (100 repos to train, rest to valid)
-  ep split input.jsonl train.jsonl=100 valid.jsonl=rest
+  # Stratify by repository_url instead of cursor_path
+  ep split --stratify=repo input.jsonl train.jsonl=80% valid.jsonl=rest
+
+  # No stratification (split by individual examples)
+  ep split --stratify=none input.jsonl train.jsonl=80% valid.jsonl=rest
 
   # Read from stdin
   cat input.jsonl | ep split train.jsonl=80% valid.jsonl=rest
@@ -62,14 +71,15 @@ EXAMPLES:
   # Reproducible split with seed
   ep split --seed 42 input.jsonl train.jsonl=80% valid.jsonl=rest
 
-  # Disable stratification (split by examples, not repositories)
-  ep split --no-stratify input.jsonl train.jsonl=80% valid.jsonl=rest
-
 STRATIFICATION:
-  When examples have a "repository_url" field, the split ensures each output
-  file contains examples from non-overlapping repositories. This prevents
-  data leakage between train/test splits. Use --no-stratify to disable this
-  behavior and split by individual examples instead.
+  Controls how examples are grouped before splitting:
+    cursor-path  Group by "cursor_path" field (default)
+    repo         Group by "repository_url" field
+    none         No grouping, split individual examples
+
+  When stratifying, the split ensures each output file contains examples
+  from non-overlapping groups. This prevents data leakage between
+  train/test splits.
 "#
 )]
 pub struct SplitArgs {
@@ -77,9 +87,19 @@ pub struct SplitArgs {
     #[arg(long)]
     pub seed: Option<u64>,
 
-    /// Disable stratification by repository_url (split by examples instead)
-    #[arg(long)]
-    pub no_stratify: bool,
+    /// Stratification field for splitting the dataset
+    #[arg(long, default_value = "cursor-path")]
+    pub stratify: Stratify,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, clap::ValueEnum, strum::Display)]
+pub enum Stratify {
+    #[strum(serialize = "cursor_path")]
+    CursorPath,
+    #[strum(serialize = "repo")]
+    Repo,
+    #[strum(serialize = "none")]
+    None,
 }
 
 #[derive(Debug, Clone)]
@@ -142,29 +162,6 @@ fn read_lines_from_input(input: Option<&Path>) -> Result<Vec<String>> {
     Ok(lines)
 }
 
-fn get_repository_url(line: &str) -> Option<String> {
-    let value: Value = serde_json::from_str(line).ok()?;
-    value
-        .get("repository_url")
-        .and_then(|v| v.as_str())
-        .map(|s| s.to_string())
-}
-
-fn group_lines_by_repo(lines: Vec<String>) -> (HashMap<String, Vec<String>>, Vec<String>) {
-    let mut by_repo: HashMap<String, Vec<String>> = HashMap::new();
-    let mut without_repo: Vec<String> = Vec::new();
-
-    for line in lines {
-        if let Some(repo_url) = get_repository_url(&line) {
-            by_repo.entry(repo_url).or_default().push(line);
-        } else {
-            without_repo.push(line);
-        }
-    }
-
-    (by_repo, without_repo)
-}
-
 fn compute_split_counts(specs: &[SplitSpec], total: usize) -> Result<Vec<usize>> {
     let mut counts = vec![0usize; specs.len()];
     let mut remaining = total;
@@ -261,26 +258,20 @@ pub fn run_split(args: &SplitArgs, inputs: &[PathBuf]) -> Result<()> {
         return Ok(());
     }
 
-    let (by_repo, without_repo) = group_lines_by_repo(lines);
-    let has_repos = !by_repo.is_empty() && !args.no_stratify;
+    let mut grouped_lines = group_lines(&lines, args.stratify);
 
-    if args.no_stratify && !by_repo.is_empty() {
+    if args.stratify != Stratify::None {
         eprintln!(
-            "Stratification disabled (--no-stratify), splitting {} examples by line",
+            "Stratifying by {} ({} unique groups, {} examples)",
+            args.stratify,
+            grouped_lines.len(),
             total_lines
         );
-    } else if has_repos {
+    } else {
         eprintln!(
-            "Stratifying by repository_url ({} unique repositories, {} examples)",
-            by_repo.len(),
-            total_lines - without_repo.len()
+            "No stratification, splitting {} examples by line",
+            total_lines
         );
-        if !without_repo.is_empty() {
-            eprintln!(
-                "  + {} examples without repository_url (distributed proportionally)",
-                without_repo.len()
-            );
-        }
     }
 
     let mut rng = match args.seed {
@@ -288,53 +279,31 @@ pub fn run_split(args: &SplitArgs, inputs: &[PathBuf]) -> Result<()> {
         None => rand::rngs::StdRng::from_os_rng(),
     };
 
-    let mut split_outputs: Vec<Vec<String>> = vec![Vec::new(); specs.len()];
-
-    if has_repos {
-        let mut repos: Vec<String> = by_repo.keys().cloned().collect();
-        repos.shuffle(&mut rng);
+    grouped_lines.shuffle(&mut rng);
 
-        let repo_counts = compute_split_counts(&specs, repos.len())?;
+    let line_targets = compute_split_counts(&specs, total_lines)?;
+    let rest_index = specs.iter().position(|s| matches!(s.size, SplitSize::Rest));
+    let mut split_outputs: Vec<Vec<String>> = vec![Vec::new(); specs.len()];
+    let mut group_iter = grouped_lines.into_iter();
 
-        let mut repo_iter = repos.into_iter();
-        for (split_idx, &count) in repo_counts.iter().enumerate() {
-            for _ in 0..count {
-                if let Some(repo) = repo_iter.next() {
-                    if let Some(repo_lines) = by_repo.get(&repo) {
-                        split_outputs[split_idx].extend(repo_lines.iter().cloned());
-                    }
-                }
-            }
+    for (split_idx, &target) in line_targets.iter().enumerate() {
+        if Some(split_idx) == rest_index {
+            continue;
         }
-
-        if !without_repo.is_empty() {
-            let no_repo_counts = compute_split_counts(&specs, without_repo.len())?;
-            let mut no_repo_shuffled = without_repo;
-            no_repo_shuffled.shuffle(&mut rng);
-
-            let mut line_iter = no_repo_shuffled.into_iter();
-            for (split_idx, &count) in no_repo_counts.iter().enumerate() {
-                for _ in 0..count {
-                    if let Some(line) = line_iter.next() {
-                        split_outputs[split_idx].push(line);
-                    }
-                }
+        let mut accumulated = 0;
+        while accumulated < target {
+            if let Some(group) = group_iter.next() {
+                accumulated += group.len();
+                split_outputs[split_idx].extend(group);
+            } else {
+                break;
             }
         }
-    } else {
-        let line_counts = compute_split_counts(&specs, total_lines)?;
-        let mut all_lines: Vec<String> = by_repo.into_values().flatten().collect();
-        all_lines.extend(without_repo);
-        all_lines.shuffle(&mut rng);
-
-        let mut line_iter = all_lines.into_iter();
+    }
 
-        for (split_idx, &count) in line_counts.iter().enumerate() {
-            for _ in 0..count {
-                if let Some(line) = line_iter.next() {
-                    split_outputs[split_idx].push(line);
-                }
-            }
+    if let Some(idx) = rest_index {
+        for group in group_iter {
+            split_outputs[idx].extend(group);
         }
     }
 
@@ -346,6 +315,39 @@ pub fn run_split(args: &SplitArgs, inputs: &[PathBuf]) -> Result<()> {
     Ok(())
 }
 
+/// Groups lines by the specified stratification field.
+///
+/// When `stratify` is `None`, each line becomes its own group.
+/// When a line is missing the stratification field, it is also placed in its own group.
+fn group_lines(lines: &[String], stratify: Stratify) -> Vec<Vec<String>> {
+    if stratify == Stratify::None {
+        return lines.iter().map(|line| vec![line.clone()]).collect();
+    }
+
+    let field = match stratify {
+        Stratify::Repo => "repository_url",
+        Stratify::CursorPath => "cursor_path",
+        Stratify::None => unreachable!(),
+    };
+
+    let mut groups: HashMap<String, Vec<String>> = HashMap::new();
+    let mut ungrouped: Vec<Vec<String>> = Vec::new();
+
+    for line in lines {
+        let key = serde_json::from_str::<Value>(line)
+            .ok()
+            .and_then(|v| v.get(field)?.as_str().map(|s| s.to_string()));
+        match key {
+            Some(key) => groups.entry(key).or_default().push(line.clone()),
+            None => ungrouped.push(vec![line.clone()]),
+        }
+    }
+
+    let mut result: Vec<Vec<String>> = groups.into_values().collect();
+    result.extend(ungrouped);
+    result
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;
@@ -389,15 +391,11 @@ mod tests {
     }
 
     #[test]
-    fn test_get_repository_url() {
-        let line = r#"{"repository_url": "https://github.com/example/repo", "data": 123}"#;
-        assert_eq!(
-            get_repository_url(line),
-            Some("https://github.com/example/repo".to_string())
-        );
-
-        let line_no_repo = r#"{"data": 123}"#;
-        assert_eq!(get_repository_url(line_no_repo), None);
+    fn test_group_lines_none() {
+        let lines = vec!["a".to_string(), "b".to_string(), "c".to_string()];
+        let groups = group_lines(&lines, Stratify::None);
+        assert_eq!(groups.len(), 3);
+        assert!(groups.iter().all(|g| g.len() == 1));
     }
 
     #[test]
@@ -457,12 +455,30 @@ mod tests {
             r#"{"id": 4}"#.to_string(),
         ];
 
-        let (by_repo, without_repo) = group_lines_by_repo(lines);
+        let groups = group_lines(&lines, Stratify::Repo);
+
+        let grouped_count: usize = groups.iter().filter(|g| g.len() > 1).count();
+        let ungrouped_count: usize = groups.iter().filter(|g| g.len() == 1).count();
+        let total_lines: usize = groups.iter().map(|g| g.len()).sum();
 
-        assert_eq!(by_repo.len(), 2);
-        assert_eq!(by_repo.get("repo1").unwrap().len(), 2);
-        assert_eq!(by_repo.get("repo2").unwrap().len(), 1);
-        assert_eq!(without_repo.len(), 1);
+        assert_eq!(grouped_count, 1); // repo1 has 2 lines
+        assert_eq!(ungrouped_count, 2); // repo2 (1 line) + line without repo
+        assert_eq!(total_lines, 4);
+    }
+
+    #[test]
+    fn test_group_lines_by_cursor_path() {
+        let lines = vec![
+            r#"{"cursor_path": "src/main.rs", "id": 1}"#.to_string(),
+            r#"{"cursor_path": "src/main.rs", "id": 2}"#.to_string(),
+            r#"{"cursor_path": "src/lib.rs", "id": 3}"#.to_string(),
+        ];
+
+        let groups = group_lines(&lines, Stratify::CursorPath);
+
+        let total_lines: usize = groups.iter().map(|g| g.len()).sum();
+        assert_eq!(groups.len(), 2);
+        assert_eq!(total_lines, 3);
     }
 
     #[test]
@@ -484,7 +500,7 @@ mod tests {
 
         let args = SplitArgs {
             seed: Some(42),
-            no_stratify: false,
+            stratify: Stratify::Repo,
         };
         let inputs = vec![
             input.path().to_path_buf(),
@@ -502,14 +518,18 @@ mod tests {
 
         assert_eq!(train_lines.len() + valid_lines.len(), 8);
 
-        let train_repos: std::collections::HashSet<_> = train_lines
-            .iter()
-            .filter_map(|l| get_repository_url(l))
-            .collect();
-        let valid_repos: std::collections::HashSet<_> = valid_lines
-            .iter()
-            .filter_map(|l| get_repository_url(l))
-            .collect();
+        let get_repo = |line: &str| -> Option<String> {
+            let value: Value = serde_json::from_str(line).ok()?;
+            value
+                .get("repository_url")
+                .and_then(|v| v.as_str())
+                .map(|s| s.to_string())
+        };
+
+        let train_repos: std::collections::HashSet<_> =
+            train_lines.iter().filter_map(|l| get_repo(l)).collect();
+        let valid_repos: std::collections::HashSet<_> =
+            valid_lines.iter().filter_map(|l| get_repo(l)).collect();
 
         assert!(
             train_repos.is_disjoint(&valid_repos),
@@ -531,4 +551,54 @@ mod tests {
         ];
         assert!(compute_split_counts(&specs, 100).is_err());
     }
+
+    #[test]
+    fn test_absolute_targets_lines_not_groups() {
+        // 5 repos × 3 lines each = 15 total lines.
+        // `train=6` should target ~6 lines (2 groups), NOT 6 groups (all 15 lines).
+        let input = create_temp_jsonl(&[
+            r#"{"repository_url": "r1", "id": 1}"#,
+            r#"{"repository_url": "r1", "id": 2}"#,
+            r#"{"repository_url": "r1", "id": 3}"#,
+            r#"{"repository_url": "r2", "id": 4}"#,
+            r#"{"repository_url": "r2", "id": 5}"#,
+            r#"{"repository_url": "r2", "id": 6}"#,
+            r#"{"repository_url": "r3", "id": 7}"#,
+            r#"{"repository_url": "r3", "id": 8}"#,
+            r#"{"repository_url": "r3", "id": 9}"#,
+            r#"{"repository_url": "r4", "id": 10}"#,
+            r#"{"repository_url": "r4", "id": 11}"#,
+            r#"{"repository_url": "r4", "id": 12}"#,
+            r#"{"repository_url": "r5", "id": 13}"#,
+            r#"{"repository_url": "r5", "id": 14}"#,
+            r#"{"repository_url": "r5", "id": 15}"#,
+        ]);
+
+        let temp_dir = tempfile::tempdir().unwrap();
+        let train_path = temp_dir.path().join("train.jsonl");
+        let valid_path = temp_dir.path().join("valid.jsonl");
+
+        let args = SplitArgs {
+            seed: Some(42),
+            stratify: Stratify::Repo,
+        };
+        let inputs = vec![
+            input.path().to_path_buf(),
+            PathBuf::from(format!("{}=6", train_path.display())),
+            PathBuf::from(format!("{}=rest", valid_path.display())),
+        ];
+
+        run_split(&args, &inputs).unwrap();
+
+        let train_content = std::fs::read_to_string(&train_path).unwrap();
+        let valid_content = std::fs::read_to_string(&valid_path).unwrap();
+
+        let train_lines: Vec<&str> = train_content.lines().collect();
+        let valid_lines: Vec<&str> = valid_content.lines().collect();
+
+        // With 3-line groups, train should get 2 groups (6 lines) to meet the
+        // target of 6, NOT 6 groups (which don't even exist). Valid gets the rest.
+        assert_eq!(train_lines.len(), 6);
+        assert_eq!(valid_lines.len(), 9);
+    }
 }

crates/edit_prediction_cli/src/synthesize.rs 🔗

@@ -284,7 +284,7 @@ fn should_skip_commit(commit: &CommitInfo) -> bool {
         .lines()
         .filter(|l| l.starts_with('+') || l.starts_with('-'))
         .count();
-    lines_changed < 10
+    lines_changed < 30
         || lines_changed > 1000
         || is_non_code_commit(commit)
         || is_rename_commit(commit)
@@ -377,10 +377,13 @@ fn build_prompt(repo_url: &str, commit: &CommitInfo) -> String {
         indoc! {r#"
             You are analyzing a git commit to construct a realistic edit prediction example.
 
-            Your goal is to tell the story of a programmer's editing session: what sequence of changes did they make, and what change logically comes next? We use these examples to train a model to predict edits, so the quality of the EDIT HISTORY is what matters most.
+            Your goal is to tell the story of a programmer's editing session: what sequence
+            of changes did they make, and what change logically comes next? We use these examples
+            to train a model to predict edits, so the quality of the EDIT HISTORY is what matters most.
 
             An edit prediction example consists of:
-            1. **Edit History**: 3-6 hunks showing what the programmer did BEFORE making the expected patch. This is the most important part - it must tell a coherent story of the changes leading up to the prediction.
+            1. **Edit History**: 2-6 hunks showing what the programmer did BEFORE making the expected patch.
+               This is the most important part - it must tell a coherent story of the changes leading up to the prediction.
             2. **Expected Patch**: One small hunk that logically follows from the edit history.
 
             Both single-file and multi-file patterns are acceptable.
@@ -417,7 +420,7 @@ fn build_prompt(repo_url: &str, commit: &CommitInfo) -> String {
             First, THINK through whether this commit can support a good example:
 
             1. What is the high-level pattern in this commit?
-            2. Can you identify at least 4 related hunks (3 for edit history + 1 for expected patch)?
+            2. Can you identify at least 3 related hunks (2 or more for edit history + 1 for expected patch)?
             3. What would be the narrative? (First... then... then... finally predict...)
             4. Which specific hunk should be the expected patch (the "punchline")?
 

crates/edit_prediction_context/src/assemble_excerpts.rs 🔗

@@ -8,16 +8,18 @@ const MAX_OUTLINE_ITEM_BODY_SIZE: usize = 24;
 
 pub fn assemble_excerpt_ranges(
     buffer: &BufferSnapshot,
-    mut input_ranges: Vec<Range<Point>>,
-) -> Vec<Range<u32>> {
+    input_ranges: Vec<(Range<Point>, usize)>,
+) -> Vec<(Range<u32>, usize)> {
+    let mut input_ranges: Vec<(Range<Point>, usize)> = input_ranges
+        .into_iter()
+        .map(|(range, order)| (clip_range_to_lines(&range, false, buffer), order))
+        .collect();
     merge_ranges(&mut input_ranges);
 
-    let mut outline_ranges = Vec::new();
+    let mut outline_ranges: Vec<(Range<Point>, usize)> = Vec::new();
     let outline_items = buffer.outline_items_as_points_containing(0..buffer.len(), false, None);
     let mut outline_ix = 0;
-    for input_range in &mut input_ranges {
-        *input_range = clip_range_to_lines(input_range, false, buffer);
-
+    for (input_range, input_order) in &mut input_ranges {
         while let Some(outline_item) = outline_items.get(outline_ix) {
             let item_range = clip_range_to_lines(&outline_item.range, false, buffer);
 
@@ -36,6 +38,7 @@ pub fn assemble_excerpt_ranges(
                 add_outline_item(
                     item_range.clone(),
                     body_range.clone(),
+                    *input_order,
                     buffer,
                     &mut outline_ranges,
                 );
@@ -57,6 +60,7 @@ pub fn assemble_excerpt_ranges(
                                 next_outline_item
                                     .body_range(buffer)
                                     .map(|body| clip_range_to_lines(&body, true, buffer)),
+                                *input_order,
                                 buffer,
                                 &mut outline_ranges,
                             );
@@ -70,12 +74,12 @@ pub fn assemble_excerpt_ranges(
         }
     }
 
-    input_ranges.extend_from_slice(&outline_ranges);
+    input_ranges.extend(outline_ranges);
     merge_ranges(&mut input_ranges);
 
     input_ranges
         .into_iter()
-        .map(|range| range.start.row..range.end.row)
+        .map(|(range, order)| (range.start.row..range.end.row, order))
         .collect()
 }
 
@@ -102,8 +106,9 @@ fn clip_range_to_lines(
 fn add_outline_item(
     mut item_range: Range<Point>,
     body_range: Option<Range<Point>>,
+    order: usize,
     buffer: &BufferSnapshot,
-    outline_ranges: &mut Vec<Range<Point>>,
+    outline_ranges: &mut Vec<(Range<Point>, usize)>,
 ) {
     if let Some(mut body_range) = body_range {
         if body_range.start.column > 0 {
@@ -113,38 +118,39 @@ fn add_outline_item(
 
         let head_range = item_range.start..body_range.start;
         if head_range.start < head_range.end {
-            outline_ranges.push(head_range);
+            outline_ranges.push((head_range, order));
         }
 
         let tail_range = body_range.end..item_range.end;
         if tail_range.start < tail_range.end {
-            outline_ranges.push(tail_range);
+            outline_ranges.push((tail_range, order));
         }
     } else {
         item_range.start.column = 0;
         item_range.end.column = buffer.line_len(item_range.end.row);
-        outline_ranges.push(item_range);
+        outline_ranges.push((item_range, order));
     }
 }
 
-pub fn merge_ranges(ranges: &mut Vec<Range<Point>>) {
-    ranges.sort_unstable_by(|a, b| a.start.cmp(&b.start).then(b.end.cmp(&a.end)));
+pub fn merge_ranges(ranges: &mut Vec<(Range<Point>, usize)>) {
+    ranges.sort_unstable_by(|(a, _), (b, _)| a.start.cmp(&b.start).then(b.end.cmp(&a.end)));
 
     let mut index = 1;
     while index < ranges.len() {
-        let mut prev_range_end = ranges[index - 1].end;
+        let mut prev_range_end = ranges[index - 1].0.end;
         if prev_range_end.column > 0 {
             prev_range_end += Point::new(1, 0);
         }
 
         if (prev_range_end + Point::new(1, 0))
-            .cmp(&ranges[index].start)
+            .cmp(&ranges[index].0.start)
             .is_ge()
         {
             let removed = ranges.remove(index);
-            if removed.end.cmp(&ranges[index - 1].end).is_gt() {
-                ranges[index - 1].end = removed.end;
+            if removed.0.end.cmp(&ranges[index - 1].0.end).is_gt() {
+                ranges[index - 1].0.end = removed.0.end;
             }
+            ranges[index - 1].1 = ranges[index - 1].1.min(removed.1);
         } else {
             index += 1;
         }

crates/edit_prediction_context/src/edit_prediction_context.rs 🔗

@@ -39,6 +39,7 @@ struct RelatedBuffer {
     buffer: Entity<Buffer>,
     path: Arc<Path>,
     anchor_ranges: Vec<Range<Anchor>>,
+    excerpt_orders: Vec<usize>,
     cached_file: Option<CachedRelatedFile>,
 }
 
@@ -174,21 +175,21 @@ impl RelatedExcerptStore {
                 };
                 let buffer = project.get_open_buffer(&project_path, cx)?;
                 let snapshot = buffer.read(cx).snapshot();
-                let anchor_ranges = file
-                    .excerpts
-                    .iter()
-                    .map(|excerpt| {
-                        let start = snapshot.anchor_before(Point::new(excerpt.row_range.start, 0));
-                        let end_row = excerpt.row_range.end;
-                        let end_col = snapshot.line_len(end_row);
-                        let end = snapshot.anchor_after(Point::new(end_row, end_col));
-                        start..end
-                    })
-                    .collect();
+                let mut anchor_ranges = Vec::with_capacity(file.excerpts.len());
+                let mut excerpt_orders = Vec::with_capacity(file.excerpts.len());
+                for excerpt in &file.excerpts {
+                    let start = snapshot.anchor_before(Point::new(excerpt.row_range.start, 0));
+                    let end_row = excerpt.row_range.end;
+                    let end_col = snapshot.line_len(end_row);
+                    let end = snapshot.anchor_after(Point::new(end_row, end_col));
+                    anchor_ranges.push(start..end);
+                    excerpt_orders.push(excerpt.order);
+                }
                 Some(RelatedBuffer {
                     buffer,
                     path: file.path.clone(),
                     anchor_ranges,
+                    excerpt_orders,
                     cached_file: None,
                 })
             })
@@ -221,18 +222,55 @@ impl RelatedExcerptStore {
             cx.emit(RelatedExcerptStoreEvent::StartedRefresh);
         })?;
 
-        let identifiers = cx
+        let identifiers_with_ranks = cx
             .background_spawn(async move {
-                identifiers_for_position(&snapshot, position, identifier_line_count)
+                let cursor_offset = position.to_offset(&snapshot);
+                let identifiers =
+                    identifiers_for_position(&snapshot, position, identifier_line_count);
+
+                // Compute byte distance from cursor to each identifier, then sort by
+                // distance so we can assign ordinal ranks. Identifiers at the same
+                // distance share the same rank.
+                let mut identifiers_with_distance: Vec<(Identifier, usize)> = identifiers
+                    .into_iter()
+                    .map(|id| {
+                        let start = id.range.start.to_offset(&snapshot);
+                        let end = id.range.end.to_offset(&snapshot);
+                        let distance = if cursor_offset < start {
+                            start - cursor_offset
+                        } else if cursor_offset > end {
+                            cursor_offset - end
+                        } else {
+                            0
+                        };
+                        (id, distance)
+                    })
+                    .collect();
+                identifiers_with_distance.sort_by_key(|(_, distance)| *distance);
+
+                let mut cursor_distances: HashMap<Identifier, usize> = HashMap::default();
+                let mut current_rank = 0;
+                let mut previous_distance = None;
+                for (identifier, distance) in &identifiers_with_distance {
+                    if previous_distance != Some(*distance) {
+                        current_rank = cursor_distances.len();
+                        previous_distance = Some(*distance);
+                    }
+                    cursor_distances.insert(identifier.clone(), current_rank);
+                }
+
+                (identifiers_with_distance, cursor_distances)
             })
             .await;
 
+        let (identifiers_with_distance, cursor_distances) = identifiers_with_ranks;
+
         let async_cx = cx.clone();
         let start_time = Instant::now();
         let futures = this.update(cx, |this, cx| {
-            identifiers
+            identifiers_with_distance
                 .into_iter()
-                .filter_map(|identifier| {
+                .filter_map(|(identifier, _)| {
                     let task = if let Some(entry) = this.cache.get(&identifier) {
                         DefinitionTask::CacheHit(entry.clone())
                     } else {
@@ -334,7 +372,8 @@ impl RelatedExcerptStore {
         }
         mean_definition_latency /= cache_miss_count.max(1) as u32;
 
-        let (new_cache, related_buffers) = rebuild_related_files(&project, new_cache, cx).await?;
+        let (new_cache, related_buffers) =
+            rebuild_related_files(&project, new_cache, &cursor_distances, cx).await?;
 
         if let Some(file) = &file {
             log::debug!(
@@ -362,6 +401,7 @@ impl RelatedExcerptStore {
 async fn rebuild_related_files(
     project: &Entity<Project>,
     mut new_entries: HashMap<Identifier, Arc<CacheEntry>>,
+    cursor_distances: &HashMap<Identifier, usize>,
     cx: &mut AsyncApp,
 ) -> Result<(HashMap<Identifier, Arc<CacheEntry>>, Vec<RelatedBuffer>)> {
     let mut snapshots = HashMap::default();
@@ -396,12 +436,18 @@ async fn rebuild_related_files(
         }
     }
 
+    let cursor_distances = cursor_distances.clone();
     Ok(cx
         .background_spawn(async move {
             let mut ranges_by_buffer =
-                HashMap::<EntityId, (Entity<Buffer>, Vec<Range<Point>>)>::default();
+                HashMap::<EntityId, (Entity<Buffer>, Vec<(Range<Point>, usize)>)>::default();
             let mut paths_by_buffer = HashMap::default();
-            for entry in new_entries.values_mut() {
+            let mut min_rank_by_buffer = HashMap::<EntityId, usize>::default();
+            for (identifier, entry) in new_entries.iter_mut() {
+                let rank = cursor_distances
+                    .get(identifier)
+                    .copied()
+                    .unwrap_or(usize::MAX);
                 for definition in entry
                     .definitions
                     .iter()
@@ -412,11 +458,16 @@ async fn rebuild_related_files(
                     };
                     paths_by_buffer.insert(definition.buffer.entity_id(), definition.path.clone());
 
+                    let buffer_rank = min_rank_by_buffer
+                        .entry(definition.buffer.entity_id())
+                        .or_insert(usize::MAX);
+                    *buffer_rank = (*buffer_rank).min(rank);
+
                     ranges_by_buffer
                         .entry(definition.buffer.entity_id())
                         .or_insert_with(|| (definition.buffer.clone(), Vec::new()))
                         .1
-                        .push(definition.anchor_range.to_point(snapshot));
+                        .push((definition.anchor_range.to_point(snapshot), rank));
                 }
             }
 
@@ -425,7 +476,7 @@ async fn rebuild_related_files(
                 .filter_map(|(entity_id, (buffer, ranges))| {
                     let snapshot = snapshots.get(&entity_id)?;
                     let project_path = paths_by_buffer.get(&entity_id)?;
-                    let row_ranges = assemble_excerpt_ranges(snapshot, ranges);
+                    let assembled = assemble_excerpt_ranges(snapshot, ranges);
                     let root_name = worktree_root_names.get(&project_path.worktree_id)?;
 
                     let path: Arc<Path> = Path::new(&format!(
@@ -435,20 +486,21 @@ async fn rebuild_related_files(
                     ))
                     .into();
 
-                    let anchor_ranges = row_ranges
-                        .into_iter()
-                        .map(|row_range| {
-                            let start = snapshot.anchor_before(Point::new(row_range.start, 0));
-                            let end_col = snapshot.line_len(row_range.end);
-                            let end = snapshot.anchor_after(Point::new(row_range.end, end_col));
-                            start..end
-                        })
-                        .collect();
+                    let mut anchor_ranges = Vec::with_capacity(assembled.len());
+                    let mut excerpt_orders = Vec::with_capacity(assembled.len());
+                    for (row_range, order) in assembled {
+                        let start = snapshot.anchor_before(Point::new(row_range.start, 0));
+                        let end_col = snapshot.line_len(row_range.end);
+                        let end = snapshot.anchor_after(Point::new(row_range.end, end_col));
+                        anchor_ranges.push(start..end);
+                        excerpt_orders.push(order);
+                    }
 
                     let mut related_buffer = RelatedBuffer {
                         buffer,
                         path,
                         anchor_ranges,
+                        excerpt_orders,
                         cached_file: None,
                     };
                     related_buffer.fill_cache(snapshot);
@@ -456,7 +508,17 @@ async fn rebuild_related_files(
                 })
                 .collect();
 
-            related_buffers.sort_by_key(|related| related.path.clone());
+            related_buffers.sort_by(|a, b| {
+                let rank_a = min_rank_by_buffer
+                    .get(&a.buffer.entity_id())
+                    .copied()
+                    .unwrap_or(usize::MAX);
+                let rank_b = min_rank_by_buffer
+                    .get(&b.buffer.entity_id())
+                    .copied()
+                    .unwrap_or(usize::MAX);
+                rank_a.cmp(&rank_b).then_with(|| a.path.cmp(&b.path))
+            });
 
             (new_entries, related_buffers)
         })
@@ -487,12 +549,14 @@ impl RelatedBuffer {
         let excerpts = self
             .anchor_ranges
             .iter()
-            .map(|range| {
+            .zip(self.excerpt_orders.iter())
+            .map(|(range, &order)| {
                 let start = range.start.to_point(buffer);
                 let end = range.end.to_point(buffer);
                 RelatedExcerpt {
                     row_range: start.row..end.row,
                     text: buffer.text_for_range(start..end).collect::<String>().into(),
+                    order,
                 }
             })
             .collect::<Vec<_>>();
@@ -580,14 +644,12 @@ fn identifiers_for_position(
     let outer_range =
         ranges.first().map_or(0, |r| r.start)..ranges.last().map_or(buffer.len(), |r| r.end);
 
-    let mut captures = buffer
-        .syntax
-        .captures(outer_range.clone(), &buffer.text, |grammar| {
-            grammar
-                .highlights_config
-                .as_ref()
-                .map(|config| &config.query)
-        });
+    let mut captures = buffer.captures(outer_range.clone(), |grammar| {
+        grammar
+            .highlights_config
+            .as_ref()
+            .map(|config| &config.query)
+    });
 
     for range in ranges {
         captures.set_byte_range(range.start..outer_range.end);

crates/edit_prediction_context/src/edit_prediction_context_tests.rs 🔗

@@ -48,6 +48,24 @@ async fn test_edit_prediction_context(cx: &mut TestAppContext) {
         assert_related_files(
             &excerpts,
             &[
+                (
+                    "root/src/person.rs",
+                    &[
+                        indoc! {"
+                        pub struct Person {
+                            first_name: String,
+                            last_name: String,
+                            email: String,
+                            age: u32,
+                        }
+
+                        impl Person {
+                            pub fn get_first_name(&self) -> &str {
+                                &self.first_name
+                            }"},
+                        "}",
+                    ],
+                ),
                 (
                     "root/src/company.rs",
                     &[indoc! {"
@@ -71,24 +89,6 @@ async fn test_edit_prediction_context(cx: &mut TestAppContext) {
                         }"},
                     ],
                 ),
-                (
-                    "root/src/person.rs",
-                    &[
-                        indoc! {"
-                        pub struct Person {
-                            first_name: String,
-                            last_name: String,
-                            email: String,
-                            age: u32,
-                        }
-
-                        impl Person {
-                            pub fn get_first_name(&self) -> &str {
-                                &self.first_name
-                            }"},
-                        "}",
-                    ],
-                ),
             ],
         );
     });
@@ -112,6 +112,24 @@ async fn test_edit_prediction_context(cx: &mut TestAppContext) {
         assert_related_files(
             &excerpts,
             &[
+                (
+                    "root/src/person.rs",
+                    &[
+                        indoc! {"
+                        pub struct Person {
+                            first_name: String,
+                            last_name: String,
+                            email: String,
+                            age: u32,
+                        }
+
+                        impl Person {
+                            pub fn get_first_name(&self) -> &str {
+                                &self.first_name
+                            }"},
+                        "}",
+                    ],
+                ),
                 (
                     "root/src/company.rs",
                     &[indoc! {"
@@ -136,24 +154,6 @@ async fn test_edit_prediction_context(cx: &mut TestAppContext) {
                         }"},
                     ],
                 ),
-                (
-                    "root/src/person.rs",
-                    &[
-                        indoc! {"
-                        pub struct Person {
-                            first_name: String,
-                            last_name: String,
-                            email: String,
-                            age: u32,
-                        }
-
-                        impl Person {
-                            pub fn get_first_name(&self) -> &str {
-                                &self.first_name
-                            }"},
-                        "}",
-                    ],
-                ),
             ],
         );
     });
@@ -290,20 +290,21 @@ fn test_assemble_excerpts(cx: &mut TestAppContext) {
         let (input, ranges) = marked_text_ranges(&input, false);
         let buffer = cx.new(|cx| Buffer::local(input, cx).with_language(rust_lang(), cx));
         buffer.read_with(cx, |buffer, _cx| {
-            let ranges: Vec<Range<Point>> = ranges
+            let ranges: Vec<(Range<Point>, usize)> = ranges
                 .into_iter()
-                .map(|range| range.to_point(&buffer))
+                .map(|range| (range.to_point(&buffer), 0))
                 .collect();
 
-            let row_ranges = assemble_excerpt_ranges(&buffer.snapshot(), ranges);
-            let excerpts: Vec<RelatedExcerpt> = row_ranges
+            let assembled = assemble_excerpt_ranges(&buffer.snapshot(), ranges);
+            let excerpts: Vec<RelatedExcerpt> = assembled
                 .into_iter()
-                .map(|row_range| {
+                .map(|(row_range, order)| {
                     let start = Point::new(row_range.start, 0);
                     let end = Point::new(row_range.end, buffer.line_len(row_range.end));
                     RelatedExcerpt {
                         row_range,
                         text: buffer.text_for_range(start..end).collect::<String>().into(),
+                        order,
                     }
                 })
                 .collect();
@@ -620,7 +621,6 @@ async fn test_type_definition_deduplication(cx: &mut TestAppContext) {
         assert_related_files(
             &excerpts,
             &[
-                ("root/src/main.rs", &["fn work() {", "}"]),
                 (
                     "root/src/types.rs",
                     &[indoc! {"
@@ -628,6 +628,194 @@ async fn test_type_definition_deduplication(cx: &mut TestAppContext) {
                             value: i32,
                         }"}],
                 ),
+                ("root/src/main.rs", &["fn work() {", "}"]),
+            ],
+        );
+    });
+}
+
+#[gpui::test]
+async fn test_definitions_ranked_by_cursor_proximity(cx: &mut TestAppContext) {
+    init_test(cx);
+    let fs = FakeFs::new(cx.executor());
+
+    // helpers.rs has an impl block whose body exceeds the test
+    // MAX_OUTLINE_ITEM_BODY_SIZE (24 bytes), so assemble_excerpt_ranges
+    // splits it into header + individual children + closing brace. main.rs
+    // references two of the three methods on separate lines at varying
+    // distances from the cursor. This exercises:
+    //   1. File ordering by closest identifier rank.
+    //   2. Per-excerpt ordering within a file — child excerpts carry the rank
+    //      of the identifier that discovered them.
+    //   3. Parent excerpt (impl header / closing brace) inheriting the minimum
+    //      order of its children.
+    fs.insert_tree(
+        path!("/root"),
+        json!({
+            "src": {
+                "helpers.rs": indoc! {r#"
+                    pub struct Helpers {
+                        value: i32,
+                    }
+
+                    impl Helpers {
+                        pub fn alpha(&self) -> i32 {
+                            let intermediate = self.value;
+                            intermediate + 1
+                        }
+
+                        pub fn beta(&self) -> i32 {
+                            let intermediate = self.value;
+                            intermediate + 2
+                        }
+
+                        pub fn gamma(&self) -> i32 {
+                            let intermediate = self.value;
+                            intermediate + 3
+                        }
+                    }
+                "#},
+                "main.rs": indoc! {r#"
+                    use super::helpers::Helpers;
+
+                    fn process(h: Helpers) {
+                        let a = h.alpha();
+                        let b = h.gamma();
+                    }
+                "#},
+            },
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
+    let mut servers = setup_fake_lsp(&project, cx);
+
+    let (buffer, _handle) = project
+        .update(cx, |project, cx| {
+            project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
+        })
+        .await
+        .unwrap();
+
+    let _server = servers.next().await.unwrap();
+    cx.run_until_parked();
+
+    // Place cursor on "h.alpha()". `alpha` is at distance 0, `gamma` is
+    // farther below. Both resolve to methods inside `impl Helpers` in
+    // helpers.rs. The impl header and closing brace excerpts should inherit
+    // the min order of their children (alpha's order).
+    let related_excerpt_store = cx.new(|cx| RelatedExcerptStore::new(&project, cx));
+    related_excerpt_store.update(cx, |store, cx| {
+        let position = {
+            let buffer = buffer.read(cx);
+            let offset = buffer.text().find("h.alpha()").unwrap();
+            buffer.anchor_before(offset)
+        };
+
+        store.set_identifier_line_count(1);
+        store.refresh(buffer.clone(), position, cx);
+    });
+
+    cx.executor().advance_clock(DEBOUNCE_DURATION);
+    related_excerpt_store.update(cx, |store, cx| {
+        let files = store.related_files(cx);
+
+        // helpers.rs has 4 excerpts: the struct+impl header merged with
+        // the alpha method header (order 1 from alpha), alpha's closing
+        // brace (order 1), gamma's method header (order 6), and the
+        // gamma+impl closing brace (order 1, inherited from alpha which
+        // is also a child of the impl).
+        let alpha_order = 1;
+        let gamma_order = 6;
+        assert_related_files_with_orders(
+            &files,
+            &[
+                (
+                    "root/src/helpers.rs",
+                    &[
+                        (
+                            indoc! {"
+                            pub struct Helpers {
+                                value: i32,
+                            }
+
+                            impl Helpers {
+                                pub fn alpha(&self) -> i32 {"},
+                            alpha_order,
+                        ),
+                        ("    }", alpha_order),
+                        ("    pub fn gamma(&self) -> i32 {", gamma_order),
+                        (
+                            indoc! {"
+                                }
+                            }"},
+                            alpha_order,
+                        ),
+                    ],
+                ),
+                (
+                    "root/src/main.rs",
+                    &[("fn process(h: Helpers) {", 8), ("}", 8)],
+                ),
+            ],
+        );
+    });
+
+    // Now move cursor to "h.gamma()" — gamma becomes closest, reranking the
+    // excerpts so that the gamma method excerpt has the best order and the
+    // alpha method excerpt has a worse order.
+    related_excerpt_store.update(cx, |store, cx| {
+        let position = {
+            let buffer = buffer.read(cx);
+            let offset = buffer.text().find("h.gamma()").unwrap();
+            buffer.anchor_before(offset)
+        };
+
+        store.set_identifier_line_count(1);
+        store.refresh(buffer.clone(), position, cx);
+    });
+
+    cx.executor().advance_clock(DEBOUNCE_DURATION);
+    related_excerpt_store.update(cx, |store, cx| {
+        let files = store.related_files(cx);
+
+        // Now gamma is closest. The alpha method excerpts carry alpha's
+        // rank (3), and the gamma method excerpts carry gamma's rank (1).
+        // The impl closing brace merges with gamma's closing brace and
+        // inherits gamma's order (the best child).
+        let alpha_order = 3;
+        let gamma_order = 1;
+        assert_related_files_with_orders(
+            &files,
+            &[
+                (
+                    "root/src/helpers.rs",
+                    &[
+                        (
+                            indoc! {"
+                            pub struct Helpers {
+                                value: i32,
+                            }
+
+                            impl Helpers {
+                                pub fn alpha(&self) -> i32 {"},
+                            alpha_order,
+                        ),
+                        ("    }", alpha_order),
+                        ("    pub fn gamma(&self) -> i32 {", gamma_order),
+                        (
+                            indoc! {"
+                                }
+                            }"},
+                            gamma_order,
+                        ),
+                    ],
+                ),
+                (
+                    "root/src/main.rs",
+                    &[("fn process(h: Helpers) {", 8), ("}", 8)],
+                ),
             ],
         );
     });
@@ -788,30 +976,56 @@ fn test_project_1() -> serde_json::Value {
 }
 
 fn assert_related_files(actual_files: &[RelatedFile], expected_files: &[(&str, &[&str])]) {
-    let actual_files = actual_files
+    let expected_with_orders: Vec<(&str, Vec<(&str, usize)>)> = expected_files
+        .iter()
+        .map(|(path, texts)| (*path, texts.iter().map(|text| (*text, 0)).collect()))
+        .collect();
+    let expected_refs: Vec<(&str, &[(&str, usize)])> = expected_with_orders
+        .iter()
+        .map(|(path, excerpts)| (*path, excerpts.as_slice()))
+        .collect();
+    assert_related_files_impl(actual_files, &expected_refs, false)
+}
+
+fn assert_related_files_with_orders(
+    actual_files: &[RelatedFile],
+    expected_files: &[(&str, &[(&str, usize)])],
+) {
+    assert_related_files_impl(actual_files, expected_files, true)
+}
+
+fn assert_related_files_impl(
+    actual_files: &[RelatedFile],
+    expected_files: &[(&str, &[(&str, usize)])],
+    check_orders: bool,
+) {
+    let actual: Vec<(&str, Vec<(String, usize)>)> = actual_files
         .iter()
         .map(|file| {
             let excerpts = file
                 .excerpts
                 .iter()
-                .map(|excerpt| excerpt.text.to_string())
-                .collect::<Vec<_>>();
+                .map(|excerpt| {
+                    let order = if check_orders { excerpt.order } else { 0 };
+                    (excerpt.text.to_string(), order)
+                })
+                .collect();
             (file.path.to_str().unwrap(), excerpts)
         })
-        .collect::<Vec<_>>();
-    let expected_excerpts = expected_files
+        .collect();
+    let expected: Vec<(&str, Vec<(String, usize)>)> = expected_files
         .iter()
-        .map(|(path, texts)| {
+        .map(|(path, excerpts)| {
             (
                 *path,
-                texts
+                excerpts
                     .iter()
-                    .map(|line| line.to_string())
-                    .collect::<Vec<_>>(),
+                    .map(|(text, order)| (text.to_string(), *order))
+                    .collect(),
             )
         })
-        .collect::<Vec<_>>();
-    pretty_assertions::assert_eq!(actual_files, expected_excerpts)
+        .collect();
+    pretty_assertions::assert_eq!(actual, expected)
 }
 
 fn assert_definitions(definitions: &[LocationLink], first_lines: &[&str], cx: &mut TestAppContext) {

crates/edit_prediction_ui/Cargo.toml 🔗

@@ -40,7 +40,6 @@ paths.workspace = true
 project.workspace = true
 regex.workspace = true
 settings.workspace = true
-supermaven.workspace = true
 telemetry.workspace = true
 text.workspace = true
 theme.workspace = true

crates/edit_prediction_ui/src/edit_prediction_button.rs 🔗

@@ -3,7 +3,7 @@ use client::{Client, UserStore, zed_urls};
 use cloud_llm_client::UsageLimit;
 use codestral::{self, CodestralEditPredictionDelegate};
 use copilot::Status;
-use edit_prediction::{EditPredictionStore, Zeta2FeatureFlag};
+use edit_prediction::EditPredictionStore;
 use edit_prediction_types::EditPredictionDelegateHandle;
 use editor::{
     Editor, MultiBufferOffset, SelectionEffects, actions::ShowEditPrediction, scroll::Autoscroll,
@@ -22,15 +22,12 @@ use language::{
 };
 use project::{DisableAiSettings, Project};
 use regex::Regex;
-use settings::{
-    EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, Settings, SettingsStore, update_settings_file,
-};
+use settings::{Settings, SettingsStore, update_settings_file};
 use std::{
     rc::Rc,
     sync::{Arc, LazyLock},
     time::Duration,
 };
-use supermaven::{AccountStatus, Supermaven};
 use ui::{
     Clickable, ContextMenu, ContextMenuEntry, DocumentationSide, IconButton, IconButtonShape,
     Indicator, PopoverMenu, PopoverMenuHandle, ProgressBar, Tooltip, prelude::*,
@@ -75,13 +72,6 @@ pub struct EditPredictionButton {
     project: WeakEntity<Project>,
 }
 
-enum SupermavenButtonStatus {
-    Ready,
-    Errored(String),
-    NeedsActivation(String),
-    Initializing,
-}
-
 impl Render for EditPredictionButton {
     fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
         // Return empty div if AI is disabled
@@ -188,101 +178,6 @@ impl Render for EditPredictionButton {
                         .with_handle(self.popover_menu_handle.clone()),
                 )
             }
-            EditPredictionProvider::Supermaven => {
-                let Some(supermaven) = Supermaven::global(cx) else {
-                    return div();
-                };
-
-                let supermaven = supermaven.read(cx);
-
-                let status = match supermaven {
-                    Supermaven::Starting => SupermavenButtonStatus::Initializing,
-                    Supermaven::FailedDownload { error } => {
-                        SupermavenButtonStatus::Errored(error.to_string())
-                    }
-                    Supermaven::Spawned(agent) => {
-                        let account_status = agent.account_status.clone();
-                        match account_status {
-                            AccountStatus::NeedsActivation { activate_url } => {
-                                SupermavenButtonStatus::NeedsActivation(activate_url)
-                            }
-                            AccountStatus::Unknown => SupermavenButtonStatus::Initializing,
-                            AccountStatus::Ready => SupermavenButtonStatus::Ready,
-                        }
-                    }
-                    Supermaven::Error { error } => {
-                        SupermavenButtonStatus::Errored(error.to_string())
-                    }
-                };
-
-                let icon = status.to_icon();
-                let tooltip_text = status.to_tooltip();
-                let has_menu = status.has_menu();
-                let this = cx.weak_entity();
-                let fs = self.fs.clone();
-                let file = self.file.clone();
-                let language = self.language.clone();
-                let project = self.project.clone();
-
-                div().child(
-                    PopoverMenu::new("supermaven")
-                        .on_open({
-                            let file = file.clone();
-                            let language = language;
-                            let project = project;
-                            Rc::new(move |_window, cx| {
-                                emit_edit_prediction_menu_opened(
-                                    "supermaven",
-                                    &file,
-                                    &language,
-                                    &project,
-                                    cx,
-                                );
-                            })
-                        })
-                        .menu(move |window, cx| match &status {
-                            SupermavenButtonStatus::NeedsActivation(activate_url) => {
-                                Some(ContextMenu::build(window, cx, |menu, _, _| {
-                                    let fs = fs.clone();
-                                    let activate_url = activate_url.clone();
-
-                                    menu.entry("Sign In", None, move |_, cx| {
-                                        cx.open_url(activate_url.as_str())
-                                    })
-                                    .entry(
-                                        "Use Zed AI",
-                                        None,
-                                        move |_, cx| {
-                                            set_completion_provider(
-                                                fs.clone(),
-                                                cx,
-                                                EditPredictionProvider::Zed,
-                                            )
-                                        },
-                                    )
-                                }))
-                            }
-                            SupermavenButtonStatus::Ready => this
-                                .update(cx, |this, cx| {
-                                    this.build_supermaven_context_menu(window, cx)
-                                })
-                                .ok(),
-                            _ => None,
-                        })
-                        .anchor(Corner::BottomRight)
-                        .trigger_with_tooltip(
-                            IconButton::new("supermaven-icon", icon),
-                            move |window, cx| {
-                                if has_menu {
-                                    Tooltip::for_action(tooltip_text.clone(), &ToggleMenu, cx)
-                                } else {
-                                    Tooltip::text(tooltip_text.clone())(window, cx)
-                                }
-                            },
-                        )
-                        .with_handle(self.popover_menu_handle.clone()),
-                )
-            }
             EditPredictionProvider::Codestral => {
                 let enabled = self.editor_enabled.unwrap_or(true);
                 let has_api_key = codestral::codestral_api_key(cx).is_some();
@@ -642,9 +537,15 @@ impl EditPredictionButton {
         edit_prediction::ollama::ensure_authenticated(cx);
         let sweep_api_token_task = edit_prediction::sweep_ai::load_sweep_api_token(cx);
         let mercury_api_token_task = edit_prediction::mercury::load_mercury_api_token(cx);
+        let open_ai_compatible_api_token_task =
+            edit_prediction::open_ai_compatible::load_open_ai_compatible_api_token(cx);
 
         cx.spawn(async move |this, cx| {
-            _ = futures::join!(sweep_api_token_task, mercury_api_token_task);
+            _ = futures::join!(
+                sweep_api_token_task,
+                mercury_api_token_task,
+                open_ai_compatible_api_token_task
+            );
             this.update(cx, |_, cx| {
                 cx.notify();
             })
@@ -873,13 +774,7 @@ impl EditPredictionButton {
 
         menu = menu.separator().header("Privacy");
 
-        if matches!(
-            provider,
-            EditPredictionProvider::Zed
-                | EditPredictionProvider::Experimental(
-                    EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME,
-                )
-        ) {
+        if matches!(provider, EditPredictionProvider::Zed) {
             if let Some(provider) = &self.edit_prediction_provider {
                 let data_collection = provider.data_collection_state(cx);
 
@@ -1120,21 +1015,6 @@ impl EditPredictionButton {
         })
     }
 
-    fn build_supermaven_context_menu(
-        &self,
-        window: &mut Window,
-        cx: &mut Context<Self>,
-    ) -> Entity<ContextMenu> {
-        ContextMenu::build(window, cx, |menu, window, cx| {
-            let menu = self.build_language_settings_menu(menu, window, cx);
-            let menu =
-                self.add_provider_switching_section(menu, EditPredictionProvider::Supermaven, cx);
-
-            menu.separator()
-                .action("Sign Out", supermaven::SignOut.boxed_clone())
-        })
-    }
-
     fn build_codestral_context_menu(
         &self,
         window: &mut Window,
@@ -1312,6 +1192,58 @@ impl EditPredictionButton {
                 menu = self.build_language_settings_menu(menu, window, cx);
             }
             menu = self.add_provider_switching_section(menu, provider, cx);
+
+            if cx.is_staff() {
+                if let Some(store) = EditPredictionStore::try_global(cx) {
+                    let store = store.read(cx);
+                    let experiments = store.available_experiments().to_vec();
+                    let preferred = store.preferred_experiment().map(|s| s.to_owned());
+                    let active = store.active_experiment().map(|s| s.to_owned());
+
+                    let preferred_for_submenu = preferred.clone();
+                    menu = menu
+                        .separator()
+                        .submenu("Experiment", move |menu, _window, _cx| {
+                            let mut menu = menu.toggleable_entry(
+                                "Default",
+                                preferred_for_submenu.is_none(),
+                                IconPosition::Start,
+                                None,
+                                {
+                                    move |_window, cx| {
+                                        if let Some(store) = EditPredictionStore::try_global(cx) {
+                                            store.update(cx, |store, _cx| {
+                                                store.set_preferred_experiment(None);
+                                            });
+                                        }
+                                    }
+                                },
+                            );
+                            for experiment in &experiments {
+                                let is_selected = active.as_deref() == Some(experiment.as_str())
+                                    || preferred.as_deref() == Some(experiment.as_str());
+                                let experiment_name = experiment.clone();
+                                menu = menu.toggleable_entry(
+                                    experiment.clone(),
+                                    is_selected,
+                                    IconPosition::Start,
+                                    None,
+                                    move |_window, cx| {
+                                        if let Some(store) = EditPredictionStore::try_global(cx) {
+                                            store.update(cx, |store, _cx| {
+                                                store.set_preferred_experiment(Some(
+                                                    experiment_name.clone(),
+                                                ));
+                                            });
+                                        }
+                                    },
+                                );
+                            }
+                            menu
+                        });
+                }
+            }
+
             menu = menu.separator().item(
                 ContextMenuEntry::new("Configure Providers")
                     .icon(IconName::Settings)
@@ -1384,33 +1316,6 @@ impl StatusItemView for EditPredictionButton {
     }
 }
 
-impl SupermavenButtonStatus {
-    fn to_icon(&self) -> IconName {
-        match self {
-            SupermavenButtonStatus::Ready => IconName::Supermaven,
-            SupermavenButtonStatus::Errored(_) => IconName::SupermavenError,
-            SupermavenButtonStatus::NeedsActivation(_) => IconName::SupermavenInit,
-            SupermavenButtonStatus::Initializing => IconName::SupermavenInit,
-        }
-    }
-
-    fn to_tooltip(&self) -> String {
-        match self {
-            SupermavenButtonStatus::Ready => "Supermaven is ready".to_string(),
-            SupermavenButtonStatus::Errored(error) => format!("Supermaven error: {}", error),
-            SupermavenButtonStatus::NeedsActivation(_) => "Supermaven needs activation".to_string(),
-            SupermavenButtonStatus::Initializing => "Supermaven initializing".to_string(),
-        }
-    }
-
-    fn has_menu(&self) -> bool {
-        match self {
-            SupermavenButtonStatus::Ready | SupermavenButtonStatus::NeedsActivation(_) => true,
-            SupermavenButtonStatus::Errored(_) | SupermavenButtonStatus::Initializing => false,
-        }
-    }
-}
-
 async fn open_disabled_globs_setting_in_editor(
     workspace: WeakEntity<Workspace>,
     cx: &mut AsyncWindowContext,
@@ -1494,12 +1399,6 @@ pub fn get_available_providers(cx: &mut App) -> Vec<EditPredictionProvider> {
 
     providers.push(EditPredictionProvider::Zed);
 
-    if cx.has_flag::<Zeta2FeatureFlag>() {
-        providers.push(EditPredictionProvider::Experimental(
-            EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME,
-        ));
-    }
-
     if let Some(app_state) = workspace::AppState::global(cx).upgrade()
         && copilot::GlobalCopilotAuth::try_get_or_init(app_state, cx)
             .is_some_and(|copilot| copilot.0.read(cx).is_authenticated())
@@ -1507,14 +1406,6 @@ pub fn get_available_providers(cx: &mut App) -> Vec<EditPredictionProvider> {
         providers.push(EditPredictionProvider::Copilot);
     };
 
-    if let Some(supermaven) = Supermaven::global(cx) {
-        if let Supermaven::Spawned(agent) = supermaven.read(cx) {
-            if matches!(agent.account_status, AccountStatus::Ready) {
-                providers.push(EditPredictionProvider::Supermaven);
-            }
-        }
-    }
-
     if codestral::codestral_api_key(cx).is_some() {
         providers.push(EditPredictionProvider::Codestral);
     }

crates/edit_prediction_ui/src/edit_prediction_context_view.rs 🔗

@@ -8,14 +8,17 @@ use std::{
 
 use anyhow::Result;
 use client::{Client, UserStore};
-use editor::{Editor, PathKey};
+use editor::{
+    Editor, PathKey,
+    display_map::{BlockPlacement, BlockProperties, BlockStyle},
+};
 use futures::StreamExt as _;
 use gpui::{
     Animation, AnimationExt, App, AppContext as _, Context, Entity, EventEmitter, FocusHandle,
     Focusable, InteractiveElement as _, IntoElement as _, ParentElement as _, SharedString,
     Styled as _, Task, TextAlign, Window, actions, div, pulsating_between,
 };
-use multi_buffer::MultiBuffer;
+use multi_buffer::{Anchor, MultiBuffer};
 use project::Project;
 use text::Point;
 use ui::{
@@ -165,8 +168,14 @@ impl EditPredictionContextView {
         }
 
         cx.spawn_in(window, async move |this, cx| {
-            let mut paths = Vec::new();
+            let mut paths: Vec<(PathKey, _, Vec<_>, Vec<usize>, usize)> = Vec::new();
             for (related_file, buffer) in related_files {
+                let orders = related_file
+                    .excerpts
+                    .iter()
+                    .map(|excerpt| excerpt.order)
+                    .collect::<Vec<_>>();
+                let min_order = orders.iter().copied().min().unwrap_or(usize::MAX);
                 let point_ranges = related_file
                     .excerpts
                     .iter()
@@ -175,20 +184,53 @@ impl EditPredictionContextView {
                     })
                     .collect::<Vec<_>>();
                 cx.update(|_, cx| {
-                    let path = PathKey::for_buffer(&buffer, cx);
-                    paths.push((path, buffer, point_ranges));
+                    let path = if let Some(file) = buffer.read(cx).file() {
+                        PathKey::with_sort_prefix(min_order as u64, file.path().clone())
+                    } else {
+                        PathKey::for_buffer(&buffer, cx)
+                    };
+                    paths.push((path, buffer, point_ranges, orders, min_order));
                 })?;
             }
 
+            paths.sort_by_key(|(_, _, _, _, min_order)| *min_order);
+
+            let mut excerpt_anchors_with_orders: Vec<(Anchor, usize)> = Vec::new();
+
             multibuffer.update(cx, |multibuffer, cx| {
                 multibuffer.clear(cx);
 
-                for (path, buffer, ranges) in paths {
-                    multibuffer.set_excerpts_for_path(path, buffer, ranges, 0, cx);
+                for (path, buffer, ranges, orders, _) in paths {
+                    let (anchor_ranges, _) =
+                        multibuffer.set_excerpts_for_path(path, buffer, ranges, 0, cx);
+                    for (anchor_range, order) in anchor_ranges.into_iter().zip(orders) {
+                        excerpt_anchors_with_orders.push((anchor_range.start, order));
+                    }
                 }
             });
 
             editor.update_in(cx, |editor, window, cx| {
+                let blocks = excerpt_anchors_with_orders
+                    .into_iter()
+                    .map(|(anchor, order)| {
+                        let label = SharedString::from(format!("order: {order}"));
+                        BlockProperties {
+                            placement: BlockPlacement::Above(anchor),
+                            height: Some(1),
+                            style: BlockStyle::Sticky,
+                            render: Arc::new(move |cx| {
+                                div()
+                                    .pl(cx.anchor_x)
+                                    .text_ui_xs(cx)
+                                    .text_color(cx.editor_style.status.info)
+                                    .child(label.clone())
+                                    .into_any_element()
+                            }),
+                            priority: 0,
+                        }
+                    })
+                    .collect::<Vec<_>>();
+                editor.insert_blocks(blocks, None, cx);
                 editor.move_to_beginning(&Default::default(), window, cx);
             })?;
 

crates/edit_prediction_ui/src/edit_prediction_ui.rs 🔗

@@ -3,7 +3,7 @@ mod edit_prediction_context_view;
 mod rate_prediction_modal;
 
 use command_palette_hooks::CommandPaletteFilter;
-use edit_prediction::{EditPredictionStore, ResetOnboarding, Zeta2FeatureFlag, capture_example};
+use edit_prediction::{EditPredictionStore, ResetOnboarding, capture_example};
 use edit_prediction_context_view::EditPredictionContextView;
 use editor::Editor;
 use feature_flags::FeatureFlagAppExt as _;
@@ -54,28 +54,25 @@ pub fn init(cx: &mut App) {
             capture_example_as_markdown(workspace, window, cx);
         });
         workspace.register_action_renderer(|div, _, _, cx| {
-            let has_flag = cx.has_flag::<Zeta2FeatureFlag>();
-            div.when(has_flag, |div| {
-                div.on_action(cx.listener(
-                    move |workspace, _: &OpenEditPredictionContextView, window, cx| {
-                        let project = workspace.project();
-                        workspace.split_item(
-                            SplitDirection::Right,
-                            Box::new(cx.new(|cx| {
-                                EditPredictionContextView::new(
-                                    project.clone(),
-                                    workspace.client(),
-                                    workspace.user_store(),
-                                    window,
-                                    cx,
-                                )
-                            })),
-                            window,
-                            cx,
-                        );
-                    },
-                ))
-            })
+            div.on_action(cx.listener(
+                move |workspace, _: &OpenEditPredictionContextView, window, cx| {
+                    let project = workspace.project();
+                    workspace.split_item(
+                        SplitDirection::Right,
+                        Box::new(cx.new(|cx| {
+                            EditPredictionContextView::new(
+                                project.clone(),
+                                workspace.client(),
+                                workspace.user_store(),
+                                window,
+                                cx,
+                            )
+                        })),
+                        window,
+                        cx,
+                    );
+                },
+            ))
         });
     })
     .detach();

crates/edit_prediction_ui/src/rate_prediction_modal.rs 🔗

@@ -1,6 +1,6 @@
 use buffer_diff::BufferDiff;
 use edit_prediction::{EditPrediction, EditPredictionRating, EditPredictionStore};
-use editor::{Editor, ExcerptRange, Inlay, MultiBuffer};
+use editor::{Editor, Inlay, MultiBuffer};
 use feature_flags::FeatureFlag;
 use gpui::{
     App, BorderStyle, DismissEvent, EdgesRefinement, Entity, EventEmitter, FocusHandle, Focusable,
@@ -359,16 +359,9 @@ impl RatePredictionsModal {
                 editor.disable_header_for_buffer(new_buffer_id, cx);
                 let excerpt_id = editor.buffer().update(cx, |multibuffer, cx| {
                     multibuffer.clear(cx);
-                    let excerpt_ids = multibuffer.push_excerpts(
-                        new_buffer,
-                        vec![ExcerptRange {
-                            context: start..end,
-                            primary: start..end,
-                        }],
-                        cx,
-                    );
+                    multibuffer.set_excerpts_for_buffer(new_buffer, [start..end], 0, cx);
                     multibuffer.add_diff(diff, cx);
-                    excerpt_ids.into_iter().next()
+                    multibuffer.excerpt_ids().into_iter().next()
                 });
 
                 if let Some((excerpt_id, cursor_position)) =

crates/editor/src/bracket_colorization.rs 🔗

@@ -5,10 +5,10 @@
 use std::ops::Range;
 
 use crate::{Editor, HighlightKey};
-use collections::HashMap;
-use gpui::{Context, HighlightStyle};
+use collections::{HashMap, HashSet};
+use gpui::{AppContext as _, Context, HighlightStyle};
 use itertools::Itertools;
-use language::language_settings;
+use language::{BufferRow, BufferSnapshot, language_settings};
 use multi_buffer::{Anchor, ExcerptId};
 use ui::{ActiveTheme, utils::ensure_minimum_contrast};
 
@@ -19,22 +19,16 @@ impl Editor {
         }
 
         if invalidate {
-            self.fetched_tree_sitter_chunks.clear();
+            self.bracket_fetched_tree_sitter_chunks.clear();
         }
 
         let accents_count = cx.theme().accents().0.len();
         let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx);
-        let anchors_in_multi_buffer = |current_excerpt: ExcerptId,
-                                       text_anchors: [text::Anchor; 4]|
-         -> Option<[Option<_>; 4]> {
-            multi_buffer_snapshot
-                .anchors_in_excerpt(current_excerpt, text_anchors)?
-                .collect_array()
-        };
-
-        let bracket_matches_by_accent = self.visible_excerpts(false, cx).into_iter().fold(
-            HashMap::default(),
-            |mut acc, (excerpt_id, (buffer, _, buffer_range))| {
+
+        let visible_excerpts = self.visible_excerpts(false, cx);
+        let excerpt_data: Vec<(ExcerptId, BufferSnapshot, Range<usize>)> = visible_excerpts
+            .into_iter()
+            .filter_map(|(excerpt_id, (buffer, _, buffer_range))| {
                 let buffer_snapshot = buffer.read(cx).snapshot();
                 if language_settings::language_settings(
                     buffer_snapshot.language().map(|language| language.name()),
@@ -43,112 +37,173 @@ impl Editor {
                 )
                 .colorize_brackets
                 {
-                    let fetched_chunks = self
-                        .fetched_tree_sitter_chunks
-                        .entry(excerpt_id)
-                        .or_default();
-
-                    let brackets_by_accent = buffer_snapshot
-                        .fetch_bracket_ranges(
-                            buffer_range.start..buffer_range.end,
-                            Some(fetched_chunks),
-                        )
-                        .into_iter()
-                        .flat_map(|(chunk_range, pairs)| {
-                            if fetched_chunks.insert(chunk_range) {
-                                pairs
-                            } else {
-                                Vec::new()
-                            }
-                        })
-                        .filter_map(|pair| {
-                            let color_index = pair.color_index?;
-
-                            let buffer_open_range =
-                                buffer_snapshot.anchor_range_around(pair.open_range);
-                            let buffer_close_range =
-                                buffer_snapshot.anchor_range_around(pair.close_range);
-                            let [
-                                buffer_open_range_start,
-                                buffer_open_range_end,
-                                buffer_close_range_start,
-                                buffer_close_range_end,
-                            ] = anchors_in_multi_buffer(
-                                excerpt_id,
-                                [
-                                    buffer_open_range.start,
-                                    buffer_open_range.end,
-                                    buffer_close_range.start,
-                                    buffer_close_range.end,
-                                ],
-                            )?;
-                            let multi_buffer_open_range =
-                                buffer_open_range_start.zip(buffer_open_range_end);
-                            let multi_buffer_close_range =
-                                buffer_close_range_start.zip(buffer_close_range_end);
-
-                            let mut ranges = Vec::with_capacity(2);
-                            if let Some((open_start, open_end)) = multi_buffer_open_range {
-                                ranges.push(open_start..open_end);
-                            }
-                            if let Some((close_start, close_end)) = multi_buffer_close_range {
-                                ranges.push(close_start..close_end);
-                            }
-                            if ranges.is_empty() {
-                                None
-                            } else {
-                                Some((color_index % accents_count, ranges))
-                            }
-                        });
+                    Some((excerpt_id, buffer_snapshot, buffer_range))
+                } else {
+                    None
+                }
+            })
+            .collect();
 
-                    for (accent_number, new_ranges) in brackets_by_accent {
-                        let ranges = acc
-                            .entry(accent_number)
-                            .or_insert_with(Vec::<Range<Anchor>>::new);
+        let mut fetched_tree_sitter_chunks = excerpt_data
+            .iter()
+            .filter_map(|(excerpt_id, ..)| {
+                Some((
+                    *excerpt_id,
+                    self.bracket_fetched_tree_sitter_chunks
+                        .get(excerpt_id)
+                        .cloned()?,
+                ))
+            })
+            .collect::<HashMap<ExcerptId, HashSet<Range<BufferRow>>>>();
+
+        let bracket_matches_by_accent = cx.background_spawn(async move {
+            let anchors_in_multi_buffer = |current_excerpt: ExcerptId,
+                                           text_anchors: [text::Anchor; 4]|
+             -> Option<[Option<_>; 4]> {
+                multi_buffer_snapshot
+                    .anchors_in_excerpt(current_excerpt, text_anchors)?
+                    .collect_array()
+            };
 
-                        for new_range in new_ranges {
-                            let i = ranges
-                                .binary_search_by(|probe| {
-                                    probe.start.cmp(&new_range.start, &multi_buffer_snapshot)
-                                })
-                                .unwrap_or_else(|i| i);
-                            ranges.insert(i, new_range);
+            let bracket_matches_by_accent: HashMap<usize, Vec<Range<Anchor>>> =
+                excerpt_data.into_iter().fold(
+                    HashMap::default(),
+                    |mut acc, (excerpt_id, buffer_snapshot, buffer_range)| {
+                        let fetched_chunks =
+                            fetched_tree_sitter_chunks.entry(excerpt_id).or_default();
+
+                        let brackets_by_accent = compute_bracket_ranges(
+                            &buffer_snapshot,
+                            buffer_range,
+                            fetched_chunks,
+                            excerpt_id,
+                            accents_count,
+                            &anchors_in_multi_buffer,
+                        );
+
+                        for (accent_number, new_ranges) in brackets_by_accent {
+                            let ranges = acc
+                                .entry(accent_number)
+                                .or_insert_with(Vec::<Range<Anchor>>::new);
+
+                            for new_range in new_ranges {
+                                let i = ranges
+                                    .binary_search_by(|probe| {
+                                        probe.start.cmp(&new_range.start, &multi_buffer_snapshot)
+                                    })
+                                    .unwrap_or_else(|i| i);
+                                ranges.insert(i, new_range);
+                            }
                         }
-                    }
-                }
 
-                acc
-            },
-        );
+                        acc
+                    },
+                );
 
-        if invalidate {
-            self.clear_highlights_with(
-                &mut |key| matches!(key, HighlightKey::ColorizeBracket(_)),
-                cx,
-            );
-        }
+            (bracket_matches_by_accent, fetched_tree_sitter_chunks)
+        });
 
         let editor_background = cx.theme().colors().editor_background;
         let accents = cx.theme().accents().clone();
-        for (accent_number, bracket_highlights) in bracket_matches_by_accent {
-            let bracket_color = accents.color_for_index(accent_number as u32);
-            let adjusted_color = ensure_minimum_contrast(bracket_color, editor_background, 55.0);
-            let style = HighlightStyle {
-                color: Some(adjusted_color),
-                ..HighlightStyle::default()
-            };
 
-            self.highlight_text_key(
-                HighlightKey::ColorizeBracket(accent_number),
-                bracket_highlights,
-                style,
-                true,
-                cx,
-            );
-        }
+        self.colorize_brackets_task = cx.spawn(async move |editor, cx| {
+            if invalidate {
+                editor
+                    .update(cx, |editor, cx| {
+                        editor.clear_highlights_with(
+                            &mut |key| matches!(key, HighlightKey::ColorizeBracket(_)),
+                            cx,
+                        );
+                    })
+                    .ok();
+            }
+
+            let (bracket_matches_by_accent, updated_chunks) = bracket_matches_by_accent.await;
+
+            editor
+                .update(cx, |editor, cx| {
+                    editor
+                        .bracket_fetched_tree_sitter_chunks
+                        .extend(updated_chunks);
+                    for (accent_number, bracket_highlights) in bracket_matches_by_accent {
+                        let bracket_color = accents.color_for_index(accent_number as u32);
+                        let adjusted_color =
+                            ensure_minimum_contrast(bracket_color, editor_background, 55.0);
+                        let style = HighlightStyle {
+                            color: Some(adjusted_color),
+                            ..HighlightStyle::default()
+                        };
+
+                        editor.highlight_text_key(
+                            HighlightKey::ColorizeBracket(accent_number),
+                            bracket_highlights,
+                            style,
+                            true,
+                            cx,
+                        );
+                    }
+                })
+                .ok();
+        });
     }
 }
 
+fn compute_bracket_ranges(
+    buffer_snapshot: &BufferSnapshot,
+    buffer_range: Range<usize>,
+    fetched_chunks: &mut HashSet<Range<BufferRow>>,
+    excerpt_id: ExcerptId,
+    accents_count: usize,
+    anchors_in_multi_buffer: &impl Fn(ExcerptId, [text::Anchor; 4]) -> Option<[Option<Anchor>; 4]>,
+) -> Vec<(usize, Vec<Range<Anchor>>)> {
+    buffer_snapshot
+        .fetch_bracket_ranges(buffer_range.start..buffer_range.end, Some(fetched_chunks))
+        .into_iter()
+        .flat_map(|(chunk_range, pairs)| {
+            if fetched_chunks.insert(chunk_range) {
+                pairs
+            } else {
+                Vec::new()
+            }
+        })
+        .filter_map(|pair| {
+            let color_index = pair.color_index?;
+
+            let buffer_open_range = buffer_snapshot.anchor_range_around(pair.open_range);
+            let buffer_close_range = buffer_snapshot.anchor_range_around(pair.close_range);
+            let [
+                buffer_open_range_start,
+                buffer_open_range_end,
+                buffer_close_range_start,
+                buffer_close_range_end,
+            ] = anchors_in_multi_buffer(
+                excerpt_id,
+                [
+                    buffer_open_range.start,
+                    buffer_open_range.end,
+                    buffer_close_range.start,
+                    buffer_close_range.end,
+                ],
+            )?;
+            let multi_buffer_open_range = buffer_open_range_start.zip(buffer_open_range_end);
+            let multi_buffer_close_range = buffer_close_range_start.zip(buffer_close_range_end);
+
+            let mut ranges = Vec::with_capacity(2);
+            if let Some((open_start, open_end)) = multi_buffer_open_range {
+                ranges.push(open_start..open_end);
+            }
+            if let Some((close_start, close_end)) = multi_buffer_close_range {
+                ranges.push(close_start..close_end);
+            }
+            if ranges.is_empty() {
+                None
+            } else {
+                Some((color_index % accents_count, ranges))
+            }
+        })
+        .collect()
+}
+
 #[cfg(test)]
 mod tests {
     use std::{cmp, sync::Arc, time::Duration};
@@ -164,12 +219,12 @@ mod tests {
     };
     use collections::HashSet;
     use fs::FakeFs;
-    use gpui::{AppContext as _, UpdateGlobal as _};
+    use gpui::UpdateGlobal as _;
     use indoc::indoc;
     use itertools::Itertools;
     use language::{Capability, markdown_lang};
     use languages::rust_lang;
-    use multi_buffer::{ExcerptRange, MultiBuffer};
+    use multi_buffer::{MultiBuffer, PathKey};
     use pretty_assertions::assert_eq;
     use project::Project;
     use rope::Point;
@@ -749,6 +804,7 @@ mod foo «1{
                 });
             });
         });
+        cx.executor().run_until_parked();
         assert_eq!(
             &separate_with_comment_lines(
                 indoc! {r#"
@@ -776,6 +832,7 @@ mod foo {
                 });
             });
         });
+        cx.executor().run_until_parked();
         assert_eq!(
             &separate_with_comment_lines(
                 indoc! {r#"
@@ -1239,32 +1296,34 @@ mod foo «1{
 
         let multi_buffer = cx.new(|cx| {
             let mut multi_buffer = MultiBuffer::new(Capability::ReadWrite);
-            multi_buffer.push_excerpts(
+            multi_buffer.set_excerpts_for_path(
+                PathKey::sorted(0),
                 buffer_2.clone(),
-                [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))],
+                [Point::new(0, 0)..Point::new(1, 0)],
+                0,
                 cx,
             );
 
             let excerpt_rows = 5;
             let rest_of_first_except_rows = 3;
-            multi_buffer.push_excerpts(
+            multi_buffer.set_excerpts_for_path(
+                PathKey::sorted(1),
                 buffer_1.clone(),
                 [
-                    ExcerptRange::new(Point::new(0, 0)..Point::new(excerpt_rows, 0)),
-                    ExcerptRange::new(
-                        Point::new(
-                            comment_lines as u32 + excerpt_rows + rest_of_first_except_rows,
+                    Point::new(0, 0)..Point::new(excerpt_rows, 0),
+                    Point::new(
+                        comment_lines as u32 + excerpt_rows + rest_of_first_except_rows,
+                        0,
+                    )
+                        ..Point::new(
+                            comment_lines as u32
+                                + excerpt_rows
+                                + rest_of_first_except_rows
+                                + excerpt_rows,
                             0,
-                        )
-                            ..Point::new(
-                                comment_lines as u32
-                                    + excerpt_rows
-                                    + rest_of_first_except_rows
-                                    + excerpt_rows,
-                                0,
-                            ),
-                    ),
+                        ),
                 ],
+                0,
                 cx,
             );
             multi_buffer
@@ -1291,7 +1350,7 @@ mod foo «1{
         let map: Option«3<Vec«4<«5()5»>4»>3» = None;
         // a
         // b
-
+        // c
 
     fn process_data_2«2()2» «2{
         let other_map: Option«3<Vec«4<«5()5»>4»>3» = None;
@@ -1331,7 +1390,7 @@ mod foo «1{
         let map: Option«3<Vec«4<«5()5»>4»>3» = None;
         // a
         // b
-
+        // c
 
     fn process_data_2«2()2» «2{
         let other_map: Option«3<Vec«4<«5()5»>4»>3» = None;
@@ -1381,7 +1440,7 @@ mod foo «1{
         let map: Option«1<Vec«2<«1()1»>2»>1» = None;
         // a
         // b
-
+        // c
 
     fn process_data_2«2()2» «2{
         let other_map: Option«1<Vec«2<«1()1»>2»>1» = None;

crates/editor/src/display_map.rs 🔗

@@ -113,6 +113,7 @@ use unicode_segmentation::UnicodeSegmentation;
 use ztracing::instrument;
 
 use std::cell::RefCell;
+use std::collections::hash_map::Entry;
 use std::{
     any::TypeId,
     borrow::Cow,
@@ -175,9 +176,9 @@ pub trait ToDisplayPoint {
     fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint;
 }
 
-type TextHighlights = TreeMap<HighlightKey, Arc<(HighlightStyle, Vec<Range<Anchor>>)>>;
+type TextHighlights = Arc<HashMap<HighlightKey, Arc<(HighlightStyle, Vec<Range<Anchor>>)>>>;
 type SemanticTokensHighlights =
-    TreeMap<BufferId, (Arc<[SemanticTokenHighlight]>, Arc<HighlightStyleInterner>)>;
+    Arc<HashMap<BufferId, (Arc<[SemanticTokenHighlight]>, Arc<HighlightStyleInterner>)>>;
 type InlayHighlights = TreeMap<HighlightKey, TreeMap<InlayId, (HighlightStyle, InlayHighlight)>>;
 
 #[derive(Debug)]
@@ -478,7 +479,7 @@ impl DisplayMap {
             diagnostics_max_severity,
             text_highlights: Default::default(),
             inlay_highlights: Default::default(),
-            semantic_token_highlights: TreeMap::default(),
+            semantic_token_highlights: Default::default(),
             clip_at_line_ends: false,
             masked: false,
             companion: None,
@@ -788,6 +789,9 @@ impl DisplayMap {
                 .collect(),
             cx,
         );
+        for buffer_id in &other.block_snapshot.buffers_with_disabled_headers {
+            self.disable_header_for_buffer(*buffer_id, cx);
+        }
     }
 
     /// Creates folds for the given creases.
@@ -1226,22 +1230,25 @@ impl DisplayMap {
         cx: &App,
     ) {
         let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx);
-        let to_insert = match self.text_highlights.remove(&key) {
-            Some(mut previous) if merge => match Arc::get_mut(&mut previous) {
-                Some((_, previous_ranges)) => {
+        match Arc::make_mut(&mut self.text_highlights).entry(key) {
+            Entry::Occupied(mut slot) => match Arc::get_mut(slot.get_mut()) {
+                Some((_, previous_ranges)) if merge => {
                     previous_ranges.extend(ranges);
                     previous_ranges.sort_by(|a, b| a.start.cmp(&b.start, &multi_buffer_snapshot));
-                    previous
                 }
-                None => Arc::new((style, {
-                    ranges.extend(previous.1.iter().cloned());
+                Some((previous_style, previous_ranges)) => {
+                    *previous_style = style;
+                    *previous_ranges = ranges;
+                }
+                None if merge => {
+                    ranges.extend(slot.get().1.iter().cloned());
                     ranges.sort_by(|a, b| a.start.cmp(&b.start, &multi_buffer_snapshot));
-                    ranges
-                })),
+                    slot.insert(Arc::new((style, ranges)));
+                }
+                None => _ = slot.insert(Arc::new((style, ranges))),
             },
-            _ => Arc::new((style, ranges)),
-        };
-        self.text_highlights.insert(key, to_insert);
+            Entry::Vacant(slot) => _ = slot.insert(Arc::new((style, ranges))),
+        }
     }
 
     #[instrument(skip_all)]
@@ -1288,14 +1295,16 @@ impl DisplayMap {
     }
 
     pub fn clear_highlights(&mut self, key: HighlightKey) -> bool {
-        let mut cleared = self.text_highlights.remove(&key).is_some();
+        let mut cleared = Arc::make_mut(&mut self.text_highlights)
+            .remove(&key)
+            .is_some();
         cleared |= self.inlay_highlights.remove(&key).is_some();
         cleared
     }
 
     pub fn clear_highlights_with(&mut self, f: &mut dyn FnMut(&HighlightKey) -> bool) -> bool {
         let mut cleared = false;
-        self.text_highlights.retain(|k, _| {
+        Arc::make_mut(&mut self.text_highlights).retain(|k, _| {
             let b = !f(k);
             cleared |= b;
             b
@@ -1349,7 +1358,7 @@ impl DisplayMap {
         widths_changed
     }
 
-    pub(crate) fn current_inlays(&self) -> impl Iterator<Item = &Inlay> {
+    pub(crate) fn current_inlays(&self) -> impl Iterator<Item = &Inlay> + Default {
         self.inlay_map.current_inlays()
     }
 
@@ -1448,7 +1457,7 @@ impl DisplayMap {
     }
 
     pub fn invalidate_semantic_highlights(&mut self, buffer_id: BufferId) {
-        self.semantic_token_highlights.remove(&buffer_id);
+        Arc::make_mut(&mut self.semantic_token_highlights).remove(&buffer_id);
     }
 }
 
@@ -1492,7 +1501,7 @@ impl<'a> HighlightedChunk<'a> {
         self,
         editor_style: &'a EditorStyle,
     ) -> impl Iterator<Item = Self> + 'a {
-        let mut chars = self.text.chars().peekable();
+        let mut chunks = self.text.graphemes(true).peekable();
         let mut text = self.text;
         let style = self.style;
         let is_tab = self.is_tab;
@@ -1500,10 +1509,12 @@ impl<'a> HighlightedChunk<'a> {
         let is_inlay = self.is_inlay;
         iter::from_fn(move || {
             let mut prefix_len = 0;
-            while let Some(&ch) = chars.peek() {
-                if !is_invisible(ch) {
-                    prefix_len += ch.len_utf8();
-                    chars.next();
+            while let Some(&chunk) = chunks.peek() {
+                let mut chars = chunk.chars();
+                let Some(ch) = chars.next() else { break };
+                if chunk.len() != ch.len_utf8() || !is_invisible(ch) {
+                    prefix_len += chunk.len();
+                    chunks.next();
                     continue;
                 }
                 if prefix_len > 0 {
@@ -1517,8 +1528,8 @@ impl<'a> HighlightedChunk<'a> {
                         replacement: renderer.clone(),
                     });
                 }
-                chars.next();
-                let (prefix, suffix) = text.split_at(ch.len_utf8());
+                chunks.next();
+                let (prefix, suffix) = text.split_at(chunk.len());
                 text = suffix;
                 if let Some(replacement) = replacement(ch) {
                     let invisible_highlight = HighlightStyle {
@@ -1908,6 +1919,9 @@ impl DisplaySnapshot {
                             color
                         }
                     }),
+                    underline: chunk_highlight
+                        .underline
+                        .filter(|_| editor_style.show_underlines),
                     ..chunk_highlight
                 }
             });
@@ -4118,4 +4132,35 @@ pub mod tests {
         assert_eq!(ranges[0].start, DisplayPoint::new(DisplayRow(0), 10));
         assert_eq!(ranges[0].end, DisplayPoint::new(DisplayRow(0), 14));
     }
+
+    #[test]
+    fn test_highlight_invisibles_preserves_compound_emojis() {
+        let editor_style = EditorStyle::default();
+
+        let pilot_emoji = "🧑\u{200d}✈\u{fe0f}";
+        let chunk = HighlightedChunk {
+            text: pilot_emoji,
+            style: None,
+            is_tab: false,
+            is_inlay: false,
+            replacement: None,
+        };
+
+        let chunks: Vec<_> = chunk
+            .highlight_invisibles(&editor_style)
+            .map(|chunk| chunk.text.to_string())
+            .collect();
+
+        assert_eq!(
+            chunks.concat(),
+            pilot_emoji,
+            "all text bytes must be preserved"
+        );
+        assert_eq!(
+            chunks.len(),
+            1,
+            "compound emoji should not be split into multiple chunks, got: {:?}",
+            chunks,
+        );
+    }
 }

crates/editor/src/display_map/block_map.rs 🔗

@@ -78,6 +78,7 @@ pub struct BlockSnapshot {
     custom_blocks_by_id: TreeMap<CustomBlockId, Arc<CustomBlock>>,
     pub(super) buffer_header_height: u32,
     pub(super) excerpt_header_height: u32,
+    pub(super) buffers_with_disabled_headers: HashSet<BufferId>,
 }
 
 impl Deref for BlockSnapshot {
@@ -265,6 +266,10 @@ impl<P: Debug> Debug for BlockProperties<P> {
 pub enum BlockStyle {
     Fixed,
     Flex,
+    /// Like `Flex` but doesn't use the gutter:
+    /// - block content scrolls with buffer content
+    /// - doesn't paint in gutter
+    Spacer,
     Sticky,
 }
 
@@ -272,6 +277,7 @@ pub enum BlockStyle {
 pub struct EditorMargins {
     pub gutter: GutterDimensions,
     pub right: Pixels,
+    pub extended_right: Pixels,
 }
 
 #[derive(gpui::AppContext, gpui::VisualContext)]
@@ -289,6 +295,7 @@ pub struct BlockContext<'a, 'b> {
     pub height: u32,
     pub selected: bool,
     pub editor_style: &'b EditorStyle,
+    pub indent_guide_padding: Pixels,
 }
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)]
@@ -393,8 +400,8 @@ impl Block {
             Block::Custom(block) => block.style,
             Block::ExcerptBoundary { .. }
             | Block::FoldedBuffer { .. }
-            | Block::BufferHeader { .. }
-            | Block::Spacer { .. } => BlockStyle::Sticky,
+            | Block::BufferHeader { .. } => BlockStyle::Sticky,
+            Block::Spacer { .. } => BlockStyle::Spacer,
         }
     }
 
@@ -651,6 +658,7 @@ impl BlockMap {
                 custom_blocks_by_id: self.custom_blocks_by_id.clone(),
                 buffer_header_height: self.buffer_header_height,
                 excerpt_header_height: self.excerpt_header_height,
+                buffers_with_disabled_headers: self.buffers_with_disabled_headers.clone(),
             },
         }
     }
@@ -1702,12 +1710,13 @@ pub(crate) fn balancing_block(
     Some(BlockProperties {
         placement: their_placement,
         height: my_block.height,
-        style: BlockStyle::Sticky,
+        style: BlockStyle::Spacer,
         render: Arc::new(move |cx| {
             crate::EditorElement::render_spacer_block(
                 cx.block_id,
                 cx.height,
                 cx.line_height,
+                cx.indent_guide_padding,
                 cx.window,
                 cx.app,
             )
@@ -2845,8 +2854,8 @@ mod tests {
     use buffer_diff::BufferDiff;
     use gpui::{App, AppContext as _, Element, div, font, px};
     use itertools::Itertools;
-    use language::{Buffer, Capability};
-    use multi_buffer::{ExcerptRange, MultiBuffer};
+    use language::{Buffer, Capability, Point};
+    use multi_buffer::{MultiBuffer, PathKey};
     use rand::prelude::*;
     use settings::SettingsStore;
     use std::env;
@@ -3056,27 +3065,32 @@ mod tests {
         let buffer2 = cx.new(|cx| Buffer::local("Buffer 2", cx));
         let buffer3 = cx.new(|cx| Buffer::local("Buffer 3", cx));
 
-        let mut excerpt_ids = Vec::new();
         let multi_buffer = cx.new(|cx| {
             let mut multi_buffer = MultiBuffer::new(Capability::ReadWrite);
-            excerpt_ids.extend(multi_buffer.push_excerpts(
+            multi_buffer.set_excerpts_for_path(
+                PathKey::sorted(0),
                 buffer1.clone(),
-                [ExcerptRange::new(0..buffer1.read(cx).len())],
+                [Point::zero()..buffer1.read(cx).max_point()],
+                0,
                 cx,
-            ));
-            excerpt_ids.extend(multi_buffer.push_excerpts(
+            );
+            multi_buffer.set_excerpts_for_path(
+                PathKey::sorted(1),
                 buffer2.clone(),
-                [ExcerptRange::new(0..buffer2.read(cx).len())],
+                [Point::zero()..buffer2.read(cx).max_point()],
+                0,
                 cx,
-            ));
-            excerpt_ids.extend(multi_buffer.push_excerpts(
+            );
+            multi_buffer.set_excerpts_for_path(
+                PathKey::sorted(2),
                 buffer3.clone(),
-                [ExcerptRange::new(0..buffer3.read(cx).len())],
+                [Point::zero()..buffer3.read(cx).max_point()],
+                0,
                 cx,
-            ));
-
+            );
             multi_buffer
         });
+        let excerpt_ids = multi_buffer.read_with(cx, |mb, _| mb.excerpt_ids());
 
         let font = test_font();
         let font_size = px(14.);
@@ -3403,30 +3417,32 @@ mod tests {
     fn test_custom_blocks_inside_buffer_folds(cx: &mut gpui::TestAppContext) {
         cx.update(init_test);
 
-        let text = "111\n222\n333\n444\n555\n666";
+        let text = "111\n\n222\n\n333\n\n444\n\n555\n\n666";
 
         let buffer = cx.update(|cx| {
-            MultiBuffer::build_multi(
+            let multibuffer = MultiBuffer::build_multi(
                 [
                     (text, vec![Point::new(0, 0)..Point::new(0, 3)]),
                     (
                         text,
                         vec![
-                            Point::new(1, 0)..Point::new(1, 3),
                             Point::new(2, 0)..Point::new(2, 3),
-                            Point::new(3, 0)..Point::new(3, 3),
+                            Point::new(4, 0)..Point::new(4, 3),
+                            Point::new(6, 0)..Point::new(6, 3),
                         ],
                     ),
                     (
                         text,
                         vec![
-                            Point::new(4, 0)..Point::new(4, 3),
-                            Point::new(5, 0)..Point::new(5, 3),
+                            Point::new(8, 0)..Point::new(8, 3),
+                            Point::new(10, 0)..Point::new(10, 3),
                         ],
                     ),
                 ],
                 cx,
-            )
+            );
+            assert_eq!(multibuffer.read(cx).excerpt_ids().len(), 6);
+            multibuffer
         });
         let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
         let buffer_ids = buffer_snapshot
@@ -3462,16 +3478,16 @@ mod tests {
                 Some(0),
                 None,
                 None,
-                Some(1),
-                None,
                 Some(2),
                 None,
-                Some(3),
+                Some(4),
                 None,
+                Some(6),
                 None,
-                Some(4),
                 None,
-                Some(5),
+                Some(8),
+                None,
+                Some(10),
             ]
         );
 
@@ -3533,19 +3549,19 @@ mod tests {
                 None,
                 None,
                 None,
-                Some(1),
+                Some(2),
                 None,
                 None,
-                Some(2),
+                Some(4),
                 None,
-                Some(3),
+                Some(6),
                 None,
                 None,
                 None,
                 None,
-                Some(4),
+                Some(8),
                 None,
-                Some(5),
+                Some(10),
                 None,
             ]
         );
@@ -3601,19 +3617,19 @@ mod tests {
                 None,
                 None,
                 None,
-                Some(1),
+                Some(2),
                 None,
                 None,
-                Some(2),
+                Some(4),
                 None,
-                Some(3),
+                Some(6),
                 None,
                 None,
                 None,
                 None,
-                Some(4),
+                Some(8),
                 None,
-                Some(5),
+                Some(10),
                 None,
             ]
         );
@@ -3664,9 +3680,9 @@ mod tests {
                 None,
                 None,
                 None,
-                Some(4),
+                Some(8),
                 None,
-                Some(5),
+                Some(10),
                 None,
             ]
         );
@@ -3720,9 +3736,9 @@ mod tests {
                 None,
                 None,
                 None,
-                Some(4),
+                Some(8),
                 None,
-                Some(5),
+                Some(10),
                 None,
             ]
         );
@@ -4590,9 +4606,10 @@ mod tests {
 
         let lhs_multibuffer = cx.new(|cx| {
             let mut mb = MultiBuffer::new(Capability::ReadWrite);
-            mb.push_excerpts(
+            mb.set_excerpts_for_buffer(
                 lhs_buffer.clone(),
-                [ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)],
+                [Point::zero()..lhs_buffer.read(cx).max_point()],
+                0,
                 cx,
             );
             mb.add_inverted_diff(diff.clone(), rhs_buffer.clone(), cx);
@@ -4600,9 +4617,10 @@ mod tests {
         });
         let rhs_multibuffer = cx.new(|cx| {
             let mut mb = MultiBuffer::new(Capability::ReadWrite);
-            mb.push_excerpts(
+            mb.set_excerpts_for_buffer(
                 rhs_buffer.clone(),
-                [ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)],
+                [Point::zero()..rhs_buffer.read(cx).max_point()],
+                0,
                 cx,
             );
             mb.add_diff(diff.clone(), cx);

crates/editor/src/display_map/inlay_map.rs 🔗

@@ -17,7 +17,7 @@ use multi_buffer::{
 };
 use project::InlayId;
 use std::{
-    cmp,
+    cmp, iter,
     ops::{Add, AddAssign, Range, Sub, SubAssign},
     sync::Arc,
 };
@@ -546,8 +546,11 @@ impl InlayMap {
     pub fn new(buffer: MultiBufferSnapshot) -> (Self, InlaySnapshot) {
         let version = 0;
         let snapshot = InlaySnapshot {
-            buffer: buffer.clone(),
-            transforms: SumTree::from_iter(Some(Transform::Isomorphic(buffer.text_summary())), ()),
+            transforms: SumTree::from_iter(
+                iter::once(Transform::Isomorphic(buffer.text_summary())),
+                (),
+            ),
+            buffer,
             version,
         };
 
@@ -745,7 +748,7 @@ impl InlayMap {
     }
 
     #[ztracing::instrument(skip_all)]
-    pub fn current_inlays(&self) -> impl Iterator<Item = &Inlay> {
+    pub fn current_inlays(&self) -> impl Iterator<Item = &Inlay> + Default {
         self.inlays.iter()
     }
 
@@ -1328,9 +1331,10 @@ mod tests {
     use super::*;
     use crate::{
         MultiBuffer,
-        display_map::{HighlightKey, InlayHighlights, TextHighlights},
+        display_map::{HighlightKey, InlayHighlights},
         hover_links::InlayHighlight,
     };
+    use collections::HashMap;
     use gpui::{App, HighlightStyle};
     use multi_buffer::Anchor;
     use project::{InlayHint, InlayHintLabel, ResolveState};
@@ -1897,7 +1901,7 @@ mod tests {
                 );
             }
 
-            let mut text_highlights = TextHighlights::default();
+            let mut text_highlights = HashMap::default();
             let text_highlight_count = rng.random_range(0_usize..10);
             let mut text_highlight_ranges = (0..text_highlight_count)
                 .map(|_| buffer_snapshot.random_byte_range(MultiBufferOffset(0), &mut rng))
@@ -1917,6 +1921,7 @@ mod tests {
                         .collect(),
                 )),
             );
+            let text_highlights = Arc::new(text_highlights);
 
             let mut inlay_highlights = InlayHighlights::default();
             if !inlays.is_empty() {

crates/editor/src/display_map/tab_map.rs 🔗

@@ -44,121 +44,146 @@ impl TabMap {
         mut fold_edits: Vec<FoldEdit>,
         tab_size: NonZeroU32,
     ) -> (TabSnapshot, Vec<TabEdit>) {
-        let old_snapshot = &mut self.0;
-        let mut new_snapshot = TabSnapshot {
-            fold_snapshot,
-            tab_size: tab_size.min(MAX_TABS),
-            max_expansion_column: old_snapshot.max_expansion_column,
-            version: old_snapshot.version,
-        };
-
-        if old_snapshot.fold_snapshot.version != new_snapshot.fold_snapshot.version {
-            new_snapshot.version += 1;
+        let tab_size = tab_size.min(MAX_TABS);
+
+        if self.0.tab_size != tab_size {
+            let old_max_point = self.0.max_point();
+            self.0.version += 1;
+            self.0.fold_snapshot = fold_snapshot;
+            self.0.tab_size = tab_size;
+            return (
+                self.0.clone(),
+                vec![TabEdit {
+                    old: TabPoint::zero()..old_max_point,
+                    new: TabPoint::zero()..self.0.max_point(),
+                }],
+            );
         }
 
-        let tab_edits = if old_snapshot.tab_size == new_snapshot.tab_size {
-            // Expand each edit to include the next tab on the same line as the edit,
-            // and any subsequent tabs on that line that moved across the tab expansion
-            // boundary.
-            for fold_edit in &mut fold_edits {
-                let old_end = fold_edit.old.end.to_point(&old_snapshot.fold_snapshot);
-                let old_end_row_successor_offset = cmp::min(
-                    FoldPoint::new(old_end.row() + 1, 0),
-                    old_snapshot.fold_snapshot.max_point(),
-                )
-                .to_offset(&old_snapshot.fold_snapshot);
-                let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot);
+        let old_snapshot = &mut self.0;
+        let mut new_version = old_snapshot.version;
+        if old_snapshot.fold_snapshot.version != fold_snapshot.version {
+            new_version += 1;
+        }
 
-                let mut offset_from_edit = 0;
-                let mut first_tab_offset = None;
-                let mut last_tab_with_changed_expansion_offset = None;
-                'outer: for chunk in old_snapshot.fold_snapshot.chunks(
-                    fold_edit.old.end..old_end_row_successor_offset,
-                    false,
-                    Highlights::default(),
-                ) {
-                    let mut remaining_tabs = chunk.tabs;
-                    while remaining_tabs != 0 {
-                        let ix = remaining_tabs.trailing_zeros();
-                        let offset_from_edit = offset_from_edit + ix;
-                        if first_tab_offset.is_none() {
-                            first_tab_offset = Some(offset_from_edit);
-                        }
-
-                        let old_column = old_end.column() + offset_from_edit;
-                        let new_column = new_end.column() + offset_from_edit;
-                        let was_expanded = old_column < old_snapshot.max_expansion_column;
-                        let is_expanded = new_column < new_snapshot.max_expansion_column;
-                        if was_expanded != is_expanded {
-                            last_tab_with_changed_expansion_offset = Some(offset_from_edit);
-                        } else if !was_expanded && !is_expanded {
-                            break 'outer;
-                        }
-
-                        remaining_tabs &= remaining_tabs - 1;
+        if fold_edits.is_empty() {
+            old_snapshot.version = new_version;
+            old_snapshot.fold_snapshot = fold_snapshot;
+            old_snapshot.tab_size = tab_size;
+            return (old_snapshot.clone(), vec![]);
+        }
+        // Expand each edit to include the next tab on the same line as the edit,
+        // and any subsequent tabs on that line that moved across the tab expansion
+        // boundary.
+        //
+        // This is necessary because a tab's display width depends on its column
+        // position: it expands to fill up to the next tab stop. When an edit
+        // shifts text on a line, any tab character after the edit may now render
+        // at a different width even though the tab byte itself wasn't touched.
+        // Additionally, tabs beyond `max_expansion_column` are rendered as a
+        // single space instead of expanding to the next tab stop. An edit that
+        // shifts a tab across that boundary changes its display width, so the
+        // edit must cover it. We scan forward from the edit end to the end of
+        // the line, extending the edit to include the first subsequent tab (whose
+        // rendered width may have changed) and the last tab that crossed the
+        // expansion boundary (transitioning between expanded and non-expanded).
+        for fold_edit in &mut fold_edits {
+            let old_end = fold_edit.old.end.to_point(&old_snapshot.fold_snapshot);
+            let old_end_row_successor_offset = cmp::min(
+                FoldPoint::new(old_end.row() + 1, 0),
+                old_snapshot.fold_snapshot.max_point(),
+            )
+            .to_offset(&old_snapshot.fold_snapshot);
+            let new_end = fold_edit.new.end.to_point(&fold_snapshot);
+
+            let mut offset_from_edit = 0;
+            let mut first_tab_offset = None;
+            let mut last_tab_with_changed_expansion_offset = None;
+            'outer: for chunk in old_snapshot.fold_snapshot.chunks(
+                fold_edit.old.end..old_end_row_successor_offset,
+                false,
+                Highlights::default(),
+            ) {
+                let mut remaining_tabs = chunk.tabs;
+                while remaining_tabs != 0 {
+                    let ix = remaining_tabs.trailing_zeros();
+                    let offset_from_edit = offset_from_edit + ix;
+                    if first_tab_offset.is_none() {
+                        first_tab_offset = Some(offset_from_edit);
                     }
 
-                    offset_from_edit += chunk.text.len() as u32;
-                    if old_end.column() + offset_from_edit >= old_snapshot.max_expansion_column
-                        && new_end.column() + offset_from_edit >= new_snapshot.max_expansion_column
-                    {
-                        break;
+                    let old_column = old_end.column() + offset_from_edit;
+                    let new_column = new_end.column() + offset_from_edit;
+                    let was_expanded = old_column < old_snapshot.max_expansion_column;
+                    let is_expanded = new_column < old_snapshot.max_expansion_column;
+                    if was_expanded != is_expanded {
+                        last_tab_with_changed_expansion_offset = Some(offset_from_edit);
+                    } else if !was_expanded && !is_expanded {
+                        break 'outer;
                     }
+
+                    remaining_tabs &= remaining_tabs - 1;
                 }
 
-                if let Some(offset) = last_tab_with_changed_expansion_offset.or(first_tab_offset) {
-                    fold_edit.old.end.0 += offset as usize + 1;
-                    fold_edit.new.end.0 += offset as usize + 1;
+                offset_from_edit += chunk.text.len() as u32;
+                if old_end.column() + offset_from_edit >= old_snapshot.max_expansion_column
+                    && new_end.column() + offset_from_edit >= old_snapshot.max_expansion_column
+                {
+                    break;
                 }
             }
 
-            let _old_alloc_ptr = fold_edits.as_ptr();
-            // Combine any edits that overlap due to the expansion.
-            let mut fold_edits = fold_edits.into_iter();
-            if let Some(mut first_edit) = fold_edits.next() {
-                // This code relies on reusing allocations from the Vec<_> - at the time of writing .flatten() prevents them.
-                #[allow(clippy::filter_map_identity)]
-                let mut v: Vec<_> = fold_edits
-                    .scan(&mut first_edit, |state, edit| {
-                        if state.old.end >= edit.old.start {
-                            state.old.end = edit.old.end;
-                            state.new.end = edit.new.end;
-                            Some(None) // Skip this edit, it's merged
-                        } else {
-                            let new_state = edit;
-                            let result = Some(Some(state.clone())); // Yield the previous edit
-                            **state = new_state;
-                            result
-                        }
-                    })
-                    .filter_map(|x| x)
-                    .collect();
-                v.push(first_edit);
-                debug_assert_eq!(v.as_ptr(), _old_alloc_ptr, "Fold edits were reallocated");
-                v.into_iter()
-                    .map(|fold_edit| {
-                        let old_start = fold_edit.old.start.to_point(&old_snapshot.fold_snapshot);
-                        let old_end = fold_edit.old.end.to_point(&old_snapshot.fold_snapshot);
-                        let new_start = fold_edit.new.start.to_point(&new_snapshot.fold_snapshot);
-                        let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot);
-                        TabEdit {
-                            old: old_snapshot.fold_point_to_tab_point(old_start)
-                                ..old_snapshot.fold_point_to_tab_point(old_end),
-                            new: new_snapshot.fold_point_to_tab_point(new_start)
-                                ..new_snapshot.fold_point_to_tab_point(new_end),
-                        }
-                    })
-                    .collect()
-            } else {
-                vec![]
+            if let Some(offset) = last_tab_with_changed_expansion_offset.or(first_tab_offset) {
+                fold_edit.old.end.0 += offset as usize + 1;
+                fold_edit.new.end.0 += offset as usize + 1;
             }
-        } else {
-            new_snapshot.version += 1;
-            vec![TabEdit {
-                old: TabPoint::zero()..old_snapshot.max_point(),
-                new: TabPoint::zero()..new_snapshot.max_point(),
-            }]
+        }
+
+        let new_snapshot = TabSnapshot {
+            fold_snapshot,
+            tab_size,
+            max_expansion_column: old_snapshot.max_expansion_column,
+            version: new_version,
         };
+
+        let _old_alloc_ptr = fold_edits.as_ptr();
+        // Combine any edits that overlap due to the expansion.
+        let mut fold_edits = fold_edits.into_iter();
+        let mut first_edit = fold_edits.next().unwrap();
+        // This code relies on reusing allocations from the Vec<_> - at the time of writing .flatten() prevents them.
+        #[allow(clippy::filter_map_identity)]
+        let mut v: Vec<_> = fold_edits
+            .scan(&mut first_edit, |state, edit| {
+                if state.old.end >= edit.old.start {
+                    state.old.end = edit.old.end;
+                    state.new.end = edit.new.end;
+                    Some(None) // Skip this edit, it's merged
+                } else {
+                    let new_state = edit;
+                    let result = Some(Some(state.clone())); // Yield the previous edit
+                    **state = new_state;
+                    result
+                }
+            })
+            .filter_map(|x| x)
+            .collect();
+        v.push(first_edit);
+        debug_assert_eq!(v.as_ptr(), _old_alloc_ptr, "Fold edits were reallocated");
+        let tab_edits = v
+            .into_iter()
+            .map(|fold_edit| {
+                let old_start = fold_edit.old.start.to_point(&old_snapshot.fold_snapshot);
+                let old_end = fold_edit.old.end.to_point(&old_snapshot.fold_snapshot);
+                let new_start = fold_edit.new.start.to_point(&new_snapshot.fold_snapshot);
+                let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot);
+                TabEdit {
+                    old: old_snapshot.fold_point_to_tab_point(old_start)
+                        ..old_snapshot.fold_point_to_tab_point(old_end),
+                    new: new_snapshot.fold_point_to_tab_point(new_start)
+                        ..new_snapshot.fold_point_to_tab_point(new_end),
+                }
+            })
+            .collect();
         *old_snapshot = new_snapshot;
         (old_snapshot.clone(), tab_edits)
     }
@@ -168,6 +193,8 @@ impl TabMap {
 pub struct TabSnapshot {
     pub fold_snapshot: FoldSnapshot,
     pub tab_size: NonZeroU32,
+    /// The maximum column up to which a tab can expand.
+    /// Any tab after this column will not expand.
     pub max_expansion_column: u32,
     pub version: usize,
 }
@@ -365,10 +392,11 @@ impl TabSnapshot {
     }
 
     #[ztracing::instrument(skip_all)]
-    fn expand_tabs<'a, I>(&self, mut cursor: TabStopCursor<'a, I>, column: u32) -> u32
-    where
-        I: Iterator<Item = Chunk<'a>>,
-    {
+    fn expand_tabs<'a>(&self, mut cursor: TabStopCursor<'a>, column: u32) -> u32 {
+        // we only ever act on a single row at a time
+        // so the main difference is that other layers build a transform sumtree, and can then just run through that
+        // we cant quite do this here, as we need to work with the previous layer chunk to understand the tabs of the corresponding row
+        // we can still do forward searches for this though, we search for a row, then traverse the column up to where we need to be
         let tab_size = self.tab_size.get();
 
         let end_column = column.min(self.max_expansion_column);
@@ -376,7 +404,7 @@ impl TabSnapshot {
         let mut tab_count = 0;
         let mut expanded_tab_len = 0;
 
-        while let Some(tab_stop) = cursor.seek(seek_target) {
+        while let Some(tab_stop) = cursor.seek_forward(seek_target) {
             let expanded_chars_old = tab_stop.char_offset + expanded_tab_len - tab_count;
             let tab_len = tab_size - ((expanded_chars_old - 1) % tab_size);
             tab_count += 1;
@@ -399,22 +427,19 @@ impl TabSnapshot {
     }
 
     #[ztracing::instrument(skip_all)]
-    fn collapse_tabs<'a, I>(
+    fn collapse_tabs<'a>(
         &self,
-        mut cursor: TabStopCursor<'a, I>,
+        mut cursor: TabStopCursor<'a>,
         column: u32,
         bias: Bias,
-    ) -> (u32, u32, u32)
-    where
-        I: Iterator<Item = Chunk<'a>>,
-    {
+    ) -> (u32, u32, u32) {
         let tab_size = self.tab_size.get();
         let mut collapsed_column = column;
         let mut seek_target = column.min(self.max_expansion_column);
         let mut tab_count = 0;
         let mut expanded_tab_len = 0;
 
-        while let Some(tab_stop) = cursor.seek(seek_target) {
+        while let Some(tab_stop) = cursor.seek_forward(seek_target) {
             // Calculate how much we want to expand this tab stop (into spaces)
             let expanded_chars_old = tab_stop.char_offset + expanded_tab_len - tab_count;
             let tab_len = tab_size - ((expanded_chars_old - 1) % tab_size);
@@ -617,13 +642,7 @@ impl<'a> Iterator for TabChunks<'a> {
             }
         }
 
-        let first_tab_ix = if self.chunk.tabs != 0 {
-            self.chunk.tabs.trailing_zeros() as usize
-        } else {
-            self.chunk.text.len()
-        };
-
-        if first_tab_ix == 0 {
+        if self.chunk.tabs & 1 != 0 {
             self.chunk.text = &self.chunk.text[1..];
             self.chunk.tabs >>= 1;
             self.chunk.chars >>= 1;
@@ -654,12 +673,46 @@ impl<'a> Iterator for TabChunks<'a> {
             });
         }
 
-        let prefix_len = first_tab_ix;
+        // Fast path: no tabs in the remaining chunk, return it directly
+        if self.chunk.tabs == 0 {
+            let chunk = self.chunk.clone();
+            self.chunk.text = "";
+            self.chunk.tabs = 0;
+            self.chunk.chars = 0;
+            self.chunk.newlines = 0;
+            let chunk_len = chunk.text.len() as u32;
+
+            let newline_count = chunk.newlines.count_ones();
+            if newline_count > 0 {
+                let last_newline_bit = 128 - chunk.newlines.leading_zeros();
+                let chars_after_last_newline =
+                    chunk.chars.unbounded_shr(last_newline_bit).count_ones();
+                let bytes_after_last_newline = chunk_len - last_newline_bit;
+
+                self.column = chars_after_last_newline;
+                self.input_column = bytes_after_last_newline;
+                self.output_position = Point::new(
+                    self.output_position.row + newline_count,
+                    bytes_after_last_newline,
+                );
+            } else {
+                let char_count = chunk.chars.count_ones();
+                self.column += char_count;
+                if !self.inside_leading_tab {
+                    self.input_column += chunk_len;
+                }
+                self.output_position.column += chunk_len;
+            }
+
+            return Some(chunk);
+        }
+
+        // Split at the next tab position
+        let prefix_len = self.chunk.tabs.trailing_zeros() as usize;
         let (prefix, suffix) = self.chunk.text.split_at(prefix_len);
 
         let mask = 1u128.unbounded_shl(prefix_len as u32).wrapping_sub(1);
         let prefix_chars = self.chunk.chars & mask;
-        let prefix_tabs = self.chunk.tabs & mask;
         let prefix_newlines = self.chunk.newlines & mask;
 
         self.chunk.text = suffix;
@@ -692,13 +745,156 @@ impl<'a> Iterator for TabChunks<'a> {
         Some(Chunk {
             text: prefix,
             chars: prefix_chars,
-            tabs: prefix_tabs,
+            tabs: 0,
             newlines: prefix_newlines,
             ..self.chunk.clone()
         })
     }
 }
 
+struct TabStopCursor<'a> {
+    chunks: FoldChunks<'a>,
+    byte_offset: u32,
+    char_offset: u32,
+    /// Chunk
+    /// last tab position iterated through
+    current_chunk: Option<(TabStopChunk<'a>, u32)>,
+}
+
+struct TabStopChunk<'a> {
+    chars: u128,
+    text: &'a str,
+    tabs: u128,
+}
+
+impl<'a> TabStopCursor<'a> {
+    fn new(chunks: FoldChunks<'a>) -> Self {
+        Self {
+            chunks,
+            byte_offset: 0,
+            char_offset: 0,
+            current_chunk: None,
+        }
+    }
+
+    fn bytes_until_next_char(&self) -> Option<usize> {
+        self.current_chunk.as_ref().map(|&(ref chunk, idx)| {
+            let higher_chars = chunk.chars.unbounded_shr(idx + 1);
+
+            if higher_chars != 0 {
+                higher_chars.trailing_zeros() as usize + 1
+            } else {
+                chunk.text.len() - idx as usize
+            }
+        })
+    }
+
+    fn is_char_boundary(&self) -> bool {
+        self.current_chunk
+            .as_ref()
+            .is_some_and(|&(ref chunk, idx)| {
+                (1u128.unbounded_shl(idx) & chunk.chars) != 0 || idx as usize == chunk.text.len()
+            })
+    }
+
+    /// distance: length to move forward while searching for the next tab stop
+    #[ztracing::instrument(skip_all)]
+    fn seek_forward(&mut self, distance: u32) -> Option<TabStop> {
+        if distance == 0 {
+            return None;
+        }
+
+        let mut distance_remaining = distance;
+
+        while let Some((mut chunk, chunk_position)) = self.current_chunk.take().or_else(|| {
+            self.chunks.next().map(|chunk| {
+                (
+                    TabStopChunk {
+                        chars: chunk.chars,
+                        text: chunk.text,
+                        tabs: chunk.tabs,
+                    },
+                    0,
+                )
+            })
+        }) {
+            let chunk_len = chunk.text.len() as u32;
+
+            if chunk.tabs == 0 {
+                let chunk_remaining = chunk_len - chunk_position;
+                if chunk_remaining >= distance_remaining {
+                    let end = chunk_position + distance_remaining;
+                    self.byte_offset += distance_remaining;
+                    self.char_offset +=
+                        count_chars_in_byte_range(chunk_position..(end - 1), chunk.chars);
+                    if end < 128 {
+                        self.current_chunk = Some((chunk, end));
+                    }
+                    return None;
+                }
+
+                self.byte_offset += chunk_remaining;
+                self.char_offset +=
+                    count_chars_in_byte_range(chunk_position..(chunk_len - 1), chunk.chars);
+                distance_remaining -= chunk_remaining;
+                continue;
+            }
+
+            let tab_end = chunk.tabs.trailing_zeros() + 1;
+            let bytes_to_tab = tab_end - chunk_position;
+
+            if bytes_to_tab > distance_remaining {
+                let end = chunk_position + distance_remaining;
+                self.byte_offset += distance_remaining;
+                self.char_offset +=
+                    count_chars_in_byte_range(chunk_position..(end - 1), chunk.chars);
+                self.current_chunk = Some((chunk, end));
+                return None;
+            }
+
+            self.byte_offset += bytes_to_tab;
+            self.char_offset +=
+                count_chars_in_byte_range(chunk_position..(tab_end - 1), chunk.chars);
+
+            let tabstop = TabStop {
+                char_offset: self.char_offset,
+                byte_offset: self.byte_offset,
+            };
+
+            chunk.tabs = (chunk.tabs - 1) & chunk.tabs;
+
+            if tab_end != chunk_len {
+                self.current_chunk = Some((chunk, tab_end));
+            }
+
+            return Some(tabstop);
+        }
+
+        None
+    }
+
+    fn byte_offset(&self) -> u32 {
+        self.byte_offset
+    }
+
+    fn char_offset(&self) -> u32 {
+        self.char_offset
+    }
+}
+
+#[inline(always)]
+fn count_chars_in_byte_range(range: Range<u32>, bitmap: u128) -> u32 {
+    let low_mask = u128::MAX << range.start;
+    let high_mask = u128::MAX >> (127 - range.end);
+    (bitmap & low_mask & high_mask).count_ones()
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+struct TabStop {
+    char_offset: u32,
+    byte_offset: u32,
+}
+
 #[cfg(test)]
 mod tests {
     use std::mem;
@@ -814,40 +1010,21 @@ mod tests {
 
     #[gpui::test]
     fn test_expand_tabs(cx: &mut gpui::App) {
-        let test_values = [
-            ("κg🏀 f\nwo🏀❌by🍐❎β🍗c\tβ❎ \ncλ🎉", 17),
-            (" \twςe", 4),
-            ("fε", 1),
-            ("i❎\t", 3),
-        ];
-        let buffer = MultiBuffer::build_simple("", cx);
+        let input = "A\tBC\tDEF\tG\tHI\tJ\tK\tL\tM";
+
+        let buffer = MultiBuffer::build_simple(input, cx);
         let buffer_snapshot = buffer.read(cx).snapshot(cx);
         let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot);
         let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
         let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
 
-        for (text, column) in test_values {
-            let mut tabs = 0u128;
-            let mut chars = 0u128;
-            for (idx, c) in text.char_indices() {
-                if c == '\t' {
-                    tabs |= 1 << idx;
-                }
-                chars |= 1 << idx;
-            }
-
-            let chunks = [Chunk {
-                text,
-                tabs,
-                chars,
-                ..Default::default()
-            }];
-
-            let cursor = TabStopCursor::new(chunks);
+        for (ix, _) in input.char_indices() {
+            let fold_point = FoldPoint::new(0, ix as u32);
 
             assert_eq!(
-                tab_snapshot.expected_expand_tabs(text.chars(), column),
-                tab_snapshot.expand_tabs(cursor, column)
+                tab_snapshot.expected_to_tab_point(fold_point),
+                tab_snapshot.fold_point_to_tab_point(fold_point),
+                "Failed with fold_point at column {ix}"
             );
         }
     }
@@ -1263,7 +1440,7 @@ mod tests {
             Default::default(),
         );
         let mut cursor = TabStopCursor::new(chunks);
-        assert!(cursor.seek(0).is_none());
+        assert!(cursor.seek_forward(0).is_none());
         let mut tab_stops = Vec::new();
 
         let mut all_tab_stops = Vec::new();
@@ -1279,7 +1456,7 @@ mod tests {
             }
         }
 
-        while let Some(tab_stop) = cursor.seek(u32::MAX) {
+        while let Some(tab_stop) = cursor.seek_forward(u32::MAX) {
             tab_stops.push(tab_stop);
         }
         pretty_assertions::assert_eq!(tab_stops.as_slice(), all_tab_stops.as_slice(),);
@@ -1314,7 +1491,7 @@ mod tests {
             }
         }
 
-        while let Some(tab_stop) = cursor.seek(u32::MAX) {
+        while let Some(tab_stop) = cursor.seek_forward(u32::MAX) {
             actual_tab_stops.push(tab_stop);
         }
         pretty_assertions::assert_eq!(actual_tab_stops.as_slice(), expected_tab_stops.as_slice(),);
@@ -1379,7 +1556,7 @@ mod tests {
 
             let mut found_tab_stops = Vec::new();
             let mut position = distance;
-            while let Some(tab_stop) = cursor.seek(position) {
+            while let Some(tab_stop) = cursor.seek_forward(position) {
                 found_tab_stops.push(tab_stop);
                 position = distance - tab_stop.byte_offset;
             }
@@ -1425,7 +1602,7 @@ mod tests {
             Default::default(),
         );
         let mut cursor = TabStopCursor::new(chunks);
-        assert!(cursor.seek(0).is_none());
+        assert!(cursor.seek_forward(0).is_none());
 
         let mut expected_tab_stops = Vec::new();
         let mut byte_offset = 0;
@@ -1441,7 +1618,7 @@ mod tests {
         }
 
         let mut actual_tab_stops = Vec::new();
-        while let Some(tab_stop) = cursor.seek(u32::MAX) {
+        while let Some(tab_stop) = cursor.seek_forward(u32::MAX) {
             actual_tab_stops.push(tab_stop);
         }
 
@@ -1487,7 +1664,7 @@ mod tests {
 
             let mut found_tab_stops = Vec::new();
             let mut position = distance;
-            while let Some(tab_stop) = cursor.seek(position) {
+            while let Some(tab_stop) = cursor.seek_forward(position) {
                 found_tab_stops.push(tab_stop);
                 position = distance - tab_stop.byte_offset;
             }
@@ -1520,165 +1697,3 @@ mod tests {
         }
     }
 }
-
-struct TabStopCursor<'a, I>
-where
-    I: Iterator<Item = Chunk<'a>>,
-{
-    chunks: I,
-    byte_offset: u32,
-    char_offset: u32,
-    /// Chunk
-    /// last tab position iterated through
-    current_chunk: Option<(Chunk<'a>, u32)>,
-}
-
-impl<'a, I> TabStopCursor<'a, I>
-where
-    I: Iterator<Item = Chunk<'a>>,
-{
-    #[ztracing::instrument(skip_all)]
-    fn new(chunks: impl IntoIterator<Item = Chunk<'a>, IntoIter = I>) -> Self {
-        Self {
-            chunks: chunks.into_iter(),
-            byte_offset: 0,
-            char_offset: 0,
-            current_chunk: None,
-        }
-    }
-
-    #[ztracing::instrument(skip_all)]
-    fn bytes_until_next_char(&self) -> Option<usize> {
-        self.current_chunk.as_ref().and_then(|(chunk, idx)| {
-            let mut idx = *idx;
-            let mut diff = 0;
-            while idx > 0 && chunk.chars & (1u128.unbounded_shl(idx)) == 0 {
-                idx -= 1;
-                diff += 1;
-            }
-
-            if chunk.chars & (1 << idx) != 0 {
-                Some(
-                    (chunk.text[idx as usize..].chars().next()?)
-                        .len_utf8()
-                        .saturating_sub(diff),
-                )
-            } else {
-                None
-            }
-        })
-    }
-
-    #[ztracing::instrument(skip_all)]
-    fn is_char_boundary(&self) -> bool {
-        self.current_chunk
-            .as_ref()
-            .is_some_and(|(chunk, idx)| (chunk.chars & 1u128.unbounded_shl(*idx)) != 0)
-    }
-
-    /// distance: length to move forward while searching for the next tab stop
-    #[ztracing::instrument(skip_all)]
-    fn seek(&mut self, distance: u32) -> Option<TabStop> {
-        if distance == 0 {
-            return None;
-        }
-
-        let mut distance_traversed = 0;
-
-        while let Some((mut chunk, chunk_position)) = self
-            .current_chunk
-            .take()
-            .or_else(|| self.chunks.next().zip(Some(0)))
-        {
-            if chunk.tabs == 0 {
-                let chunk_distance = chunk.text.len() as u32 - chunk_position;
-                if chunk_distance + distance_traversed >= distance {
-                    let overshoot = distance_traversed.abs_diff(distance);
-
-                    self.byte_offset += overshoot;
-                    self.char_offset += get_char_offset(
-                        chunk_position..(chunk_position + overshoot).saturating_sub(1),
-                        chunk.chars,
-                    );
-
-                    if chunk_position + overshoot < 128 {
-                        self.current_chunk = Some((chunk, chunk_position + overshoot));
-                    }
-
-                    return None;
-                }
-
-                self.byte_offset += chunk_distance;
-                self.char_offset += get_char_offset(
-                    chunk_position..(chunk_position + chunk_distance).saturating_sub(1),
-                    chunk.chars,
-                );
-                distance_traversed += chunk_distance;
-                continue;
-            }
-            let tab_position = chunk.tabs.trailing_zeros() + 1;
-
-            if distance_traversed + tab_position - chunk_position > distance {
-                let cursor_position = distance_traversed.abs_diff(distance);
-
-                self.char_offset += get_char_offset(
-                    chunk_position..(chunk_position + cursor_position - 1),
-                    chunk.chars,
-                );
-                self.current_chunk = Some((chunk, cursor_position + chunk_position));
-                self.byte_offset += cursor_position;
-
-                return None;
-            }
-
-            self.byte_offset += tab_position - chunk_position;
-            self.char_offset += get_char_offset(chunk_position..(tab_position - 1), chunk.chars);
-
-            let tabstop = TabStop {
-                char_offset: self.char_offset,
-                byte_offset: self.byte_offset,
-            };
-
-            chunk.tabs = (chunk.tabs - 1) & chunk.tabs;
-
-            if tab_position as usize != chunk.text.len() {
-                self.current_chunk = Some((chunk, tab_position));
-            }
-
-            return Some(tabstop);
-        }
-
-        None
-    }
-
-    fn byte_offset(&self) -> u32 {
-        self.byte_offset
-    }
-
-    fn char_offset(&self) -> u32 {
-        self.char_offset
-    }
-}
-
-#[inline(always)]
-fn get_char_offset(range: Range<u32>, bit_map: u128) -> u32 {
-    if range.start == range.end {
-        return if (1u128 << range.start) & bit_map == 0 {
-            0
-        } else {
-            1
-        };
-    }
-    let end_shift: u128 = 127u128 - range.end as u128;
-    let mut bit_mask = (u128::MAX >> range.start) << range.start;
-    bit_mask = (bit_mask << end_shift) >> end_shift;
-    let bit_map = bit_map & bit_mask;
-
-    bit_map.count_ones()
-}
-
-#[derive(Clone, Copy, Debug, PartialEq, Eq)]
-struct TabStop {
-    char_offset: u32,
-    byte_offset: u32,
-}

crates/editor/src/editor.rs 🔗

@@ -237,6 +237,7 @@ use crate::{
 };
 
 pub const FILE_HEADER_HEIGHT: u32 = 2;
+pub const BUFFER_HEADER_PADDING: Rems = rems(0.25);
 pub const MULTI_BUFFER_EXCERPT_HEADER_HEIGHT: u32 = 1;
 const CURSOR_BLINK_INTERVAL: Duration = Duration::from_millis(500);
 const MAX_LINE_LEN: usize = 1024;
@@ -1346,7 +1347,7 @@ pub struct Editor {
     suppress_selection_callback: bool,
     applicable_language_settings: HashMap<Option<LanguageName>, LanguageSettings>,
     accent_data: Option<AccentData>,
-    fetched_tree_sitter_chunks: HashMap<ExcerptId, HashSet<Range<BufferRow>>>,
+    bracket_fetched_tree_sitter_chunks: HashMap<ExcerptId, HashSet<Range<BufferRow>>>,
     semantic_token_state: SemanticTokenState,
     pub(crate) refresh_matching_bracket_highlights_task: Task<()>,
     refresh_document_symbols_task: Shared<Task<()>>,
@@ -1355,6 +1356,7 @@ pub struct Editor {
     outline_symbols_at_cursor: Option<(BufferId, Vec<OutlineItem<Anchor>>)>,
     sticky_headers_task: Task<()>,
     sticky_headers: Option<Vec<OutlineItem<Anchor>>>,
+    pub(crate) colorize_brackets_task: Task<()>,
 }
 
 #[derive(Debug, PartialEq)]
@@ -1971,6 +1973,8 @@ impl Editor {
             .clone_state(&self.scroll_manager, &my_snapshot, &clone_snapshot, cx);
         clone.searchable = self.searchable;
         clone.read_only = self.read_only;
+        clone.buffers_with_disabled_indent_guides =
+            self.buffers_with_disabled_indent_guides.clone();
         clone
     }
 
@@ -2397,7 +2401,9 @@ impl Editor {
             diagnostics_max_severity,
             hard_wrap: None,
             completion_provider: project.clone().map(|project| Rc::new(project) as _),
-            semantics_provider: project.clone().map(|project| Rc::new(project) as _),
+            semantics_provider: project
+                .as_ref()
+                .map(|project| Rc::new(project.downgrade()) as _),
             collaboration_hub: project.clone().map(|project| Box::new(project) as _),
             project,
             blink_manager: blink_manager.clone(),
@@ -2597,7 +2603,7 @@ impl Editor {
             applicable_language_settings: HashMap::default(),
             semantic_token_state: SemanticTokenState::new(cx, full_mode),
             accent_data: None,
-            fetched_tree_sitter_chunks: HashMap::default(),
+            bracket_fetched_tree_sitter_chunks: HashMap::default(),
             number_deleted_lines: false,
             refresh_matching_bracket_highlights_task: Task::ready(()),
             refresh_document_symbols_task: Task::ready(()).shared(),
@@ -2606,6 +2612,7 @@ impl Editor {
             outline_symbols_at_cursor: None,
             sticky_headers_task: Task::ready(()),
             sticky_headers: None,
+            colorize_brackets_task: Task::ready(()),
         };
 
         if is_minimap {
@@ -3625,7 +3632,7 @@ impl Editor {
             self.refresh_document_highlights(cx);
             refresh_linked_ranges(self, window, cx);
 
-            self.refresh_selected_text_highlights(false, window, cx);
+            self.refresh_selected_text_highlights(&display_map, false, window, cx);
             self.refresh_matching_bracket_highlights(&display_map, cx);
             self.refresh_outline_symbols_at_cursor(cx);
             self.update_visible_edit_prediction(window, cx);
@@ -4893,8 +4900,10 @@ impl Editor {
                         .scope_context(Some(CharScopeContext::LinkedEdit));
                     classifier.is_word(char)
                 });
+                let is_dot = text.as_ref() == ".";
+                let should_apply_linked_edit = is_word_char || is_dot;
 
-                if is_word_char {
+                if should_apply_linked_edit {
                     let anchor_range = start_anchor.text_anchor..anchor.text_anchor;
                     linked_edits.push(&self, anchor_range, text.clone(), cx);
                 } else {
@@ -5079,6 +5088,10 @@ impl Editor {
     }
 
     pub fn newline(&mut self, _: &Newline, window: &mut Window, cx: &mut Context<Self>) {
+        if self.read_only(cx) {
+            return;
+        }
+
         self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx);
         self.transact(window, cx, |this, window, cx| {
             let (edits_with_flags, selection_info): (Vec<_>, Vec<_>) = {
@@ -5207,29 +5220,48 @@ impl Editor {
                                 extra_line_additional_indent,
                                 prevent_auto_indent,
                             } => {
+                                let auto_indent_mode =
+                                    buffer.language_settings_at(start, cx).auto_indent;
+                                let preserve_indent =
+                                    auto_indent_mode != language::AutoIndentMode::None;
+                                let apply_syntax_indent =
+                                    auto_indent_mode == language::AutoIndentMode::SyntaxAware;
                                 let capacity_for_delimiter =
                                     delimiter.as_deref().map(str::len).unwrap_or_default();
+                                let existing_indent_len = if preserve_indent {
+                                    existing_indent.len as usize
+                                } else {
+                                    0
+                                };
                                 let extra_line_len = extra_line_additional_indent
-                                    .map(|i| 1 + existing_indent.len as usize + i.len as usize)
+                                    .map(|i| 1 + existing_indent_len + i.len as usize)
                                     .unwrap_or(0);
                                 let mut new_text = String::with_capacity(
                                     1 + capacity_for_delimiter
-                                        + existing_indent.len as usize
+                                        + existing_indent_len
                                         + additional_indent.len as usize
                                         + extra_line_len,
                                 );
                                 new_text.push('\n');
-                                new_text.extend(existing_indent.chars());
+                                if preserve_indent {
+                                    new_text.extend(existing_indent.chars());
+                                }
                                 new_text.extend(additional_indent.chars());
                                 if let Some(delimiter) = &delimiter {
                                     new_text.push_str(delimiter);
                                 }
                                 if let Some(extra_indent) = extra_line_additional_indent {
                                     new_text.push('\n');
-                                    new_text.extend(existing_indent.chars());
+                                    if preserve_indent {
+                                        new_text.extend(existing_indent.chars());
+                                    }
                                     new_text.extend(extra_indent.chars());
                                 }
-                                (start, new_text, *prevent_auto_indent)
+                                (
+                                    start,
+                                    new_text,
+                                    *prevent_auto_indent || !apply_syntax_indent,
+                                )
                             }
                         };
 
@@ -5281,6 +5313,10 @@ impl Editor {
     }
 
     pub fn newline_above(&mut self, _: &NewlineAbove, window: &mut Window, cx: &mut Context<Self>) {
+        if self.read_only(cx) {
+            return;
+        }
+
         self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx);
 
         let buffer = self.buffer.read(cx);
@@ -5348,6 +5384,10 @@ impl Editor {
     }
 
     pub fn newline_below(&mut self, _: &NewlineBelow, window: &mut Window, cx: &mut Context<Self>) {
+        if self.read_only(cx) {
+            return;
+        }
+
         self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx);
 
         let mut buffer_edits: HashMap<EntityId, (Entity<Buffer>, Vec<Point>)> = HashMap::default();
@@ -7499,7 +7539,7 @@ impl Editor {
 
     fn prepare_highlight_query_from_selection(
         &mut self,
-        window: &Window,
+        snapshot: &DisplaySnapshot,
         cx: &mut Context<Editor>,
     ) -> Option<(String, Range<Anchor>)> {
         if matches!(self.mode, EditorMode::SingleLine) {
@@ -7511,7 +7551,6 @@ impl Editor {
         if self.selections.count() != 1 || self.selections.line_mode() {
             return None;
         }
-        let snapshot = self.snapshot(window, cx);
         let selection = self.selections.newest::<Point>(&snapshot);
         // If the selection spans multiple rows OR it is empty
         if selection.start.row != selection.end.row
@@ -7533,6 +7572,7 @@ impl Editor {
     #[ztracing::instrument(skip_all)]
     fn update_selection_occurrence_highlights(
         &mut self,
+        multi_buffer_snapshot: MultiBufferSnapshot,
         query_text: String,
         query_range: Range<Anchor>,
         multi_buffer_range_to_query: Range<Point>,
@@ -7540,7 +7580,6 @@ impl Editor {
         window: &mut Window,
         cx: &mut Context<Editor>,
     ) -> Task<()> {
-        let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx);
         cx.spawn_in(window, async move |editor, cx| {
             if use_debounce {
                 cx.background_executor()
@@ -7556,7 +7595,7 @@ impl Editor {
                     .filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty());
                 let mut match_ranges = Vec::new();
                 let Ok(regex) = project::search::SearchQuery::text(
-                    query_text.clone(),
+                    query_text,
                     false,
                     false,
                     false,
@@ -7718,12 +7757,13 @@ impl Editor {
     #[ztracing::instrument(skip_all)]
     fn refresh_selected_text_highlights(
         &mut self,
+        snapshot: &DisplaySnapshot,
         on_buffer_edit: bool,
         window: &mut Window,
         cx: &mut Context<Editor>,
     ) {
         let Some((query_text, query_range)) =
-            self.prepare_highlight_query_from_selection(window, cx)
+            self.prepare_highlight_query_from_selection(snapshot, cx)
         else {
             self.clear_background_highlights(HighlightKey::SelectedTextHighlight, cx);
             self.quick_selection_highlight_task.take();
@@ -7755,6 +7795,7 @@ impl Editor {
             self.quick_selection_highlight_task = Some((
                 query_range.clone(),
                 self.update_selection_occurrence_highlights(
+                    snapshot.buffer.clone(),
                     query_text.clone(),
                     query_range.clone(),
                     multi_buffer_visible_range,
@@ -7780,6 +7821,7 @@ impl Editor {
             self.debounced_selection_highlight_task = Some((
                 query_range.clone(),
                 self.update_selection_occurrence_highlights(
+                    snapshot.buffer.clone(),
                     query_text,
                     query_range,
                     multi_buffer_full_range,
@@ -9888,7 +9930,14 @@ impl Editor {
 
         origin.x -= BORDER_WIDTH;
 
-        window.defer_draw(element, origin, 1);
+        window.with_content_mask(
+            Some(gpui::ContentMask {
+                bounds: *text_bounds,
+            }),
+            |window| {
+                window.defer_draw(element, origin, 1, Some(window.content_mask()));
+            },
+        );
 
         // Do not return an element, since it will already be drawn due to defer_draw.
         None
@@ -11307,6 +11356,15 @@ impl Editor {
             // would do nothing for single line selections individual cursors.
             let end = if selection.start.row == selection.end.row {
                 MultiBufferRow(selection.start.row + 1)
+            } else if selection.end.column == 0 {
+                // If the selection ends at the start of a line, it's logically at the end of the
+                // previous line (plus its newline).
+                // Don't include the end line unless there's only one line selected.
+                if selection.start.row + 1 == selection.end.row {
+                    MultiBufferRow(selection.end.row)
+                } else {
+                    MultiBufferRow(selection.end.row - 1)
+                }
             } else {
                 MultiBufferRow(selection.end.row)
             };
@@ -13664,94 +13722,94 @@ impl Editor {
         let selections = self.selections.all::<Point>(&self.display_snapshot(cx));
         let buffer = self.buffer.read(cx).read(cx);
         let mut text = String::new();
-
         let mut clipboard_selections = Vec::with_capacity(selections.len());
-        {
-            let max_point = buffer.max_point();
-            let mut is_first = true;
-            let mut prev_selection_was_entire_line = false;
-            for selection in &selections {
-                let mut start = selection.start;
-                let mut end = selection.end;
-                let is_entire_line = selection.is_empty() || self.selections.line_mode();
-                let mut add_trailing_newline = false;
-                if is_entire_line {
-                    start = Point::new(start.row, 0);
-                    let next_line_start = Point::new(end.row + 1, 0);
-                    if next_line_start <= max_point {
-                        end = next_line_start;
-                    } else {
-                        // We're on the last line without a trailing newline.
-                        // Copy to the end of the line and add a newline afterwards.
-                        end = Point::new(end.row, buffer.line_len(MultiBufferRow(end.row)));
-                        add_trailing_newline = true;
-                    }
+
+        let max_point = buffer.max_point();
+        let mut is_first = true;
+        for selection in &selections {
+            let mut start = selection.start;
+            let mut end = selection.end;
+            let is_entire_line = selection.is_empty() || self.selections.line_mode();
+            let mut add_trailing_newline = false;
+            if is_entire_line {
+                start = Point::new(start.row, 0);
+                let next_line_start = Point::new(end.row + 1, 0);
+                if next_line_start <= max_point {
+                    end = next_line_start;
+                } else {
+                    // We're on the last line without a trailing newline.
+                    // Copy to the end of the line and add a newline afterwards.
+                    end = Point::new(end.row, buffer.line_len(MultiBufferRow(end.row)));
+                    add_trailing_newline = true;
                 }
+            }
 
-                let mut trimmed_selections = Vec::new();
-                if strip_leading_indents && end.row.saturating_sub(start.row) > 0 {
-                    let row = MultiBufferRow(start.row);
-                    let first_indent = buffer.indent_size_for_line(row);
-                    if first_indent.len == 0 || start.column > first_indent.len {
-                        trimmed_selections.push(start..end);
-                    } else {
-                        trimmed_selections.push(
-                            Point::new(row.0, first_indent.len)
-                                ..Point::new(row.0, buffer.line_len(row)),
-                        );
-                        for row in start.row + 1..=end.row {
-                            let mut line_len = buffer.line_len(MultiBufferRow(row));
-                            if row == end.row {
-                                line_len = end.column;
-                            }
-                            if line_len == 0 {
-                                trimmed_selections
-                                    .push(Point::new(row, 0)..Point::new(row, line_len));
-                                continue;
-                            }
-                            let row_indent_size = buffer.indent_size_for_line(MultiBufferRow(row));
-                            if row_indent_size.len >= first_indent.len {
-                                trimmed_selections.push(
-                                    Point::new(row, first_indent.len)..Point::new(row, line_len),
-                                );
-                            } else {
-                                trimmed_selections.clear();
-                                trimmed_selections.push(start..end);
-                                break;
-                            }
+            let mut trimmed_selections = Vec::new();
+            if strip_leading_indents && end.row.saturating_sub(start.row) > 0 {
+                let row = MultiBufferRow(start.row);
+                let first_indent = buffer.indent_size_for_line(row);
+                if first_indent.len == 0 || start.column > first_indent.len {
+                    trimmed_selections.push(start..end);
+                } else {
+                    trimmed_selections.push(
+                        Point::new(row.0, first_indent.len)
+                            ..Point::new(row.0, buffer.line_len(row)),
+                    );
+                    for row in start.row + 1..=end.row {
+                        let mut line_len = buffer.line_len(MultiBufferRow(row));
+                        if row == end.row {
+                            line_len = end.column;
+                        }
+                        if line_len == 0 {
+                            trimmed_selections.push(Point::new(row, 0)..Point::new(row, line_len));
+                            continue;
+                        }
+                        let row_indent_size = buffer.indent_size_for_line(MultiBufferRow(row));
+                        if row_indent_size.len >= first_indent.len {
+                            trimmed_selections
+                                .push(Point::new(row, first_indent.len)..Point::new(row, line_len));
+                        } else {
+                            trimmed_selections.clear();
+                            trimmed_selections.push(start..end);
+                            break;
                         }
                     }
-                } else {
-                    trimmed_selections.push(start..end);
                 }
+            } else {
+                trimmed_selections.push(start..end);
+            }
 
-                let is_multiline_trim = trimmed_selections.len() > 1;
-                for trimmed_range in trimmed_selections {
-                    if is_first {
-                        is_first = false;
-                    } else if is_multiline_trim || !prev_selection_was_entire_line {
-                        text += "\n";
-                    }
-                    prev_selection_was_entire_line = is_entire_line && !is_multiline_trim;
-                    let mut len = 0;
-                    for chunk in buffer.text_for_range(trimmed_range.start..trimmed_range.end) {
-                        text.push_str(chunk);
-                        len += chunk.len();
-                    }
-                    if add_trailing_newline {
-                        text.push('\n');
-                        len += 1;
+            let is_multiline_trim = trimmed_selections.len() > 1;
+            let mut selection_len: usize = 0;
+            let prev_selection_was_entire_line = is_entire_line && !is_multiline_trim;
+
+            for trimmed_range in trimmed_selections {
+                if is_first {
+                    is_first = false;
+                } else if is_multiline_trim || !prev_selection_was_entire_line {
+                    text.push('\n');
+                    if is_multiline_trim {
+                        selection_len += 1;
                     }
-                    clipboard_selections.push(ClipboardSelection::for_buffer(
-                        len,
-                        is_entire_line,
-                        trimmed_range,
-                        &buffer,
-                        self.project.as_ref(),
-                        cx,
-                    ));
+                }
+                for chunk in buffer.text_for_range(trimmed_range.start..trimmed_range.end) {
+                    text.push_str(chunk);
+                    selection_len += chunk.len();
+                }
+                if add_trailing_newline {
+                    text.push('\n');
+                    selection_len += 1;
                 }
             }
+
+            clipboard_selections.push(ClipboardSelection::for_buffer(
+                selection_len,
+                is_entire_line,
+                start..end,
+                &buffer,
+                self.project.as_ref(),
+                cx,
+            ));
         }
 
         cx.write_to_clipboard(ClipboardItem::new_string_with_json_metadata(
@@ -15340,7 +15398,7 @@ impl Editor {
     pub fn select_all(&mut self, _: &SelectAll, window: &mut Window, cx: &mut Context<Self>) {
         self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx);
         self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
-            s.select_ranges(vec![Anchor::min()..Anchor::max()]);
+            s.select_ranges([Anchor::min()..Anchor::max()]);
         });
     }
 
@@ -15372,7 +15430,7 @@ impl Editor {
             .into_iter()
             .map(|selection| selection.start..selection.end)
             .collect::<Vec<_>>();
-        self.unfold_ranges(&selections, true, true, cx);
+        self.unfold_ranges(&selections, true, false, cx);
 
         let mut new_selection_ranges = Vec::new();
         {
@@ -15414,7 +15472,7 @@ impl Editor {
                 }
             }
         }
-        self.change_selections(Default::default(), window, cx, |s| {
+        self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
             s.select_ranges(new_selection_ranges);
         });
     }
@@ -23932,7 +23990,7 @@ impl Editor {
     }
 
     pub fn refresh_inline_values(&mut self, cx: &mut Context<Self>) {
-        let Some(project) = self.project.clone() else {
+        let Some(semantics) = self.semantics_provider.clone() else {
             return;
         };
 
@@ -23967,7 +24025,7 @@ impl Editor {
                     let range =
                         buffer.read(cx).anchor_before(0)..current_execution_position.text_anchor;
 
-                    project.inline_values(buffer, range, cx)
+                    semantics.inline_values(buffer, range, cx)
                 })
                 .ok()
                 .flatten()?
@@ -24097,7 +24155,7 @@ impl Editor {
                 self.update_lsp_data(Some(buffer_id), window, cx);
                 self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx);
                 self.colorize_brackets(false, cx);
-                self.refresh_selected_text_highlights(true, window, cx);
+                self.refresh_selected_text_highlights(&self.display_snapshot(cx), true, window, cx);
                 cx.emit(EditorEvent::ExcerptsAdded {
                     buffer: buffer.clone(),
                     predecessor: *predecessor,
@@ -24122,6 +24180,11 @@ impl Editor {
                         display_map.clear_lsp_folding_ranges(*buffer_id, cx);
                     });
                 }
+
+                self.display_map.update(cx, |display_map, cx| {
+                    display_map.unfold_buffers(removed_buffer_ids.iter().copied(), cx);
+                });
+
                 jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx);
                 cx.emit(EditorEvent::ExcerptsRemoved {
                     ids: ids.clone(),
@@ -24144,7 +24207,7 @@ impl Editor {
                 self.refresh_document_highlights(cx);
                 let snapshot = multibuffer.read(cx).snapshot(cx);
                 for id in ids {
-                    self.fetched_tree_sitter_chunks.remove(id);
+                    self.bracket_fetched_tree_sitter_chunks.remove(id);
                     if let Some(buffer) = snapshot.buffer_for_excerpt(*id) {
                         self.semantic_token_state
                             .invalidate_buffer(&buffer.remote_id());
@@ -24156,7 +24219,7 @@ impl Editor {
             }
             multi_buffer::Event::Reparsed(buffer_id) => {
                 self.tasks_update_task = Some(self.refresh_runnables(window, cx));
-                self.refresh_selected_text_highlights(true, window, cx);
+                self.refresh_selected_text_highlights(&self.display_snapshot(cx), true, window, cx);
                 self.colorize_brackets(true, cx);
                 jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx);
 
@@ -26796,7 +26859,7 @@ pub trait SemanticsProvider {
         buffer: Entity<Buffer>,
         refresh: Option<RefreshForServer>,
         cx: &mut App,
-    ) -> Shared<Task<std::result::Result<BufferSemanticTokens, Arc<anyhow::Error>>>>;
+    ) -> Option<Shared<Task<std::result::Result<BufferSemanticTokens, Arc<anyhow::Error>>>>>;
 
     fn supports_inlay_hints(&self, buffer: &Entity<Buffer>, cx: &mut App) -> bool;
 
@@ -27278,14 +27341,15 @@ impl CompletionProvider for Entity<Project> {
     }
 }
 
-impl SemanticsProvider for Entity<Project> {
+impl SemanticsProvider for WeakEntity<Project> {
     fn hover(
         &self,
         buffer: &Entity<Buffer>,
         position: text::Anchor,
         cx: &mut App,
     ) -> Option<Task<Option<Vec<project::Hover>>>> {
-        Some(self.update(cx, |project, cx| project.hover(buffer, position, cx)))
+        self.update(cx, |project, cx| project.hover(buffer, position, cx))
+            .ok()
     }
 
     fn document_highlights(
@@ -27294,9 +27358,10 @@ impl SemanticsProvider for Entity<Project> {
         position: text::Anchor,
         cx: &mut App,
     ) -> Option<Task<Result<Vec<DocumentHighlight>>>> {
-        Some(self.update(cx, |project, cx| {
+        self.update(cx, |project, cx| {
             project.document_highlights(buffer, position, cx)
-        }))
+        })
+        .ok()
     }
 
     fn definitions(
@@ -27306,12 +27371,13 @@ impl SemanticsProvider for Entity<Project> {
         kind: GotoDefinitionKind,
         cx: &mut App,
     ) -> Option<Task<Result<Option<Vec<LocationLink>>>>> {
-        Some(self.update(cx, |project, cx| match kind {
+        self.update(cx, |project, cx| match kind {
             GotoDefinitionKind::Symbol => project.definitions(buffer, position, cx),
             GotoDefinitionKind::Declaration => project.declarations(buffer, position, cx),
             GotoDefinitionKind::Type => project.type_definitions(buffer, position, cx),
             GotoDefinitionKind::Implementation => project.implementations(buffer, position, cx),
-        }))
+        })
+        .ok()
     }
 
     fn supports_inlay_hints(&self, buffer: &Entity<Buffer>, cx: &mut App) -> bool {
@@ -27327,6 +27393,7 @@ impl SemanticsProvider for Entity<Project> {
                 project.any_language_server_supports_inlay_hints(buffer, cx)
             })
         })
+        .unwrap_or(false)
     }
 
     fn supports_semantic_tokens(&self, buffer: &Entity<Buffer>, cx: &mut App) -> bool {
@@ -27335,6 +27402,7 @@ impl SemanticsProvider for Entity<Project> {
                 project.any_language_server_supports_semantic_tokens(buffer, cx)
             })
         })
+        .unwrap_or(false)
     }
 
     fn inline_values(
@@ -27348,6 +27416,8 @@ impl SemanticsProvider for Entity<Project> {
 
             Some(project.inline_values(session, active_stack_frame, buffer_handle, range, cx))
         })
+        .ok()
+        .flatten()
     }
 
     fn applicable_inlay_chunks(
@@ -27356,15 +27426,21 @@ impl SemanticsProvider for Entity<Project> {
         ranges: &[Range<text::Anchor>],
         cx: &mut App,
     ) -> Vec<Range<BufferRow>> {
-        self.read(cx).lsp_store().update(cx, |lsp_store, cx| {
-            lsp_store.applicable_inlay_chunks(buffer, ranges, cx)
+        self.update(cx, |project, cx| {
+            project.lsp_store().update(cx, |lsp_store, cx| {
+                lsp_store.applicable_inlay_chunks(buffer, ranges, cx)
+            })
         })
+        .unwrap_or_default()
     }
 
     fn invalidate_inlay_hints(&self, for_buffers: &HashSet<BufferId>, cx: &mut App) {
-        self.read(cx).lsp_store().update(cx, |lsp_store, _| {
-            lsp_store.invalidate_inlay_hints(for_buffers)
-        });
+        self.update(cx, |project, cx| {
+            project.lsp_store().update(cx, |lsp_store, _| {
+                lsp_store.invalidate_inlay_hints(for_buffers)
+            })
+        })
+        .ok();
     }
 
     fn inlay_hints(
@@ -27375,9 +27451,12 @@ impl SemanticsProvider for Entity<Project> {
         known_chunks: Option<(clock::Global, HashSet<Range<BufferRow>>)>,
         cx: &mut App,
     ) -> Option<HashMap<Range<BufferRow>, Task<Result<CacheInlayHints>>>> {
-        Some(self.read(cx).lsp_store().update(cx, |lsp_store, cx| {
-            lsp_store.inlay_hints(invalidate, buffer, ranges, known_chunks, cx)
-        }))
+        self.update(cx, |project, cx| {
+            project.lsp_store().update(cx, |lsp_store, cx| {
+                lsp_store.inlay_hints(invalidate, buffer, ranges, known_chunks, cx)
+            })
+        })
+        .ok()
     }
 
     fn semantic_tokens(
@@ -27385,10 +27464,13 @@ impl SemanticsProvider for Entity<Project> {
         buffer: Entity<Buffer>,
         refresh: Option<RefreshForServer>,
         cx: &mut App,
-    ) -> Shared<Task<std::result::Result<BufferSemanticTokens, Arc<anyhow::Error>>>> {
-        self.read(cx).lsp_store().update(cx, |lsp_store, cx| {
-            lsp_store.semantic_tokens(buffer, refresh, cx)
+    ) -> Option<Shared<Task<std::result::Result<BufferSemanticTokens, Arc<anyhow::Error>>>>> {
+        self.update(cx, |this, cx| {
+            this.lsp_store().update(cx, |lsp_store, cx| {
+                lsp_store.semantic_tokens(buffer, refresh, cx)
+            })
         })
+        .ok()
     }
 
     fn range_for_rename(
@@ -27397,7 +27479,7 @@ impl SemanticsProvider for Entity<Project> {
         position: text::Anchor,
         cx: &mut App,
     ) -> Option<Task<Result<Option<Range<text::Anchor>>>>> {
-        Some(self.update(cx, |project, cx| {
+        self.update(cx, |project, cx| {
             let buffer = buffer.clone();
             let task = project.prepare_rename(buffer.clone(), position, cx);
             cx.spawn(async move |_, cx| {
@@ -27420,7 +27502,8 @@ impl SemanticsProvider for Entity<Project> {
                     }
                 })
             })
-        }))
+        })
+        .ok()
     }
 
     fn perform_rename(
@@ -27430,9 +27513,10 @@ impl SemanticsProvider for Entity<Project> {
         new_name: String,
         cx: &mut App,
     ) -> Option<Task<Result<ProjectTransaction>>> {
-        Some(self.update(cx, |project, cx| {
+        self.update(cx, |project, cx| {
             project.perform_rename(buffer.clone(), position, new_name, cx)
-        }))
+        })
+        .ok()
     }
 }
 
@@ -28532,7 +28616,7 @@ fn edit_prediction_edit_text(
 }
 
 fn edit_prediction_fallback_text(edits: &[(Range<Anchor>, Arc<str>)], cx: &App) -> HighlightedText {
-    // Fallback for providers that don't provide edit_preview (like Copilot/Supermaven)
+    // Fallback for providers that don't provide edit_preview (like Copilot)
     // Just show the raw edit text with basic styling
     let mut text = String::new();
     let mut highlights = Vec::new();

crates/editor/src/editor_tests.rs 🔗

@@ -7,7 +7,7 @@ use crate::{
     linked_editing_ranges::LinkedEditingRanges,
     scroll::scroll_amount::ScrollAmount,
     test::{
-        assert_text_with_selections, build_editor,
+        assert_text_with_selections, build_editor, editor_content_with_blocks,
         editor_lsp_test_context::{EditorLspTestContext, git_commit_lang},
         editor_test_context::EditorTestContext,
         select_ranges,
@@ -35,9 +35,7 @@ use language_settings::Formatter;
 use languages::markdown_lang;
 use languages::rust_lang;
 use lsp::{CompletionParams, DEFAULT_LSP_REQUEST_TIMEOUT};
-use multi_buffer::{
-    ExcerptRange, IndentGuide, MultiBuffer, MultiBufferOffset, MultiBufferOffsetUtf16, PathKey,
-};
+use multi_buffer::{IndentGuide, MultiBuffer, MultiBufferOffset, MultiBufferOffsetUtf16, PathKey};
 use parking_lot::Mutex;
 use pretty_assertions::{assert_eq, assert_ne};
 use project::{
@@ -64,7 +62,6 @@ use util::{
     assert_set_eq, path,
     rel_path::rel_path,
     test::{TextRangeMarker, marked_text_ranges, marked_text_ranges_by, sample_text},
-    uri,
 };
 use workspace::{
     CloseActiveItem, CloseAllItems, CloseOtherItems, MultiWorkspace, NavigationEntry, OpenOptions,
@@ -3385,6 +3382,46 @@ async fn test_newline_below(cx: &mut TestAppContext) {
     "});
 }
 
+#[gpui::test]
+fn test_newline_respects_read_only(cx: &mut TestAppContext) {
+    init_test(cx, |_| {});
+
+    let editor = cx.add_window(|window, cx| {
+        let buffer = MultiBuffer::build_simple("aaaa\nbbbb\n", cx);
+        build_editor(buffer, window, cx)
+    });
+
+    _ = editor.update(cx, |editor, window, cx| {
+        editor.set_read_only(true);
+        editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
+            s.select_display_ranges([
+                DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 2)
+            ])
+        });
+
+        editor.newline(&Newline, window, cx);
+        assert_eq!(
+            editor.text(cx),
+            "aaaa\nbbbb\n",
+            "newline should not modify a read-only editor"
+        );
+
+        editor.newline_above(&NewlineAbove, window, cx);
+        assert_eq!(
+            editor.text(cx),
+            "aaaa\nbbbb\n",
+            "newline_above should not modify a read-only editor"
+        );
+
+        editor.newline_below(&NewlineBelow, window, cx);
+        assert_eq!(
+            editor.text(cx),
+            "aaaa\nbbbb\n",
+            "newline_below should not modify a read-only editor"
+        );
+    });
+}
+
 #[gpui::test]
 fn test_newline_below_multibuffer(cx: &mut TestAppContext) {
     init_test(cx, |_| {});
@@ -3393,14 +3430,18 @@ fn test_newline_below_multibuffer(cx: &mut TestAppContext) {
     let buffer_2 = cx.new(|cx| Buffer::local("ddd\neee\nfff", cx));
     let multibuffer = cx.new(|cx| {
         let mut multibuffer = MultiBuffer::new(ReadWrite);
-        multibuffer.push_excerpts(
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(0),
             buffer_1.clone(),
-            [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 3))],
+            [Point::new(0, 0)..Point::new(2, 3)],
+            0,
             cx,
         );
-        multibuffer.push_excerpts(
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(1),
             buffer_2.clone(),
-            [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 3))],
+            [Point::new(0, 0)..Point::new(2, 3)],
+            0,
             cx,
         );
         multibuffer
@@ -3467,14 +3508,18 @@ fn test_newline_below_multibuffer_middle_of_excerpt(cx: &mut TestAppContext) {
     let buffer_2 = cx.new(|cx| Buffer::local("ddd\neee\nfff", cx));
     let multibuffer = cx.new(|cx| {
         let mut multibuffer = MultiBuffer::new(ReadWrite);
-        multibuffer.push_excerpts(
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(0),
             buffer_1.clone(),
-            [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 3))],
+            [Point::new(0, 0)..Point::new(2, 3)],
+            0,
             cx,
         );
-        multibuffer.push_excerpts(
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(1),
             buffer_2.clone(),
-            [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 3))],
+            [Point::new(0, 0)..Point::new(2, 3)],
+            0,
             cx,
         );
         multibuffer
@@ -3528,14 +3573,18 @@ fn test_newline_below_multibuffer_last_line_of_last_excerpt(cx: &mut TestAppCont
     let buffer_2 = cx.new(|cx| Buffer::local("ddd\neee\nfff", cx));
     let multibuffer = cx.new(|cx| {
         let mut multibuffer = MultiBuffer::new(ReadWrite);
-        multibuffer.push_excerpts(
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(0),
             buffer_1.clone(),
-            [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 3))],
+            [Point::new(0, 0)..Point::new(2, 3)],
+            0,
             cx,
         );
-        multibuffer.push_excerpts(
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(1),
             buffer_2.clone(),
-            [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 3))],
+            [Point::new(0, 0)..Point::new(2, 3)],
+            0,
             cx,
         );
         multibuffer
@@ -3589,14 +3638,18 @@ fn test_newline_below_multibuffer_multiple_cursors(cx: &mut TestAppContext) {
     let buffer_2 = cx.new(|cx| Buffer::local("ddd\neee\nfff", cx));
     let multibuffer = cx.new(|cx| {
         let mut multibuffer = MultiBuffer::new(ReadWrite);
-        multibuffer.push_excerpts(
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(0),
             buffer_1.clone(),
-            [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 3))],
+            [Point::new(0, 0)..Point::new(2, 3)],
+            0,
             cx,
         );
-        multibuffer.push_excerpts(
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(1),
             buffer_2.clone(),
-            [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 3))],
+            [Point::new(0, 0)..Point::new(2, 3)],
+            0,
             cx,
         );
         multibuffer
@@ -4671,14 +4724,18 @@ fn test_indent_outdent_with_excerpts(cx: &mut TestAppContext) {
         cx.new(|cx| Buffer::local("const c: usize = 3;\n", cx).with_language(rust_language, cx));
     let multibuffer = cx.new(|cx| {
         let mut multibuffer = MultiBuffer::new(ReadWrite);
-        multibuffer.push_excerpts(
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(0),
             toml_buffer.clone(),
-            [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))],
+            [Point::new(0, 0)..Point::new(2, 0)],
+            0,
             cx,
         );
-        multibuffer.push_excerpts(
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(1),
             rust_buffer.clone(),
-            [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))],
+            [Point::new(0, 0)..Point::new(1, 0)],
+            0,
             cx,
         );
         multibuffer
@@ -4882,6 +4939,32 @@ fn test_join_lines_with_single_selection(cx: &mut TestAppContext) {
             &[Point::new(0, 3)..Point::new(0, 3)]
         );
 
+        editor.undo(&Undo, window, cx);
+        assert_eq!(buffer.read(cx).text(), "aaa\nbbb\nccc\nddd\n\n");
+
+        // Select a full line, i.e. start of the first line to the start of the second line
+        editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
+            s.select_ranges([Point::new(0, 0)..Point::new(1, 0)])
+        });
+        editor.join_lines(&JoinLines, window, cx);
+        assert_eq!(buffer.read(cx).text(), "aaa bbb\nccc\nddd\n\n");
+
+        editor.undo(&Undo, window, cx);
+        assert_eq!(buffer.read(cx).text(), "aaa\nbbb\nccc\nddd\n\n");
+
+        // Select two full lines
+        editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
+            s.select_ranges([Point::new(0, 0)..Point::new(2, 0)])
+        });
+        editor.join_lines(&JoinLines, window, cx);
+
+        // Only the selected lines should be joined, not the third.
+        assert_eq!(
+            buffer.read(cx).text(),
+            "aaa bbb\nccc\nddd\n\n",
+            "only the two selected lines (a and b) should be joined"
+        );
+
         // When multiple lines are selected, remove newlines that are spanned by the selection
         editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
             s.select_ranges([Point::new(0, 5)..Point::new(2, 2)])
@@ -8030,16 +8113,54 @@ async fn test_copy_trim_line_mode(cx: &mut TestAppContext) {
     let mut cx = EditorTestContext::new(cx).await;
 
     cx.set_state(indoc! {"
-        «    a
-            bˇ»
+        «    fn main() {
+                1
+            }ˇ»
     "});
     cx.update_editor(|editor, _window, _cx| editor.selections.set_line_mode(true));
     cx.update_editor(|editor, window, cx| editor.copy_and_trim(&CopyAndTrim, window, cx));
 
     assert_eq!(
         cx.read_from_clipboard().and_then(|item| item.text()),
-        Some("a\nb\n".to_string())
+        Some("fn main() {\n    1\n}\n".to_string())
     );
+
+    let clipboard_selections: Vec<ClipboardSelection> = cx
+        .read_from_clipboard()
+        .and_then(|item| item.entries().first().cloned())
+        .and_then(|entry| match entry {
+            gpui::ClipboardEntry::String(text) => text.metadata_json(),
+            _ => None,
+        })
+        .expect("should have clipboard selections");
+
+    assert_eq!(clipboard_selections.len(), 1);
+    assert!(clipboard_selections[0].is_entire_line);
+
+    cx.set_state(indoc! {"
+        «fn main() {
+            1
+        }ˇ»
+    "});
+    cx.update_editor(|editor, _window, _cx| editor.selections.set_line_mode(true));
+    cx.update_editor(|editor, window, cx| editor.copy_and_trim(&CopyAndTrim, window, cx));
+
+    assert_eq!(
+        cx.read_from_clipboard().and_then(|item| item.text()),
+        Some("fn main() {\n    1\n}\n".to_string())
+    );
+
+    let clipboard_selections: Vec<ClipboardSelection> = cx
+        .read_from_clipboard()
+        .and_then(|item| item.entries().first().cloned())
+        .and_then(|entry| match entry {
+            gpui::ClipboardEntry::String(text) => text.metadata_json(),
+            _ => None,
+        })
+        .expect("should have clipboard selections");
+
+    assert_eq!(clipboard_selections.len(), 1);
+    assert!(clipboard_selections[0].is_entire_line);
 }
 
 #[gpui::test]
@@ -8064,9 +8185,11 @@ async fn test_clipboard_line_numbers_from_multibuffer(cx: &mut TestAppContext) {
 
     let multibuffer = cx.new(|cx| {
         let mut multibuffer = MultiBuffer::new(ReadWrite);
-        multibuffer.push_excerpts(
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(0),
             buffer.clone(),
-            [ExcerptRange::new(Point::new(2, 0)..Point::new(5, 0))],
+            [Point::new(2, 0)..Point::new(5, 0)],
+            0,
             cx,
         );
         multibuffer
@@ -8455,6 +8578,26 @@ async fn test_split_selection_into_lines(cx: &mut TestAppContext) {
     );
 }
 
+#[gpui::test]
+async fn test_split_selection_into_lines_does_not_scroll(cx: &mut TestAppContext) {
+    init_test(cx, |_| {});
+    let mut cx = EditorTestContext::new(cx).await;
+
+    let large_body = "\nline".repeat(300);
+    cx.set_state(&format!("«ˇstart{large_body}\nend»"));
+    let initial_scroll_position = cx.update_editor(|editor, _, cx| editor.scroll_position(cx));
+
+    cx.update_editor(|editor, window, cx| {
+        editor.split_selection_into_lines(&Default::default(), window, cx);
+    });
+
+    let scroll_position_after_split = cx.update_editor(|editor, _, cx| editor.scroll_position(cx));
+    assert_eq!(
+        initial_scroll_position, scroll_position_after_split,
+        "Scroll position should not change after splitting selection into lines"
+    );
+}
+
 #[gpui::test]
 async fn test_split_selection_into_lines_interacting_with_creases(cx: &mut TestAppContext) {
     init_test(cx, |_| {});
@@ -9581,31 +9724,25 @@ async fn test_select_previous_multibuffer(cx: &mut TestAppContext) {
     init_test(cx, |_| {});
 
     let mut cx =
-        EditorTestContext::new_multibuffer(cx, ["aaa\n«bbb\nccc\n»ddd", "aaa\n«bbb\nccc\n»ddd"]);
+        EditorTestContext::new_multibuffer(cx, ["aaa\n«bbb\nccc»\nddd", "aaa\n«bbb\nccc»\nddd"]);
 
     cx.assert_editor_state(indoc! {"
         ˇbbb
         ccc
-
         bbb
-        ccc
-        "});
+        ccc"});
     cx.dispatch_action(SelectPrevious::default());
     cx.assert_editor_state(indoc! {"
                 «bbbˇ»
                 ccc
-
                 bbb
-                ccc
-                "});
+                ccc"});
     cx.dispatch_action(SelectPrevious::default());
     cx.assert_editor_state(indoc! {"
                 «bbbˇ»
                 ccc
-
                 «bbbˇ»
-                ccc
-                "});
+                ccc"});
 }
 
 #[gpui::test]
@@ -10619,7 +10756,9 @@ async fn test_autoindent(cx: &mut TestAppContext) {
 
 #[gpui::test]
 async fn test_autoindent_disabled(cx: &mut TestAppContext) {
-    init_test(cx, |settings| settings.defaults.auto_indent = Some(false));
+    init_test(cx, |settings| {
+        settings.defaults.auto_indent = Some(settings::AutoIndentMode::None)
+    });
 
     let language = Arc::new(
         Language::new(
@@ -10697,14 +10836,165 @@ async fn test_autoindent_disabled(cx: &mut TestAppContext) {
     });
 }
 
+#[gpui::test]
+async fn test_autoindent_none_does_not_preserve_indentation_on_newline(cx: &mut TestAppContext) {
+    init_test(cx, |settings| {
+        settings.defaults.auto_indent = Some(settings::AutoIndentMode::None)
+    });
+
+    let mut cx = EditorTestContext::new(cx).await;
+
+    cx.set_state(indoc! {"
+        hello
+            indented lineˇ
+        world
+    "});
+
+    cx.update_editor(|editor, window, cx| {
+        editor.newline(&Newline, window, cx);
+    });
+
+    cx.assert_editor_state(indoc! {"
+        hello
+            indented line
+        ˇ
+        world
+    "});
+}
+
+#[gpui::test]
+async fn test_autoindent_preserve_indent_maintains_indentation_on_newline(cx: &mut TestAppContext) {
+    // When auto_indent is "preserve_indent", pressing Enter on an indented line
+    // should preserve the indentation but not adjust based on syntax.
+    init_test(cx, |settings| {
+        settings.defaults.auto_indent = Some(settings::AutoIndentMode::PreserveIndent)
+    });
+
+    let mut cx = EditorTestContext::new(cx).await;
+
+    cx.set_state(indoc! {"
+        hello
+            indented lineˇ
+        world
+    "});
+
+    cx.update_editor(|editor, window, cx| {
+        editor.newline(&Newline, window, cx);
+    });
+
+    // The new line SHOULD have the same indentation as the previous line
+    cx.assert_editor_state(indoc! {"
+        hello
+            indented line
+            ˇ
+        world
+    "});
+}
+
+#[gpui::test]
+async fn test_autoindent_preserve_indent_does_not_apply_syntax_indent(cx: &mut TestAppContext) {
+    init_test(cx, |settings| {
+        settings.defaults.auto_indent = Some(settings::AutoIndentMode::PreserveIndent)
+    });
+
+    let language = Arc::new(
+        Language::new(
+            LanguageConfig {
+                brackets: BracketPairConfig {
+                    pairs: vec![BracketPair {
+                        start: "{".to_string(),
+                        end: "}".to_string(),
+                        close: false,
+                        surround: false,
+                        newline: false, // Disable extra newline behavior to isolate syntax indent test
+                    }],
+                    ..Default::default()
+                },
+                ..Default::default()
+            },
+            Some(tree_sitter_rust::LANGUAGE.into()),
+        )
+        .with_indents_query(r#"(_ "{" "}" @end) @indent"#)
+        .unwrap(),
+    );
+
+    let buffer =
+        cx.new(|cx| Buffer::local("fn foo() {\n}", cx).with_language(language.clone(), cx));
+    let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
+    let (editor, cx) = cx.add_window_view(|window, cx| build_editor(buffer, window, cx));
+    editor
+        .condition::<crate::EditorEvent>(cx, |editor, cx| !editor.buffer.read(cx).is_parsing(cx))
+        .await;
+
+    // Position cursor at end of line containing `{`
+    editor.update_in(cx, |editor, window, cx| {
+        editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
+            s.select_ranges([MultiBufferOffset(10)..MultiBufferOffset(10)]) // After "fn foo() {"
+        });
+        editor.newline(&Newline, window, cx);
+
+        // With PreserveIndent, the new line should have 0 indentation (same as the fn line)
+        // NOT 4 spaces (which tree-sitter would add for being inside `{}`)
+        assert_eq!(editor.text(cx), "fn foo() {\n\n}");
+    });
+}
+
+#[gpui::test]
+async fn test_autoindent_syntax_aware_applies_syntax_indent(cx: &mut TestAppContext) {
+    // Companion test to show that SyntaxAware DOES apply tree-sitter indentation
+    init_test(cx, |settings| {
+        settings.defaults.auto_indent = Some(settings::AutoIndentMode::SyntaxAware)
+    });
+
+    let language = Arc::new(
+        Language::new(
+            LanguageConfig {
+                brackets: BracketPairConfig {
+                    pairs: vec![BracketPair {
+                        start: "{".to_string(),
+                        end: "}".to_string(),
+                        close: false,
+                        surround: false,
+                        newline: false, // Disable extra newline behavior to isolate syntax indent test
+                    }],
+                    ..Default::default()
+                },
+                ..Default::default()
+            },
+            Some(tree_sitter_rust::LANGUAGE.into()),
+        )
+        .with_indents_query(r#"(_ "{" "}" @end) @indent"#)
+        .unwrap(),
+    );
+
+    let buffer =
+        cx.new(|cx| Buffer::local("fn foo() {\n}", cx).with_language(language.clone(), cx));
+    let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
+    let (editor, cx) = cx.add_window_view(|window, cx| build_editor(buffer, window, cx));
+    editor
+        .condition::<crate::EditorEvent>(cx, |editor, cx| !editor.buffer.read(cx).is_parsing(cx))
+        .await;
+
+    // Position cursor at end of line containing `{`
+    editor.update_in(cx, |editor, window, cx| {
+        editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
+            s.select_ranges([MultiBufferOffset(10)..MultiBufferOffset(10)]) // After "fn foo() {"
+        });
+        editor.newline(&Newline, window, cx);
+
+        // With SyntaxAware, tree-sitter adds indentation for being inside `{}`
+        assert_eq!(editor.text(cx), "fn foo() {\n    \n}");
+    });
+}
+
 #[gpui::test]
 async fn test_autoindent_disabled_with_nested_language(cx: &mut TestAppContext) {
     init_test(cx, |settings| {
-        settings.defaults.auto_indent = Some(true);
+        settings.defaults.auto_indent = Some(settings::AutoIndentMode::SyntaxAware);
         settings.languages.0.insert(
             "python".into(),
             LanguageSettingsContent {
-                auto_indent: Some(false),
+                auto_indent: Some(settings::AutoIndentMode::None),
                 ..Default::default()
             },
         );
@@ -12708,10 +12998,10 @@ async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) {
         sample_text_2,
         "llll\nmmmm\nnnnn\noooo\npppp\nqqqq\nrrrr\nssss\ntttt\nuuuu"
     );
-    let sample_text_3 = sample_text(rows, cols, 'v');
+    let sample_text_3 = sample_text(rows, cols, 'v').replace('\u{7f}', ".");
     assert_eq!(
         sample_text_3,
-        "vvvv\nwwww\nxxxx\nyyyy\nzzzz\n{{{{\n||||\n}}}}\n~~~~\n\u{7f}\u{7f}\u{7f}\u{7f}"
+        "vvvv\nwwww\nxxxx\nyyyy\nzzzz\n{{{{\n||||\n}}}}\n~~~~\n...."
     );
 
     let fs = FakeFs::new(cx.executor());
@@ -12770,33 +13060,40 @@ async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) {
 
     let multi_buffer = cx.new(|cx| {
         let mut multi_buffer = MultiBuffer::new(ReadWrite);
-        multi_buffer.push_excerpts(
+        multi_buffer.set_excerpts_for_path(
+            PathKey::sorted(0),
             buffer_1.clone(),
             [
-                ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0)),
-                ExcerptRange::new(Point::new(5, 0)..Point::new(7, 0)),
-                ExcerptRange::new(Point::new(9, 0)..Point::new(10, 4)),
+                Point::new(0, 0)..Point::new(2, 4),
+                Point::new(5, 0)..Point::new(6, 4),
+                Point::new(9, 0)..Point::new(9, 4),
             ],
+            0,
             cx,
         );
-        multi_buffer.push_excerpts(
+        multi_buffer.set_excerpts_for_path(
+            PathKey::sorted(1),
             buffer_2.clone(),
             [
-                ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0)),
-                ExcerptRange::new(Point::new(5, 0)..Point::new(7, 0)),
-                ExcerptRange::new(Point::new(9, 0)..Point::new(10, 4)),
+                Point::new(0, 0)..Point::new(2, 4),
+                Point::new(5, 0)..Point::new(6, 4),
+                Point::new(9, 0)..Point::new(9, 4),
             ],
+            0,
             cx,
         );
-        multi_buffer.push_excerpts(
+        multi_buffer.set_excerpts_for_path(
+            PathKey::sorted(2),
             buffer_3.clone(),
             [
-                ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0)),
-                ExcerptRange::new(Point::new(5, 0)..Point::new(7, 0)),
-                ExcerptRange::new(Point::new(9, 0)..Point::new(10, 4)),
+                Point::new(0, 0)..Point::new(2, 4),
+                Point::new(5, 0)..Point::new(6, 4),
+                Point::new(9, 0)..Point::new(9, 4),
             ],
+            0,
             cx,
         );
+        assert_eq!(multi_buffer.excerpt_ids().len(), 9);
         multi_buffer
     });
     let multi_buffer_editor = cx.new_window_entity(|window, cx| {
@@ -12810,30 +13107,61 @@ async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) {
     });
 
     multi_buffer_editor.update_in(cx, |editor, window, cx| {
+        let a = editor.text(cx).find("aaaa").unwrap();
         editor.change_selections(
             SelectionEffects::scroll(Autoscroll::Next),
             window,
             cx,
-            |s| s.select_ranges(Some(MultiBufferOffset(1)..MultiBufferOffset(2))),
+            |s| s.select_ranges(Some(MultiBufferOffset(a + 1)..MultiBufferOffset(a + 2))),
         );
         editor.insert("|one|two|three|", window, cx);
     });
     assert!(cx.read(|cx| multi_buffer_editor.is_dirty(cx)));
     multi_buffer_editor.update_in(cx, |editor, window, cx| {
+        let n = editor.text(cx).find("nnnn").unwrap();
         editor.change_selections(
             SelectionEffects::scroll(Autoscroll::Next),
             window,
             cx,
-            |s| s.select_ranges(Some(MultiBufferOffset(60)..MultiBufferOffset(70))),
+            |s| s.select_ranges(Some(MultiBufferOffset(n + 4)..MultiBufferOffset(n + 14))),
         );
         editor.insert("|four|five|six|", window, cx);
     });
     assert!(cx.read(|cx| multi_buffer_editor.is_dirty(cx)));
 
     // First two buffers should be edited, but not the third one.
-    assert_eq!(
-        multi_buffer_editor.update(cx, |editor, cx| editor.text(cx)),
-        "a|one|two|three|aa\nbbbb\ncccc\n\nffff\ngggg\n\njjjj\nllll\nmmmm\nnnnn|four|five|six|\nr\n\nuuuu\nvvvv\nwwww\nxxxx\n\n{{{{\n||||\n\n\u{7f}\u{7f}\u{7f}\u{7f}",
+    pretty_assertions::assert_eq!(
+        editor_content_with_blocks(&multi_buffer_editor, cx),
+        indoc! {"
+            § main.rs
+            § -----
+            a|one|two|three|aa
+            bbbb
+            cccc
+            § -----
+            ffff
+            gggg
+            § -----
+            jjjj
+            § other.rs
+            § -----
+            llll
+            mmmm
+            nnnn|four|five|six|
+            § -----
+
+            § -----
+            uuuu
+            § lib.rs
+            § -----
+            vvvv
+            wwww
+            xxxx
+            § -----
+            {{{{
+            ||||
+            § -----
+            ...."}
     );
     buffer_1.update(cx, |buffer, _| {
         assert!(buffer.is_dirty());
@@ -12846,7 +13174,7 @@ async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) {
         assert!(buffer.is_dirty());
         assert_eq!(
             buffer.text(),
-            "llll\nmmmm\nnnnn|four|five|six|oooo\npppp\nr\nssss\ntttt\nuuuu",
+            "llll\nmmmm\nnnnn|four|five|six|\noooo\npppp\n\nssss\ntttt\nuuuu",
         )
     });
     buffer_3.update(cx, |buffer, _| {
@@ -12872,10 +13200,10 @@ async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) {
     let fake_server = fake_servers.next().await.unwrap();
     fake_server
         .server
-        .on_request::<lsp::request::Formatting, _, _>(move |params, _| async move {
+        .on_request::<lsp::request::Formatting, _, _>(move |_params, _| async move {
             Ok(Some(vec![lsp::TextEdit::new(
                 lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(1, 0)),
-                format!("[{} formatted]", params.text_document.uri),
+                "[formatted]".to_string(),
             )]))
         })
         .detach();
@@ -12884,23 +13212,61 @@ async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) {
     // After multibuffer saving, only first two buffers should be reformatted, but not the third one (as it was not dirty).
     assert!(cx.read(|cx| !multi_buffer_editor.is_dirty(cx)));
     assert_eq!(
-        multi_buffer_editor.update(cx, |editor, cx| editor.text(cx)),
-        uri!(
-            "a|o[file:///a/main.rs formatted]bbbb\ncccc\n\nffff\ngggg\n\njjjj\n\nlll[file:///a/other.rs formatted]mmmm\nnnnn|four|five|six|\nr\n\nuuuu\n\nvvvv\nwwww\nxxxx\n\n{{{{\n||||\n\n\u{7f}\u{7f}\u{7f}\u{7f}"
-        ),
+        editor_content_with_blocks(&multi_buffer_editor, cx),
+        indoc! {"
+            § main.rs
+            § -----
+            a|o[formatted]bbbb
+            cccc
+            § -----
+            ffff
+            gggg
+            § -----
+            jjjj
+
+            § other.rs
+            § -----
+            lll[formatted]mmmm
+            nnnn|four|five|six|
+            § -----
+
+            § -----
+            uuuu
+
+            § lib.rs
+            § -----
+            vvvv
+            wwww
+            xxxx
+            § -----
+            {{{{
+            ||||
+            § -----
+            ...."}
     );
     buffer_1.update(cx, |buffer, _| {
         assert!(!buffer.is_dirty());
         assert_eq!(
             buffer.text(),
-            uri!("a|o[file:///a/main.rs formatted]bbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj\n"),
+            "a|o[formatted]bbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj\n",
         )
     });
+    // Diff < left / right > :
+    //  lll[formatted]mmmm
+    // <nnnn|four|five|six|
+    // <oooo
+    // >nnnn|four|five|six|oooo
+    //  pppp
+    // <
+    //  ssss
+    //  tttt
+    //  uuuu
+
     buffer_2.update(cx, |buffer, _| {
         assert!(!buffer.is_dirty());
         assert_eq!(
             buffer.text(),
-            uri!("lll[file:///a/other.rs formatted]mmmm\nnnnn|four|five|six|oooo\npppp\nr\nssss\ntttt\nuuuu\n"),
+            "lll[formatted]mmmm\nnnnn|four|five|six|\noooo\npppp\n\nssss\ntttt\nuuuu\n",
         )
     });
     buffer_3.update(cx, |buffer, _| {
@@ -12957,19 +13323,25 @@ async fn test_autosave_with_dirty_buffers(cx: &mut TestAppContext) {
     // Create a multi-buffer with all three buffers
     let multi_buffer = cx.new(|cx| {
         let mut multi_buffer = MultiBuffer::new(ReadWrite);
-        multi_buffer.push_excerpts(
+        multi_buffer.set_excerpts_for_path(
+            PathKey::sorted(0),
             buffer_1.clone(),
-            [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))],
+            [Point::new(0, 0)..Point::new(1, 0)],
+            0,
             cx,
         );
-        multi_buffer.push_excerpts(
+        multi_buffer.set_excerpts_for_path(
+            PathKey::sorted(1),
             buffer_2.clone(),
-            [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))],
+            [Point::new(0, 0)..Point::new(1, 0)],
+            0,
             cx,
         );
-        multi_buffer.push_excerpts(
+        multi_buffer.set_excerpts_for_path(
+            PathKey::sorted(2),
             buffer_3.clone(),
-            [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))],
+            [Point::new(0, 0)..Point::new(1, 0)],
+            0,
             cx,
         );
         multi_buffer
@@ -15456,7 +15828,9 @@ async fn test_completion_in_multibuffer_with_replace_range(cx: &mut TestAppConte
             10.satu;
 
             //
-            // separate cursors so they open in different excerpts (manually reproducible)
+            // separate1
+            // separate2
+            // separate3
             //
 
             10.satu20;
@@ -15468,8 +15842,6 @@ async fn test_completion_in_multibuffer_with_replace_range(cx: &mut TestAppConte
 
             //
 
-            //
-
             10.satuˇ20;
         }
     "};
@@ -15479,15 +15851,10 @@ async fn test_completion_in_multibuffer_with_replace_range(cx: &mut TestAppConte
 
             //
 
-            //
-
             10.saturating_sub()ˇ;
         }
     "};
 
-    let first_excerpt_end = buffer_text.find("//").unwrap() + 3;
-    let second_excerpt_end = buffer_text.rfind("//").unwrap() - 4;
-
     let fs = FakeFs::new(cx.executor());
     fs.insert_tree(
         path!("/a"),
@@ -15527,14 +15894,14 @@ async fn test_completion_in_multibuffer_with_replace_range(cx: &mut TestAppConte
 
     let multi_buffer = cx.new(|cx| {
         let mut multi_buffer = MultiBuffer::new(Capability::ReadWrite);
-        multi_buffer.push_excerpts(
-            buffer.clone(),
-            [ExcerptRange::new(0..first_excerpt_end)],
-            cx,
-        );
-        multi_buffer.push_excerpts(
+        multi_buffer.set_excerpts_for_path(
+            PathKey::sorted(0),
             buffer.clone(),
-            [ExcerptRange::new(second_excerpt_end..buffer_text.len())],
+            [
+                Point::zero()..Point::new(2, 0),
+                Point::new(7, 0)..buffer.read(cx).max_point(),
+            ],
+            0,
             cx,
         );
         multi_buffer
@@ -15568,7 +15935,7 @@ async fn test_completion_in_multibuffer_with_replace_range(cx: &mut TestAppConte
         editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
             s.select_ranges([
                 Point::new(1, 11)..Point::new(1, 11),
-                Point::new(7, 11)..Point::new(7, 11),
+                Point::new(5, 11)..Point::new(5, 11),
             ])
         });
 
@@ -15587,12 +15954,12 @@ async fn test_completion_in_multibuffer_with_replace_range(cx: &mut TestAppConte
                     lsp::InsertReplaceEdit {
                         new_text: "saturating_sub()".to_owned(),
                         insert: lsp::Range::new(
-                            lsp::Position::new(7, 7),
-                            lsp::Position::new(7, 11),
+                            lsp::Position::new(9, 7),
+                            lsp::Position::new(9, 11),
                         ),
                         replace: lsp::Range::new(
-                            lsp::Position::new(7, 7),
-                            lsp::Position::new(7, 13),
+                            lsp::Position::new(9, 7),
+                            lsp::Position::new(9, 13),
                         ),
                     },
                 )),
@@ -17130,6 +17497,7 @@ async fn test_no_duplicated_completion_requests(cx: &mut TestAppContext) {
         }
     });
 
+    cx.executor().run_until_parked();
     cx.condition(|editor, _| editor.context_menu_visible())
         .await;
     cx.assert_editor_state("fn main() { let a = 2.ˇ; }");
@@ -17674,24 +18042,26 @@ async fn test_toggle_block_comment(cx: &mut TestAppContext) {
 fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) {
     init_test(cx, |_| {});
 
-    let buffer = cx.new(|cx| Buffer::local(sample_text(3, 4, 'a'), cx));
+    let buffer = cx.new(|cx| Buffer::local(sample_text(6, 4, 'a'), cx));
     let multibuffer = cx.new(|cx| {
         let mut multibuffer = MultiBuffer::new(ReadWrite);
-        multibuffer.push_excerpts(
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(0),
             buffer.clone(),
             [
-                ExcerptRange::new(Point::new(0, 0)..Point::new(0, 4)),
-                ExcerptRange::new(Point::new(1, 0)..Point::new(1, 4)),
+                Point::new(0, 0)..Point::new(0, 4),
+                Point::new(5, 0)..Point::new(5, 4),
             ],
+            0,
             cx,
         );
-        assert_eq!(multibuffer.read(cx).text(), "aaaa\nbbbb");
+        assert_eq!(multibuffer.read(cx).text(), "aaaa\nffff");
         multibuffer
     });
 
     let (editor, cx) = cx.add_window_view(|window, cx| build_editor(multibuffer, window, cx));
     editor.update_in(cx, |editor, window, cx| {
-        assert_eq!(editor.text(cx), "aaaa\nbbbb");
+        assert_eq!(editor.text(cx), "aaaa\nffff");
         editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
             s.select_ranges([
                 Point::new(0, 0)..Point::new(0, 0),
@@ -17700,7 +18070,7 @@ fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) {
         });
 
         editor.handle_input("X", window, cx);
-        assert_eq!(editor.text(cx), "Xaaaa\nXbbbb");
+        assert_eq!(editor.text(cx), "Xaaaa\nXffff");
         assert_eq!(
             editor.selections.ranges(&editor.display_snapshot(cx)),
             [
@@ -17714,7 +18084,7 @@ fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) {
             s.select_ranges([Point::new(0, 2)..Point::new(1, 2)])
         });
         editor.backspace(&Default::default(), window, cx);
-        assert_eq!(editor.text(cx), "Xa\nbbb");
+        assert_eq!(editor.text(cx), "Xa\nfff");
         assert_eq!(
             editor.selections.ranges(&editor.display_snapshot(cx)),
             [Point::new(1, 0)..Point::new(1, 0)]
@@ -17724,7 +18094,7 @@ fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) {
             s.select_ranges([Point::new(1, 1)..Point::new(0, 1)])
         });
         editor.backspace(&Default::default(), window, cx);
-        assert_eq!(editor.text(cx), "X\nbb");
+        assert_eq!(editor.text(cx), "X\nff");
         assert_eq!(
             editor.selections.ranges(&editor.display_snapshot(cx)),
             [Point::new(0, 1)..Point::new(0, 1)]
@@ -17732,115 +18102,23 @@ fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) {
     });
 }
 
-#[gpui::test]
-fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) {
-    init_test(cx, |_| {});
-
-    let markers = vec![('[', ']').into(), ('(', ')').into()];
-    let (initial_text, mut excerpt_ranges) = marked_text_ranges_by(
-        indoc! {"
-            [aaaa
-            (bbbb]
-            cccc)",
-        },
-        markers.clone(),
-    );
-    let excerpt_ranges = markers.into_iter().map(|marker| {
-        let context = excerpt_ranges.remove(&marker).unwrap()[0].clone();
-        ExcerptRange::new(context)
-    });
-    let buffer = cx.new(|cx| Buffer::local(initial_text, cx));
-    let multibuffer = cx.new(|cx| {
-        let mut multibuffer = MultiBuffer::new(ReadWrite);
-        multibuffer.push_excerpts(buffer, excerpt_ranges, cx);
-        multibuffer
-    });
-
-    let (editor, cx) = cx.add_window_view(|window, cx| build_editor(multibuffer, window, cx));
-    editor.update_in(cx, |editor, window, cx| {
-        let (expected_text, selection_ranges) = marked_text_ranges(
-            indoc! {"
-                aaaa
-                bˇbbb
-                bˇbbˇb
-                cccc"
-            },
-            true,
-        );
-        assert_eq!(editor.text(cx), expected_text);
-        editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
-            s.select_ranges(
-                selection_ranges
-                    .iter()
-                    .map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end)),
-            )
-        });
-
-        editor.handle_input("X", window, cx);
-
-        let (expected_text, expected_selections) = marked_text_ranges(
-            indoc! {"
-                aaaa
-                bXˇbbXb
-                bXˇbbXˇb
-                cccc"
-            },
-            false,
-        );
-        assert_eq!(editor.text(cx), expected_text);
-        assert_eq!(
-            editor.selections.ranges(&editor.display_snapshot(cx)),
-            expected_selections
-                .iter()
-                .map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end))
-                .collect::<Vec<_>>()
-        );
-
-        editor.newline(&Newline, window, cx);
-        let (expected_text, expected_selections) = marked_text_ranges(
-            indoc! {"
-                aaaa
-                bX
-                ˇbbX
-                b
-                bX
-                ˇbbX
-                ˇb
-                cccc"
-            },
-            false,
-        );
-        assert_eq!(editor.text(cx), expected_text);
-        assert_eq!(
-            editor.selections.ranges(&editor.display_snapshot(cx)),
-            expected_selections
-                .iter()
-                .map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end))
-                .collect::<Vec<_>>()
-        );
-    });
-}
-
 #[gpui::test]
 fn test_refresh_selections(cx: &mut TestAppContext) {
     init_test(cx, |_| {});
 
-    let buffer = cx.new(|cx| Buffer::local(sample_text(3, 4, 'a'), cx));
-    let mut excerpt1_id = None;
+    let buffer = cx.new(|cx| Buffer::local(sample_text(5, 4, 'a'), cx));
     let multibuffer = cx.new(|cx| {
         let mut multibuffer = MultiBuffer::new(ReadWrite);
-        excerpt1_id = multibuffer
-            .push_excerpts(
-                buffer.clone(),
-                [
-                    ExcerptRange::new(Point::new(0, 0)..Point::new(1, 4)),
-                    ExcerptRange::new(Point::new(1, 0)..Point::new(2, 4)),
-                ],
-                cx,
-            )
-            .into_iter()
-            .next();
-        assert_eq!(multibuffer.read(cx).text(), "aaaa\nbbbb\nbbbb\ncccc");
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(0),
+            buffer.clone(),
+            [
+                Point::new(0, 0)..Point::new(1, 4),
+                Point::new(3, 0)..Point::new(4, 4),
+            ],
+            0,
+            cx,
+        );
         multibuffer
     });
 

crates/editor/src/element.rs 🔗

@@ -1,15 +1,15 @@
 use crate::{
-    ActiveDiagnostic, BlockId, CURSORS_VISIBLE_FOR, ChunkRendererContext, ChunkReplacement,
-    CodeActionSource, ColumnarMode, ConflictsOurs, ConflictsOursMarker, ConflictsOuter,
-    ConflictsTheirs, ConflictsTheirsMarker, ContextMenuPlacement, CursorShape, CustomBlockId,
-    DisplayDiffHunk, DisplayPoint, DisplayRow, EditDisplayMode, EditPrediction, Editor, EditorMode,
-    EditorSettings, EditorSnapshot, EditorStyle, FILE_HEADER_HEIGHT, FocusedBlock,
-    GutterDimensions, HalfPageDown, HalfPageUp, HandleInput, HoveredCursor, InlayHintRefreshReason,
-    JumpData, LineDown, LineHighlight, LineUp, MAX_LINE_LEN, MINIMAP_FONT_SIZE,
-    MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, OpenExcerpts, PageDown, PageUp, PhantomBreakpointIndicator,
-    PhantomDiffReviewIndicator, Point, RowExt, RowRangeExt, SelectPhase, Selection,
-    SelectionDragState, SelectionEffects, SizingBehavior, SoftWrap, StickyHeaderExcerpt, ToPoint,
-    ToggleFold, ToggleFoldAll,
+    ActiveDiagnostic, BUFFER_HEADER_PADDING, BlockId, CURSORS_VISIBLE_FOR, ChunkRendererContext,
+    ChunkReplacement, CodeActionSource, ColumnarMode, ConflictsOurs, ConflictsOursMarker,
+    ConflictsOuter, ConflictsTheirs, ConflictsTheirsMarker, ContextMenuPlacement, CursorShape,
+    CustomBlockId, DisplayDiffHunk, DisplayPoint, DisplayRow, EditDisplayMode, EditPrediction,
+    Editor, EditorMode, EditorSettings, EditorSnapshot, EditorStyle, FILE_HEADER_HEIGHT,
+    FocusedBlock, GutterDimensions, HalfPageDown, HalfPageUp, HandleInput, HoveredCursor,
+    InlayHintRefreshReason, JumpData, LineDown, LineHighlight, LineUp, MAX_LINE_LEN,
+    MINIMAP_FONT_SIZE, MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, OpenExcerpts, PageDown, PageUp,
+    PhantomBreakpointIndicator, PhantomDiffReviewIndicator, Point, RowExt, RowRangeExt,
+    SelectPhase, Selection, SelectionDragState, SelectionEffects, SizingBehavior, SoftWrap,
+    StickyHeaderExcerpt, ToPoint, ToggleFold, ToggleFoldAll,
     code_context_menus::{CodeActionsMenu, MENU_ASIDE_MAX_WIDTH, MENU_ASIDE_MIN_WIDTH, MENU_GAP},
     column_pixels,
     display_map::{
@@ -47,8 +47,8 @@ use gpui::{
     MouseDownEvent, MouseMoveEvent, MousePressureEvent, MouseUpEvent, PaintQuad, ParentElement,
     Pixels, PressureStage, ScrollDelta, ScrollHandle, ScrollWheelEvent, ShapedLine, SharedString,
     Size, StatefulInteractiveElement, Style, Styled, StyledText, TextAlign, TextRun,
-    TextStyleRefinement, WeakEntity, Window, anchored, checkerboard, deferred, div, fill,
-    linear_color_stop, linear_gradient, outline, point, px, quad, relative, size, solid_background,
+    TextStyleRefinement, WeakEntity, Window, anchored, deferred, div, fill, linear_color_stop,
+    linear_gradient, outline, pattern_slash, point, px, quad, relative, size, solid_background,
     transparent_black,
 };
 use itertools::Itertools;
@@ -186,7 +186,10 @@ impl SelectionLayout {
 
 #[derive(Default)]
 struct RenderBlocksOutput {
-    blocks: Vec<BlockLayout>,
+    // We store spacer blocks separately because they paint in a different order
+    // (spacers -> indent guides -> non-spacers)
+    non_spacer_blocks: Vec<BlockLayout>,
+    spacer_blocks: Vec<BlockLayout>,
     row_block_types: HashMap<DisplayRow, bool>,
     resized_blocks: Option<HashMap<CustomBlockId, u32>>,
 }
@@ -2860,7 +2863,7 @@ impl EditorElement {
                 }
             });
 
-            window.defer_draw(element, origin, 2);
+            window.defer_draw(element, origin, 2, None);
         }
     }
 
@@ -2970,11 +2973,12 @@ impl EditorElement {
                             - scroll_pixel_position.x,
                     );
                     if start_x >= text_origin.x {
-                        let (offset_y, length) = Self::calculate_indent_guide_bounds(
-                            indent_guide.start_row..indent_guide.end_row,
-                            line_height,
-                            snapshot,
-                        );
+                        let (offset_y, length, display_row_range) =
+                            Self::calculate_indent_guide_bounds(
+                                indent_guide.start_row..indent_guide.end_row,
+                                line_height,
+                                snapshot,
+                            );
 
                         let start_y = Pixels::from(
                             ScrollOffset::from(content_origin.y) + offset_y
@@ -2985,6 +2989,7 @@ impl EditorElement {
                             origin: point(start_x, start_y),
                             length,
                             single_indent_width,
+                            display_row_range,
                             depth: indent_guide.depth,
                             active: active_indent_guide_indices.contains(&i),
                             settings: indent_guide.settings,
@@ -2997,6 +3002,22 @@ impl EditorElement {
         )
     }
 
+    fn depth_zero_indent_guide_padding_for_row(
+        indent_guides: &[IndentGuideLayout],
+        row: DisplayRow,
+    ) -> Pixels {
+        indent_guides
+            .iter()
+            .find(|guide| guide.depth == 0 && guide.display_row_range.contains(&row))
+            .and_then(|guide| {
+                guide
+                    .settings
+                    .visible_line_width(guide.active)
+                    .map(|width| px(width as f32 * 2.0))
+            })
+            .unwrap_or(px(0.0))
+    }
+
     fn layout_wrap_guides(
         &self,
         em_advance: Pixels,
@@ -3034,11 +3055,11 @@ impl EditorElement {
         row_range: Range<MultiBufferRow>,
         line_height: Pixels,
         snapshot: &DisplaySnapshot,
-    ) -> (f64, gpui::Pixels) {
+    ) -> (f64, gpui::Pixels, Range<DisplayRow>) {
         let start_point = Point::new(row_range.start.0, 0);
         let end_point = Point::new(row_range.end.0, 0);
 
-        let row_range = start_point.to_display_point(snapshot).row()
+        let mut row_range = start_point.to_display_point(snapshot).row()
             ..end_point.to_display_point(snapshot).row();
 
         let mut prev_line = start_point;
@@ -3076,6 +3097,7 @@ impl EditorElement {
         if !found_excerpt_header {
             offset_y -= block_offset as f64 * f64::from(line_height);
             length += block_height as f32 * line_height;
+            row_range = DisplayRow(row_range.start.0.saturating_sub(block_offset))..row_range.end;
         }
 
         // If there is a block (e.g. diagnostic) at the end of an multibuffer excerpt,
@@ -3093,9 +3115,11 @@ impl EditorElement {
         }
         if found_excerpt_header {
             length -= block_height as f32 * line_height;
+        } else {
+            row_range = row_range.start..cons_line;
         }
 
-        (offset_y, length)
+        (offset_y, length, row_range)
     }
 
     fn layout_breakpoints(
@@ -3857,6 +3881,7 @@ impl EditorElement {
         latest_selection_anchors: &HashMap<BufferId, Anchor>,
         is_row_soft_wrapped: impl Copy + Fn(usize) -> bool,
         sticky_header_excerpt_id: Option<ExcerptId>,
+        indent_guides: &Option<Vec<IndentGuideLayout>>,
         block_resize_offset: &mut i32,
         window: &mut Window,
         cx: &mut App,
@@ -3908,19 +3933,30 @@ impl EditorElement {
 
                 div()
                     .size_full()
-                    .child(custom.render(&mut BlockContext {
-                        window,
-                        app: cx,
-                        anchor_x,
-                        margins: editor_margins,
-                        line_height,
-                        em_width,
-                        block_id,
-                        height: custom.height.unwrap_or(1),
-                        selected,
-                        max_width: text_hitbox.size.width.max(*scroll_width),
-                        editor_style: &self.style,
-                    }))
+                    .child(
+                        custom.render(&mut BlockContext {
+                            window,
+                            app: cx,
+                            anchor_x,
+                            margins: editor_margins,
+                            line_height,
+                            em_width,
+                            block_id,
+                            height: custom.height.unwrap_or(1),
+                            selected,
+                            max_width: text_hitbox.size.width.max(*scroll_width),
+                            editor_style: &self.style,
+                            indent_guide_padding: indent_guides
+                                .as_ref()
+                                .map(|guides| {
+                                    Self::depth_zero_indent_guide_padding_for_row(
+                                        guides,
+                                        block_row_start,
+                                    )
+                                })
+                                .unwrap_or(px(0.0)),
+                        }),
+                    )
                     .into_any()
             }
 
@@ -4008,7 +4044,20 @@ impl EditorElement {
             }
 
             Block::Spacer { height, .. } => {
-                Self::render_spacer_block(block_id, *height, line_height, window, cx)
+                let indent_guide_padding = indent_guides
+                    .as_ref()
+                    .map(|guides| {
+                        Self::depth_zero_indent_guide_padding_for_row(guides, block_row_start)
+                    })
+                    .unwrap_or(px(0.0));
+                Self::render_spacer_block(
+                    block_id,
+                    *height,
+                    line_height,
+                    indent_guide_padding,
+                    window,
+                    cx,
+                )
             }
         };
 
@@ -4070,10 +4119,13 @@ impl EditorElement {
         Some((element, final_size, row, x_offset))
     }
 
-    /// The checkerboard pattern height must be an even factor of the line
-    /// height, so that two consecutive spacer blocks can render contiguously
-    /// without an obvious break in the pattern.
-    fn checkerboard_size(line_height: f32, target_height: f32) -> f32 {
+    /// The spacer pattern period must be an even factor of the line height, so
+    /// that two consecutive spacer blocks can render contiguously without an
+    /// obvious break in the pattern.
+    ///
+    /// Two consecutive spacers can appear when the other side has a diff hunk
+    /// and a custom block next to each other (e.g. merge conflict buttons).
+    fn spacer_pattern_period(line_height: f32, target_height: f32) -> f32 {
         let k_approx = line_height / (2.0 * target_height);
         let k_floor = (k_approx.floor() as u32).max(1);
         let k_ceil = (k_approx.ceil() as u32).max(1);
@@ -4092,24 +4144,40 @@ impl EditorElement {
         block_id: BlockId,
         block_height: u32,
         line_height: Pixels,
+        indent_guide_padding: Pixels,
         window: &mut Window,
         cx: &App,
     ) -> AnyElement {
+        let target_size = 16.0;
+        let scale = window.scale_factor();
+        let pattern_size =
+            Self::spacer_pattern_period(f32::from(line_height) * scale, target_size * scale);
+        let color = cx.theme().colors().panel_background;
+        let background = pattern_slash(color, 2.0, pattern_size - 2.0);
+
         div()
             .id(block_id)
+            .cursor(CursorStyle::Arrow)
             .w_full()
             .h((block_height as f32) * line_height)
-            // the checkerboard pattern is semi-transparent, so we render a
-            // solid background to prevent indent guides peeking through
-            .bg(cx.theme().colors().editor_background)
+            .flex()
+            .flex_row()
+            .child(div().flex_shrink_0().w(indent_guide_padding).h_full())
             .child(
                 div()
-                    .size_full()
-                    .bg(checkerboard(cx.theme().colors().panel_background, {
-                        let target_size = 16.0;
-                        let scale = window.scale_factor();
-                        Self::checkerboard_size(f32::from(line_height) * scale, target_size * scale)
-                    })),
+                    .flex_1()
+                    .h_full()
+                    .relative()
+                    .overflow_x_hidden()
+                    .child(
+                        div()
+                            .absolute()
+                            .top_0()
+                            .bottom_0()
+                            .right_0()
+                            .left(-indent_guide_padding)
+                            .bg(background),
+                    ),
             )
             .into_any()
     }
@@ -4154,6 +4222,7 @@ impl EditorElement {
         latest_selection_anchors: &HashMap<BufferId, Anchor>,
         is_row_soft_wrapped: impl Copy + Fn(usize) -> bool,
         sticky_header_excerpt_id: Option<ExcerptId>,
+        indent_guides: &Option<Vec<IndentGuideLayout>>,
         window: &mut Window,
         cx: &mut App,
     ) -> RenderBlocksOutput {
@@ -4166,6 +4235,7 @@ impl EditorElement {
             .update(cx, |editor, _| editor.take_focused_block());
         let mut fixed_block_max_width = Pixels::ZERO;
         let mut blocks = Vec::new();
+        let mut spacer_blocks = Vec::new();
         let mut resized_blocks = HashMap::default();
         let mut row_block_types = HashMap::default();
         let mut block_resize_offset: i32 = 0;
@@ -4199,6 +4269,7 @@ impl EditorElement {
                 latest_selection_anchors,
                 is_row_soft_wrapped,
                 sticky_header_excerpt_id,
+                indent_guides,
                 &mut block_resize_offset,
                 window,
                 cx,
@@ -4226,7 +4297,15 @@ impl EditorElement {
                     .size
                     .width
                     .max(fixed_block_max_width)
-                    .max(editor_margins.gutter.width + *scroll_width)
+                    .max(
+                        editor_margins.gutter.width + *scroll_width + editor_margins.extended_right,
+                    )
+                    .into(),
+                (BlockStyle::Spacer, _) => hitbox
+                    .size
+                    .width
+                    .max(fixed_block_max_width)
+                    .max(*scroll_width + editor_margins.extended_right)
                     .into(),
                 (BlockStyle::Fixed, _) => unreachable!(),
             };
@@ -4258,20 +4337,26 @@ impl EditorElement {
                 latest_selection_anchors,
                 is_row_soft_wrapped,
                 sticky_header_excerpt_id,
+                indent_guides,
                 &mut block_resize_offset,
                 window,
                 cx,
             ) {
-                blocks.push(BlockLayout {
+                let layout = BlockLayout {
                     id: block_id,
                     x_offset,
                     row: Some(row),
                     element,
                     available_space: size(width, element_size.height.into()),
                     style,
-                    overlaps_gutter: !block.place_near(),
+                    overlaps_gutter: !block.place_near() && style != BlockStyle::Spacer,
                     is_buffer_header: block.is_buffer_header(),
-                });
+                };
+                if style == BlockStyle::Spacer {
+                    spacer_blocks.push(layout);
+                } else {
+                    blocks.push(layout);
+                }
             }
         }
 
@@ -4283,12 +4368,17 @@ impl EditorElement {
             let style = block.style();
             let width = match style {
                 BlockStyle::Fixed => AvailableSpace::MinContent,
-                BlockStyle::Flex => AvailableSpace::Definite(
+                BlockStyle::Flex => {
+                    AvailableSpace::Definite(hitbox.size.width.max(fixed_block_max_width).max(
+                        editor_margins.gutter.width + *scroll_width + editor_margins.extended_right,
+                    ))
+                }
+                BlockStyle::Spacer => AvailableSpace::Definite(
                     hitbox
                         .size
                         .width
                         .max(fixed_block_max_width)
-                        .max(editor_margins.gutter.width + *scroll_width),
+                        .max(*scroll_width + editor_margins.extended_right),
                 ),
                 BlockStyle::Sticky => AvailableSpace::Definite(hitbox.size.width),
             };
@@ -4315,6 +4405,7 @@ impl EditorElement {
                 latest_selection_anchors,
                 is_row_soft_wrapped,
                 sticky_header_excerpt_id,
+                indent_guides,
                 &mut block_resize_offset,
                 window,
                 cx,
@@ -4338,7 +4429,8 @@ impl EditorElement {
         }
 
         RenderBlocksOutput {
-            blocks,
+            non_spacer_blocks: blocks,
+            spacer_blocks,
             row_block_types,
             resized_blocks: (!resized_blocks.is_empty()).then_some(resized_blocks),
         }
@@ -4348,9 +4440,11 @@ impl EditorElement {
         &self,
         blocks: &mut Vec<BlockLayout>,
         hitbox: &Hitbox,
+        gutter_hitbox: &Hitbox,
         line_height: Pixels,
         scroll_position: gpui::Point<ScrollOffset>,
         scroll_pixel_position: gpui::Point<ScrollPixelOffset>,
+        editor_margins: &EditorMargins,
         window: &mut Window,
         cx: &mut App,
     ) {
@@ -4369,6 +4463,13 @@ impl EditorElement {
                 hitbox.origin + point(Pixels::ZERO, hitbox.size.height)
             };
 
+            if block.style == BlockStyle::Spacer {
+                origin += point(
+                    gutter_hitbox.size.width + editor_margins.gutter.margin,
+                    Pixels::ZERO,
+                );
+            }
+
             if !matches!(block.style, BlockStyle::Sticky) {
                 origin += point(Pixels::from(-scroll_pixel_position.x), Pixels::ZERO);
             }
@@ -5008,7 +5109,7 @@ impl EditorElement {
                         current_position.y -= size.height;
                     }
                     let position = current_position;
-                    window.defer_draw(element, current_position, 1);
+                    window.defer_draw(element, current_position, 1, None);
                     if !y_flipped {
                         current_position.y += size.height + MENU_GAP;
                     } else {
@@ -5111,7 +5212,7 @@ impl EditorElement {
         // Skip drawing if it doesn't fit anywhere.
         if let Some((aside, position, size)) = positioned_aside {
             let aside_bounds = Bounds::new(position, size);
-            window.defer_draw(aside, position, 2);
+            window.defer_draw(aside, position, 2, None);
             return Some(aside_bounds);
         }
 
@@ -5320,7 +5421,7 @@ impl EditorElement {
                 .on_mouse_move(|_, _, cx| cx.stop_propagation())
                 .into_any_element();
             occlusion.layout_as_root(size(width, HOVER_POPOVER_GAP).into(), window, cx);
-            window.defer_draw(occlusion, origin, 2);
+            window.defer_draw(occlusion, origin, 2, None);
         }
 
         fn place_popovers_above(
@@ -5337,7 +5438,7 @@ impl EditorElement {
                     current_y - size.height,
                 );
 
-                window.defer_draw(popover.element, popover_origin, 2);
+                window.defer_draw(popover.element, popover_origin, 2, None);
                 if position != itertools::Position::Last {
                     let origin = point(popover_origin.x, popover_origin.y - HOVER_POPOVER_GAP);
                     draw_occluder(size.width, origin, window, cx);
@@ -5359,7 +5460,7 @@ impl EditorElement {
                 let size = popover.size;
                 let popover_origin = point(hovered_point.x + popover.horizontal_offset, current_y);
 
-                window.defer_draw(popover.element, popover_origin, 2);
+                window.defer_draw(popover.element, popover_origin, 2, None);
                 if position != itertools::Position::Last {
                     let origin = point(popover_origin.x, popover_origin.y + size.height);
                     draw_occluder(size.width, origin, window, cx);
@@ -5461,7 +5562,7 @@ impl EditorElement {
                     let size = popover.size;
                     let popover_origin = point(origin.x, current_y);
 
-                    window.defer_draw(popover.element, popover_origin, 2);
+                    window.defer_draw(popover.element, popover_origin, 2, None);
                     if position != itertools::Position::Last {
                         let origin = point(popover_origin.x, popover_origin.y + size.height);
                         draw_occluder(size.width, origin, window, cx);
@@ -5580,7 +5681,11 @@ impl EditorElement {
                     continue;
                 }
                 let row_ix = display_row_range.start.0.saturating_sub(row_range.start.0);
-                if row_infos[row_ix as usize].diff_status.is_none() {
+                if row_infos
+                    .get(row_ix as usize)
+                    .and_then(|row_info| row_info.diff_status)
+                    .is_none()
+                {
                     continue;
                 }
                 if highlighted_rows
@@ -5789,7 +5894,7 @@ impl EditorElement {
             })
         };
 
-        window.defer_draw(element, final_origin, 2);
+        window.defer_draw(element, final_origin, 2, None);
     }
 
     fn paint_background(&self, layout: &EditorLayout, window: &mut Window, cx: &mut App) {
@@ -6019,22 +6124,18 @@ impl EditorElement {
                 )),
             };
 
-            let requested_line_width = if indent_guide.active {
-                settings.active_line_width
-            } else {
-                settings.line_width
-            }
-            .clamp(1, 10);
             let mut line_indicator_width = 0.;
-            if let Some(color) = line_color {
-                window.paint_quad(fill(
-                    Bounds {
-                        origin: indent_guide.origin,
-                        size: size(px(requested_line_width as f32), indent_guide.length),
-                    },
-                    color,
-                ));
-                line_indicator_width = requested_line_width as f32;
+            if let Some(requested_line_width) = settings.visible_line_width(indent_guide.active) {
+                if let Some(color) = line_color {
+                    window.paint_quad(fill(
+                        Bounds {
+                            origin: indent_guide.origin,
+                            size: size(px(requested_line_width as f32), indent_guide.length),
+                        },
+                        color,
+                    ));
+                    line_indicator_width = requested_line_width as f32;
+                }
             }
 
             if let Some(color) = background_color {
@@ -7464,7 +7565,27 @@ impl EditorElement {
         }
     }
 
-    fn paint_blocks(&mut self, layout: &mut EditorLayout, window: &mut Window, cx: &mut App) {
+    fn paint_spacer_blocks(
+        &mut self,
+        layout: &mut EditorLayout,
+        window: &mut Window,
+        cx: &mut App,
+    ) {
+        for mut block in layout.spacer_blocks.drain(..) {
+            let mut bounds = layout.hitbox.bounds;
+            bounds.origin.x += layout.gutter_hitbox.bounds.size.width;
+            window.with_content_mask(Some(ContentMask { bounds }), |window| {
+                block.element.paint(window, cx);
+            })
+        }
+    }
+
+    fn paint_non_spacer_blocks(
+        &mut self,
+        layout: &mut EditorLayout,
+        window: &mut Window,
+        cx: &mut App,
+    ) {
         for mut block in layout.blocks.drain(..) {
             if block.overlaps_gutter {
                 block.element.paint(window, cx);
@@ -7950,7 +8071,7 @@ fn apply_dirty_filename_style(
     text_style: &gpui::TextStyle,
     cx: &App,
 ) -> Option<gpui::AnyElement> {
-    let text = segment.text.replace('\n', "⏎");
+    let text = segment.text.replace('\n', " ");
 
     let filename_position = std::path::Path::new(&segment.text)
         .file_name()
@@ -8134,7 +8255,7 @@ pub(crate) fn render_buffer_header(
 
     let header = div()
         .id(("buffer-header", for_excerpt.buffer_id.to_proto()))
-        .p_1()
+        .p(BUFFER_HEADER_PADDING)
         .w_full()
         .h(FILE_HEADER_HEIGHT as f32 * window.line_height())
         .child(
@@ -9587,11 +9708,12 @@ impl Element for EditorElement {
 
                     let right_margin = minimap_width + vertical_scrollbar_width;
 
-                    let editor_width =
-                        text_width - gutter_dimensions.margin - 2 * em_width - right_margin;
+                    let extended_right = 2 * em_width + right_margin;
+                    let editor_width = text_width - gutter_dimensions.margin - extended_right;
                     let editor_margins = EditorMargins {
                         gutter: gutter_dimensions,
                         right: right_margin,
+                        extended_right,
                     };
 
                     snapshot = self.editor.update(cx, |editor, cx| {
@@ -10212,6 +10334,26 @@ impl Element for EditorElement {
                     let sticky_header_excerpt_id =
                         sticky_header_excerpt.as_ref().map(|top| top.excerpt.id);
 
+                    let buffer = snapshot.buffer_snapshot();
+                    let start_buffer_row = MultiBufferRow(start_anchor.to_point(&buffer).row);
+                    let end_buffer_row = MultiBufferRow(end_anchor.to_point(&buffer).row);
+
+                    let preliminary_scroll_pixel_position = point(
+                        scroll_position.x * f64::from(em_layout_width),
+                        scroll_position.y * f64::from(line_height),
+                    );
+                    let indent_guides = self.layout_indent_guides(
+                        content_origin,
+                        text_hitbox.origin,
+                        start_buffer_row..end_buffer_row,
+                        preliminary_scroll_pixel_position,
+                        line_height,
+                        &snapshot,
+                        window,
+                        cx,
+                    );
+                    let indent_guides_for_spacers = indent_guides.clone();
+
                     let blocks = (!is_minimap)
                         .then(|| {
                             window.with_element_namespace("blocks", |window| {
@@ -10232,6 +10374,7 @@ impl Element for EditorElement {
                                     &latest_selection_anchors,
                                     is_row_soft_wrapped,
                                     sticky_header_excerpt_id,
+                                    &indent_guides_for_spacers,
                                     window,
                                     cx,
                                 )
@@ -10239,7 +10382,8 @@ impl Element for EditorElement {
                         })
                         .unwrap_or_default();
                     let RenderBlocksOutput {
-                        mut blocks,
+                        non_spacer_blocks: mut blocks,
+                        mut spacer_blocks,
                         row_block_types,
                         resized_blocks,
                     } = blocks;
@@ -10290,11 +10434,6 @@ impl Element for EditorElement {
                         None
                     };
 
-                    let start_buffer_row =
-                        MultiBufferRow(start_anchor.to_point(&snapshot.buffer_snapshot()).row);
-                    let end_buffer_row =
-                        MultiBufferRow(end_anchor.to_point(&snapshot.buffer_snapshot()).row);
-
                     let scroll_max: gpui::Point<ScrollPixelOffset> = point(
                         ScrollPixelOffset::from(
                             ((scroll_width - editor_width) / em_layout_width).max(0.0),
@@ -10355,16 +10494,21 @@ impl Element for EditorElement {
                             sticky_headers.as_ref().map_or(0, |h| h.lines.len()),
                         );
                     });
-                    let indent_guides = self.layout_indent_guides(
-                        content_origin,
-                        text_hitbox.origin,
-                        start_buffer_row..end_buffer_row,
-                        scroll_pixel_position,
-                        line_height,
-                        &snapshot,
-                        window,
-                        cx,
-                    );
+                    let indent_guides =
+                        if scroll_pixel_position != preliminary_scroll_pixel_position {
+                            self.layout_indent_guides(
+                                content_origin,
+                                text_hitbox.origin,
+                                start_buffer_row..end_buffer_row,
+                                scroll_pixel_position,
+                                line_height,
+                                &snapshot,
+                                window,
+                                cx,
+                            )
+                        } else {
+                            indent_guides
+                        };
 
                     let crease_trailers =
                         window.with_element_namespace("crease_trailers", |window| {
@@ -10503,9 +10647,22 @@ impl Element for EditorElement {
                         self.layout_blocks(
                             &mut blocks,
                             &hitbox,
+                            &gutter_hitbox,
+                            line_height,
+                            scroll_position,
+                            scroll_pixel_position,
+                            &editor_margins,
+                            window,
+                            cx,
+                        );
+                        self.layout_blocks(
+                            &mut spacer_blocks,
+                            &hitbox,
+                            &gutter_hitbox,
                             line_height,
                             scroll_position,
                             scroll_pixel_position,
+                            &editor_margins,
                             window,
                             cx,
                         );
@@ -10803,7 +10960,9 @@ impl Element for EditorElement {
                         .and_then(|headers| headers.lines.last())
                         .map_or(Pixels::ZERO, |last| last.offset + line_height);
 
-                    let sticky_header_height = if sticky_buffer_header.is_some() {
+                    let has_sticky_buffer_header =
+                        sticky_buffer_header.is_some() || sticky_header_excerpt_id.is_some();
+                    let sticky_header_height = if has_sticky_buffer_header {
                         let full_height = FILE_HEADER_HEIGHT as f32 * line_height;
                         let display_row = blocks
                             .iter()
@@ -10822,7 +10981,9 @@ impl Element for EditorElement {
                             }
                             None => full_height,
                         };
-                        sticky_scroll_header_height + offset
+                        let header_bottom_padding =
+                            BUFFER_HEADER_PADDING.to_pixels(window.rem_size());
+                        sticky_scroll_header_height + offset - header_bottom_padding
                     } else {
                         sticky_scroll_header_height
                     };
@@ -10900,6 +11061,7 @@ impl Element for EditorElement {
                         inline_blame_layout,
                         inline_code_actions,
                         blocks,
+                        spacer_blocks,
                         cursors,
                         visible_cursors,
                         selections,
@@ -10961,6 +11123,7 @@ impl Element for EditorElement {
                 window.with_content_mask(Some(ContentMask { bounds }), |window| {
                     self.paint_mouse_listeners(layout, window, cx);
                     self.paint_background(layout, window, cx);
+
                     self.paint_indent_guides(layout, window, cx);
 
                     if layout.gutter_hitbox.size.width > Pixels::ZERO {
@@ -10970,6 +11133,12 @@ impl Element for EditorElement {
 
                     self.paint_text(layout, window, cx);
 
+                    if !layout.spacer_blocks.is_empty() {
+                        window.with_element_namespace("blocks", |window| {
+                            self.paint_spacer_blocks(layout, window, cx);
+                        });
+                    }
+
                     if layout.gutter_hitbox.size.width > Pixels::ZERO {
                         self.paint_gutter_highlights(layout, window, cx);
                         self.paint_gutter_indicators(layout, window, cx);
@@ -10977,7 +11146,7 @@ impl Element for EditorElement {
 
                     if !layout.blocks.is_empty() {
                         window.with_element_namespace("blocks", |window| {
-                            self.paint_blocks(layout, window, cx);
+                            self.paint_non_spacer_blocks(layout, window, cx);
                         });
                     }
 
@@ -11079,6 +11248,7 @@ pub struct EditorLayout {
     inline_blame_layout: Option<InlineBlameLayout>,
     inline_code_actions: Option<AnyElement>,
     blocks: Vec<BlockLayout>,
+    spacer_blocks: Vec<BlockLayout>,
     highlighted_ranges: Vec<(Range<DisplayPoint>, Hsla)>,
     highlighted_gutter_ranges: Vec<(Range<DisplayPoint>, Hsla)>,
     redacted_ranges: Vec<Range<DisplayPoint>>,
@@ -11843,11 +12013,12 @@ pub fn layout_line(
     .unwrap()
 }
 
-#[derive(Debug)]
+#[derive(Debug, Clone)]
 pub struct IndentGuideLayout {
     origin: gpui::Point<Pixels>,
     length: Pixels,
     single_indent_width: Pixels,
+    display_row_range: Range<DisplayRow>,
     depth: u32,
     active: bool,
     settings: IndentGuideSettings,
@@ -13353,26 +13524,26 @@ mod tests {
     }
 
     #[test]
-    fn test_checkerboard_size() {
+    fn test_spacer_pattern_period() {
         // line height is smaller than target height, so we just return half the line height
-        assert_eq!(EditorElement::checkerboard_size(10.0, 20.0), 5.0);
+        assert_eq!(EditorElement::spacer_pattern_period(10.0, 20.0), 5.0);
 
         // line height is exactly half the target height, perfect match
-        assert_eq!(EditorElement::checkerboard_size(20.0, 10.0), 10.0);
+        assert_eq!(EditorElement::spacer_pattern_period(20.0, 10.0), 10.0);
 
         // line height is close to half the target height
-        assert_eq!(EditorElement::checkerboard_size(20.0, 9.0), 10.0);
+        assert_eq!(EditorElement::spacer_pattern_period(20.0, 9.0), 10.0);
 
         // line height is close to 1/4 the target height
-        assert_eq!(EditorElement::checkerboard_size(20.0, 4.8), 5.0);
+        assert_eq!(EditorElement::spacer_pattern_period(20.0, 4.8), 5.0);
     }
 
     #[gpui::test(iterations = 100)]
-    fn test_random_checkerboard_size(mut rng: StdRng) {
+    fn test_random_spacer_pattern_period(mut rng: StdRng) {
         let line_height = rng.next_u32() as f32;
         let target_height = rng.next_u32() as f32;
 
-        let result = EditorElement::checkerboard_size(line_height, target_height);
+        let result = EditorElement::spacer_pattern_period(line_height, target_height);
 
         let k = line_height / result;
         assert!(k - k.round() < 0.0000001); // approximately integer

crates/editor/src/hover_links.rs 🔗

@@ -673,7 +673,7 @@ pub(crate) async fn find_file(
 // (literally, [LinkTitle](link_file.txt)) as a candidate.
 fn link_pattern_file_candidates(candidate: &str) -> Vec<(String, Range<usize>)> {
     static MD_LINK_REGEX: LazyLock<Regex> =
-        LazyLock::new(|| Regex::new(r"\(([^)]*)\)").expect("Failed to create REGEX"));
+        LazyLock::new(|| Regex::new(r"]\(([^)]*)\)").expect("Failed to create REGEX"));
 
     let candidate_len = candidate.len();
 
@@ -1444,14 +1444,26 @@ mod tests {
             candidates,
             vec!["LinkTitle](link\\ _file.txt)", "link\\ _file.txt",]
         );
-        //
-        // Square brackets not strictly necessary
+        // Parentheses without preceding `]` should not extract inner content,
+        // to avoid matching function calls like `do_work(file2)` as file paths.
         let candidates: Vec<String> = link_pattern_file_candidates("(link_file.txt)")
             .into_iter()
             .map(|(c, _)| c)
             .collect();
+        assert_eq!(candidates, vec!["(link_file.txt)"]);
 
-        assert_eq!(candidates, vec!["(link_file.txt)", "link_file.txt",]);
+        let candidates: Vec<String> = link_pattern_file_candidates("do_work(file2);")
+            .into_iter()
+            .map(|(c, _)| c)
+            .collect();
+        assert_eq!(candidates, vec!["do_work(file2);"]);
+
+        // Markdown links should still extract the path
+        let candidates: Vec<String> = link_pattern_file_candidates("](readme.md)")
+            .into_iter()
+            .map(|(c, _)| c)
+            .collect();
+        assert_eq!(candidates, vec!["](readme.md)", "readme.md"]);
 
         // No nesting
         let candidates: Vec<String> =

crates/editor/src/inlays.rs 🔗

@@ -58,10 +58,12 @@ pub enum InlayContent {
 impl Inlay {
     pub fn hint(id: InlayId, position: Anchor, hint: &InlayHint) -> Self {
         let mut text = hint.text();
-        if hint.padding_right && text.reversed_chars_at(text.len()).next() != Some(' ') {
+        let needs_right_padding = hint.padding_right && !text.ends_with(" ");
+        let needs_left_padding = hint.padding_left && !text.starts_with(" ");
+        if needs_right_padding {
             text.push(" ");
         }
-        if hint.padding_left && text.chars_at(0).next() != Some(' ') {
+        if needs_left_padding {
             text.push_front(" ");
         }
         Self {

crates/editor/src/inlays/inlay_hints.rs 🔗

@@ -27,6 +27,7 @@ use util::debug_panic;
 use super::{Inlay, InlayId};
 use crate::{
     Editor, EditorSnapshot, PointForPosition, ToggleInlayHints, ToggleInlineValues, debounce_value,
+    display_map::{DisplayMap, InlayOffset},
     hover_links::{InlayHighlight, TriggerPoint, show_link_definition},
     hover_popover::{self, InlayHover},
     inlays::InlaySplice,
@@ -104,13 +105,34 @@ impl LspInlayHintData {
         self.added_hints.clear();
     }
 
+    /// Like `clear`, but only wipes tracking state for the given buffer IDs.
+    /// Hints belonging to other buffers are left intact so they are neither
+    /// re-fetched nor duplicated on the next `NewLinesShown`.
+    pub fn clear_for_buffers(
+        &mut self,
+        buffer_ids: &HashSet<BufferId>,
+        current_hints: impl IntoIterator<Item = Inlay>,
+    ) {
+        for buffer_id in buffer_ids {
+            self.hint_refresh_tasks.remove(buffer_id);
+            self.hint_chunk_fetching.remove(buffer_id);
+        }
+        for hint in current_hints {
+            if let Some(buffer_id) = hint.position.text_anchor.buffer_id {
+                if buffer_ids.contains(&buffer_id) {
+                    self.added_hints.remove(&hint.id);
+                }
+            }
+        }
+    }
+
     /// Checks inlay hint settings for enabled hint kinds and general enabled state.
     /// Generates corresponding inlay_map splice updates on settings changes.
     /// Does not update inlay hint cache state on disabling or inlay hint kinds change: only reenabling forces new LSP queries.
     fn update_settings(
         &mut self,
         new_hint_settings: InlayHintSettings,
-        visible_hints: Vec<Inlay>,
+        visible_hints: impl IntoIterator<Item = Inlay>,
     ) -> ControlFlow<Option<InlaySplice>, Option<InlaySplice>> {
         let old_enabled = self.enabled;
         // If the setting for inlay hints has changed, update `enabled`. This condition avoids inlay
@@ -140,7 +162,7 @@ impl LspInlayHintData {
                     ControlFlow::Continue(
                         Some(InlaySplice {
                             to_remove: visible_hints
-                                .iter()
+                                .into_iter()
                                 .filter_map(|inlay| {
                                     let inlay_kind = self.added_hints.get(&inlay.id).copied()?;
                                     if !self.allowed_hint_kinds.contains(&inlay_kind) {
@@ -159,12 +181,13 @@ impl LspInlayHintData {
             (true, false) => {
                 self.modifiers_override = false;
                 self.allowed_hint_kinds = new_allowed_hint_kinds;
-                if visible_hints.is_empty() {
+                let mut visible_hints = visible_hints.into_iter().peekable();
+                if visible_hints.peek().is_none() {
                     ControlFlow::Break(None)
                 } else {
                     self.clear();
                     ControlFlow::Break(Some(InlaySplice {
-                        to_remove: visible_hints.iter().map(|inlay| inlay.id).collect(),
+                        to_remove: visible_hints.map(|inlay| inlay.id).collect(),
                         to_insert: Vec::new(),
                     }))
                 }
@@ -175,7 +198,7 @@ impl LspInlayHintData {
                 ControlFlow::Continue(
                     Some(InlaySplice {
                         to_remove: visible_hints
-                            .iter()
+                            .into_iter()
                             .filter_map(|inlay| {
                                 let inlay_kind = self.added_hints.get(&inlay.id).copied()?;
                                 if !self.allowed_hint_kinds.contains(&inlay_kind) {
@@ -338,12 +361,20 @@ impl Editor {
         };
 
         let multi_buffer = self.buffer().clone();
+
         let Some(inlay_hints) = self.inlay_hints.as_mut() else {
             return;
         };
 
         if invalidate_cache.should_invalidate() {
-            inlay_hints.clear();
+            if invalidate_hints_for_buffers.is_empty() {
+                inlay_hints.clear();
+            } else {
+                inlay_hints.clear_for_buffers(
+                    &invalidate_hints_for_buffers,
+                    Self::visible_inlay_hints(self.display_map.read(cx)),
+                );
+            }
         }
         inlay_hints
             .invalidate_hints_for_buffers
@@ -420,16 +451,8 @@ impl Editor {
     }
 
     pub fn clear_inlay_hints(&mut self, cx: &mut Context<Self>) {
-        let to_remove = self
-            .visible_inlay_hints(cx)
-            .into_iter()
-            .map(|inlay| {
-                let inlay_id = inlay.id;
-                if let Some(inlay_hints) = &mut self.inlay_hints {
-                    inlay_hints.added_hints.remove(&inlay_id);
-                }
-                inlay_id
-            })
+        let to_remove = Self::visible_inlay_hints(self.display_map.read(cx))
+            .map(|inlay| inlay.id)
             .collect::<Vec<_>>();
         self.splice_inlays(&to_remove, Vec::new(), cx);
     }
@@ -439,7 +462,6 @@ impl Editor {
         reason: &InlayHintRefreshReason,
         cx: &mut Context<'_, Editor>,
     ) -> Option<InvalidationStrategy> {
-        let visible_inlay_hints = self.visible_inlay_hints(cx);
         let Some(inlay_hints) = self.inlay_hints.as_mut() else {
             return None;
         };
@@ -471,6 +493,8 @@ impl Editor {
                 }
             }
             InlayHintRefreshReason::SettingsChange(new_settings) => {
+                let visible_inlay_hints =
+                    Self::visible_inlay_hints(self.display_map.read(cx)).collect::<Vec<_>>();
                 match inlay_hints.update_settings(*new_settings, visible_inlay_hints) {
                     ControlFlow::Break(Some(InlaySplice {
                         to_remove,
@@ -534,13 +558,11 @@ impl Editor {
         Some(invalidate_cache)
     }
 
-    pub(crate) fn visible_inlay_hints(&self, cx: &Context<Editor>) -> Vec<Inlay> {
-        self.display_map
-            .read(cx)
+    fn visible_inlay_hints(display_map: &DisplayMap) -> impl Iterator<Item = Inlay> + use<'_> {
+        display_map
             .current_inlays()
             .filter(move |inlay| matches!(inlay.id, InlayId::Hint(_)))
             .cloned()
-            .collect()
     }
 
     pub fn update_inlay_link_and_hover_points(
@@ -575,9 +597,8 @@ impl Editor {
                 point_for_position.next_valid.to_point(snapshot),
                 Bias::Right,
             );
-            if let Some(hovered_hint) = self
-                .visible_inlay_hints(cx)
-                .into_iter()
+            if let Some(hovered_hint) = Self::visible_inlay_hints(self.display_map.read(cx))
+                .filter(|hint| snapshot.can_resolve(&hint.position))
                 .skip_while(|hint| {
                     hint.position
                         .cmp(&previous_valid_anchor, &buffer_snapshot)
@@ -602,15 +623,19 @@ impl Editor {
                 {
                     match cached_hint.resolve_state {
                         ResolveState::Resolved => {
-                            let mut extra_shift_left = 0;
-                            let mut extra_shift_right = 0;
-                            if cached_hint.padding_left {
-                                extra_shift_left += 1;
-                                extra_shift_right += 1;
-                            }
-                            if cached_hint.padding_right {
-                                extra_shift_right += 1;
-                            }
+                            let original_text = cached_hint.text();
+                            let actual_left_padding =
+                                if cached_hint.padding_left && !original_text.starts_with(" ") {
+                                    1
+                                } else {
+                                    0
+                                };
+                            let actual_right_padding =
+                                if cached_hint.padding_right && !original_text.ends_with(" ") {
+                                    1
+                                } else {
+                                    0
+                                };
                             match cached_hint.label {
                                 InlayHintLabel::String(_) => {
                                     if let Some(tooltip) = cached_hint.tooltip {
@@ -632,9 +657,9 @@ impl Editor {
                                                 range: InlayHighlight {
                                                     inlay: hovered_hint.id,
                                                     inlay_position: hovered_hint.position,
-                                                    range: extra_shift_left
+                                                    range: actual_left_padding
                                                         ..hovered_hint.text().len()
-                                                            + extra_shift_right,
+                                                            - actual_right_padding,
                                                 },
                                             },
                                             window,
@@ -646,17 +671,17 @@ impl Editor {
                                 InlayHintLabel::LabelParts(label_parts) => {
                                     let hint_start =
                                         snapshot.anchor_to_inlay_offset(hovered_hint.position);
+                                    let content_start =
+                                        InlayOffset(hint_start.0 + actual_left_padding);
                                     if let Some((hovered_hint_part, part_range)) =
                                         hover_popover::find_hovered_hint_part(
                                             label_parts,
-                                            hint_start,
+                                            content_start,
                                             hovered_offset,
                                         )
                                     {
-                                        let highlight_start =
-                                            (part_range.start - hint_start) + extra_shift_left;
-                                        let highlight_end =
-                                            (part_range.end - hint_start) + extra_shift_right;
+                                        let highlight_start = part_range.start - hint_start;
+                                        let highlight_end = part_range.end - hint_start;
                                         let highlight = InlayHighlight {
                                             inlay: hovered_hint.id,
                                             inlay_position: hovered_hint.position,
@@ -763,9 +788,7 @@ impl Editor {
         new_hints: Vec<(Range<BufferRow>, anyhow::Result<CacheInlayHints>)>,
         cx: &mut Context<Self>,
     ) {
-        let visible_inlay_hint_ids = self
-            .visible_inlay_hints(cx)
-            .iter()
+        let visible_inlay_hint_ids = Self::visible_inlay_hints(self.display_map.read(cx))
             .filter(|inlay| inlay.position.text_anchor.buffer_id == Some(buffer_id))
             .map(|inlay| inlay.id)
             .collect::<Vec<_>>();
@@ -794,6 +817,18 @@ impl Editor {
         // from the cache.
         if invalidate_cache.should_invalidate() {
             hints_to_remove.extend(visible_inlay_hint_ids);
+
+            // When invalidating, this task removes ALL visible hints for the buffer
+            // but only adds back hints for its own chunk ranges. Chunks fetched by
+            // other concurrent tasks (e.g., a scroll task that completed before this
+            // edit task) would have their hints removed but remain marked as "already
+            // fetched" in hint_chunk_fetching, preventing re-fetch on the next
+            // NewLinesShown. Fix: retain only chunks that this task has results for.
+            let task_chunk_ranges: HashSet<&Range<BufferRow>> =
+                new_hints.iter().map(|(range, _)| range).collect();
+            if let Some((_, fetched_chunks)) = inlay_hints.hint_chunk_fetching.get_mut(&buffer_id) {
+                fetched_chunks.retain(|chunk| task_chunk_ranges.contains(chunk));
+            }
         }
 
         let mut inserted_hint_text = HashMap::default();
@@ -874,8 +909,7 @@ impl Editor {
             std::mem::take(&mut inlay_hints.invalidate_hints_for_buffers);
         if !invalidate_hints_for_buffers.is_empty() {
             hints_to_remove.extend(
-                self.visible_inlay_hints(cx)
-                    .iter()
+                Self::visible_inlay_hints(self.display_map.read(cx))
                     .filter(|inlay| {
                         inlay
                             .position
@@ -959,9 +993,9 @@ fn spawn_editor_hints_refresh(
 pub mod tests {
     use crate::editor_tests::update_test_language_settings;
     use crate::inlays::inlay_hints::InlayHintRefreshReason;
+    use crate::scroll::Autoscroll;
     use crate::scroll::ScrollAmount;
     use crate::{Editor, SelectionEffects};
-    use crate::{ExcerptRange, scroll::Autoscroll};
     use collections::HashSet;
     use futures::{StreamExt, future};
     use gpui::{AppContext as _, Context, TestAppContext, WindowHandle};
@@ -971,7 +1005,7 @@ pub mod tests {
     use language::{Language, LanguageConfig, LanguageMatcher};
     use languages::rust_lang;
     use lsp::{DEFAULT_LSP_REQUEST_TIMEOUT, FakeLanguageServer};
-    use multi_buffer::{MultiBuffer, MultiBufferOffset};
+    use multi_buffer::{MultiBuffer, MultiBufferOffset, PathKey};
     use parking_lot::Mutex;
     use pretty_assertions::assert_eq;
     use project::{FakeFs, Project};
@@ -2321,28 +2355,32 @@ pub mod tests {
             .unwrap();
         let multibuffer = cx.new(|cx| {
             let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
-            multibuffer.push_excerpts(
+            multibuffer.set_excerpts_for_path(
+                PathKey::sorted(0),
                 buffer_1.clone(),
                 [
-                    ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0)),
-                    ExcerptRange::new(Point::new(4, 0)..Point::new(11, 0)),
-                    ExcerptRange::new(Point::new(22, 0)..Point::new(33, 0)),
-                    ExcerptRange::new(Point::new(44, 0)..Point::new(55, 0)),
-                    ExcerptRange::new(Point::new(56, 0)..Point::new(66, 0)),
-                    ExcerptRange::new(Point::new(67, 0)..Point::new(77, 0)),
+                    Point::new(0, 0)..Point::new(2, 0),
+                    Point::new(4, 0)..Point::new(11, 0),
+                    Point::new(22, 0)..Point::new(33, 0),
+                    Point::new(44, 0)..Point::new(55, 0),
+                    Point::new(56, 0)..Point::new(66, 0),
+                    Point::new(67, 0)..Point::new(77, 0),
                 ],
+                0,
                 cx,
             );
-            multibuffer.push_excerpts(
+            multibuffer.set_excerpts_for_path(
+                PathKey::sorted(1),
                 buffer_2.clone(),
                 [
-                    ExcerptRange::new(Point::new(0, 1)..Point::new(2, 1)),
-                    ExcerptRange::new(Point::new(4, 1)..Point::new(11, 1)),
-                    ExcerptRange::new(Point::new(22, 1)..Point::new(33, 1)),
-                    ExcerptRange::new(Point::new(44, 1)..Point::new(55, 1)),
-                    ExcerptRange::new(Point::new(56, 1)..Point::new(66, 1)),
-                    ExcerptRange::new(Point::new(67, 1)..Point::new(77, 1)),
+                    Point::new(0, 1)..Point::new(2, 1),
+                    Point::new(4, 1)..Point::new(11, 1),
+                    Point::new(22, 1)..Point::new(33, 1),
+                    Point::new(44, 1)..Point::new(55, 1),
+                    Point::new(56, 1)..Point::new(66, 1),
+                    Point::new(67, 1)..Point::new(77, 1),
                 ],
+                0,
                 cx,
             );
             multibuffer
@@ -2732,19 +2770,21 @@ let c = 3;"#
             .unwrap();
         let multi_buffer = cx.new(|cx| {
             let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
-            multibuffer.push_excerpts(
+            multibuffer.set_excerpts_for_path(
+                PathKey::sorted(0),
                 buffer_1.clone(),
                 [
-                    // Have first excerpt to spawn over 2 chunks (50 lines each).
-                    ExcerptRange::new(Point::new(49, 0)..Point::new(53, 0)),
-                    // Have 2nd excerpt to be in the 2nd chunk only.
-                    ExcerptRange::new(Point::new(70, 0)..Point::new(73, 0)),
+                    Point::new(49, 0)..Point::new(53, 0),
+                    Point::new(70, 0)..Point::new(73, 0),
                 ],
+                0,
                 cx,
             );
-            multibuffer.push_excerpts(
+            multibuffer.set_excerpts_for_path(
+                PathKey::sorted(1),
                 buffer_2.clone(),
-                [ExcerptRange::new(Point::new(0, 0)..Point::new(4, 0))],
+                [Point::new(0, 0)..Point::new(4, 0)],
+                0,
                 cx,
             );
             multibuffer
@@ -2930,16 +2970,23 @@ let c = 3;"#
             .unwrap();
         let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
         let (buffer_1_excerpts, buffer_2_excerpts) = multibuffer.update(cx, |multibuffer, cx| {
-            let buffer_1_excerpts = multibuffer.push_excerpts(
+            multibuffer.set_excerpts_for_path(
+                PathKey::sorted(0),
                 buffer_1.clone(),
-                [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))],
+                [Point::new(0, 0)..Point::new(2, 0)],
+                0,
                 cx,
             );
-            let buffer_2_excerpts = multibuffer.push_excerpts(
+            multibuffer.set_excerpts_for_path(
+                PathKey::sorted(1),
                 buffer_2.clone(),
-                [ExcerptRange::new(Point::new(0, 1)..Point::new(2, 1))],
+                [Point::new(0, 1)..Point::new(2, 1)],
+                0,
                 cx,
             );
+            let excerpt_ids = multibuffer.excerpt_ids();
+            let buffer_1_excerpts = vec![excerpt_ids[0]];
+            let buffer_2_excerpts = vec![excerpt_ids[1]];
             (buffer_1_excerpts, buffer_2_excerpts)
         });
 
@@ -3046,7 +3093,7 @@ let c = 3;"#
         editor
             .update(cx, |editor, _, cx| {
                 editor.buffer().update(cx, |multibuffer, cx| {
-                    multibuffer.remove_excerpts(buffer_2_excerpts, cx)
+                    multibuffer.remove_excerpts_for_path(PathKey::sorted(1), cx);
                 })
             })
             .unwrap();
@@ -4000,20 +4047,24 @@ let c = 3;"#
             .unwrap();
         let multi_buffer = cx.new(|cx| {
             let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
-            multibuffer.push_excerpts(
+            multibuffer.set_excerpts_for_path(
+                PathKey::sorted(0),
                 buffer_2.clone(),
                 [
-                    ExcerptRange::new(Point::new(0, 0)..Point::new(10, 0)),
-                    ExcerptRange::new(Point::new(23, 0)..Point::new(34, 0)),
+                    Point::new(0, 0)..Point::new(10, 0),
+                    Point::new(23, 0)..Point::new(34, 0),
                 ],
+                0,
                 cx,
             );
-            multibuffer.push_excerpts(
+            multibuffer.set_excerpts_for_path(
+                PathKey::sorted(1),
                 buffer_1.clone(),
                 [
-                    ExcerptRange::new(Point::new(0, 0)..Point::new(10, 0)),
-                    ExcerptRange::new(Point::new(13, 0)..Point::new(23, 0)),
+                    Point::new(0, 0)..Point::new(10, 0),
+                    Point::new(13, 0)..Point::new(23, 0),
                 ],
+                0,
                 cx,
             );
             multibuffer
@@ -4137,6 +4188,613 @@ let c = 3;"#
         );
     }
 
+    #[gpui::test]
+    async fn test_edit_then_scroll_race(cx: &mut gpui::TestAppContext) {
+        // Bug 1: An edit fires with a long debounce, and a scroll brings new lines
+        // before that debounce elapses. The edit task's apply_fetched_hints removes
+        // ALL visible hints (including the scroll-added ones) but only adds back
+        // hints for its own chunks. The scroll chunk remains in hint_chunk_fetching,
+        // so it is never re-queried, leaving it permanently empty.
+        init_test(cx, &|settings| {
+            settings.defaults.inlay_hints = Some(InlayHintSettingsContent {
+                enabled: Some(true),
+                edit_debounce_ms: Some(700),
+                scroll_debounce_ms: Some(50),
+                show_type_hints: Some(true),
+                show_parameter_hints: Some(true),
+                show_other_hints: Some(true),
+                ..InlayHintSettingsContent::default()
+            })
+        });
+
+        let fs = FakeFs::new(cx.background_executor.clone());
+        let mut file_content = String::from("fn main() {\n");
+        for i in 0..150 {
+            file_content.push_str(&format!("    let v{i} = {i};\n"));
+        }
+        file_content.push_str("}\n");
+        fs.insert_tree(
+            path!("/a"),
+            json!({
+                "main.rs": file_content,
+                "other.rs": "// Test file",
+            }),
+        )
+        .await;
+
+        let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
+        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
+        language_registry.add(rust_lang());
+
+        let lsp_request_ranges = Arc::new(Mutex::new(Vec::new()));
+        let mut fake_servers = language_registry.register_fake_lsp(
+            "Rust",
+            FakeLspAdapter {
+                capabilities: lsp::ServerCapabilities {
+                    inlay_hint_provider: Some(lsp::OneOf::Left(true)),
+                    ..lsp::ServerCapabilities::default()
+                },
+                initializer: Some(Box::new({
+                    let lsp_request_ranges = lsp_request_ranges.clone();
+                    move |fake_server| {
+                        let lsp_request_ranges = lsp_request_ranges.clone();
+                        fake_server.set_request_handler::<lsp::request::InlayHintRequest, _, _>(
+                            move |params, _| {
+                                let lsp_request_ranges = lsp_request_ranges.clone();
+                                async move {
+                                    lsp_request_ranges.lock().push(params.range);
+                                    let start_line = params.range.start.line;
+                                    Ok(Some(vec![lsp::InlayHint {
+                                        position: lsp::Position::new(start_line + 1, 9),
+                                        label: lsp::InlayHintLabel::String(format!(
+                                            "chunk_{start_line}"
+                                        )),
+                                        kind: Some(lsp::InlayHintKind::TYPE),
+                                        text_edits: None,
+                                        tooltip: None,
+                                        padding_left: None,
+                                        padding_right: None,
+                                        data: None,
+                                    }]))
+                                }
+                            },
+                        );
+                    }
+                })),
+                ..FakeLspAdapter::default()
+            },
+        );
+
+        let buffer = project
+            .update(cx, |project, cx| {
+                project.open_local_buffer(path!("/a/main.rs"), cx)
+            })
+            .await
+            .unwrap();
+        let editor =
+            cx.add_window(|window, cx| Editor::for_buffer(buffer, Some(project), window, cx));
+        cx.executor().run_until_parked();
+        let _fake_server = fake_servers.next().await.unwrap();
+
+        editor
+            .update(cx, |editor, window, cx| {
+                editor.set_visible_line_count(50.0, window, cx);
+                editor.set_visible_column_count(120.0);
+                editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx);
+            })
+            .unwrap();
+        cx.executor().advance_clock(Duration::from_millis(100));
+        cx.executor().run_until_parked();
+
+        editor
+            .update(cx, |editor, _window, cx| {
+                let visible = visible_hint_labels(editor, cx);
+                assert!(
+                    visible.iter().any(|h| h.starts_with("chunk_0")),
+                    "Should have chunk_0 hints initially, got: {visible:?}"
+                );
+            })
+            .unwrap();
+
+        lsp_request_ranges.lock().clear();
+
+        // Step 1: Make an edit → triggers BufferEdited with 700ms debounce.
+        editor
+            .update(cx, |editor, window, cx| {
+                editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
+                    s.select_ranges([MultiBufferOffset(13)..MultiBufferOffset(13)])
+                });
+                editor.handle_input("x", window, cx);
+            })
+            .unwrap();
+        // Let the BufferEdited event propagate and the edit task get spawned.
+        cx.executor().run_until_parked();
+
+        // Step 2: Scroll down to reveal a new chunk, then trigger NewLinesShown.
+        // This spawns a scroll task with the shorter 50ms debounce.
+        editor
+            .update(cx, |editor, window, cx| {
+                editor.scroll_screen(&ScrollAmount::Page(1.0), window, cx);
+            })
+            .unwrap();
+        // Explicitly trigger NewLinesShown for the new visible range.
+        editor
+            .update(cx, |editor, _window, cx| {
+                editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx);
+            })
+            .unwrap();
+
+        // Step 3: Advance clock past scroll debounce (50ms) but NOT past edit
+        // debounce (700ms). The scroll task completes and adds hints for the
+        // new chunk.
+        cx.executor().advance_clock(Duration::from_millis(100));
+        cx.executor().run_until_parked();
+
+        // The scroll task's apply_fetched_hints also processes
+        // invalidate_hints_for_buffers (set by the earlier BufferEdited), which
+        // removes the old chunk_0 hint. Only the scroll chunk's hint remains.
+        editor
+            .update(cx, |editor, _window, cx| {
+                let visible = visible_hint_labels(editor, cx);
+                assert!(
+                    visible.iter().any(|h| h.starts_with("chunk_50")),
+                    "After scroll task completes, the scroll chunk's hints should be \
+                     present, got: {visible:?}"
+                );
+            })
+            .unwrap();
+
+        // Step 4: Advance clock past the edit debounce (700ms). The edit task
+        // completes, calling apply_fetched_hints with should_invalidate()=true,
+        // which removes ALL visible hints (including the scroll chunk's) but only
+        // adds back hints for its own chunks (chunk_0).
+        cx.executor().advance_clock(Duration::from_millis(700));
+        cx.executor().run_until_parked();
+
+        // At this point the edit task has:
+        //   - removed chunk_50's hint (via should_invalidate removing all visible)
+        //   - added chunk_0's hint (from its own fetch)
+        //   - (with fix) cleared chunk_50 from hint_chunk_fetching
+        // Without the fix, chunk_50 is stuck in hint_chunk_fetching and will
+        // never be re-queried by NewLinesShown.
+
+        // Step 5: Trigger NewLinesShown to give the system a chance to re-fetch
+        // any chunks whose hints were lost.
+        editor
+            .update(cx, |editor, _window, cx| {
+                editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx);
+            })
+            .unwrap();
+        cx.executor().advance_clock(Duration::from_millis(100));
+        cx.executor().run_until_parked();
+
+        editor
+            .update(cx, |editor, _window, cx| {
+                let visible = visible_hint_labels(editor, cx);
+                assert!(
+                    visible.iter().any(|h| h.starts_with("chunk_0")),
+                    "chunk_0 hints (from edit task) should be present. Got: {visible:?}"
+                );
+                assert!(
+                    visible.iter().any(|h| h.starts_with("chunk_50")),
+                    "chunk_50 hints should have been re-fetched after NewLinesShown. \
+                     Bug 1: the scroll chunk's hints were removed by the edit task \
+                     and the chunk was stuck in hint_chunk_fetching, preventing \
+                     re-fetch. Got: {visible:?}"
+                );
+            })
+            .unwrap();
+    }
+
+    #[gpui::test]
+    async fn test_refresh_requested_multi_server(cx: &mut gpui::TestAppContext) {
+        // Bug 2: When one LSP server sends workspace/inlayHint/refresh, the editor
+        // wipes all tracking state via clear(), then spawns tasks that call
+        // LspStore::inlay_hints with for_server=Some(requesting_server). The LspStore
+        // filters out other servers' cached hints via the for_server guard, so only
+        // the requesting server's hints are returned. apply_fetched_hints removes ALL
+        // visible hints (should_invalidate()=true) but only adds back the requesting
+        // server's hints. Other servers' hints disappear permanently.
+        init_test(cx, &|settings| {
+            settings.defaults.inlay_hints = Some(InlayHintSettingsContent {
+                enabled: Some(true),
+                edit_debounce_ms: Some(0),
+                scroll_debounce_ms: Some(0),
+                show_type_hints: Some(true),
+                show_parameter_hints: Some(true),
+                show_other_hints: Some(true),
+                ..InlayHintSettingsContent::default()
+            })
+        });
+
+        let fs = FakeFs::new(cx.background_executor.clone());
+        fs.insert_tree(
+            path!("/a"),
+            json!({
+                "main.rs": "fn main() { let x = 1; } // padding to keep hints from being trimmed",
+                "other.rs": "// Test file",
+            }),
+        )
+        .await;
+
+        let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
+        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
+        language_registry.add(rust_lang());
+
+        // Server A returns a hint labeled "server_a".
+        let server_a_request_count = Arc::new(AtomicU32::new(0));
+        let mut fake_servers_a = language_registry.register_fake_lsp(
+            "Rust",
+            FakeLspAdapter {
+                name: "rust-analyzer",
+                capabilities: lsp::ServerCapabilities {
+                    inlay_hint_provider: Some(lsp::OneOf::Left(true)),
+                    ..lsp::ServerCapabilities::default()
+                },
+                initializer: Some(Box::new({
+                    let server_a_request_count = server_a_request_count.clone();
+                    move |fake_server| {
+                        let server_a_request_count = server_a_request_count.clone();
+                        fake_server.set_request_handler::<lsp::request::InlayHintRequest, _, _>(
+                            move |_params, _| {
+                                let count =
+                                    server_a_request_count.fetch_add(1, Ordering::Release) + 1;
+                                async move {
+                                    Ok(Some(vec![lsp::InlayHint {
+                                        position: lsp::Position::new(0, 9),
+                                        label: lsp::InlayHintLabel::String(format!(
+                                            "server_a_{count}"
+                                        )),
+                                        kind: Some(lsp::InlayHintKind::TYPE),
+                                        text_edits: None,
+                                        tooltip: None,
+                                        padding_left: None,
+                                        padding_right: None,
+                                        data: None,
+                                    }]))
+                                }
+                            },
+                        );
+                    }
+                })),
+                ..FakeLspAdapter::default()
+            },
+        );
+
+        // Server B returns a hint labeled "server_b" at a different position.
+        let server_b_request_count = Arc::new(AtomicU32::new(0));
+        let mut fake_servers_b = language_registry.register_fake_lsp(
+            "Rust",
+            FakeLspAdapter {
+                name: "secondary-ls",
+                capabilities: lsp::ServerCapabilities {
+                    inlay_hint_provider: Some(lsp::OneOf::Left(true)),
+                    ..lsp::ServerCapabilities::default()
+                },
+                initializer: Some(Box::new({
+                    let server_b_request_count = server_b_request_count.clone();
+                    move |fake_server| {
+                        let server_b_request_count = server_b_request_count.clone();
+                        fake_server.set_request_handler::<lsp::request::InlayHintRequest, _, _>(
+                            move |_params, _| {
+                                let count =
+                                    server_b_request_count.fetch_add(1, Ordering::Release) + 1;
+                                async move {
+                                    Ok(Some(vec![lsp::InlayHint {
+                                        position: lsp::Position::new(0, 22),
+                                        label: lsp::InlayHintLabel::String(format!(
+                                            "server_b_{count}"
+                                        )),
+                                        kind: Some(lsp::InlayHintKind::TYPE),
+                                        text_edits: None,
+                                        tooltip: None,
+                                        padding_left: None,
+                                        padding_right: None,
+                                        data: None,
+                                    }]))
+                                }
+                            },
+                        );
+                    }
+                })),
+                ..FakeLspAdapter::default()
+            },
+        );
+
+        let (buffer, _buffer_handle) = project
+            .update(cx, |project, cx| {
+                project.open_local_buffer_with_lsp(path!("/a/main.rs"), cx)
+            })
+            .await
+            .unwrap();
+        let editor =
+            cx.add_window(|window, cx| Editor::for_buffer(buffer, Some(project), window, cx));
+        cx.executor().run_until_parked();
+
+        let fake_server_a = fake_servers_a.next().await.unwrap();
+        let _fake_server_b = fake_servers_b.next().await.unwrap();
+
+        editor
+            .update(cx, |editor, window, cx| {
+                editor.set_visible_line_count(50.0, window, cx);
+                editor.set_visible_column_count(120.0);
+                editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx);
+            })
+            .unwrap();
+        cx.executor().advance_clock(Duration::from_millis(100));
+        cx.executor().run_until_parked();
+
+        // Verify both servers' hints are present initially.
+        editor
+            .update(cx, |editor, _window, cx| {
+                let visible = visible_hint_labels(editor, cx);
+                let has_a = visible.iter().any(|h| h.starts_with("server_a"));
+                let has_b = visible.iter().any(|h| h.starts_with("server_b"));
+                assert!(
+                    has_a && has_b,
+                    "Both servers should have hints initially. Got: {visible:?}"
+                );
+            })
+            .unwrap();
+
+        // Trigger RefreshRequested from server A. This should re-fetch server A's
+        // hints while keeping server B's hints intact.
+        editor
+            .update(cx, |editor, _window, cx| {
+                editor.refresh_inlay_hints(
+                    InlayHintRefreshReason::RefreshRequested {
+                        server_id: fake_server_a.server.server_id(),
+                        request_id: Some(1),
+                    },
+                    cx,
+                );
+            })
+            .unwrap();
+        cx.executor().advance_clock(Duration::from_millis(100));
+        cx.executor().run_until_parked();
+
+        // Also trigger NewLinesShown to give the system a chance to recover
+        // any chunks that might have been cleared.
+        editor
+            .update(cx, |editor, _window, cx| {
+                editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx);
+            })
+            .unwrap();
+        cx.executor().advance_clock(Duration::from_millis(100));
+        cx.executor().run_until_parked();
+
+        editor
+            .update(cx, |editor, _window, cx| {
+                let visible = visible_hint_labels(editor, cx);
+                let has_a = visible.iter().any(|h| h.starts_with("server_a"));
+                let has_b = visible.iter().any(|h| h.starts_with("server_b"));
+                assert!(
+                    has_a,
+                    "Server A hints should be present after its own refresh. Got: {visible:?}"
+                );
+                assert!(
+                    has_b,
+                    "Server B hints should NOT be lost when server A triggers \
+                     RefreshRequested. Bug 2: clear() wipes all tracking, then \
+                     LspStore filters out server B's cached hints via the for_server \
+                     guard, and apply_fetched_hints removes all visible hints but only \
+                     adds back server A's. Got: {visible:?}"
+                );
+            })
+            .unwrap();
+    }
+
+    #[gpui::test]
+    async fn test_multi_language_multibuffer_no_duplicate_hints(cx: &mut gpui::TestAppContext) {
+        init_test(cx, &|settings| {
+            settings.defaults.inlay_hints = Some(InlayHintSettingsContent {
+                show_value_hints: Some(true),
+                enabled: Some(true),
+                edit_debounce_ms: Some(0),
+                scroll_debounce_ms: Some(0),
+                show_type_hints: Some(true),
+                show_parameter_hints: Some(true),
+                show_other_hints: Some(true),
+                show_background: Some(false),
+                toggle_on_modifiers_press: None,
+            })
+        });
+
+        let fs = FakeFs::new(cx.background_executor.clone());
+        fs.insert_tree(
+            path!("/a"),
+            json!({
+                "main.rs": "fn main() { let x = 1; } // padding to keep hints from being trimmed",
+                "index.ts": "const y = 2; // padding to keep hints from being trimmed in typescript",
+            }),
+        )
+        .await;
+
+        let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
+        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
+
+        let mut rs_fake_servers = None;
+        let mut ts_fake_servers = None;
+        for (name, path_suffix) in [("Rust", "rs"), ("TypeScript", "ts")] {
+            language_registry.add(Arc::new(Language::new(
+                LanguageConfig {
+                    name: name.into(),
+                    matcher: LanguageMatcher {
+                        path_suffixes: vec![path_suffix.to_string()],
+                        ..Default::default()
+                    },
+                    ..Default::default()
+                },
+                Some(tree_sitter_rust::LANGUAGE.into()),
+            )));
+            let fake_servers = language_registry.register_fake_lsp(
+                name,
+                FakeLspAdapter {
+                    name,
+                    capabilities: lsp::ServerCapabilities {
+                        inlay_hint_provider: Some(lsp::OneOf::Left(true)),
+                        ..Default::default()
+                    },
+                    initializer: Some(Box::new({
+                        move |fake_server| {
+                            let request_count = Arc::new(AtomicU32::new(0));
+                            fake_server
+                                .set_request_handler::<lsp::request::InlayHintRequest, _, _>(
+                                    move |params, _| {
+                                        let count =
+                                            request_count.fetch_add(1, Ordering::Release) + 1;
+                                        let prefix = match name {
+                                            "Rust" => "rs_hint",
+                                            "TypeScript" => "ts_hint",
+                                            other => panic!("Unexpected language: {other}"),
+                                        };
+                                        async move {
+                                            Ok(Some(vec![lsp::InlayHint {
+                                                position: params.range.start,
+                                                label: lsp::InlayHintLabel::String(format!(
+                                                    "{prefix}_{count}"
+                                                )),
+                                                kind: None,
+                                                text_edits: None,
+                                                tooltip: None,
+                                                padding_left: None,
+                                                padding_right: None,
+                                                data: None,
+                                            }]))
+                                        }
+                                    },
+                                );
+                        }
+                    })),
+                    ..Default::default()
+                },
+            );
+            match name {
+                "Rust" => rs_fake_servers = Some(fake_servers),
+                "TypeScript" => ts_fake_servers = Some(fake_servers),
+                _ => unreachable!(),
+            }
+        }
+
+        let (rs_buffer, _rs_handle) = project
+            .update(cx, |project, cx| {
+                project.open_local_buffer_with_lsp(path!("/a/main.rs"), cx)
+            })
+            .await
+            .unwrap();
+        let (ts_buffer, _ts_handle) = project
+            .update(cx, |project, cx| {
+                project.open_local_buffer_with_lsp(path!("/a/index.ts"), cx)
+            })
+            .await
+            .unwrap();
+
+        let multi_buffer = cx.new(|cx| {
+            let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
+            multibuffer.set_excerpts_for_path(
+                PathKey::sorted(0),
+                rs_buffer.clone(),
+                [Point::new(0, 0)..Point::new(1, 0)],
+                0,
+                cx,
+            );
+            multibuffer.set_excerpts_for_path(
+                PathKey::sorted(1),
+                ts_buffer.clone(),
+                [Point::new(0, 0)..Point::new(1, 0)],
+                0,
+                cx,
+            );
+            multibuffer
+        });
+
+        cx.executor().run_until_parked();
+        let editor = cx.add_window(|window, cx| {
+            Editor::for_multibuffer(multi_buffer, Some(project.clone()), window, cx)
+        });
+
+        let _rs_fake_server = rs_fake_servers.unwrap().next().await.unwrap();
+        let _ts_fake_server = ts_fake_servers.unwrap().next().await.unwrap();
+        cx.executor().advance_clock(Duration::from_millis(100));
+        cx.executor().run_until_parked();
+
+        // Verify initial state: both languages have exactly one hint each
+        editor
+            .update(cx, |editor, _window, cx| {
+                let visible = visible_hint_labels(editor, cx);
+                let rs_hints: Vec<_> = visible
+                    .iter()
+                    .filter(|h| h.starts_with("rs_hint"))
+                    .collect();
+                let ts_hints: Vec<_> = visible
+                    .iter()
+                    .filter(|h| h.starts_with("ts_hint"))
+                    .collect();
+                assert_eq!(
+                    rs_hints.len(),
+                    1,
+                    "Should have exactly 1 Rust hint initially, got: {rs_hints:?}"
+                );
+                assert_eq!(
+                    ts_hints.len(),
+                    1,
+                    "Should have exactly 1 TypeScript hint initially, got: {ts_hints:?}"
+                );
+            })
+            .unwrap();
+
+        // Edit the Rust buffer — triggers BufferEdited(rust_buffer_id).
+        // The language filter in refresh_inlay_hints excludes TypeScript excerpts
+        // from processing, but the global clear() wipes added_hints for ALL buffers.
+        editor
+            .update(cx, |editor, window, cx| {
+                editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
+                    s.select_ranges([MultiBufferOffset(0)..MultiBufferOffset(0)])
+                });
+                editor.handle_input("x", window, cx);
+            })
+            .unwrap();
+        cx.executor().run_until_parked();
+
+        // Trigger NewLinesShown — this causes TypeScript chunks to be re-fetched
+        // because hint_chunk_fetching was wiped by clear(). The cached hints pass
+        // the added_hints.insert(...).is_none() filter (also wiped) and get inserted
+        // alongside the still-displayed copies, causing duplicates.
+        editor
+            .update(cx, |editor, _window, cx| {
+                editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx);
+            })
+            .unwrap();
+        cx.executor().run_until_parked();
+
+        // Assert: TypeScript hints must NOT be duplicated
+        editor
+            .update(cx, |editor, _window, cx| {
+                let visible = visible_hint_labels(editor, cx);
+                let ts_hints: Vec<_> = visible
+                    .iter()
+                    .filter(|h| h.starts_with("ts_hint"))
+                    .collect();
+                assert_eq!(
+                    ts_hints.len(),
+                    1,
+                    "TypeScript hints should NOT be duplicated after editing Rust buffer \
+                     and triggering NewLinesShown. Got: {ts_hints:?}"
+                );
+
+                let rs_hints: Vec<_> = visible
+                    .iter()
+                    .filter(|h| h.starts_with("rs_hint"))
+                    .collect();
+                assert_eq!(
+                    rs_hints.len(),
+                    1,
+                    "Rust hints should still be present after editing. Got: {rs_hints:?}"
+                );
+            })
+            .unwrap();
+    }
+
     pub(crate) fn init_test(cx: &mut TestAppContext, f: &dyn Fn(&mut AllLanguageSettingsContent)) {
         cx.update(|cx| {
             let settings_store = SettingsStore::test(cx);

crates/editor/src/items.rs 🔗

@@ -735,10 +735,13 @@ impl Item for Editor {
         h_flex()
             .gap_2()
             .child(
-                Label::new(self.title(cx).to_string())
-                    .color(label_color)
-                    .when(params.preview, |this| this.italic())
-                    .when(was_deleted, |this| this.strikethrough()),
+                Label::new(util::truncate_and_trailoff(
+                    &self.title(cx),
+                    MAX_TAB_TITLE_LEN,
+                ))
+                .color(label_color)
+                .when(params.preview, |this| this.italic())
+                .when(was_deleted, |this| this.strikethrough()),
             )
             .when_some(description, |this, description| {
                 this.child(

crates/editor/src/jsx_tag_auto_close.rs 🔗

@@ -619,7 +619,7 @@ mod jsx_tag_autoclose_tests {
     use super::*;
     use gpui::{AppContext as _, TestAppContext};
     use languages::language;
-    use multi_buffer::{ExcerptRange, MultiBufferOffset};
+    use multi_buffer::{MultiBufferOffset, PathKey};
     use text::Selection;
 
     async fn test_setup(cx: &mut TestAppContext) -> EditorTestContext {
@@ -816,21 +816,12 @@ mod jsx_tag_autoclose_tests {
         let buffer_c = cx.new(|cx| language::Buffer::local("<span", cx));
         let buffer = cx.new(|cx| {
             let mut buf = MultiBuffer::new(language::Capability::ReadWrite);
-            buf.push_excerpts(
-                buffer_a,
-                [ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)],
-                cx,
-            );
-            buf.push_excerpts(
-                buffer_b,
-                [ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)],
-                cx,
-            );
-            buf.push_excerpts(
-                buffer_c,
-                [ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)],
-                cx,
-            );
+            let range_a = language::Point::zero()..buffer_a.read(cx).max_point();
+            let range_b = language::Point::zero()..buffer_b.read(cx).max_point();
+            let range_c = language::Point::zero()..buffer_c.read(cx).max_point();
+            buf.set_excerpts_for_path(PathKey::sorted(0), buffer_a, [range_a], 0, cx);
+            buf.set_excerpts_for_path(PathKey::sorted(1), buffer_b, [range_b], 0, cx);
+            buf.set_excerpts_for_path(PathKey::sorted(2), buffer_c, [range_c], 0, cx);
             buf
         });
         let editor = cx.add_window(|window, cx| build_editor(buffer.clone(), window, cx));

crates/editor/src/movement.rs 🔗

@@ -911,12 +911,13 @@ pub fn split_display_range_by_lines(
 mod tests {
     use super::*;
     use crate::{
-        Buffer, DisplayMap, DisplayRow, ExcerptRange, FoldPlaceholder, MultiBuffer,
+        Buffer, DisplayMap, DisplayRow, FoldPlaceholder, MultiBuffer,
         inlays::Inlay,
         test::{editor_test_context::EditorTestContext, marked_display_snapshot},
     };
     use gpui::{AppContext as _, font, px};
     use language::Capability;
+    use multi_buffer::PathKey;
     use project::project_settings::DiagnosticSeverity;
     use settings::SettingsStore;
     use util::post_inc;
@@ -1229,15 +1230,17 @@ mod tests {
 
             let font = font("Helvetica");
 
-            let buffer = cx.new(|cx| Buffer::local("abc\ndefg\nhijkl\nmn", cx));
+            let buffer = cx.new(|cx| Buffer::local("abc\ndefg\na\na\na\nhijkl\nmn", cx));
             let multibuffer = cx.new(|cx| {
                 let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
-                multibuffer.push_excerpts(
+                multibuffer.set_excerpts_for_path(
+                    PathKey::sorted(0),
                     buffer.clone(),
                     [
-                        ExcerptRange::new(Point::new(0, 0)..Point::new(1, 4)),
-                        ExcerptRange::new(Point::new(2, 0)..Point::new(3, 2)),
+                        Point::new(0, 0)..Point::new(1, 4),
+                        Point::new(5, 0)..Point::new(6, 2),
                     ],
+                    0,
                     cx,
                 );
                 multibuffer

crates/editor/src/scroll/actions.rs 🔗

@@ -1,10 +1,12 @@
 use super::Axis;
 use crate::{
-    Autoscroll, Editor, EditorMode, NextScreen, NextScrollCursorCenterTopBottom,
+    Autoscroll, Editor, EditorMode, EditorSettings, NextScreen, NextScrollCursorCenterTopBottom,
     SCROLL_CENTER_TOP_BOTTOM_DEBOUNCE_TIMEOUT, ScrollCursorBottom, ScrollCursorCenter,
     ScrollCursorCenterTopBottom, ScrollCursorTop, display_map::DisplayRow, scroll::ScrollOffset,
 };
 use gpui::{Context, Point, Window};
+use settings::Settings;
+use text::ToOffset;
 
 impl Editor {
     pub fn next_screen(&mut self, _: &NextScreen, window: &mut Window, cx: &mut Context<Editor>) {
@@ -73,18 +75,37 @@ impl Editor {
     ) {
         let display_snapshot = self.display_snapshot(cx);
         let scroll_margin_rows = self.vertical_scroll_margin() as u32;
-        let new_screen_top = self
-            .selections
-            .newest_display(&display_snapshot)
-            .head()
-            .row()
-            .0;
+        let selection_head = self.selections.newest_display(&display_snapshot).head();
+
+        let sticky_headers_len = if EditorSettings::get_global(cx).sticky_scroll.enabled
+            && let Some((_, _, buffer_snapshot)) = display_snapshot.buffer_snapshot().as_singleton()
+        {
+            let select_head_point =
+                rope::Point::new(selection_head.to_point(&display_snapshot).row, 0);
+            buffer_snapshot
+                .outline_items_containing(select_head_point..select_head_point, false, None)
+                .iter()
+                .filter(|outline| {
+                    outline.range.start.offset
+                        < select_head_point.to_offset(&buffer_snapshot) as u32
+                })
+                .collect::<Vec<_>>()
+                .len()
+        } else {
+            0
+        } as u32;
+
+        let new_screen_top = selection_head.row().0;
         let header_offset = display_snapshot
             .buffer_snapshot()
             .show_headers()
             .then(|| display_snapshot.buffer_header_height())
             .unwrap_or(0);
-        let new_screen_top = new_screen_top.saturating_sub(scroll_margin_rows + header_offset);
+
+        // If the number of sticky headers exceeds the vertical_scroll_margin,
+        // we need to adjust the scroll top a bit further
+        let adjustment = scroll_margin_rows.max(sticky_headers_len) + header_offset;
+        let new_screen_top = new_screen_top.saturating_sub(adjustment);
         self.set_scroll_top_row(DisplayRow(new_screen_top), window, cx);
     }
 

crates/editor/src/semantic_tokens.rs 🔗

@@ -122,7 +122,10 @@ impl Editor {
         if !self.mode().is_full() || !self.semantic_token_state.enabled() {
             self.invalidate_semantic_tokens(None);
             self.display_map.update(cx, |display_map, _| {
-                display_map.semantic_token_highlights.clear();
+                match Arc::get_mut(&mut display_map.semantic_token_highlights) {
+                    Some(highlights) => highlights.clear(),
+                    None => display_map.semantic_token_highlights = Arc::new(Default::default()),
+                };
             });
             self.semantic_token_state.update_task = Task::ready(());
             cx.notify();
@@ -171,8 +174,8 @@ impl Editor {
             .display_map
             .read(cx)
             .semantic_token_highlights
-            .iter()
-            .map(|(buffer_id, _)| *buffer_id)
+            .keys()
+            .copied()
             .filter(|buffer_id| !buffers_to_query.contains_key(buffer_id))
             .filter(|buffer_id| {
                 !self
@@ -214,8 +217,9 @@ impl Editor {
                             }) {
                                 None
                             } else {
-                                let task = sema.semantic_tokens(buffer, for_server, cx);
-                                Some(async move { (buffer_id, query_version, task.await) })
+                                sema.semantic_tokens(buffer, for_server, cx).map(
+                                    |task| async move { (buffer_id, query_version, task.await) },
+                                )
                             }
                         })
                         .collect::<Vec<_>>()
@@ -308,7 +312,7 @@ impl Editor {
                                 token_highlights.sort_by(|a, b| {
                                     a.range.start.cmp(&b.range.start, &multi_buffer_snapshot)
                                 });
-                                display_map.semantic_token_highlights.insert(
+                                Arc::make_mut(&mut display_map.semantic_token_highlights).insert(
                                     buffer_id,
                                     (Arc::from(token_highlights), Arc::new(interner)),
                                 );
@@ -464,7 +468,7 @@ mod tests {
     use language::{Language, LanguageConfig, LanguageMatcher};
     use languages::FakeLspAdapter;
     use multi_buffer::{
-        AnchorRangeExt, ExcerptRange, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset,
+        AnchorRangeExt, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset, PathKey,
     };
     use project::Project;
     use rope::Point;
@@ -1160,14 +1164,18 @@ mod tests {
         });
         let multibuffer = cx.new(|cx| {
             let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
-            multibuffer.push_excerpts(
+            multibuffer.set_excerpts_for_path(
+                PathKey::sorted(0),
                 toml_buffer.clone(),
-                [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))],
+                [Point::new(0, 0)..Point::new(0, 4)],
+                0,
                 cx,
             );
-            multibuffer.push_excerpts(
+            multibuffer.set_excerpts_for_path(
+                PathKey::sorted(1),
                 rust_buffer.clone(),
-                [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))],
+                [Point::new(0, 0)..Point::new(0, 4)],
+                0,
                 cx,
             );
             multibuffer
@@ -1234,202 +1242,6 @@ mod tests {
         );
     }
 
-    #[gpui::test]
-    async fn lsp_semantic_tokens_multibuffer_shared(cx: &mut TestAppContext) {
-        init_test(cx, |_| {});
-
-        update_test_language_settings(cx, &|language_settings| {
-            language_settings.languages.0.insert(
-                "TOML".into(),
-                LanguageSettingsContent {
-                    semantic_tokens: Some(SemanticTokens::Full),
-                    ..LanguageSettingsContent::default()
-                },
-            );
-        });
-
-        let toml_language = Arc::new(Language::new(
-            LanguageConfig {
-                name: "TOML".into(),
-                matcher: LanguageMatcher {
-                    path_suffixes: vec!["toml".into()],
-                    ..LanguageMatcher::default()
-                },
-                ..LanguageConfig::default()
-            },
-            None,
-        ));
-
-        let toml_legend = lsp::SemanticTokensLegend {
-            token_types: vec!["property".into()],
-            token_modifiers: Vec::new(),
-        };
-
-        let app_state = cx.update(workspace::AppState::test);
-
-        cx.update(|cx| {
-            assets::Assets.load_test_fonts(cx);
-            crate::init(cx);
-            workspace::init(app_state.clone(), cx);
-        });
-
-        let project = Project::test(app_state.fs.clone(), [], cx).await;
-        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
-        let full_counter_toml = Arc::new(AtomicUsize::new(0));
-        let full_counter_toml_clone = full_counter_toml.clone();
-
-        let mut toml_server = language_registry.register_fake_lsp(
-            toml_language.name(),
-            FakeLspAdapter {
-                name: "toml",
-                capabilities: lsp::ServerCapabilities {
-                    semantic_tokens_provider: Some(
-                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
-                            lsp::SemanticTokensOptions {
-                                legend: toml_legend,
-                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
-                                ..lsp::SemanticTokensOptions::default()
-                            },
-                        ),
-                    ),
-                    ..lsp::ServerCapabilities::default()
-                },
-                initializer: Some(Box::new({
-                    let full_counter_toml_clone = full_counter_toml_clone.clone();
-                    move |fake_server| {
-                        let full_counter = full_counter_toml_clone.clone();
-                        fake_server
-                            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
-                                move |_, _| {
-                                    full_counter.fetch_add(1, atomic::Ordering::Release);
-                                    async move {
-                                        Ok(Some(lsp::SemanticTokensResult::Tokens(
-                                            lsp::SemanticTokens {
-                                                // highlight 'a' as a property
-                                                data: vec![
-                                                    0, // delta_line
-                                                    0, // delta_start
-                                                    1, // length
-                                                    0, // token_type
-                                                    0, // token_modifiers_bitset
-                                                ],
-                                                result_id: Some("a".into()),
-                                            },
-                                        )))
-                                    }
-                                },
-                            );
-                    }
-                })),
-                ..FakeLspAdapter::default()
-            },
-        );
-        language_registry.add(toml_language.clone());
-
-        app_state
-            .fs
-            .as_fake()
-            .insert_tree(
-                EditorLspTestContext::root_path(),
-                json!({
-                    ".git": {},
-                    "dir": {
-                        "foo.toml": "a = 1\nb = 2\n",
-                    }
-                }),
-            )
-            .await;
-
-        let (multi_workspace, cx) =
-            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
-        let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
-        project
-            .update(cx, |project, cx| {
-                project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
-            })
-            .await
-            .unwrap();
-        cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
-            .await;
-
-        let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
-        let toml_item = workspace
-            .update_in(cx, |workspace, window, cx| {
-                workspace.open_path(toml_file, None, true, window, cx)
-            })
-            .await
-            .expect("Could not open test file");
-
-        let toml_editor = cx.update(|_, cx| {
-            toml_item
-                .act_as::<Editor>(cx)
-                .expect("Opened test file wasn't an editor")
-        });
-        let toml_buffer = cx.read(|cx| {
-            toml_editor
-                .read(cx)
-                .buffer()
-                .read(cx)
-                .as_singleton()
-                .unwrap()
-        });
-        let multibuffer = cx.new(|cx| {
-            let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
-            multibuffer.push_excerpts(
-                toml_buffer.clone(),
-                [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))],
-                cx,
-            );
-            multibuffer.push_excerpts(
-                toml_buffer.clone(),
-                [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))],
-                cx,
-            );
-            multibuffer
-        });
-
-        let editor = workspace.update_in(cx, |_, window, cx| {
-            cx.new(|cx| build_editor_with_project(project, multibuffer, window, cx))
-        });
-        editor.update_in(cx, |editor, window, cx| {
-            let nav_history = workspace
-                .read(cx)
-                .active_pane()
-                .read(cx)
-                .nav_history_for_item(&cx.entity());
-            editor.set_nav_history(Some(nav_history));
-            window.focus(&editor.focus_handle(cx), cx)
-        });
-
-        let _toml_server = toml_server.next().await.unwrap();
-
-        // Initial request.
-        cx.executor().advance_clock(Duration::from_millis(200));
-        let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
-        cx.run_until_parked();
-        task.await;
-        assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1);
-
-        // Edit two parts of the multibuffer, which both map to the same buffer.
-        //
-        // Without debouncing, this grabs semantic tokens 4 times (twice for the
-        // toml editor, and twice for the multibuffer).
-        editor.update_in(cx, |editor, _, cx| {
-            editor.edit([(MultiBufferOffset(0)..MultiBufferOffset(1), "b")], cx);
-            editor.edit([(MultiBufferOffset(12)..MultiBufferOffset(13), "c")], cx);
-        });
-        cx.executor().advance_clock(Duration::from_millis(200));
-        let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
-        cx.run_until_parked();
-        task.await;
-        assert_eq!(
-            extract_semantic_highlights(&editor, &cx),
-            vec![MultiBufferOffset(0)..MultiBufferOffset(1)]
-        );
-
-        assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 2);
-    }
-
     fn extract_semantic_highlights(
         editor: &Entity<Editor>,
         cx: &TestAppContext,

crates/editor/src/split.rs 🔗

@@ -34,7 +34,7 @@ use workspace::{
 };
 
 use crate::{
-    Autoscroll, DisplayMap, Editor, EditorEvent, RenderDiffHunkControlsFn, ToggleSoftWrap,
+    Autoscroll, Editor, EditorEvent, RenderDiffHunkControlsFn, ToggleSoftWrap,
     actions::{DisableBreakpoint, EditLogBreakpoint, EnableBreakpoint, ToggleBreakpoint},
     display_map::Companion,
 };
@@ -667,52 +667,28 @@ impl SplittableEditor {
                 .collect()
         };
 
-        let mut companion = Companion::new(
-            rhs_display_map_id,
-            convert_rhs_rows_to_lhs,
-            convert_lhs_rows_to_rhs,
-        );
-
-        // stream this
-        for (path, diff) in path_diffs {
-            self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
-                let sync_result = lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| {
-                    LhsEditor::update_path_excerpts_from_rhs(
-                        path.clone(),
-                        rhs_multibuffer,
-                        lhs_multibuffer,
-                        diff.clone(),
-                        lhs_cx,
-                    )
-                });
-
-                if let Some((lhs_excerpt_ids, rhs_merge_groups)) = sync_result {
-                    let mut final_rhs_ids = Vec::with_capacity(lhs_excerpt_ids.len());
-                    for group in rhs_merge_groups {
-                        if group.len() == 1 {
-                            final_rhs_ids.push(group[0]);
-                        } else {
-                            let merged_id = rhs_multibuffer.merge_excerpts(&group, cx);
-                            final_rhs_ids.push(merged_id);
-                        }
-                    }
+        let companion = cx.new(|_| {
+            Companion::new(
+                rhs_display_map_id,
+                convert_rhs_rows_to_lhs,
+                convert_lhs_rows_to_rhs,
+            )
+        });
 
-                    for (rhs_id, lhs_id) in final_rhs_ids.iter().zip(lhs_excerpt_ids.iter()) {
-                        companion.add_excerpt_mapping(*lhs_id, *rhs_id);
-                    }
-                    let lhs_buffer_id = diff.read(cx).base_text(cx).remote_id();
-                    let rhs_buffer_id = diff.read(cx).buffer_id;
-                    companion.add_buffer_mapping(lhs_buffer_id, rhs_buffer_id);
-                }
-            });
-        }
+        self.lhs = Some(lhs);
 
-        let companion = cx.new(|_| companion);
+        let paths_for_sync: Vec<_> = path_diffs
+            .into_iter()
+            .map(|(path, diff)| (path, vec![], diff))
+            .collect();
+        self.sync_lhs_for_paths(paths_for_sync, &companion, cx);
 
         rhs_display_map.update(cx, |dm, cx| {
             dm.set_companion(Some((lhs_display_map, companion.clone())), cx);
         });
 
+        let lhs = self.lhs.as_ref().unwrap();
+
         let shared_scroll_anchor = self
             .rhs_editor
             .read(cx)
@@ -761,8 +737,6 @@ impl SplittableEditor {
             cx.notify();
         });
 
-        self.lhs = Some(lhs);
-
         cx.notify();
     }
 
@@ -1011,34 +985,52 @@ impl SplittableEditor {
         diff: Entity<BufferDiff>,
         cx: &mut Context<Self>,
     ) -> (Vec<Range<Anchor>>, bool) {
-        let rhs_display_map = self.rhs_editor.read(cx).display_map.clone();
-        let lhs = self.lhs.as_ref();
-        self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
-            mutate_excerpts_for_paths(
-                rhs_multibuffer,
-                lhs,
-                &rhs_display_map,
-                vec![(path.clone(), diff.clone())],
+        let Some(companion) = self.companion(cx) else {
+            return self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
+                let (anchors, added_a_new_excerpt) = rhs_multibuffer.set_excerpts_for_path(
+                    path,
+                    buffer.clone(),
+                    ranges,
+                    context_line_count,
+                    cx,
+                );
+                if !anchors.is_empty()
+                    && rhs_multibuffer
+                        .diff_for(buffer.read(cx).remote_id())
+                        .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id())
+                {
+                    rhs_multibuffer.add_diff(diff, cx);
+                }
+                (anchors, added_a_new_excerpt)
+            });
+        };
+
+        let old_rhs_ids: Vec<ExcerptId> = self
+            .rhs_multibuffer
+            .read(cx)
+            .excerpts_for_path(&path)
+            .collect();
+
+        let result = self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
+            let (anchors, added_a_new_excerpt) = rhs_multibuffer.set_excerpts_for_path(
+                path.clone(),
+                buffer.clone(),
+                ranges,
+                context_line_count,
                 cx,
-                |rhs_multibuffer, cx| {
-                    let (anchors, added_a_new_excerpt) = rhs_multibuffer.set_excerpts_for_path(
-                        path.clone(),
-                        buffer.clone(),
-                        ranges,
-                        context_line_count,
-                        cx,
-                    );
-                    if !anchors.is_empty()
-                        && rhs_multibuffer
-                            .diff_for(buffer.read(cx).remote_id())
-                            .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id())
-                    {
-                        rhs_multibuffer.add_diff(diff.clone(), cx);
-                    }
-                    (anchors, added_a_new_excerpt)
-                },
-            )
-        })
+            );
+            if !anchors.is_empty()
+                && rhs_multibuffer
+                    .diff_for(buffer.read(cx).remote_id())
+                    .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id())
+            {
+                rhs_multibuffer.add_diff(diff.clone(), cx);
+            }
+            (anchors, added_a_new_excerpt)
+        });
+
+        self.sync_lhs_for_paths(vec![(path, old_rhs_ids, diff)], &companion, cx);
+        result
     }
 
     fn expand_excerpts(
@@ -1048,78 +1040,209 @@ impl SplittableEditor {
         direction: ExpandExcerptDirection,
         cx: &mut Context<Self>,
     ) {
-        let rhs_display_map = self.rhs_editor.read(cx).display_map.clone();
-        let lhs = self.lhs.as_ref();
-        self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
-            if lhs.is_some() {
-                let snapshot = rhs_multibuffer.snapshot(cx);
-                let paths_with_diffs: Vec<_> = excerpt_ids
-                    .clone()
-                    .filter_map(|excerpt_id| {
-                        let path = rhs_multibuffer.path_for_excerpt(excerpt_id)?;
-                        let buffer = snapshot.buffer_for_excerpt(excerpt_id)?;
-                        let diff = rhs_multibuffer.diff_for(buffer.remote_id())?;
-                        Some((path, diff))
-                    })
-                    .collect::<HashMap<_, _>>()
-                    .into_iter()
-                    .collect();
-
-                mutate_excerpts_for_paths(
-                    rhs_multibuffer,
-                    lhs,
-                    &rhs_display_map,
-                    paths_with_diffs,
-                    cx,
-                    |rhs_multibuffer, cx| {
-                        rhs_multibuffer.expand_excerpts(excerpt_ids.clone(), lines, direction, cx);
-                    },
-                );
-            } else {
+        let Some(companion) = self.companion(cx) else {
+            self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
                 rhs_multibuffer.expand_excerpts(excerpt_ids, lines, direction, cx);
-            }
+            });
+            return;
+        };
+
+        let paths_with_old_ids: Vec<_> = self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
+            let snapshot = rhs_multibuffer.snapshot(cx);
+            let paths = excerpt_ids
+                .clone()
+                .filter_map(|excerpt_id| {
+                    let path = rhs_multibuffer.path_for_excerpt(excerpt_id)?;
+                    let buffer = snapshot.buffer_for_excerpt(excerpt_id)?;
+                    let diff = rhs_multibuffer.diff_for(buffer.remote_id())?;
+                    Some((path, diff))
+                })
+                .collect::<HashMap<_, _>>()
+                .into_iter()
+                .map(|(path, diff)| {
+                    let old_ids = rhs_multibuffer.excerpts_for_path(&path).collect();
+                    (path, old_ids, diff)
+                })
+                .collect();
+            rhs_multibuffer.expand_excerpts(excerpt_ids, lines, direction, cx);
+            paths
         });
+
+        self.sync_lhs_for_paths(paths_with_old_ids, &companion, cx);
     }
 
     pub fn remove_excerpts_for_path(&mut self, path: PathKey, cx: &mut Context<Self>) {
-        let rhs_display_map = self.rhs_editor.read(cx).display_map.clone();
-
-        if let Some(lhs) = &self.lhs {
+        let Some(lhs) = &self.lhs else {
             self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
-                let rhs_excerpt_ids: Vec<ExcerptId> =
-                    rhs_multibuffer.excerpts_for_path(&path).collect();
-                let lhs_excerpt_ids: Vec<ExcerptId> =
-                    lhs.multibuffer.read(cx).excerpts_for_path(&path).collect();
+                rhs_multibuffer.remove_excerpts_for_path(path, cx);
+            });
+            return;
+        };
 
-                if let Some(companion) = rhs_display_map.read(cx).companion().cloned() {
-                    companion.update(cx, |c, _| {
-                        c.remove_excerpt_mappings(lhs_excerpt_ids, rhs_excerpt_ids);
-                    });
-                }
+        let rhs_excerpt_ids: Vec<ExcerptId> = self
+            .rhs_multibuffer
+            .read(cx)
+            .excerpts_for_path(&path)
+            .collect();
+        let lhs_excerpt_ids: Vec<ExcerptId> =
+            lhs.multibuffer.read(cx).excerpts_for_path(&path).collect();
 
-                rhs_multibuffer.remove_excerpts_for_path(path.clone(), cx);
-            });
-            lhs.multibuffer.update(cx, |lhs_multibuffer, cx| {
-                lhs_multibuffer.remove_excerpts_for_path(path, cx);
-            });
-        } else {
-            self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
-                rhs_multibuffer.remove_excerpts_for_path(path.clone(), cx);
+        let rhs_display_map = self.rhs_editor.read(cx).display_map.clone();
+        if let Some(companion) = rhs_display_map.read(cx).companion().cloned() {
+            companion.update(cx, |c, _| {
+                c.remove_excerpt_mappings(lhs_excerpt_ids, rhs_excerpt_ids);
             });
         }
+
+        self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
+            rhs_multibuffer.remove_excerpts_for_path(path.clone(), cx);
+        });
+        lhs.multibuffer.update(cx, |lhs_multibuffer, cx| {
+            lhs_multibuffer.remove_excerpts_for_path(path, cx);
+        });
     }
 
     fn search_token(&self) -> SearchToken {
         SearchToken::new(self.focused_side() as u64)
     }
 
-    fn editor_for_token(&self, token: SearchToken) -> &Entity<Editor> {
+    fn editor_for_token(&self, token: SearchToken) -> Option<&Entity<Editor>> {
         if token.value() == SplitSide::Left as u64 {
-            if let Some(lhs) = &self.lhs {
-                return &lhs.editor;
-            }
+            return self.lhs.as_ref().map(|lhs| &lhs.editor);
         }
-        &self.rhs_editor
+        Some(&self.rhs_editor)
+    }
+
+    fn companion(&self, cx: &App) -> Option<Entity<Companion>> {
+        if self.lhs.is_none() {
+            return None;
+        }
+        let rhs_display_map = self.rhs_editor.read(cx).display_map.clone();
+        rhs_display_map.read(cx).companion().cloned()
+    }
+
+    fn sync_lhs_for_paths(
+        &self,
+        paths_with_old_rhs_ids: Vec<(PathKey, Vec<ExcerptId>, Entity<BufferDiff>)>,
+        companion: &Entity<Companion>,
+        cx: &mut Context<Self>,
+    ) {
+        let Some(lhs) = &self.lhs else { return };
+
+        self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
+            for (path, old_rhs_ids, diff) in paths_with_old_rhs_ids {
+                let old_lhs_ids: Vec<ExcerptId> =
+                    lhs.multibuffer.read(cx).excerpts_for_path(&path).collect();
+
+                companion.update(cx, |c, _| {
+                    c.remove_excerpt_mappings(old_lhs_ids, old_rhs_ids);
+                });
+
+                let rhs_excerpt_ids: Vec<ExcerptId> =
+                    rhs_multibuffer.excerpts_for_path(&path).collect();
+                let Some(excerpt_id) = rhs_excerpt_ids.first().copied() else {
+                    lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| {
+                        lhs_multibuffer.remove_excerpts_for_path(path, lhs_cx);
+                    });
+                    continue;
+                };
+                let Some(main_buffer_snapshot) = rhs_multibuffer
+                    .snapshot(cx)
+                    .buffer_for_excerpt(excerpt_id)
+                    .cloned()
+                else {
+                    continue;
+                };
+                let Some(main_buffer) = rhs_multibuffer.buffer(main_buffer_snapshot.remote_id())
+                else {
+                    continue;
+                };
+
+                let base_text_buffer = diff.read(cx).base_text_buffer().clone();
+                let diff_snapshot = diff.read(cx).snapshot(cx);
+                let base_text_buffer_snapshot = base_text_buffer.read(cx).snapshot();
+
+                let lhs_ranges: Vec<ExcerptRange<Point>> = rhs_multibuffer
+                    .excerpts_for_buffer(main_buffer_snapshot.remote_id(), cx)
+                    .into_iter()
+                    .filter(|(id, _)| rhs_excerpt_ids.contains(id))
+                    .map(|(_, excerpt_range)| {
+                        let to_base_text = |range: Range<Point>| {
+                            let start = diff_snapshot
+                                .buffer_point_to_base_text_range(
+                                    Point::new(range.start.row, 0),
+                                    &main_buffer_snapshot,
+                                )
+                                .start;
+                            let end = diff_snapshot
+                                .buffer_point_to_base_text_range(
+                                    Point::new(range.end.row, 0),
+                                    &main_buffer_snapshot,
+                                )
+                                .end;
+                            let end_column = diff_snapshot.base_text().line_len(end.row);
+                            Point::new(start.row, 0)..Point::new(end.row, end_column)
+                        };
+                        let primary = excerpt_range.primary.to_point(&main_buffer_snapshot);
+                        let context = excerpt_range.context.to_point(&main_buffer_snapshot);
+                        ExcerptRange {
+                            primary: to_base_text(primary),
+                            context: to_base_text(context),
+                        }
+                    })
+                    .collect();
+
+                let groups = lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| {
+                    let lhs_result = lhs_multibuffer.update_path_excerpts(
+                        path,
+                        base_text_buffer,
+                        &base_text_buffer_snapshot,
+                        lhs_ranges,
+                        lhs_cx,
+                    );
+                    if !lhs_result.excerpt_ids.is_empty()
+                        && lhs_multibuffer
+                            .diff_for(base_text_buffer_snapshot.remote_id())
+                            .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id())
+                    {
+                        lhs_multibuffer.add_inverted_diff(diff.clone(), main_buffer, lhs_cx);
+                    }
+
+                    let mut groups = Vec::new();
+                    for (lhs_id, chunk) in &lhs_result
+                        .excerpt_ids
+                        .iter()
+                        .copied()
+                        .zip(rhs_excerpt_ids)
+                        .chunk_by(|(lhs_id, _)| *lhs_id)
+                    {
+                        groups.push((lhs_id, chunk.map(|(_, rhs_id)| rhs_id).collect::<Vec<_>>()));
+                    }
+                    groups
+                });
+
+                let pairs = groups
+                    .into_iter()
+                    .map(|(lhs_id, rhs_group)| {
+                        let rhs_id = if rhs_group.len() == 1 {
+                            rhs_group[0]
+                        } else {
+                            rhs_multibuffer.merge_excerpts(&rhs_group, cx)
+                        };
+                        (lhs_id, rhs_id)
+                    })
+                    .collect::<Vec<_>>();
+
+                let lhs_buffer_id = diff.read(cx).base_text(cx).remote_id();
+                let rhs_buffer_id = diff.read(cx).buffer_id;
+                companion.update(cx, |c, _| {
+                    for (lhs_id, rhs_id) in pairs {
+                        c.add_excerpt_mapping(lhs_id, rhs_id);
+                    }
+                    c.add_buffer_mapping(lhs_buffer_id, rhs_buffer_id);
+                });
+            }
+        });
     }
 }
 
@@ -1758,7 +1881,10 @@ impl SearchableItem for SplittableEditor {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
-        self.editor_for_token(token).update(cx, |editor, cx| {
+        let Some(target) = self.editor_for_token(token) else {
+            return;
+        };
+        target.update(cx, |editor, cx| {
             editor.update_matches(matches, active_match_index, token, window, cx);
         });
     }
@@ -1804,7 +1930,10 @@ impl SearchableItem for SplittableEditor {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
-        self.editor_for_token(token).update(cx, |editor, cx| {
+        let Some(target) = self.editor_for_token(token) else {
+            return;
+        };
+        target.update(cx, |editor, cx| {
             editor.activate_match(index, matches, token, window, cx);
         });
     }
@@ -1816,7 +1945,10 @@ impl SearchableItem for SplittableEditor {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
-        self.editor_for_token(token).update(cx, |editor, cx| {
+        let Some(target) = self.editor_for_token(token) else {
+            return;
+        };
+        target.update(cx, |editor, cx| {
             editor.select_matches(matches, token, window, cx);
         });
     }
@@ -1829,7 +1961,10 @@ impl SearchableItem for SplittableEditor {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
-        self.editor_for_token(token).update(cx, |editor, cx| {
+        let Some(target) = self.editor_for_token(token) else {
+            return;
+        };
+        target.update(cx, |editor, cx| {
             editor.replace(identifier, query, token, window, cx);
         });
     }
@@ -1873,7 +2008,7 @@ impl SearchableItem for SplittableEditor {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) -> Option<usize> {
-        self.editor_for_token(token).update(cx, |editor, cx| {
+        self.editor_for_token(token)?.update(cx, |editor, cx| {
             editor.active_match_index(direction, matches, token, window, cx)
         })
     }
@@ -1927,209 +2062,6 @@ impl Render for SplittableEditor {
     }
 }
 
-fn mutate_excerpts_for_paths<R>(
-    rhs_multibuffer: &mut MultiBuffer,
-    lhs: Option<&LhsEditor>,
-    rhs_display_map: &Entity<DisplayMap>,
-    paths_with_diffs: Vec<(PathKey, Entity<BufferDiff>)>,
-    cx: &mut Context<MultiBuffer>,
-    mutate: impl FnOnce(&mut MultiBuffer, &mut Context<MultiBuffer>) -> R,
-) -> R {
-    let old_rhs_ids: Vec<_> = paths_with_diffs
-        .iter()
-        .map(|(path, _)| {
-            rhs_multibuffer
-                .excerpts_for_path(path)
-                .collect::<Vec<ExcerptId>>()
-        })
-        .collect();
-
-    let result = mutate(rhs_multibuffer, cx);
-
-    if let Some(lhs) = lhs {
-        let mut sync_results = Vec::new();
-        let mut diffs_for_mapping = Vec::new();
-
-        for ((path, diff), old_rhs_ids) in paths_with_diffs.into_iter().zip(old_rhs_ids) {
-            let sync_result = lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| {
-                LhsEditor::sync_path_excerpts(
-                    path,
-                    old_rhs_ids,
-                    rhs_multibuffer,
-                    lhs_multibuffer,
-                    diff.clone(),
-                    rhs_display_map,
-                    lhs_cx,
-                )
-            });
-            if let Some(sync_result) = sync_result {
-                sync_results.push(sync_result);
-                diffs_for_mapping.push(diff);
-            }
-        }
-
-        for ((lhs_excerpt_ids, rhs_merge_groups), diff) in
-            sync_results.into_iter().zip(diffs_for_mapping.into_iter())
-        {
-            let mut final_rhs_ids = Vec::with_capacity(lhs_excerpt_ids.len());
-            for group in rhs_merge_groups {
-                if group.len() == 1 {
-                    final_rhs_ids.push(group[0]);
-                } else {
-                    let merged_id = rhs_multibuffer.merge_excerpts(&group, cx);
-                    final_rhs_ids.push(merged_id);
-                }
-            }
-
-            debug_assert_eq!(final_rhs_ids.len(), lhs_excerpt_ids.len());
-
-            if let Some(companion) = rhs_display_map.read(cx).companion().cloned() {
-                let lhs_buffer_id = diff.read(cx).base_text(cx).remote_id();
-                let rhs_buffer_id = diff.read(cx).buffer_id;
-                companion.update(cx, |c, _| {
-                    for (rhs_id, lhs_id) in final_rhs_ids.iter().zip(lhs_excerpt_ids.iter()) {
-                        c.add_excerpt_mapping(*lhs_id, *rhs_id);
-                    }
-                    c.add_buffer_mapping(lhs_buffer_id, rhs_buffer_id);
-                });
-            }
-        }
-    }
-
-    result
-}
-
-impl LhsEditor {
-    fn update_path_excerpts_from_rhs(
-        path_key: PathKey,
-        rhs_multibuffer: &MultiBuffer,
-        lhs_multibuffer: &mut MultiBuffer,
-        diff: Entity<BufferDiff>,
-        lhs_cx: &mut Context<MultiBuffer>,
-    ) -> Option<(Vec<ExcerptId>, Vec<Vec<ExcerptId>>)> {
-        let Some(excerpt_id) = rhs_multibuffer.excerpts_for_path(&path_key).next() else {
-            lhs_multibuffer.remove_excerpts_for_path(path_key, lhs_cx);
-            return None;
-        };
-
-        let rhs_excerpt_ids: Vec<ExcerptId> =
-            rhs_multibuffer.excerpts_for_path(&path_key).collect();
-
-        let rhs_multibuffer_snapshot = rhs_multibuffer.snapshot(lhs_cx);
-        let main_buffer = rhs_multibuffer_snapshot
-            .buffer_for_excerpt(excerpt_id)
-            .unwrap();
-        let diff_snapshot;
-        let base_text_buffer_snapshot;
-        let remote_id;
-        {
-            let diff = diff.read(lhs_cx);
-            let base_text_buffer = diff.base_text_buffer().read(lhs_cx);
-            diff_snapshot = diff.snapshot(lhs_cx);
-            base_text_buffer_snapshot = base_text_buffer.snapshot();
-            remote_id = base_text_buffer.remote_id();
-        }
-        let new = rhs_multibuffer
-            .excerpts_for_buffer(main_buffer.remote_id(), lhs_cx)
-            .into_iter()
-            .filter(|(id, _)| rhs_excerpt_ids.contains(&id))
-            .map(|(_, excerpt_range)| {
-                let point_range_to_base_text_point_range = |range: Range<Point>| {
-                    let start = diff_snapshot
-                        .buffer_point_to_base_text_range(
-                            Point::new(range.start.row, 0),
-                            main_buffer,
-                        )
-                        .start;
-                    let end = diff_snapshot
-                        .buffer_point_to_base_text_range(Point::new(range.end.row, 0), main_buffer)
-                        .end;
-                    let end_column = diff_snapshot.base_text().line_len(end.row);
-                    Point::new(start.row, 0)..Point::new(end.row, end_column)
-                };
-                let rhs = excerpt_range.primary.to_point(main_buffer);
-                let context = excerpt_range.context.to_point(main_buffer);
-                ExcerptRange {
-                    primary: point_range_to_base_text_point_range(rhs),
-                    context: point_range_to_base_text_point_range(context),
-                }
-            })
-            .collect();
-
-        let lhs_result = lhs_multibuffer.update_path_excerpts(
-            path_key,
-            diff.read(lhs_cx).base_text_buffer().clone(),
-            &base_text_buffer_snapshot,
-            new,
-            lhs_cx,
-        );
-        if !lhs_result.excerpt_ids.is_empty()
-            && lhs_multibuffer
-                .diff_for(remote_id)
-                .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id())
-        {
-            let main_buffer_entity = rhs_multibuffer
-                .buffer(main_buffer.remote_id())
-                .expect("main buffer should exist in rhs_multibuffer");
-            lhs_multibuffer.add_inverted_diff(diff, main_buffer_entity, lhs_cx);
-        }
-
-        let rhs_merge_groups: Vec<Vec<ExcerptId>> = {
-            let mut groups = Vec::new();
-            let mut current_group = Vec::new();
-            let mut last_id = None;
-
-            for (lhs_id, rhs_id) in lhs_result.excerpt_ids.iter().zip(rhs_excerpt_ids) {
-                if last_id == Some(lhs_id) {
-                    current_group.push(rhs_id);
-                } else {
-                    if !current_group.is_empty() {
-                        groups.push(current_group);
-                    }
-                    current_group = vec![rhs_id];
-                    last_id = Some(lhs_id);
-                }
-            }
-            if !current_group.is_empty() {
-                groups.push(current_group);
-            }
-            groups
-        };
-
-        let deduplicated_lhs_ids: Vec<ExcerptId> =
-            lhs_result.excerpt_ids.iter().dedup().copied().collect();
-
-        Some((deduplicated_lhs_ids, rhs_merge_groups))
-    }
-
-    fn sync_path_excerpts(
-        path_key: PathKey,
-        old_rhs_excerpt_ids: Vec<ExcerptId>,
-        rhs_multibuffer: &MultiBuffer,
-        lhs_multibuffer: &mut MultiBuffer,
-        diff: Entity<BufferDiff>,
-        rhs_display_map: &Entity<DisplayMap>,
-        lhs_cx: &mut Context<MultiBuffer>,
-    ) -> Option<(Vec<ExcerptId>, Vec<Vec<ExcerptId>>)> {
-        let old_lhs_excerpt_ids: Vec<ExcerptId> =
-            lhs_multibuffer.excerpts_for_path(&path_key).collect();
-
-        if let Some(companion) = rhs_display_map.read(lhs_cx).companion().cloned() {
-            companion.update(lhs_cx, |c, _| {
-                c.remove_excerpt_mappings(old_lhs_excerpt_ids, old_rhs_excerpt_ids);
-            });
-        }
-
-        Self::update_path_excerpts_from_rhs(
-            path_key,
-            rhs_multibuffer,
-            lhs_multibuffer,
-            diff,
-            lhs_cx,
-        )
-    }
-}
-
 #[cfg(test)]
 mod tests {
     use std::sync::Arc;

crates/editor/src/test.rs 🔗

@@ -123,8 +123,6 @@ pub fn assert_text_with_selections(
     assert_eq!(actual, marked_text, "Selections don't match");
 }
 
-// RA thinks this is dead code even though it is used in a whole lot of tests
-#[allow(dead_code)]
 #[cfg(any(test, feature = "test-support"))]
 pub(crate) fn build_editor(
     buffer: Entity<MultiBuffer>,

crates/editor/src/test/editor_test_context.rs 🔗

@@ -13,7 +13,7 @@ use gpui::{
 };
 use itertools::Itertools;
 use language::{Buffer, BufferSnapshot, LanguageRegistry};
-use multi_buffer::{Anchor, ExcerptRange, MultiBufferOffset, MultiBufferRow};
+use multi_buffer::{Anchor, ExcerptRange, MultiBufferOffset, MultiBufferRow, PathKey};
 use parking_lot::RwLock;
 use project::{FakeFs, Project};
 use std::{
@@ -128,10 +128,26 @@ impl EditorTestContext {
     ) -> EditorTestContext {
         let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite);
         let buffer = cx.new(|cx| {
-            for excerpt in excerpts.into_iter() {
+            for (index, excerpt) in excerpts.into_iter().enumerate() {
                 let (text, ranges) = marked_text_ranges(excerpt, false);
                 let buffer = cx.new(|cx| Buffer::local(text, cx));
-                multibuffer.push_excerpts(buffer, ranges.into_iter().map(ExcerptRange::new), cx);
+                let point_ranges: Vec<_> = {
+                    let snapshot = buffer.read(cx);
+                    ranges
+                        .into_iter()
+                        .map(|range| {
+                            snapshot.offset_to_point(range.start)
+                                ..snapshot.offset_to_point(range.end)
+                        })
+                        .collect()
+                };
+                multibuffer.set_excerpts_for_path(
+                    PathKey::sorted(index as u64),
+                    buffer,
+                    point_ranges,
+                    0,
+                    cx,
+                );
             }
             multibuffer
         });

crates/etw_tracing/Cargo.toml 🔗

@@ -21,10 +21,4 @@ workspace.workspace = true
 [target.'cfg(target_os = "windows")'.dependencies]
 wprcontrol = { git = "https://github.com/zed-industries/wprcontrol", rev = "cd811f7" }
 windows-core = "0.61"
-windows = { workspace = true, features = [
-    "Win32_Foundation",
-    "Win32_System_Com",
-    "Win32_System_Ole",
-    "Win32_System_Variant",
-    "Win32_UI_Shell",
-] }
+windows.workspace = true

crates/eval/src/instance.rs 🔗

@@ -564,6 +564,7 @@ impl ExampleInstance {
                 stop: Vec::new(),
                 thinking_allowed: true,
                 thinking_effort: None,
+                speed: None,
             };
 
             let model = model.clone();
@@ -682,9 +683,7 @@ impl agent::ThreadEnvironment for EvalThreadEnvironment {
 
     fn create_subagent(
         &self,
-        _parent_thread: Entity<agent::Thread>,
         _label: String,
-        _initial_prompt: String,
         _cx: &mut App,
     ) -> Result<Rc<dyn agent::SubagentHandle>> {
         unimplemented!()

crates/explorer_command_injector/src/explorer_command_injector.rs 🔗

@@ -106,18 +106,17 @@ impl IClassFactory_Impl for ExplorerCommandInjectorFactory_Impl {
         riid: *const windows_core::GUID,
         ppvobject: *mut *mut core::ffi::c_void,
     ) -> Result<()> {
+        if ppvobject.is_null() || riid.is_null() {
+            return Err(windows::Win32::Foundation::E_POINTER.into());
+        }
+
         unsafe {
             *ppvobject = std::ptr::null_mut();
         }
+
         if punkouter.is_none() {
             let factory: IExplorerCommand = ExplorerCommandInjector {}.into();
-            let ret = unsafe { factory.query(riid, ppvobject).ok() };
-            if ret.is_ok() {
-                unsafe {
-                    *ppvobject = factory.into_raw();
-                }
-            }
-            ret
+            unsafe { factory.query(riid, ppvobject).ok() }
         } else {
             Err(E_INVALIDARG.into())
         }
@@ -145,19 +144,17 @@ extern "system" fn DllGetClassObject(
     iid: *const GUID,
     out: *mut *mut std::ffi::c_void,
 ) -> HRESULT {
+    if out.is_null() || class_id.is_null() || iid.is_null() {
+        return E_INVALIDARG;
+    }
+
     unsafe {
         *out = std::ptr::null_mut();
     }
     let class_id = unsafe { *class_id };
     if class_id == MODULE_ID {
         let instance: IClassFactory = ExplorerCommandInjectorFactory {}.into();
-        let ret = unsafe { instance.query(iid, out) };
-        if ret.is_ok() {
-            unsafe {
-                *out = instance.into_raw();
-            }
-        }
-        ret
+        unsafe { instance.query(iid, out) }
     } else {
         CLASS_E_CLASSNOTAVAILABLE
     }

crates/extension_host/src/extension_host.rs 🔗

@@ -32,8 +32,8 @@ use futures::{
     select_biased,
 };
 use gpui::{
-    App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Global, Task, WeakEntity,
-    actions,
+    App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Global, Task, UpdateGlobal as _,
+    WeakEntity, actions,
 };
 use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
 use language::{
@@ -46,7 +46,7 @@ use release_channel::ReleaseChannel;
 use remote::RemoteClient;
 use semver::Version;
 use serde::{Deserialize, Serialize};
-use settings::Settings;
+use settings::{SemanticTokenRules, Settings, SettingsStore};
 use std::ops::RangeInclusive;
 use std::str::FromStr;
 use std::{
@@ -1220,6 +1220,15 @@ impl ExtensionStore {
         self.proxy
             .remove_languages(&languages_to_remove, &grammars_to_remove);
 
+        // Remove semantic token rules for languages being unloaded.
+        if !languages_to_remove.is_empty() {
+            SettingsStore::update_global(cx, |store, cx| {
+                for language in &languages_to_remove {
+                    store.remove_language_semantic_token_rules(language.as_ref(), cx);
+                }
+            });
+        }
+
         let mut grammars_to_add = Vec::new();
         let mut themes_to_add = Vec::new();
         let mut icon_themes_to_add = Vec::new();
@@ -1267,12 +1276,30 @@ impl ExtensionStore {
             .iter()
             .filter(|(_, entry)| extensions_to_load.contains(&entry.extension))
             .collect::<Vec<_>>();
+        let mut semantic_token_rules_to_add: Vec<(LanguageName, SemanticTokenRules)> = Vec::new();
         for (language_name, language) in languages_to_add {
             let mut language_path = self.installed_dir.clone();
             language_path.extend([
                 Path::new(language.extension.as_ref()),
                 language.path.as_path(),
             ]);
+
+            // Load semantic token rules if present in the language directory.
+            let rules_path = language_path.join("semantic_token_rules.json");
+            if let Ok(rules_json) = std::fs::read_to_string(&rules_path) {
+                match serde_json_lenient::from_str::<SemanticTokenRules>(&rules_json) {
+                    Ok(rules) => {
+                        semantic_token_rules_to_add.push((language_name.clone(), rules));
+                    }
+                    Err(err) => {
+                        log::error!(
+                            "Failed to parse semantic token rules from {}: {err:#}",
+                            rules_path.display()
+                        );
+                    }
+                }
+            }
+
             self.proxy.register_language(
                 language_name.clone(),
                 language.grammar.clone(),
@@ -1302,6 +1329,15 @@ impl ExtensionStore {
             );
         }
 
+        // Register semantic token rules for newly loaded extension languages.
+        if !semantic_token_rules_to_add.is_empty() {
+            SettingsStore::update_global(cx, |store, cx| {
+                for (language_name, rules) in semantic_token_rules_to_add {
+                    store.set_language_semantic_token_rules(language_name.0.clone(), rules, cx);
+                }
+            });
+        }
+
         let fs = self.fs.clone();
         let wasm_host = self.wasm_host.clone();
         let root_dir = self.installed_dir.clone();

crates/feature_flags/src/flags.rs 🔗

@@ -37,16 +37,6 @@ impl FeatureFlag for AgentSharingFeatureFlag {
     const NAME: &'static str = "agent-sharing";
 }
 
-pub struct SubagentsFeatureFlag;
-
-impl FeatureFlag for SubagentsFeatureFlag {
-    const NAME: &'static str = "subagents";
-
-    fn enabled_for_staff() -> bool {
-        true
-    }
-}
-
 pub struct DiffReviewFeatureFlag;
 
 impl FeatureFlag for DiffReviewFeatureFlag {
@@ -57,12 +47,18 @@ impl FeatureFlag for DiffReviewFeatureFlag {
     }
 }
 
+pub struct GitGraphFeatureFlag;
+
+impl FeatureFlag for GitGraphFeatureFlag {
+    const NAME: &'static str = "git-graph";
+}
+
 pub struct StreamingEditFileToolFeatureFlag;
 
 impl FeatureFlag for StreamingEditFileToolFeatureFlag {
     const NAME: &'static str = "streaming-edit-file-tool";
 
     fn enabled_for_staff() -> bool {
-        false
+        true
     }
 }

crates/fs/Cargo.toml 🔗

@@ -58,4 +58,4 @@ gpui = { workspace = true, features = ["test-support"] }
 git = { workspace = true, features = ["test-support"] }
 
 [features]
-test-support = ["gpui/test-support", "git/test-support"]
+test-support = ["gpui/test-support", "git/test-support", "util/test-support"]

crates/fs/src/fake_git_repo.rs 🔗

@@ -20,7 +20,7 @@ use ignore::gitignore::GitignoreBuilder;
 use parking_lot::Mutex;
 use rope::Rope;
 use smol::{channel::Sender, future::FutureExt as _};
-use std::{path::PathBuf, sync::Arc};
+use std::{path::PathBuf, sync::Arc, sync::atomic::AtomicBool};
 use text::LineEnding;
 use util::{paths::PathStyle, rel_path::RelPath};
 
@@ -32,6 +32,7 @@ pub struct FakeGitRepository {
     pub(crate) dot_git_path: PathBuf,
     pub(crate) repository_dir_path: PathBuf,
     pub(crate) common_dir_path: PathBuf,
+    pub(crate) is_trusted: Arc<AtomicBool>,
 }
 
 #[derive(Debug, Clone)]
@@ -406,7 +407,31 @@ impl GitRepository for FakeGitRepository {
     }
 
     fn worktrees(&self) -> BoxFuture<'_, Result<Vec<Worktree>>> {
-        self.with_state_async(false, |state| Ok(state.worktrees.clone()))
+        let dot_git_path = self.dot_git_path.clone();
+        self.with_state_async(false, move |state| {
+            let work_dir = dot_git_path
+                .parent()
+                .map(PathBuf::from)
+                .unwrap_or(dot_git_path);
+            let head_sha = state
+                .refs
+                .get("HEAD")
+                .cloned()
+                .unwrap_or_else(|| "0000000".to_string());
+            let branch_ref = state
+                .current_branch_name
+                .as_ref()
+                .map(|name| format!("refs/heads/{name}"))
+                .unwrap_or_else(|| "refs/heads/main".to_string());
+            let main_worktree = Worktree {
+                path: work_dir,
+                ref_name: branch_ref.into(),
+                sha: head_sha.into(),
+            };
+            let mut all = vec![main_worktree];
+            all.extend(state.worktrees.iter().cloned());
+            Ok(all)
+        })
     }
 
     fn create_worktree(
@@ -768,6 +793,109 @@ impl GitRepository for FakeGitRepository {
         unimplemented!()
     }
 
+    fn diff_stat(
+        &self,
+        path_prefixes: &[RepoPath],
+    ) -> BoxFuture<'_, Result<git::status::GitDiffStat>> {
+        fn count_lines(s: &str) -> u32 {
+            if s.is_empty() {
+                0
+            } else {
+                s.lines().count() as u32
+            }
+        }
+
+        fn matches_prefixes(path: &RepoPath, prefixes: &[RepoPath]) -> bool {
+            if prefixes.is_empty() {
+                return true;
+            }
+            prefixes.iter().any(|prefix| {
+                let prefix_str = prefix.as_unix_str();
+                if prefix_str == "." {
+                    return true;
+                }
+                path == prefix || path.starts_with(&prefix)
+            })
+        }
+
+        let path_prefixes = path_prefixes.to_vec();
+
+        let workdir_path = self.dot_git_path.parent().unwrap().to_path_buf();
+        let worktree_files: HashMap<RepoPath, String> = self
+            .fs
+            .files()
+            .iter()
+            .filter_map(|path| {
+                let repo_path = path.strip_prefix(&workdir_path).ok()?;
+                if repo_path.starts_with(".git") {
+                    return None;
+                }
+                let content = self
+                    .fs
+                    .read_file_sync(path)
+                    .ok()
+                    .and_then(|bytes| String::from_utf8(bytes).ok())?;
+                let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
+                Some((RepoPath::from_rel_path(&repo_path), content))
+            })
+            .collect();
+
+        self.with_state_async(false, move |state| {
+            let mut entries = Vec::new();
+            let all_paths: HashSet<&RepoPath> = state
+                .head_contents
+                .keys()
+                .chain(
+                    worktree_files
+                        .keys()
+                        .filter(|p| state.index_contents.contains_key(*p)),
+                )
+                .collect();
+            for path in all_paths {
+                if !matches_prefixes(path, &path_prefixes) {
+                    continue;
+                }
+                let head = state.head_contents.get(path);
+                let worktree = worktree_files.get(path);
+                match (head, worktree) {
+                    (Some(old), Some(new)) if old != new => {
+                        entries.push((
+                            path.clone(),
+                            git::status::DiffStat {
+                                added: count_lines(new),
+                                deleted: count_lines(old),
+                            },
+                        ));
+                    }
+                    (Some(old), None) => {
+                        entries.push((
+                            path.clone(),
+                            git::status::DiffStat {
+                                added: 0,
+                                deleted: count_lines(old),
+                            },
+                        ));
+                    }
+                    (None, Some(new)) => {
+                        entries.push((
+                            path.clone(),
+                            git::status::DiffStat {
+                                added: count_lines(new),
+                                deleted: 0,
+                            },
+                        ));
+                    }
+                    _ => {}
+                }
+            }
+            entries.sort_by(|(a, _), (b, _)| a.cmp(b));
+            Ok(git::status::GitDiffStat {
+                entries: entries.into(),
+            })
+        })
+        .boxed()
+    }
+
     fn checkpoint(&self) -> BoxFuture<'static, Result<GitRepositoryCheckpoint>> {
         let executor = self.executor.clone();
         let fs = self.fs.clone();
@@ -881,146 +1009,13 @@ impl GitRepository for FakeGitRepository {
     fn commit_data_reader(&self) -> Result<CommitDataReader> {
         anyhow::bail!("commit_data_reader not supported for FakeGitRepository")
     }
-}
 
-#[cfg(test)]
-mod tests {
-    use super::*;
-    use crate::{FakeFs, Fs};
-    use gpui::TestAppContext;
-    use serde_json::json;
-    use std::path::Path;
-
-    #[gpui::test]
-    async fn test_fake_worktree_lifecycle(cx: &mut TestAppContext) {
-        let worktree_dir_settings = &["../worktrees", ".git/zed-worktrees", "my-worktrees/"];
-
-        for worktree_dir_setting in worktree_dir_settings {
-            let fs = FakeFs::new(cx.executor());
-            fs.insert_tree("/project", json!({".git": {}, "file.txt": "content"}))
-                .await;
-            let repo = fs
-                .open_repo(Path::new("/project/.git"), None)
-                .expect("should open fake repo");
-
-            // Initially no worktrees
-            let worktrees = repo.worktrees().await.unwrap();
-            assert!(worktrees.is_empty());
-
-            let expected_dir = git::repository::resolve_worktree_directory(
-                Path::new("/project"),
-                worktree_dir_setting,
-            );
-
-            // Create a worktree
-            repo.create_worktree(
-                "feature-branch".to_string(),
-                expected_dir.clone(),
-                Some("abc123".to_string()),
-            )
-            .await
-            .unwrap();
-
-            // List worktrees — should have one
-            let worktrees = repo.worktrees().await.unwrap();
-            assert_eq!(worktrees.len(), 1);
-            assert_eq!(
-                worktrees[0].path,
-                expected_dir.join("feature-branch"),
-                "failed for worktree_directory setting: {worktree_dir_setting:?}"
-            );
-            assert_eq!(worktrees[0].ref_name.as_ref(), "refs/heads/feature-branch");
-            assert_eq!(worktrees[0].sha.as_ref(), "abc123");
-
-            // Directory should exist in FakeFs after create
-            assert!(
-                fs.is_dir(&expected_dir.join("feature-branch")).await,
-                "worktree directory should be created in FakeFs for setting {worktree_dir_setting:?}"
-            );
-
-            // Create a second worktree (without explicit commit)
-            repo.create_worktree("bugfix-branch".to_string(), expected_dir.clone(), None)
-                .await
-                .unwrap();
-
-            let worktrees = repo.worktrees().await.unwrap();
-            assert_eq!(worktrees.len(), 2);
-            assert!(
-                fs.is_dir(&expected_dir.join("bugfix-branch")).await,
-                "second worktree directory should be created in FakeFs for setting {worktree_dir_setting:?}"
-            );
-
-            // Rename the first worktree
-            repo.rename_worktree(
-                expected_dir.join("feature-branch"),
-                expected_dir.join("renamed-branch"),
-            )
-            .await
-            .unwrap();
-
-            let worktrees = repo.worktrees().await.unwrap();
-            assert_eq!(worktrees.len(), 2);
-            assert!(
-                worktrees
-                    .iter()
-                    .any(|w| w.path == expected_dir.join("renamed-branch")),
-                "renamed worktree should exist at new path for setting {worktree_dir_setting:?}"
-            );
-            assert!(
-                worktrees
-                    .iter()
-                    .all(|w| w.path != expected_dir.join("feature-branch")),
-                "old path should no longer exist for setting {worktree_dir_setting:?}"
-            );
-
-            // Directory should be moved in FakeFs after rename
-            assert!(
-                !fs.is_dir(&expected_dir.join("feature-branch")).await,
-                "old worktree directory should not exist after rename for setting {worktree_dir_setting:?}"
-            );
-            assert!(
-                fs.is_dir(&expected_dir.join("renamed-branch")).await,
-                "new worktree directory should exist after rename for setting {worktree_dir_setting:?}"
-            );
-
-            // Rename a nonexistent worktree should fail
-            let result = repo
-                .rename_worktree(PathBuf::from("/nonexistent"), PathBuf::from("/somewhere"))
-                .await;
-            assert!(result.is_err());
-
-            // Remove a worktree
-            repo.remove_worktree(expected_dir.join("renamed-branch"), false)
-                .await
-                .unwrap();
-
-            let worktrees = repo.worktrees().await.unwrap();
-            assert_eq!(worktrees.len(), 1);
-            assert_eq!(worktrees[0].path, expected_dir.join("bugfix-branch"));
-
-            // Directory should be removed from FakeFs after remove
-            assert!(
-                !fs.is_dir(&expected_dir.join("renamed-branch")).await,
-                "worktree directory should be removed from FakeFs for setting {worktree_dir_setting:?}"
-            );
-
-            // Remove a nonexistent worktree should fail
-            let result = repo
-                .remove_worktree(PathBuf::from("/nonexistent"), false)
-                .await;
-            assert!(result.is_err());
-
-            // Remove the last worktree
-            repo.remove_worktree(expected_dir.join("bugfix-branch"), false)
-                .await
-                .unwrap();
-
-            let worktrees = repo.worktrees().await.unwrap();
-            assert!(worktrees.is_empty());
-            assert!(
-                !fs.is_dir(&expected_dir.join("bugfix-branch")).await,
-                "last worktree directory should be removed from FakeFs for setting {worktree_dir_setting:?}"
-            );
-        }
+    fn set_trusted(&self, trusted: bool) {
+        self.is_trusted
+            .store(trusted, std::sync::atomic::Ordering::Release);
+    }
+
+    fn is_trusted(&self) -> bool {
+        self.is_trusted.load(std::sync::atomic::Ordering::Acquire)
     }
 }

crates/fs/src/fs.rs 🔗

@@ -2776,6 +2776,7 @@ impl Fs for FakeFs {
                     repository_dir_path: repository_dir_path.to_owned(),
                     common_dir_path: common_dir_path.to_owned(),
                     checkpoints: Arc::default(),
+                    is_trusted: Arc::default(),
                 }) as _
             },
         )

crates/fs/tests/integration/fake_git_repo.rs 🔗

@@ -1,9 +1,146 @@
 use fs::{FakeFs, Fs};
-use gpui::BackgroundExecutor;
+use gpui::{BackgroundExecutor, TestAppContext};
 use serde_json::json;
-use std::path::Path;
+use std::path::{Path, PathBuf};
 use util::path;
 
+#[gpui::test]
+async fn test_fake_worktree_lifecycle(cx: &mut TestAppContext) {
+    let worktree_dir_settings = &["../worktrees", ".git/zed-worktrees", "my-worktrees/"];
+
+    for worktree_dir_setting in worktree_dir_settings {
+        let fs = FakeFs::new(cx.executor());
+        fs.insert_tree("/project", json!({".git": {}, "file.txt": "content"}))
+            .await;
+        let repo = fs
+            .open_repo(Path::new("/project/.git"), None)
+            .expect("should open fake repo");
+
+        // Initially only the main worktree exists
+        let worktrees = repo.worktrees().await.unwrap();
+        assert_eq!(worktrees.len(), 1);
+        assert_eq!(worktrees[0].path, PathBuf::from("/project"));
+
+        let expected_dir = git::repository::resolve_worktree_directory(
+            Path::new("/project"),
+            worktree_dir_setting,
+        );
+
+        // Create a worktree
+        repo.create_worktree(
+            "feature-branch".to_string(),
+            expected_dir.clone(),
+            Some("abc123".to_string()),
+        )
+        .await
+        .unwrap();
+
+        // List worktrees — should have main + one created
+        let worktrees = repo.worktrees().await.unwrap();
+        assert_eq!(worktrees.len(), 2);
+        assert_eq!(worktrees[0].path, PathBuf::from("/project"));
+        assert_eq!(
+            worktrees[1].path,
+            expected_dir.join("feature-branch"),
+            "failed for worktree_directory setting: {worktree_dir_setting:?}"
+        );
+        assert_eq!(worktrees[1].ref_name.as_ref(), "refs/heads/feature-branch");
+        assert_eq!(worktrees[1].sha.as_ref(), "abc123");
+
+        // Directory should exist in FakeFs after create
+        assert!(
+            fs.is_dir(&expected_dir.join("feature-branch")).await,
+            "worktree directory should be created in FakeFs for setting {worktree_dir_setting:?}"
+        );
+
+        // Create a second worktree (without explicit commit)
+        repo.create_worktree("bugfix-branch".to_string(), expected_dir.clone(), None)
+            .await
+            .unwrap();
+
+        let worktrees = repo.worktrees().await.unwrap();
+        assert_eq!(worktrees.len(), 3);
+        assert!(
+            fs.is_dir(&expected_dir.join("bugfix-branch")).await,
+            "second worktree directory should be created in FakeFs for setting {worktree_dir_setting:?}"
+        );
+
+        // Rename the first worktree
+        repo.rename_worktree(
+            expected_dir.join("feature-branch"),
+            expected_dir.join("renamed-branch"),
+        )
+        .await
+        .unwrap();
+
+        let worktrees = repo.worktrees().await.unwrap();
+        assert_eq!(worktrees.len(), 3);
+        assert!(
+            worktrees
+                .iter()
+                .any(|w| w.path == expected_dir.join("renamed-branch")),
+            "renamed worktree should exist at new path for setting {worktree_dir_setting:?}"
+        );
+        assert!(
+            worktrees
+                .iter()
+                .all(|w| w.path != expected_dir.join("feature-branch")),
+            "old path should no longer exist for setting {worktree_dir_setting:?}"
+        );
+
+        // Directory should be moved in FakeFs after rename
+        assert!(
+            !fs.is_dir(&expected_dir.join("feature-branch")).await,
+            "old worktree directory should not exist after rename for setting {worktree_dir_setting:?}"
+        );
+        assert!(
+            fs.is_dir(&expected_dir.join("renamed-branch")).await,
+            "new worktree directory should exist after rename for setting {worktree_dir_setting:?}"
+        );
+
+        // Rename a nonexistent worktree should fail
+        let result = repo
+            .rename_worktree(PathBuf::from("/nonexistent"), PathBuf::from("/somewhere"))
+            .await;
+        assert!(result.is_err());
+
+        // Remove a worktree
+        repo.remove_worktree(expected_dir.join("renamed-branch"), false)
+            .await
+            .unwrap();
+
+        let worktrees = repo.worktrees().await.unwrap();
+        assert_eq!(worktrees.len(), 2);
+        assert_eq!(worktrees[0].path, PathBuf::from("/project"));
+        assert_eq!(worktrees[1].path, expected_dir.join("bugfix-branch"));
+
+        // Directory should be removed from FakeFs after remove
+        assert!(
+            !fs.is_dir(&expected_dir.join("renamed-branch")).await,
+            "worktree directory should be removed from FakeFs for setting {worktree_dir_setting:?}"
+        );
+
+        // Remove a nonexistent worktree should fail
+        let result = repo
+            .remove_worktree(PathBuf::from("/nonexistent"), false)
+            .await;
+        assert!(result.is_err());
+
+        // Remove the last worktree
+        repo.remove_worktree(expected_dir.join("bugfix-branch"), false)
+            .await
+            .unwrap();
+
+        let worktrees = repo.worktrees().await.unwrap();
+        assert_eq!(worktrees.len(), 1);
+        assert_eq!(worktrees[0].path, PathBuf::from("/project"));
+        assert!(
+            !fs.is_dir(&expected_dir.join("bugfix-branch")).await,
+            "last worktree directory should be removed from FakeFs for setting {worktree_dir_setting:?}"
+        );
+    }
+}
+
 #[gpui::test]
 async fn test_checkpoints(executor: BackgroundExecutor) {
     let fs = FakeFs::new(executor);

crates/git/clippy.toml 🔗

@@ -0,0 +1,28 @@
+allow-private-module-inception = true
+avoid-breaking-exported-api = false
+ignore-interior-mutability = [
+    # Suppresses clippy::mutable_key_type, which is a false positive as the Eq
+    # and Hash impls do not use fields with interior mutability.
+    "agent_ui::context::AgentContextKey"
+]
+disallowed-methods = [
+    { path = "std::process::Command::spawn", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::spawn" },
+    { path = "std::process::Command::output", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::output" },
+    { path = "std::process::Command::status", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::status" },
+    { path = "std::process::Command::stdin", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stdin" },
+    { path = "std::process::Command::stdout", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stdout" },
+    { path = "std::process::Command::stderr", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stderr" },
+    { path = "smol::Timer::after", reason = "smol::Timer introduces non-determinism in tests", replacement = "gpui::BackgroundExecutor::timer" },
+    { path = "serde_json::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892. Use `serde_json::from_slice` instead." },
+    { path = "serde_json_lenient::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892, Use `serde_json_lenient::from_slice` instead." },
+    { path = "cocoa::foundation::NSString::alloc", reason = "NSString must be autoreleased to avoid memory leaks. Use `ns_string()` helper instead." },
+    { path = "smol::process::Command::new", reason = "Git commands must go through `GitBinary::build_command` to ensure security flags like `-c core.fsmonitor=false` are always applied.", replacement = "GitBinary::build_command" },
+    { path = "util::command::new_command", reason = "Git commands must go through `GitBinary::build_command` to ensure security flags like `-c core.fsmonitor=false` are always applied.", replacement = "GitBinary::build_command" },
+    { path = "util::command::Command::new", reason = "Git commands must go through `GitBinary::build_command` to ensure security flags like `-c core.fsmonitor=false` are always applied.", replacement = "GitBinary::build_command" },
+]
+disallowed-types = [
+    # { path = "std::collections::HashMap", replacement = "collections::HashMap" },
+    # { path = "std::collections::HashSet", replacement = "collections::HashSet" },
+    # { path = "indexmap::IndexSet", replacement = "collections::IndexSet" },
+    # { path = "indexmap::IndexMap", replacement = "collections::IndexMap" },
+]

crates/git/src/blame.rs 🔗

@@ -1,11 +1,11 @@
 use crate::Oid;
 use crate::commit::get_messages;
-use crate::repository::RepoPath;
+use crate::repository::{GitBinary, RepoPath};
 use anyhow::{Context as _, Result};
 use collections::{HashMap, HashSet};
 use futures::AsyncWriteExt;
 use serde::{Deserialize, Serialize};
-use std::{ops::Range, path::Path};
+use std::ops::Range;
 use text::{LineEnding, Rope};
 use time::OffsetDateTime;
 use time::UtcOffset;
@@ -21,15 +21,13 @@ pub struct Blame {
 }
 
 impl Blame {
-    pub async fn for_path(
-        git_binary: &Path,
-        working_directory: &Path,
+    pub(crate) async fn for_path(
+        git: &GitBinary,
         path: &RepoPath,
         content: &Rope,
         line_ending: LineEnding,
     ) -> Result<Self> {
-        let output =
-            run_git_blame(git_binary, working_directory, path, content, line_ending).await?;
+        let output = run_git_blame(git, path, content, line_ending).await?;
         let mut entries = parse_git_blame(&output)?;
         entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start));
 
@@ -40,7 +38,7 @@ impl Blame {
         }
 
         let shas = unique_shas.into_iter().collect::<Vec<_>>();
-        let messages = get_messages(working_directory, &shas)
+        let messages = get_messages(git, &shas)
             .await
             .context("failed to get commit messages")?;
 
@@ -52,8 +50,7 @@ const GIT_BLAME_NO_COMMIT_ERROR: &str = "fatal: no such ref: HEAD";
 const GIT_BLAME_NO_PATH: &str = "fatal: no such path";
 
 async fn run_git_blame(
-    git_binary: &Path,
-    working_directory: &Path,
+    git: &GitBinary,
     path: &RepoPath,
     contents: &Rope,
     line_ending: LineEnding,
@@ -61,12 +58,7 @@ async fn run_git_blame(
     let mut child = {
         let span = ztracing::debug_span!("spawning git-blame command", path = path.as_unix_str());
         let _enter = span.enter();
-        util::command::new_command(git_binary)
-            .current_dir(working_directory)
-            .arg("blame")
-            .arg("--incremental")
-            .arg("--contents")
-            .arg("-")
+        git.build_command(["blame", "--incremental", "--contents", "-"])
             .arg(path.as_unix_str())
             .stdin(Stdio::piped())
             .stdout(Stdio::piped())

crates/git/src/commit.rs 🔗

@@ -1,11 +1,11 @@
 use crate::{
     BuildCommitPermalinkParams, GitHostingProviderRegistry, GitRemote, Oid, parse_git_remote_url,
-    status::StatusCode,
+    repository::GitBinary, status::StatusCode,
 };
 use anyhow::{Context as _, Result};
 use collections::HashMap;
 use gpui::SharedString;
-use std::{path::Path, sync::Arc};
+use std::sync::Arc;
 
 #[derive(Clone, Debug, Default)]
 pub struct ParsedCommitMessage {
@@ -48,7 +48,7 @@ impl ParsedCommitMessage {
     }
 }
 
-pub async fn get_messages(working_directory: &Path, shas: &[Oid]) -> Result<HashMap<Oid, String>> {
+pub(crate) async fn get_messages(git: &GitBinary, shas: &[Oid]) -> Result<HashMap<Oid, String>> {
     if shas.is_empty() {
         return Ok(HashMap::default());
     }
@@ -63,12 +63,12 @@ pub async fn get_messages(working_directory: &Path, shas: &[Oid]) -> Result<Hash
 
         let mut result = vec![];
         for shas in shas.chunks(MAX_ENTRIES_PER_INVOCATION) {
-            let partial = get_messages_impl(working_directory, shas).await?;
+            let partial = get_messages_impl(git, shas).await?;
             result.extend(partial);
         }
         result
     } else {
-        get_messages_impl(working_directory, shas).await?
+        get_messages_impl(git, shas).await?
     };
 
     Ok(shas
@@ -78,11 +78,10 @@ pub async fn get_messages(working_directory: &Path, shas: &[Oid]) -> Result<Hash
         .collect::<HashMap<Oid, String>>())
 }
 
-async fn get_messages_impl(working_directory: &Path, shas: &[Oid]) -> Result<Vec<String>> {
+async fn get_messages_impl(git: &GitBinary, shas: &[Oid]) -> Result<Vec<String>> {
     const MARKER: &str = "<MARKER>";
-    let output = util::command::new_command("git")
-        .current_dir(working_directory)
-        .arg("show")
+    let output = git
+        .build_command(["show"])
         .arg("-s")
         .arg(format!("--format=%B{}", MARKER))
         .args(shas.iter().map(ToString::to_string))

crates/git/src/repository.rs 🔗

@@ -21,6 +21,7 @@ use text::LineEnding;
 
 use std::collections::HashSet;
 use std::ffi::{OsStr, OsString};
+use std::sync::atomic::AtomicBool;
 
 use std::process::ExitStatus;
 use std::str::FromStr;
@@ -55,6 +56,26 @@ pub const GRAPH_CHUNK_SIZE: usize = 1000;
 /// Default value for the `git.worktree_directory` setting.
 pub const DEFAULT_WORKTREE_DIRECTORY: &str = "../worktrees";
 
+/// Given the git common directory (from `commondir()`), derive the original
+/// repository's working directory.
+///
+/// For a standard checkout, `common_dir` is `<work_dir>/.git`, so the parent
+/// is the working directory. For a git worktree, `common_dir` is the **main**
+/// repo's `.git` directory, so the parent is the original repo's working directory.
+///
+/// Falls back to returning `common_dir` itself if it doesn't end with `.git`
+/// (e.g. bare repos or unusual layouts).
+pub fn original_repo_path_from_common_dir(common_dir: &Path) -> PathBuf {
+    if common_dir.file_name() == Some(OsStr::new(".git")) {
+        common_dir
+            .parent()
+            .map(|p| p.to_path_buf())
+            .unwrap_or_else(|| common_dir.to_path_buf())
+    } else {
+        common_dir.to_path_buf()
+    }
+}
+
 /// Resolves the configured worktree directory to an absolute path.
 ///
 /// `worktree_directory_setting` is the raw string from the user setting
@@ -283,6 +304,7 @@ impl Branch {
 pub struct Worktree {
     pub path: PathBuf,
     pub ref_name: SharedString,
+    // todo(git_worktree) This type should be a Oid
     pub sha: SharedString,
 }
 
@@ -320,6 +342,8 @@ pub fn parse_worktrees_from_str<T: AsRef<str>>(raw_worktrees: T) -> Vec<Worktree
             // Ignore other lines: detached, bare, locked, prunable, etc.
         }
 
+        // todo(git_worktree) We should add a test for detach head state
+        // a detach head will have ref_name as none so we would skip it
         if let (Some(path), Some(sha), Some(ref_name)) = (path, sha, ref_name) {
             worktrees.push(Worktree {
                 path: PathBuf::from(path),
@@ -898,6 +922,11 @@ pub trait GitRepository: Send + Sync {
     /// Run git diff
     fn diff(&self, diff: DiffType) -> BoxFuture<'_, Result<String>>;
 
+    fn diff_stat(
+        &self,
+        path_prefixes: &[RepoPath],
+    ) -> BoxFuture<'_, Result<crate::status::GitDiffStat>>;
+
     /// Creates a checkpoint for the repository.
     fn checkpoint(&self) -> BoxFuture<'static, Result<GitRepositoryCheckpoint>>;
 
@@ -933,6 +962,9 @@ pub trait GitRepository: Send + Sync {
     ) -> BoxFuture<'_, Result<()>>;
 
     fn commit_data_reader(&self) -> Result<CommitDataReader>;
+
+    fn set_trusted(&self, trusted: bool);
+    fn is_trusted(&self) -> bool;
 }
 
 pub enum DiffType {
@@ -959,6 +991,7 @@ pub struct RealGitRepository {
     pub any_git_binary_path: PathBuf,
     any_git_binary_help_output: Arc<Mutex<Option<SharedString>>>,
     executor: BackgroundExecutor,
+    is_trusted: Arc<AtomicBool>,
 }
 
 impl RealGitRepository {
@@ -977,6 +1010,7 @@ impl RealGitRepository {
             any_git_binary_path,
             executor,
             any_git_binary_help_output: Arc::new(Mutex::new(None)),
+            is_trusted: Arc::new(AtomicBool::new(false)),
         })
     }
 
@@ -988,20 +1022,24 @@ impl RealGitRepository {
             .map(Path::to_path_buf)
     }
 
+    fn git_binary(&self) -> Result<GitBinary> {
+        Ok(GitBinary::new(
+            self.any_git_binary_path.clone(),
+            self.working_directory()
+                .with_context(|| "Can't run git commands without a working directory")?,
+            self.executor.clone(),
+            self.is_trusted(),
+        ))
+    }
+
     async fn any_git_binary_help_output(&self) -> SharedString {
         if let Some(output) = self.any_git_binary_help_output.lock().clone() {
             return output;
         }
-        let git_binary_path = self.any_git_binary_path.clone();
-        let executor = self.executor.clone();
-        let working_directory = self.working_directory();
+        let git_binary = self.git_binary();
         let output: SharedString = self
             .executor
-            .spawn(async move {
-                GitBinary::new(git_binary_path, working_directory?, executor)
-                    .run(["help", "-a"])
-                    .await
-            })
+            .spawn(async move { git_binary?.run(["help", "-a"]).await })
             .await
             .unwrap_or_default()
             .into();
@@ -1044,6 +1082,7 @@ pub async fn get_git_committer(cx: &AsyncApp) -> GitCommitter {
         git_binary_path.unwrap_or(PathBuf::from("git")),
         paths::home_dir().clone(),
         cx.background_executor().clone(),
+        true,
     );
 
     cx.background_spawn(async move {
@@ -1075,14 +1114,12 @@ impl GitRepository for RealGitRepository {
     }
 
     fn show(&self, commit: String) -> BoxFuture<'_, Result<CommitDetails>> {
-        let git_binary_path = self.any_git_binary_path.clone();
-        let working_directory = self.working_directory();
+        let git_binary = self.git_binary();
         self.executor
             .spawn(async move {
-                let working_directory = working_directory?;
-                let output = new_command(git_binary_path)
-                    .current_dir(&working_directory)
-                    .args([
+                let git = git_binary?;
+                let output = git
+                    .build_command([
                         "--no-optional-locks",
                         "show",
                         "--no-patch",
@@ -1113,15 +1150,14 @@ impl GitRepository for RealGitRepository {
     }
 
     fn load_commit(&self, commit: String, cx: AsyncApp) -> BoxFuture<'_, Result<CommitDiff>> {
-        let Some(working_directory) = self.repository.lock().workdir().map(ToOwned::to_owned)
-        else {
+        if self.repository.lock().workdir().is_none() {
             return future::ready(Err(anyhow!("no working directory"))).boxed();
-        };
-        let git_binary_path = self.any_git_binary_path.clone();
+        }
+        let git_binary = self.git_binary();
         cx.background_spawn(async move {
-            let show_output = util::command::new_command(&git_binary_path)
-                .current_dir(&working_directory)
-                .args([
+            let git = git_binary?;
+            let show_output = git
+                .build_command([
                     "--no-optional-locks",
                     "show",
                     "--format=",
@@ -1142,9 +1178,8 @@ impl GitRepository for RealGitRepository {
             let changes = parse_git_diff_name_status(&show_stdout);
             let parent_sha = format!("{}^", commit);
 
-            let mut cat_file_process = util::command::new_command(&git_binary_path)
-                .current_dir(&working_directory)
-                .args(["--no-optional-locks", "cat-file", "--batch=%(objectsize)"])
+            let mut cat_file_process = git
+                .build_command(["--no-optional-locks", "cat-file", "--batch=%(objectsize)"])
                 .stdin(Stdio::piped())
                 .stdout(Stdio::piped())
                 .stderr(Stdio::piped())
@@ -1251,18 +1286,17 @@ impl GitRepository for RealGitRepository {
         mode: ResetMode,
         env: Arc<HashMap<String, String>>,
     ) -> BoxFuture<'_, Result<()>> {
+        let git_binary = self.git_binary();
         async move {
-            let working_directory = self.working_directory();
-
             let mode_flag = match mode {
                 ResetMode::Mixed => "--mixed",
                 ResetMode::Soft => "--soft",
             };
 
-            let output = new_command(&self.any_git_binary_path)
+            let git = git_binary?;
+            let output = git
+                .build_command(["reset", mode_flag, &commit])
                 .envs(env.iter())
-                .current_dir(&working_directory?)
-                .args(["reset", mode_flag, &commit])
                 .output()
                 .await?;
             anyhow::ensure!(
@@ -1281,17 +1315,16 @@ impl GitRepository for RealGitRepository {
         paths: Vec<RepoPath>,
         env: Arc<HashMap<String, String>>,
     ) -> BoxFuture<'_, Result<()>> {
-        let working_directory = self.working_directory();
-        let git_binary_path = self.any_git_binary_path.clone();
+        let git_binary = self.git_binary();
         async move {
             if paths.is_empty() {
                 return Ok(());
             }
 
-            let output = new_command(&git_binary_path)
-                .current_dir(&working_directory?)
+            let git = git_binary?;
+            let output = git
+                .build_command(["checkout", &commit, "--"])
                 .envs(env.iter())
-                .args(["checkout", &commit, "--"])
                 .args(paths.iter().map(|path| path.as_unix_str()))
                 .output()
                 .await?;
@@ -1313,33 +1346,29 @@ impl GitRepository for RealGitRepository {
         self.executor
             .spawn(async move {
                 fn logic(repo: &git2::Repository, path: &RepoPath) -> Result<Option<String>> {
-                    // This check is required because index.get_path() unwraps internally :(
                     let mut index = repo.index()?;
                     index.read(false)?;
 
                     const STAGE_NORMAL: i32 = 0;
-                    let path = path.as_std_path();
-                    // `RepoPath` contains a `RelPath` which normalizes `.` into an empty path
-                    // `get_path` unwraps on empty paths though, so undo that normalization here
-                    let path = if path.components().next().is_none() {
-                        ".".as_ref()
-                    } else {
-                        path
-                    };
-                    let oid = match index.get_path(path, STAGE_NORMAL) {
-                        Some(entry) if entry.mode != GIT_MODE_SYMLINK => entry.id,
-                        _ => return Ok(None),
+                    // git2 unwraps internally on empty paths or `.`
+                    if path.is_empty() {
+                        bail!("empty path has no index text");
+                    }
+                    let Some(entry) = index.get_path(path.as_std_path(), STAGE_NORMAL) else {
+                        return Ok(None);
                     };
+                    if entry.mode == GIT_MODE_SYMLINK {
+                        return Ok(None);
+                    }
 
-                    let content = repo.find_blob(oid)?.content().to_owned();
+                    let content = repo.find_blob(entry.id)?.content().to_owned();
                     Ok(String::from_utf8(content).ok())
                 }
 
-                match logic(&repo.lock(), &path) {
-                    Ok(value) => return value,
-                    Err(err) => log::error!("Error loading index text: {:?}", err),
-                }
-                None
+                logic(&repo.lock(), &path)
+                    .context("loading index text")
+                    .log_err()
+                    .flatten()
             })
             .boxed()
     }
@@ -1348,14 +1377,26 @@ impl GitRepository for RealGitRepository {
         let repo = self.repository.clone();
         self.executor
             .spawn(async move {
-                let repo = repo.lock();
-                let head = repo.head().ok()?.peel_to_tree().log_err()?;
-                let entry = head.get_path(path.as_std_path()).ok()?;
-                if entry.filemode() == i32::from(git2::FileMode::Link) {
-                    return None;
+                fn logic(repo: &git2::Repository, path: &RepoPath) -> Result<Option<String>> {
+                    let head = repo.head()?.peel_to_tree()?;
+                    // git2 unwraps internally on empty paths or `.`
+                    if path.is_empty() {
+                        return Err(anyhow!("empty path has no committed text"));
+                    }
+                    let Some(entry) = head.get_path(path.as_std_path()).ok() else {
+                        return Ok(None);
+                    };
+                    if entry.filemode() == i32::from(git2::FileMode::Link) {
+                        return Ok(None);
+                    }
+                    let content = repo.find_blob(entry.id())?.content().to_owned();
+                    Ok(String::from_utf8(content).ok())
                 }
-                let content = repo.find_blob(entry.id()).log_err()?.content().to_owned();
-                String::from_utf8(content).ok()
+
+                logic(&repo.lock(), &path)
+                    .context("loading committed text")
+                    .log_err()
+                    .flatten()
             })
             .boxed()
     }
@@ -1378,18 +1419,16 @@ impl GitRepository for RealGitRepository {
         env: Arc<HashMap<String, String>>,
         is_executable: bool,
     ) -> BoxFuture<'_, anyhow::Result<()>> {
-        let working_directory = self.working_directory();
-        let git_binary_path = self.any_git_binary_path.clone();
+        let git_binary = self.git_binary();
         self.executor
             .spawn(async move {
-                let working_directory = working_directory?;
+                let git = git_binary?;
                 let mode = if is_executable { "100755" } else { "100644" };
 
                 if let Some(content) = content {
-                    let mut child = new_command(&git_binary_path)
-                        .current_dir(&working_directory)
+                    let mut child = git
+                        .build_command(["hash-object", "-w", "--stdin"])
                         .envs(env.iter())
-                        .args(["hash-object", "-w", "--stdin"])
                         .stdin(Stdio::piped())
                         .stdout(Stdio::piped())
                         .spawn()?;
@@ -1402,10 +1441,9 @@ impl GitRepository for RealGitRepository {
 
                     log::debug!("indexing SHA: {sha}, path {path:?}");
 
-                    let output = new_command(&git_binary_path)
-                        .current_dir(&working_directory)
+                    let output = git
+                        .build_command(["update-index", "--add", "--cacheinfo", mode, sha])
                         .envs(env.iter())
-                        .args(["update-index", "--add", "--cacheinfo", mode, sha])
                         .arg(path.as_unix_str())
                         .output()
                         .await?;
@@ -1417,10 +1455,9 @@ impl GitRepository for RealGitRepository {
                     );
                 } else {
                     log::debug!("removing path {path:?} from the index");
-                    let output = new_command(&git_binary_path)
-                        .current_dir(&working_directory)
+                    let output = git
+                        .build_command(["update-index", "--force-remove"])
                         .envs(env.iter())
-                        .args(["update-index", "--force-remove"])
                         .arg(path.as_unix_str())
                         .output()
                         .await?;
@@ -1449,14 +1486,12 @@ impl GitRepository for RealGitRepository {
     }
 
     fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<'_, Result<Vec<Option<String>>>> {
-        let working_directory = self.working_directory();
-        let git_binary_path = self.any_git_binary_path.clone();
+        let git_binary = self.git_binary();
         self.executor
             .spawn(async move {
-                let working_directory = working_directory?;
-                let mut process = new_command(&git_binary_path)
-                    .current_dir(&working_directory)
-                    .args([
+                let git = git_binary?;
+                let mut process = git
+                    .build_command([
                         "--no-optional-locks",
                         "cat-file",
                         "--batch-check=%(objectname)",
@@ -1509,19 +1544,14 @@ impl GitRepository for RealGitRepository {
     }
 
     fn status(&self, path_prefixes: &[RepoPath]) -> Task<Result<GitStatus>> {
-        let git_binary_path = self.any_git_binary_path.clone();
-        let working_directory = match self.working_directory() {
-            Ok(working_directory) => working_directory,
+        let git = match self.git_binary() {
+            Ok(git) => git,
             Err(e) => return Task::ready(Err(e)),
         };
         let args = git_status_args(path_prefixes);
         log::debug!("Checking for git status in {path_prefixes:?}");
         self.executor.spawn(async move {
-            let output = new_command(&git_binary_path)
-                .current_dir(working_directory)
-                .args(args)
-                .output()
-                .await?;
+            let output = git.build_command(args).output().await?;
             if output.status.success() {
                 let stdout = String::from_utf8_lossy(&output.stdout);
                 stdout.parse()
@@ -1533,9 +1563,8 @@ impl GitRepository for RealGitRepository {
     }
 
     fn diff_tree(&self, request: DiffTreeType) -> BoxFuture<'_, Result<TreeDiff>> {
-        let git_binary_path = self.any_git_binary_path.clone();
-        let working_directory = match self.working_directory() {
-            Ok(working_directory) => working_directory,
+        let git = match self.git_binary() {
+            Ok(git) => git,
             Err(e) => return Task::ready(Err(e)).boxed(),
         };
 
@@ -1560,11 +1589,7 @@ impl GitRepository for RealGitRepository {
 
         self.executor
             .spawn(async move {
-                let output = new_command(&git_binary_path)
-                    .current_dir(working_directory)
-                    .args(args)
-                    .output()
-                    .await?;
+                let output = git.build_command(args).output().await?;
                 if output.status.success() {
                     let stdout = String::from_utf8_lossy(&output.stdout);
                     stdout.parse()
@@ -1577,13 +1602,12 @@ impl GitRepository for RealGitRepository {
     }
 
     fn stash_entries(&self) -> BoxFuture<'_, Result<GitStash>> {
-        let git_binary_path = self.any_git_binary_path.clone();
-        let working_directory = self.working_directory();
+        let git_binary = self.git_binary();
         self.executor
             .spawn(async move {
-                let output = new_command(&git_binary_path)
-                    .current_dir(working_directory?)
-                    .args(&["stash", "list", "--pretty=format:%gd%x00%H%x00%ct%x00%s"])
+                let git = git_binary?;
+                let output = git
+                    .build_command(&["stash", "list", "--pretty=format:%gd%x00%H%x00%ct%x00%s"])
                     .output()
                     .await?;
                 if output.status.success() {
@@ -1598,8 +1622,7 @@ impl GitRepository for RealGitRepository {
     }
 
     fn branches(&self) -> BoxFuture<'_, Result<Vec<Branch>>> {
-        let working_directory = self.working_directory();
-        let git_binary_path = self.any_git_binary_path.clone();
+        let git_binary = self.git_binary();
         self.executor
             .spawn(async move {
                 let fields = [
@@ -1621,12 +1644,8 @@ impl GitRepository for RealGitRepository {
                     "--format",
                     &fields,
                 ];
-                let working_directory = working_directory?;
-                let output = new_command(&git_binary_path)
-                    .current_dir(&working_directory)
-                    .args(args)
-                    .output()
-                    .await?;
+                let git = git_binary?;
+                let output = git.build_command(args).output().await?;
 
                 anyhow::ensure!(
                     output.status.success(),
@@ -1640,11 +1659,7 @@ impl GitRepository for RealGitRepository {
                 if branches.is_empty() {
                     let args = vec!["symbolic-ref", "--quiet", "HEAD"];
 
-                    let output = new_command(&git_binary_path)
-                        .current_dir(&working_directory)
-                        .args(args)
-                        .output()
-                        .await?;
+                    let output = git.build_command(args).output().await?;
 
                     // git symbolic-ref returns a non-0 exit code if HEAD points
                     // to something other than a branch
@@ -1666,13 +1681,12 @@ impl GitRepository for RealGitRepository {
     }
 
     fn worktrees(&self) -> BoxFuture<'_, Result<Vec<Worktree>>> {
-        let git_binary_path = self.any_git_binary_path.clone();
-        let working_directory = self.working_directory();
+        let git_binary = self.git_binary();
         self.executor
             .spawn(async move {
-                let output = new_command(&git_binary_path)
-                    .current_dir(working_directory?)
-                    .args(&["--no-optional-locks", "worktree", "list", "--porcelain"])
+                let git = git_binary?;
+                let output = git
+                    .build_command(&["--no-optional-locks", "worktree", "list", "--porcelain"])
                     .output()
                     .await?;
                 if output.status.success() {
@@ -1692,8 +1706,7 @@ impl GitRepository for RealGitRepository {
         directory: PathBuf,
         from_commit: Option<String>,
     ) -> BoxFuture<'_, Result<()>> {
-        let git_binary_path = self.any_git_binary_path.clone();
-        let working_directory = self.working_directory();
+        let git_binary = self.git_binary();
         let final_path = directory.join(&name);
         let mut args = vec![
             OsString::from("--no-optional-locks"),
@@ -1713,11 +1726,8 @@ impl GitRepository for RealGitRepository {
         self.executor
             .spawn(async move {
                 std::fs::create_dir_all(final_path.parent().unwrap_or(&final_path))?;
-                let output = new_command(&git_binary_path)
-                    .current_dir(working_directory?)
-                    .args(args)
-                    .output()
-                    .await?;
+                let git = git_binary?;
+                let output = git.build_command(args).output().await?;
                 if output.status.success() {
                     Ok(())
                 } else {
@@ -1729,9 +1739,7 @@ impl GitRepository for RealGitRepository {
     }
 
     fn remove_worktree(&self, path: PathBuf, force: bool) -> BoxFuture<'_, Result<()>> {
-        let git_binary_path = self.any_git_binary_path.clone();
-        let working_directory = self.working_directory();
-        let executor = self.executor.clone();
+        let git_binary = self.git_binary();
 
         self.executor
             .spawn(async move {
@@ -1745,18 +1753,14 @@ impl GitRepository for RealGitRepository {
                 }
                 args.push("--".into());
                 args.push(path.as_os_str().into());
-                GitBinary::new(git_binary_path, working_directory?, executor)
-                    .run(args)
-                    .await?;
+                git_binary?.run(args).await?;
                 anyhow::Ok(())
             })
             .boxed()
     }
 
     fn rename_worktree(&self, old_path: PathBuf, new_path: PathBuf) -> BoxFuture<'_, Result<()>> {
-        let git_binary_path = self.any_git_binary_path.clone();
-        let working_directory = self.working_directory();
-        let executor = self.executor.clone();
+        let git_binary = self.git_binary();
 
         self.executor
             .spawn(async move {
@@ -1768,9 +1772,7 @@ impl GitRepository for RealGitRepository {
                     old_path.as_os_str().into(),
                     new_path.as_os_str().into(),
                 ];
-                GitBinary::new(git_binary_path, working_directory?, executor)
-                    .run(args)
-                    .await?;
+                git_binary?.run(args).await?;
                 anyhow::Ok(())
             })
             .boxed()
@@ -1778,9 +1780,7 @@ impl GitRepository for RealGitRepository {
 
     fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
         let repo = self.repository.clone();
-        let working_directory = self.working_directory();
-        let git_binary_path = self.any_git_binary_path.clone();
-        let executor = self.executor.clone();
+        let git_binary = self.git_binary();
         let branch = self.executor.spawn(async move {
             let repo = repo.lock();
             let branch = if let Ok(branch) = repo.find_branch(&name, BranchType::Local) {
@@ -1815,9 +1815,7 @@ impl GitRepository for RealGitRepository {
         self.executor
             .spawn(async move {
                 let branch = branch.await?;
-                GitBinary::new(git_binary_path, working_directory?, executor)
-                    .run(&["checkout", &branch])
-                    .await?;
+                git_binary?.run(&["checkout", &branch]).await?;
                 anyhow::Ok(())
             })
             .boxed()
@@ -1828,9 +1826,7 @@ impl GitRepository for RealGitRepository {
         name: String,
         base_branch: Option<String>,
     ) -> BoxFuture<'_, Result<()>> {
-        let git_binary_path = self.any_git_binary_path.clone();
-        let working_directory = self.working_directory();
-        let executor = self.executor.clone();
+        let git_binary = self.git_binary();
 
         self.executor
             .spawn(async move {
@@ -1841,22 +1837,18 @@ impl GitRepository for RealGitRepository {
                     args.push(&base_branch_str);
                 }
 
-                GitBinary::new(git_binary_path, working_directory?, executor)
-                    .run(&args)
-                    .await?;
+                git_binary?.run(&args).await?;
                 anyhow::Ok(())
             })
             .boxed()
     }
 
     fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>> {
-        let git_binary_path = self.any_git_binary_path.clone();
-        let working_directory = self.working_directory();
-        let executor = self.executor.clone();
+        let git_binary = self.git_binary();
 
         self.executor
             .spawn(async move {
-                GitBinary::new(git_binary_path, working_directory?, executor)
+                git_binary?
                     .run(&["branch", "-m", &branch, &new_name])
                     .await?;
                 anyhow::Ok(())
@@ -1865,15 +1857,11 @@ impl GitRepository for RealGitRepository {
     }
 
     fn delete_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
-        let git_binary_path = self.any_git_binary_path.clone();
-        let working_directory = self.working_directory();
-        let executor = self.executor.clone();
+        let git_binary = self.git_binary();
 
         self.executor
             .spawn(async move {
-                GitBinary::new(git_binary_path, working_directory?, executor)
-                    .run(&["branch", "-d", &name])
-                    .await?;
+                git_binary?.run(&["branch", "-d", &name]).await?;
                 anyhow::Ok(())
             })
             .boxed()
@@ -1885,20 +1873,11 @@ impl GitRepository for RealGitRepository {
         content: Rope,
         line_ending: LineEnding,
     ) -> BoxFuture<'_, Result<crate::blame::Blame>> {
-        let working_directory = self.working_directory();
-        let git_binary_path = self.any_git_binary_path.clone();
-        let executor = self.executor.clone();
+        let git = self.git_binary();
 
-        executor
+        self.executor
             .spawn(async move {
-                crate::blame::Blame::for_path(
-                    &git_binary_path,
-                    &working_directory?,
-                    &path,
-                    &content,
-                    line_ending,
-                )
-                .await
+                crate::blame::Blame::for_path(&git?, &path, &content, line_ending).await
             })
             .boxed()
     }
@@ -1913,11 +1892,10 @@ impl GitRepository for RealGitRepository {
         skip: usize,
         limit: Option<usize>,
     ) -> BoxFuture<'_, Result<FileHistory>> {
-        let working_directory = self.working_directory();
-        let git_binary_path = self.any_git_binary_path.clone();
+        let git_binary = self.git_binary();
         self.executor
             .spawn(async move {
-                let working_directory = working_directory?;
+                let git = git_binary?;
                 // Use a unique delimiter with a hardcoded UUID to separate commits
                 // This essentially eliminates any chance of encountering the delimiter in actual commit data
                 let commit_delimiter =
@@ -1945,9 +1923,8 @@ impl GitRepository for RealGitRepository {
 
                 args.push("--");
 
-                let output = new_command(&git_binary_path)
-                    .current_dir(&working_directory)
-                    .args(&args)
+                let output = git
+                    .build_command(&args)
                     .arg(path.as_unix_str())
                     .output()
                     .await?;
@@ -1992,30 +1969,17 @@ impl GitRepository for RealGitRepository {
     }
 
     fn diff(&self, diff: DiffType) -> BoxFuture<'_, Result<String>> {
-        let working_directory = self.working_directory();
-        let git_binary_path = self.any_git_binary_path.clone();
+        let git_binary = self.git_binary();
         self.executor
             .spawn(async move {
-                let working_directory = working_directory?;
+                let git = git_binary?;
                 let output = match diff {
                     DiffType::HeadToIndex => {
-                        new_command(&git_binary_path)
-                            .current_dir(&working_directory)
-                            .args(["diff", "--staged"])
-                            .output()
-                            .await?
-                    }
-                    DiffType::HeadToWorktree => {
-                        new_command(&git_binary_path)
-                            .current_dir(&working_directory)
-                            .args(["diff"])
-                            .output()
-                            .await?
+                        git.build_command(["diff", "--staged"]).output().await?
                     }
+                    DiffType::HeadToWorktree => git.build_command(["diff"]).output().await?,
                     DiffType::MergeBase { base_ref } => {
-                        new_command(&git_binary_path)
-                            .current_dir(&working_directory)
-                            .args(["diff", "--merge-base", base_ref.as_ref()])
+                        git.build_command(["diff", "--merge-base", base_ref.as_ref()])
                             .output()
                             .await?
                     }
@@ -2031,20 +1995,49 @@ impl GitRepository for RealGitRepository {
             .boxed()
     }
 
+    fn diff_stat(
+        &self,
+        path_prefixes: &[RepoPath],
+    ) -> BoxFuture<'_, Result<crate::status::GitDiffStat>> {
+        let path_prefixes = path_prefixes.to_vec();
+        let git_binary = self.git_binary();
+
+        self.executor
+            .spawn(async move {
+                let git_binary = git_binary?;
+                let mut args: Vec<String> = vec![
+                    "diff".into(),
+                    "--numstat".into(),
+                    "--no-renames".into(),
+                    "HEAD".into(),
+                ];
+                if !path_prefixes.is_empty() {
+                    args.push("--".into());
+                    args.extend(
+                        path_prefixes
+                            .iter()
+                            .map(|p| p.as_std_path().to_string_lossy().into_owned()),
+                    );
+                }
+                let output = git_binary.run(&args).await?;
+                Ok(crate::status::parse_numstat(&output))
+            })
+            .boxed()
+    }
+
     fn stage_paths(
         &self,
         paths: Vec<RepoPath>,
         env: Arc<HashMap<String, String>>,
     ) -> BoxFuture<'_, Result<()>> {
-        let working_directory = self.working_directory();
-        let git_binary_path = self.any_git_binary_path.clone();
+        let git_binary = self.git_binary();
         self.executor
             .spawn(async move {
                 if !paths.is_empty() {
-                    let output = new_command(&git_binary_path)
-                        .current_dir(&working_directory?)
+                    let git = git_binary?;
+                    let output = git
+                        .build_command(["update-index", "--add", "--remove", "--"])
                         .envs(env.iter())
-                        .args(["update-index", "--add", "--remove", "--"])
                         .args(paths.iter().map(|p| p.as_unix_str()))
                         .output()
                         .await?;
@@ -2064,16 +2057,15 @@ impl GitRepository for RealGitRepository {
         paths: Vec<RepoPath>,
         env: Arc<HashMap<String, String>>,
     ) -> BoxFuture<'_, Result<()>> {
-        let working_directory = self.working_directory();
-        let git_binary_path = self.any_git_binary_path.clone();
+        let git_binary = self.git_binary();
 
         self.executor
             .spawn(async move {
                 if !paths.is_empty() {
-                    let output = new_command(&git_binary_path)
-                        .current_dir(&working_directory?)
+                    let git = git_binary?;
+                    let output = git
+                        .build_command(["reset", "--quiet", "--"])
                         .envs(env.iter())
-                        .args(["reset", "--quiet", "--"])
                         .args(paths.iter().map(|p| p.as_std_path()))
                         .output()
                         .await?;
@@ -2094,19 +2086,16 @@ impl GitRepository for RealGitRepository {
         paths: Vec<RepoPath>,
         env: Arc<HashMap<String, String>>,
     ) -> BoxFuture<'_, Result<()>> {
-        let working_directory = self.working_directory();
-        let git_binary_path = self.any_git_binary_path.clone();
+        let git_binary = self.git_binary();
         self.executor
             .spawn(async move {
-                let mut cmd = new_command(&git_binary_path);
-                cmd.current_dir(&working_directory?)
+                let git = git_binary?;
+                let output = git
+                    .build_command(["stash", "push", "--quiet", "--include-untracked"])
                     .envs(env.iter())
-                    .args(["stash", "push", "--quiet"])
-                    .arg("--include-untracked");
-
-                cmd.args(paths.iter().map(|p| p.as_unix_str()));
-
-                let output = cmd.output().await?;
+                    .args(paths.iter().map(|p| p.as_unix_str()))
+                    .output()
+                    .await?;
 
                 anyhow::ensure!(
                     output.status.success(),
@@ -2123,20 +2112,15 @@ impl GitRepository for RealGitRepository {
         index: Option<usize>,
         env: Arc<HashMap<String, String>>,
     ) -> BoxFuture<'_, Result<()>> {
-        let working_directory = self.working_directory();
-        let git_binary_path = self.any_git_binary_path.clone();
+        let git_binary = self.git_binary();
         self.executor
             .spawn(async move {
-                let mut cmd = new_command(git_binary_path);
+                let git = git_binary?;
                 let mut args = vec!["stash".to_string(), "pop".to_string()];
                 if let Some(index) = index {
                     args.push(format!("stash@{{{}}}", index));
                 }
-                cmd.current_dir(&working_directory?)
-                    .envs(env.iter())
-                    .args(args);
-
-                let output = cmd.output().await?;
+                let output = git.build_command(&args).envs(env.iter()).output().await?;
 
                 anyhow::ensure!(
                     output.status.success(),
@@ -2153,20 +2137,15 @@ impl GitRepository for RealGitRepository {
         index: Option<usize>,
         env: Arc<HashMap<String, String>>,
     ) -> BoxFuture<'_, Result<()>> {
-        let working_directory = self.working_directory();
-        let git_binary_path = self.any_git_binary_path.clone();
+        let git_binary = self.git_binary();
         self.executor
             .spawn(async move {
-                let mut cmd = new_command(git_binary_path);
+                let git = git_binary?;
                 let mut args = vec!["stash".to_string(), "apply".to_string()];
                 if let Some(index) = index {
                     args.push(format!("stash@{{{}}}", index));
                 }
-                cmd.current_dir(&working_directory?)
-                    .envs(env.iter())
-                    .args(args);
-
-                let output = cmd.output().await?;
+                let output = git.build_command(&args).envs(env.iter()).output().await?;
 
                 anyhow::ensure!(
                     output.status.success(),
@@ -2183,20 +2162,15 @@ impl GitRepository for RealGitRepository {
         index: Option<usize>,
         env: Arc<HashMap<String, String>>,
     ) -> BoxFuture<'_, Result<()>> {
-        let working_directory = self.working_directory();
-        let git_binary_path = self.any_git_binary_path.clone();
+        let git_binary = self.git_binary();
         self.executor
             .spawn(async move {
-                let mut cmd = new_command(git_binary_path);
+                let git = git_binary?;
                 let mut args = vec!["stash".to_string(), "drop".to_string()];
                 if let Some(index) = index {
                     args.push(format!("stash@{{{}}}", index));
                 }
-                cmd.current_dir(&working_directory?)
-                    .envs(env.iter())
-                    .args(args);
-
-                let output = cmd.output().await?;
+                let output = git.build_command(&args).envs(env.iter()).output().await?;
 
                 anyhow::ensure!(
                     output.status.success(),
@@ -2216,16 +2190,14 @@ impl GitRepository for RealGitRepository {
         ask_pass: AskPassDelegate,
         env: Arc<HashMap<String, String>>,
     ) -> BoxFuture<'_, Result<()>> {
-        let working_directory = self.working_directory();
-        let git_binary_path = self.any_git_binary_path.clone();
+        let git_binary = self.git_binary();
         let executor = self.executor.clone();
         // Note: Do not spawn this command on the background thread, it might pop open the credential helper
         // which we want to block on.
         async move {
-            let mut cmd = new_command(git_binary_path);
-            cmd.current_dir(&working_directory?)
-                .envs(env.iter())
-                .args(["commit", "--quiet", "-m"])
+            let git = git_binary?;
+            let mut cmd = git.build_command(["commit", "--quiet", "-m"]);
+            cmd.envs(env.iter())
                 .arg(&message.to_string())
                 .arg("--cleanup=strip")
                 .arg("--no-verify")
@@ -2264,16 +2236,21 @@ impl GitRepository for RealGitRepository {
         let working_directory = self.working_directory();
         let executor = cx.background_executor().clone();
         let git_binary_path = self.system_git_binary_path.clone();
+        let is_trusted = self.is_trusted();
         // Note: Do not spawn this command on the background thread, it might pop open the credential helper
         // which we want to block on.
         async move {
             let git_binary_path = git_binary_path.context("git not found on $PATH, can't push")?;
             let working_directory = working_directory?;
-            let mut command = new_command(git_binary_path);
+            let git = GitBinary::new(
+                git_binary_path,
+                working_directory,
+                executor.clone(),
+                is_trusted,
+            );
+            let mut command = git.build_command(["push"]);
             command
                 .envs(env.iter())
-                .current_dir(&working_directory)
-                .args(["push"])
                 .args(options.map(|option| match option {
                     PushOptions::SetUpstream => "--set-upstream",
                     PushOptions::Force => "--force-with-lease",
@@ -2301,15 +2278,20 @@ impl GitRepository for RealGitRepository {
         let working_directory = self.working_directory();
         let executor = cx.background_executor().clone();
         let git_binary_path = self.system_git_binary_path.clone();
+        let is_trusted = self.is_trusted();
         // Note: Do not spawn this command on the background thread, it might pop open the credential helper
         // which we want to block on.
         async move {
             let git_binary_path = git_binary_path.context("git not found on $PATH, can't pull")?;
-            let mut command = new_command(git_binary_path);
-            command
-                .envs(env.iter())
-                .current_dir(&working_directory?)
-                .arg("pull");
+            let working_directory = working_directory?;
+            let git = GitBinary::new(
+                git_binary_path,
+                working_directory,
+                executor.clone(),
+                is_trusted,
+            );
+            let mut command = git.build_command(["pull"]);
+            command.envs(env.iter());
 
             if rebase {
                 command.arg("--rebase");
@@ -2337,15 +2319,21 @@ impl GitRepository for RealGitRepository {
         let remote_name = format!("{}", fetch_options);
         let git_binary_path = self.system_git_binary_path.clone();
         let executor = cx.background_executor().clone();
+        let is_trusted = self.is_trusted();
         // Note: Do not spawn this command on the background thread, it might pop open the credential helper
         // which we want to block on.
         async move {
             let git_binary_path = git_binary_path.context("git not found on $PATH, can't fetch")?;
-            let mut command = new_command(git_binary_path);
+            let working_directory = working_directory?;
+            let git = GitBinary::new(
+                git_binary_path,
+                working_directory,
+                executor.clone(),
+                is_trusted,
+            );
+            let mut command = git.build_command(["fetch", &remote_name]);
             command
                 .envs(env.iter())
-                .current_dir(&working_directory?)
-                .args(["fetch", &remote_name])
                 .stdout(Stdio::piped())
                 .stderr(Stdio::piped());
 

crates/git/src/status.rs 🔗

@@ -475,7 +475,12 @@ impl FromStr for GitStatus {
                     }
                     .into();
                 }
-                _ => panic!("Unexpected duplicated status entries: {a_status:?} and {b_status:?}"),
+                (x, y) if x == y => {}
+                _ => {
+                    log::warn!(
+                        "Unexpected duplicated status entries: {a_status:?} and {b_status:?}"
+                    );
+                }
             }
             true
         });
@@ -575,14 +580,165 @@ impl FromStr for TreeDiff {
     }
 }
 
+#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
+pub struct DiffStat {
+    pub added: u32,
+    pub deleted: u32,
+}
+
+#[derive(Clone, Debug)]
+pub struct GitDiffStat {
+    pub entries: Arc<[(RepoPath, DiffStat)]>,
+}
+
+/// Parses the output of `git diff --numstat` where output looks like:
+///
+/// ```text
+/// 24   12   dir/file.txt
+/// ```
+pub fn parse_numstat(output: &str) -> GitDiffStat {
+    let mut entries = Vec::new();
+    for line in output.lines() {
+        let line = line.trim();
+        if line.is_empty() {
+            continue;
+        }
+        let mut parts = line.splitn(3, '\t');
+        let (Some(added_str), Some(deleted_str), Some(path_str)) =
+            (parts.next(), parts.next(), parts.next())
+        else {
+            continue;
+        };
+        let Ok(added) = added_str.parse::<u32>() else {
+            continue;
+        };
+        let Ok(deleted) = deleted_str.parse::<u32>() else {
+            continue;
+        };
+        let Ok(path) = RepoPath::new(path_str) else {
+            continue;
+        };
+        entries.push((path, DiffStat { added, deleted }));
+    }
+    entries.sort_by(|(a, _), (b, _)| a.cmp(b));
+    entries.dedup_by(|(a, _), (b, _)| a == b);
+
+    GitDiffStat {
+        entries: entries.into(),
+    }
+}
+
 #[cfg(test)]
 mod tests {
 
     use crate::{
         repository::RepoPath,
-        status::{TreeDiff, TreeDiffStatus},
+        status::{FileStatus, GitStatus, TreeDiff, TreeDiffStatus},
     };
 
+    use super::{DiffStat, parse_numstat};
+
+    fn lookup<'a>(entries: &'a [(RepoPath, DiffStat)], path: &str) -> Option<&'a DiffStat> {
+        let path = RepoPath::new(path).unwrap();
+        entries.iter().find(|(p, _)| p == &path).map(|(_, s)| s)
+    }
+
+    #[test]
+    fn test_parse_numstat_normal() {
+        let input = "10\t5\tsrc/main.rs\n3\t1\tREADME.md\n";
+        let result = parse_numstat(input);
+        assert_eq!(result.entries.len(), 2);
+        assert_eq!(
+            lookup(&result.entries, "src/main.rs"),
+            Some(&DiffStat {
+                added: 10,
+                deleted: 5
+            })
+        );
+        assert_eq!(
+            lookup(&result.entries, "README.md"),
+            Some(&DiffStat {
+                added: 3,
+                deleted: 1
+            })
+        );
+    }
+
+    #[test]
+    fn test_parse_numstat_binary_files_skipped() {
+        // git diff --numstat outputs "-\t-\tpath" for binary files
+        let input = "-\t-\timage.png\n5\t2\tsrc/lib.rs\n";
+        let result = parse_numstat(input);
+        assert_eq!(result.entries.len(), 1);
+        assert!(lookup(&result.entries, "image.png").is_none());
+        assert_eq!(
+            lookup(&result.entries, "src/lib.rs"),
+            Some(&DiffStat {
+                added: 5,
+                deleted: 2
+            })
+        );
+    }
+
+    #[test]
+    fn test_parse_numstat_empty_input() {
+        assert!(parse_numstat("").entries.is_empty());
+        assert!(parse_numstat("\n\n").entries.is_empty());
+        assert!(parse_numstat("   \n  \n").entries.is_empty());
+    }
+
+    #[test]
+    fn test_parse_numstat_malformed_lines_skipped() {
+        let input = "not_a_number\t5\tfile.rs\n10\t5\tvalid.rs\n";
+        let result = parse_numstat(input);
+        assert_eq!(result.entries.len(), 1);
+        assert_eq!(
+            lookup(&result.entries, "valid.rs"),
+            Some(&DiffStat {
+                added: 10,
+                deleted: 5
+            })
+        );
+    }
+
+    #[test]
+    fn test_parse_numstat_incomplete_lines_skipped() {
+        // Lines with fewer than 3 tab-separated fields are skipped
+        let input = "10\t5\n7\t3\tok.rs\n";
+        let result = parse_numstat(input);
+        assert_eq!(result.entries.len(), 1);
+        assert_eq!(
+            lookup(&result.entries, "ok.rs"),
+            Some(&DiffStat {
+                added: 7,
+                deleted: 3
+            })
+        );
+    }
+
+    #[test]
+    fn test_parse_numstat_zero_stats() {
+        let input = "0\t0\tunchanged_but_present.rs\n";
+        let result = parse_numstat(input);
+        assert_eq!(
+            lookup(&result.entries, "unchanged_but_present.rs"),
+            Some(&DiffStat {
+                added: 0,
+                deleted: 0
+            })
+        );
+    }
+
+    #[test]
+    fn test_duplicate_untracked_entries() {
+        // Regression test for ZED-2XA: git can produce duplicate untracked entries
+        // for the same path. This should deduplicate them instead of panicking.
+        let input = "?? file.txt\0?? file.txt";
+        let status: GitStatus = input.parse().unwrap();
+        assert_eq!(status.entries.len(), 1);
+        assert_eq!(status.entries[0].1, FileStatus::Untracked);
+    }
+
     #[test]
     fn test_tree_diff_parsing() {
         let input = ":000000 100644 0000000000000000000000000000000000000000 0062c311b8727c3a2e3cd7a41bc9904feacf8f98 A\x00.zed/settings.json\x00".to_owned() +

crates/git_graph/src/git_graph.rs 🔗

@@ -1,5 +1,5 @@
 use collections::{BTreeMap, HashMap};
-use feature_flags::{FeatureFlag, FeatureFlagAppExt as _};
+use feature_flags::{FeatureFlagAppExt as _, GitGraphFeatureFlag};
 use git::{
     BuildCommitPermalinkParams, GitHostingProviderRegistry, GitRemote, Oid, ParsedGitRemote,
     parse_git_remote_url,
@@ -18,7 +18,10 @@ use language::line_diff;
 use menu::{Cancel, SelectNext, SelectPrevious};
 use project::{
     Project,
-    git_store::{CommitDataState, GitStoreEvent, Repository, RepositoryEvent, RepositoryId},
+    git_store::{
+        CommitDataState, GitGraphEvent, GitStoreEvent, GraphDataResponse, Repository,
+        RepositoryEvent, RepositoryId,
+    },
 };
 use settings::Settings;
 use smallvec::{SmallVec, smallvec};
@@ -39,7 +42,7 @@ use ui::{
 };
 use workspace::{
     Workspace,
-    item::{Item, ItemEvent, SerializableItem},
+    item::{Item, ItemEvent, SerializableItem, TabTooltipContent},
 };
 
 const COMMIT_CIRCLE_RADIUS: Pixels = px(3.5);
@@ -246,12 +249,6 @@ actions!(
     ]
 );
 
-pub struct GitGraphFeatureFlag;
-
-impl FeatureFlag for GitGraphFeatureFlag {
-    const NAME: &'static str = "git-graph";
-}
-
 fn timestamp_format() -> &'static [BorrowedFormatItem<'static>] {
     static FORMAT: OnceLock<Vec<BorrowedFormatItem<'static>>> = OnceLock::new();
     FORMAT.get_or_init(|| {
@@ -710,29 +707,66 @@ pub fn init(cx: &mut App) {
                 |div| {
                     let workspace = workspace.weak_handle();
 
-                    div.on_action(move |_: &git_ui::git_panel::Open, window, cx| {
-                        workspace
-                            .update(cx, |workspace, cx| {
-                                let existing = workspace.items_of_type::<GitGraph>(cx).next();
-                                if let Some(existing) = existing {
-                                    workspace.activate_item(&existing, true, true, window, cx);
-                                    return;
-                                }
+                    div.on_action({
+                        let workspace = workspace.clone();
+                        move |_: &git_ui::git_panel::Open, window, cx| {
+                            workspace
+                                .update(cx, |workspace, cx| {
+                                    let existing = workspace.items_of_type::<GitGraph>(cx).next();
+                                    if let Some(existing) = existing {
+                                        workspace.activate_item(&existing, true, true, window, cx);
+                                        return;
+                                    }
 
-                                let project = workspace.project().clone();
-                                let workspace_handle = workspace.weak_handle();
-                                let git_graph = cx
-                                    .new(|cx| GitGraph::new(project, workspace_handle, window, cx));
-                                workspace.add_item_to_active_pane(
-                                    Box::new(git_graph),
-                                    None,
-                                    true,
-                                    window,
-                                    cx,
-                                );
-                            })
-                            .ok();
+                                    let project = workspace.project().clone();
+                                    let workspace_handle = workspace.weak_handle();
+                                    let git_graph = cx.new(|cx| {
+                                        GitGraph::new(project, workspace_handle, window, cx)
+                                    });
+                                    workspace.add_item_to_active_pane(
+                                        Box::new(git_graph),
+                                        None,
+                                        true,
+                                        window,
+                                        cx,
+                                    );
+                                })
+                                .ok();
+                        }
                     })
+                    .on_action(
+                        move |action: &git_ui::git_panel::OpenAtCommit, window, cx| {
+                            let sha = action.sha.clone();
+                            workspace
+                                .update(cx, |workspace, cx| {
+                                    let existing = workspace.items_of_type::<GitGraph>(cx).next();
+                                    if let Some(existing) = existing {
+                                        existing.update(cx, |graph, cx| {
+                                            graph.select_commit_by_sha(&sha, cx);
+                                        });
+                                        workspace.activate_item(&existing, true, true, window, cx);
+                                        return;
+                                    }
+
+                                    let project = workspace.project().clone();
+                                    let workspace_handle = workspace.weak_handle();
+                                    let git_graph = cx.new(|cx| {
+                                        let mut graph =
+                                            GitGraph::new(project, workspace_handle, window, cx);
+                                        graph.select_commit_by_sha(&sha, cx);
+                                        graph
+                                    });
+                                    workspace.add_item_to_active_pane(
+                                        Box::new(git_graph),
+                                        None,
+                                        true,
+                                        window,
+                                        cx,
+                                    );
+                                })
+                                .ok();
+                        },
+                    )
                 },
             )
         });
@@ -821,6 +855,7 @@ pub struct GitGraph {
     commit_details_split_state: Entity<SplitState>,
     selected_repo_id: Option<RepositoryId>,
     changed_files_scroll_handle: UniformListScrollHandle,
+    pending_select_sha: Option<Oid>,
 }
 
 impl GitGraph {
@@ -918,6 +953,7 @@ impl GitGraph {
             commit_details_split_state: cx.new(|_cx| SplitState::new()),
             selected_repo_id: active_repository,
             changed_files_scroll_handle: UniformListScrollHandle::new(),
+            pending_select_sha: None,
         };
 
         this.fetch_initial_graph_data(cx);
@@ -931,21 +967,65 @@ impl GitGraph {
         cx: &mut Context<Self>,
     ) {
         match event {
-            RepositoryEvent::GitGraphCountUpdated((order, source), commit_count) => {
-                if order != &self.log_order || source != &self.log_source {
-                    return;
-                }
+            RepositoryEvent::GraphEvent((source, order), event)
+                if source == &self.log_source && order == &self.log_order =>
+            {
+                match event {
+                    GitGraphEvent::FullyLoaded => {
+                        if let Some(pending_sha_index) =
+                            self.pending_select_sha.take().and_then(|oid| {
+                                repository
+                                    .read(cx)
+                                    .get_graph_data(source.clone(), *order)
+                                    .and_then(|data| data.commit_oid_to_index.get(&oid).copied())
+                            })
+                        {
+                            self.select_entry(pending_sha_index, cx);
+                        }
+                    }
+                    GitGraphEvent::LoadingError => {
+                        // todo(git_graph): Wire this up with the UI
+                    }
+                    GitGraphEvent::CountUpdated(commit_count) => {
+                        let old_count = self.graph_data.commits.len();
+
+                        if let Some(pending_selection_index) =
+                            repository.update(cx, |repository, cx| {
+                                let GraphDataResponse {
+                                    commits,
+                                    is_loading,
+                                    error: _,
+                                } = repository.graph_data(
+                                    source.clone(),
+                                    *order,
+                                    old_count..*commit_count,
+                                    cx,
+                                );
+                                self.graph_data.add_commits(commits);
 
-                let old_count = self.graph_data.commits.len();
+                                let pending_sha_index = self.pending_select_sha.and_then(|oid| {
+                                    repository.get_graph_data(source.clone(), *order).and_then(
+                                        |data| data.commit_oid_to_index.get(&oid).copied(),
+                                    )
+                                });
 
-                repository.update(cx, |repository, cx| {
-                    let (commits, _) =
-                        repository.graph_data(source.clone(), *order, old_count..*commit_count, cx);
-                    self.graph_data.add_commits(commits);
-                });
-                cx.notify();
+                                if !is_loading && pending_sha_index.is_none() {
+                                    self.pending_select_sha.take();
+                                }
+
+                                pending_sha_index
+                            })
+                        {
+                            self.select_entry(pending_selection_index, cx);
+                            self.pending_select_sha.take();
+                        }
+
+                        cx.notify();
+                    }
+                }
             }
-            RepositoryEvent::BranchChanged | RepositoryEvent::MergeHeadsChanged => {
+            RepositoryEvent::BranchChanged => {
+                self.pending_select_sha = None;
                 // Only invalidate if we scanned atleast once,
                 // meaning we are not inside the initial repo loading state
                 // NOTE: this fixes an loading performance regression
@@ -954,6 +1034,7 @@ impl GitGraph {
                     cx.notify();
                 }
             }
+            RepositoryEvent::GraphEvent(_, _) => {}
             _ => {}
         }
     }
@@ -961,12 +1042,9 @@ impl GitGraph {
     fn fetch_initial_graph_data(&mut self, cx: &mut App) {
         if let Some(repository) = self.get_selected_repository(cx) {
             repository.update(cx, |repository, cx| {
-                let (commits, _) = repository.graph_data(
-                    self.log_source.clone(),
-                    self.log_order,
-                    0..usize::MAX,
-                    cx,
-                );
+                let commits = repository
+                    .graph_data(self.log_source.clone(), self.log_order, 0..usize::MAX, cx)
+                    .commits;
                 self.graph_data.add_commits(commits);
             });
         }
@@ -1109,6 +1187,10 @@ impl GitGraph {
         }
     }
 
+    fn confirm(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
+        self.open_selected_commit_view(window, cx);
+    }
+
     fn select_entry(&mut self, idx: usize, cx: &mut Context<Self>) {
         if self.selected_entry_idx == Some(idx) {
             return;
@@ -1153,6 +1235,27 @@ impl GitGraph {
         cx.notify();
     }
 
+    pub fn select_commit_by_sha(&mut self, sha: &str, cx: &mut Context<Self>) {
+        let Ok(oid) = sha.parse::<Oid>() else {
+            return;
+        };
+
+        let Some(selected_repository) = self.get_selected_repository(cx) else {
+            return;
+        };
+
+        let Some(index) = selected_repository
+            .read(cx)
+            .get_graph_data(self.log_source.clone(), self.log_order)
+            .and_then(|data| data.commit_oid_to_index.get(&oid))
+            .copied()
+        else {
+            return;
+        };
+
+        self.select_entry(index, cx);
+    }
+
     fn open_selected_commit_view(&mut self, window: &mut Window, cx: &mut Context<Self>) {
         let Some(selected_entry_index) = self.selected_entry_idx else {
             return;
@@ -1966,7 +2069,11 @@ impl Render for GitGraph {
                     if let Some(repository) = self.get_selected_repository(cx) {
                         repository.update(cx, |repository, cx| {
                             // Start loading the graph data if we haven't started already
-                            let (commits, is_loading) = repository.graph_data(
+                            let GraphDataResponse {
+                                commits,
+                                is_loading,
+                                error: _,
+                            } = repository.graph_data(
                                 self.log_source.clone(),
                                 self.log_order,
                                 0..usize::MAX,
@@ -2145,16 +2252,17 @@ impl Render for GitGraph {
         };
 
         div()
-            .size_full()
-            .bg(cx.theme().colors().editor_background)
             .key_context("GitGraph")
             .track_focus(&self.focus_handle)
+            .size_full()
+            .bg(cx.theme().colors().editor_background)
             .on_action(cx.listener(|this, _: &OpenCommitView, window, cx| {
                 this.open_selected_commit_view(window, cx);
             }))
             .on_action(cx.listener(Self::cancel))
             .on_action(cx.listener(Self::select_prev))
             .on_action(cx.listener(Self::select_next))
+            .on_action(cx.listener(Self::confirm))
             .child(content)
             .children(self.context_menu.as_ref().map(|(menu, position, _)| {
                 deferred(
@@ -2179,8 +2287,39 @@ impl Focusable for GitGraph {
 impl Item for GitGraph {
     type Event = ItemEvent;
 
-    fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString {
-        "Git Graph".into()
+    fn tab_icon(&self, _window: &Window, _cx: &App) -> Option<Icon> {
+        Some(Icon::new(IconName::GitGraph))
+    }
+
+    fn tab_tooltip_content(&self, cx: &App) -> Option<TabTooltipContent> {
+        let repo_name = self.get_selected_repository(cx).and_then(|repo| {
+            repo.read(cx)
+                .work_directory_abs_path
+                .file_name()
+                .map(|name| name.to_string_lossy().to_string())
+        });
+
+        Some(TabTooltipContent::Custom(Box::new(Tooltip::element({
+            move |_, _| {
+                v_flex()
+                    .child(Label::new("Git Graph"))
+                    .when_some(repo_name.clone(), |this, name| {
+                        this.child(Label::new(name).color(Color::Muted).size(LabelSize::Small))
+                    })
+                    .into_any_element()
+            }
+        }))))
+    }
+
+    fn tab_content_text(&self, _detail: usize, cx: &App) -> SharedString {
+        self.get_selected_repository(cx)
+            .and_then(|repo| {
+                repo.read(cx)
+                    .work_directory_abs_path
+                    .file_name()
+                    .map(|name| name.to_string_lossy().to_string())
+            })
+            .map_or_else(|| "Git Graph".into(), |name| SharedString::from(name))
     }
 
     fn show_toolbar(&self) -> bool {
@@ -2958,7 +3097,7 @@ mod tests {
                 0..usize::MAX,
                 cx,
             )
-            .0
+            .commits
             .to_vec()
         });
 
@@ -3035,19 +3174,10 @@ mod tests {
                 .any(|event| matches!(event, RepositoryEvent::BranchChanged)),
             "initial repository scan should emit BranchChanged"
         );
-        assert!(
-            observed_repository_events
-                .iter()
-                .any(|event| matches!(event, RepositoryEvent::MergeHeadsChanged)),
-            "initial repository scan should emit MergeHeadsChanged"
-        );
-
-        let graph_data_key = (crate::LogOrder::default(), crate::LogSource::default());
         let commit_count_after = repository.read_with(cx, |repo, _| {
-            repo.initial_graph_data
-                .get(&graph_data_key)
-                .map(|(_, data)| data.len())
-                .unwrap_or(0)
+            repo.get_graph_data(crate::LogSource::default(), crate::LogOrder::default())
+                .map(|data| data.commit_data.len())
+                .unwrap()
         });
         assert_eq!(
             commits.len(),

crates/git_ui/Cargo.toml 🔗

@@ -27,6 +27,7 @@ component.workspace = true
 db.workspace = true
 editor.workspace = true
 futures.workspace = true
+feature_flags.workspace = true
 fuzzy.workspace = true
 git.workspace = true
 gpui.workspace = true
@@ -43,6 +44,7 @@ panel.workspace = true
 picker.workspace = true
 project.workspace = true
 prompt_store.workspace = true
+proto.workspace = true
 remote_connection.workspace = true
 remote.workspace = true
 schemars.workspace = true

crates/git_ui/src/branch_picker.rs 🔗

@@ -1390,7 +1390,9 @@ mod tests {
         (branch_list, cx)
     }
 
-    async fn init_fake_repository(cx: &mut TestAppContext) -> Entity<Repository> {
+    async fn init_fake_repository(
+        cx: &mut TestAppContext,
+    ) -> (Entity<Project>, Entity<Repository>) {
         let fs = FakeFs::new(cx.executor());
         fs.insert_tree(
             path!("/dir"),
@@ -1413,7 +1415,7 @@ mod tests {
         let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
         let repository = cx.read(|cx| project.read(cx).active_repository(cx));
 
-        repository.unwrap()
+        (project, repository.unwrap())
     }
 
     #[gpui::test]
@@ -1476,7 +1478,7 @@ mod tests {
     #[gpui::test]
     async fn test_delete_branch(cx: &mut TestAppContext) {
         init_test(cx);
-        let repository = init_fake_repository(cx).await;
+        let (_project, repository) = init_fake_repository(cx).await;
 
         let branches = create_test_branches();
 
@@ -1534,7 +1536,7 @@ mod tests {
     #[gpui::test]
     async fn test_delete_remote(cx: &mut TestAppContext) {
         init_test(cx);
-        let repository = init_fake_repository(cx).await;
+        let (_project, repository) = init_fake_repository(cx).await;
         let branches = vec![
             create_test_branch("main", true, Some("origin"), Some(1000)),
             create_test_branch("feature-auth", false, Some("origin"), Some(900)),
@@ -1721,7 +1723,7 @@ mod tests {
         const NEW_BRANCH: &str = "new-feature-branch";
 
         init_test(test_cx);
-        let repository = init_fake_repository(test_cx).await;
+        let (_project, repository) = init_fake_repository(test_cx).await;
 
         let branches = vec![
             create_test_branch(MAIN_BRANCH, true, None, Some(1000)),
@@ -1785,7 +1787,7 @@ mod tests {
     #[gpui::test]
     async fn test_remote_url_detection_https(cx: &mut TestAppContext) {
         init_test(cx);
-        let repository = init_fake_repository(cx).await;
+        let (_project, repository) = init_fake_repository(cx).await;
         let branches = vec![create_test_branch("main", true, None, Some(1000))];
 
         let (branch_list, mut ctx) = init_branch_list_test(repository.into(), branches, cx).await;

crates/git_ui/src/commit_view.rs 🔗

@@ -3,6 +3,7 @@ use buffer_diff::BufferDiff;
 use collections::HashMap;
 use editor::display_map::{BlockPlacement, BlockProperties, BlockStyle};
 use editor::{Addon, Editor, EditorEvent, ExcerptRange, MultiBuffer, multibuffer_context_lines};
+use feature_flags::{FeatureFlagAppExt as _, GitGraphFeatureFlag};
 use git::repository::{CommitDetails, CommitDiff, RepoPath, is_binary_content};
 use git::status::{FileStatus, StatusCode, TrackedStatus};
 use git::{
@@ -27,7 +28,7 @@ use std::{
     sync::Arc,
 };
 use theme::ActiveTheme;
-use ui::{ButtonLike, DiffStat, Tooltip, prelude::*};
+use ui::{DiffStat, Divider, Tooltip, prelude::*};
 use util::{ResultExt, paths::PathStyle, rel_path::RelPath, truncate_and_trailoff};
 use workspace::item::TabTooltipContent;
 use workspace::{
@@ -450,6 +451,7 @@ impl CommitView {
     fn render_header(&self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
         let commit = &self.commit;
         let author_name = commit.author_name.clone();
+        let author_email = commit.author_email.clone();
         let commit_sha = commit.sha.clone();
         let commit_date = time::OffsetDateTime::from_unix_timestamp(commit.commit_timestamp)
             .unwrap_or_else(|_| time::OffsetDateTime::now_utc());
@@ -461,36 +463,6 @@ impl CommitView {
             time_format::TimestampFormat::MediumAbsolute,
         );
 
-        let remote_info = self
-            .remote
-            .as_ref()
-            .filter(|_| self.stash.is_none())
-            .map(|remote| {
-                let provider = remote.host.name();
-                let parsed_remote = ParsedGitRemote {
-                    owner: remote.owner.as_ref().into(),
-                    repo: remote.repo.as_ref().into(),
-                };
-                let params = BuildCommitPermalinkParams { sha: &commit.sha };
-                let url = remote
-                    .host
-                    .build_commit_permalink(&parsed_remote, params)
-                    .to_string();
-                (provider, url)
-            });
-
-        let (additions, deletions) = self.calculate_changed_lines(cx);
-
-        let commit_diff_stat = if additions > 0 || deletions > 0 {
-            Some(DiffStat::new(
-                "commit-diff-stat",
-                additions as usize,
-                deletions as usize,
-            ))
-        } else {
-            None
-        };
-
         let gutter_width = self.editor.update(cx, |editor, cx| {
             let snapshot = editor.snapshot(window, cx);
             let style = editor.style(cx);
@@ -501,116 +473,75 @@ impl CommitView {
                 .full_width()
         });
 
-        let clipboard_has_link = cx
+        let clipboard_has_sha = cx
             .read_from_clipboard()
             .and_then(|entry| entry.text())
             .map_or(false, |clipboard_text| {
                 clipboard_text.trim() == commit_sha.as_ref()
             });
 
-        let (copy_icon, copy_icon_color) = if clipboard_has_link {
+        let (copy_icon, copy_icon_color) = if clipboard_has_sha {
             (IconName::Check, Color::Success)
         } else {
             (IconName::Copy, Color::Muted)
         };
 
         h_flex()
+            .py_2()
+            .pr_2p5()
+            .w_full()
+            .justify_between()
             .border_b_1()
             .border_color(cx.theme().colors().border_variant)
-            .w_full()
-            .child(
-                h_flex()
-                    .w(gutter_width)
-                    .justify_center()
-                    .child(self.render_commit_avatar(&commit.sha, rems_from_px(48.), window, cx)),
-            )
             .child(
                 h_flex()
-                    .py_4()
-                    .pl_1()
-                    .pr_4()
-                    .w_full()
-                    .items_start()
-                    .justify_between()
-                    .flex_wrap()
+                    .child(h_flex().w(gutter_width).justify_center().child(
+                        self.render_commit_avatar(&commit.sha, rems_from_px(40.), window, cx),
+                    ))
                     .child(
-                        v_flex()
-                            .child(
-                                h_flex()
-                                    .gap_1()
-                                    .child(Label::new(author_name).color(Color::Default))
-                                    .child({
-                                        ButtonLike::new("sha")
-                                            .child(
-                                                h_flex()
-                                                    .group("sha_btn")
-                                                    .size_full()
-                                                    .max_w_32()
-                                                    .gap_0p5()
-                                                    .child(
-                                                        Label::new(commit_sha.clone())
-                                                            .color(Color::Muted)
-                                                            .size(LabelSize::Small)
-                                                            .truncate()
-                                                            .buffer_font(cx),
-                                                    )
-                                                    .child(
-                                                        div().visible_on_hover("sha_btn").child(
-                                                            Icon::new(copy_icon)
-                                                                .color(copy_icon_color)
-                                                                .size(IconSize::Small),
-                                                        ),
-                                                    ),
-                                            )
-                                            .tooltip({
-                                                let commit_sha = commit_sha.clone();
-                                                move |_, cx| {
-                                                    Tooltip::with_meta(
-                                                        "Copy Commit SHA",
-                                                        None,
-                                                        commit_sha.clone(),
-                                                        cx,
-                                                    )
-                                                }
-                                            })
-                                            .on_click(move |_, _, cx| {
-                                                cx.stop_propagation();
-                                                cx.write_to_clipboard(ClipboardItem::new_string(
-                                                    commit_sha.to_string(),
-                                                ));
-                                            })
-                                    }),
-                            )
-                            .child(
-                                h_flex()
-                                    .gap_1p5()
-                                    .child(
-                                        Label::new(date_string)
-                                            .color(Color::Muted)
-                                            .size(LabelSize::Small),
-                                    )
-                                    .child(
-                                        Label::new("•")
-                                            .color(Color::Ignored)
-                                            .size(LabelSize::Small),
-                                    )
-                                    .children(commit_diff_stat),
-                            ),
-                    )
-                    .children(remote_info.map(|(provider_name, url)| {
-                        let icon = match provider_name.as_str() {
-                            "GitHub" => IconName::Github,
-                            _ => IconName::Link,
-                        };
-
-                        Button::new("view_on_provider", format!("View on {}", provider_name))
-                            .icon(icon)
-                            .icon_color(Color::Muted)
-                            .icon_size(IconSize::Small)
-                            .icon_position(IconPosition::Start)
-                            .on_click(move |_, _, cx| cx.open_url(&url))
-                    })),
+                        v_flex().child(Label::new(author_name)).child(
+                            h_flex()
+                                .gap_1p5()
+                                .child(
+                                    Label::new(date_string)
+                                        .color(Color::Muted)
+                                        .size(LabelSize::Small),
+                                )
+                                .child(
+                                    Label::new("•")
+                                        .size(LabelSize::Small)
+                                        .color(Color::Muted)
+                                        .alpha(0.5),
+                                )
+                                .child(
+                                    Label::new(author_email)
+                                        .color(Color::Muted)
+                                        .size(LabelSize::Small),
+                                ),
+                        ),
+                    ),
             )
+            .when(self.stash.is_none(), |this| {
+                this.child(
+                    Button::new("sha", "Commit SHA")
+                        .icon(copy_icon)
+                        .icon_color(copy_icon_color)
+                        .icon_position(IconPosition::Start)
+                        .icon_size(IconSize::Small)
+                        .tooltip({
+                            let commit_sha = commit_sha.clone();
+                            move |_, cx| {
+                                Tooltip::with_meta("Copy Commit SHA", None, commit_sha.clone(), cx)
+                            }
+                        })
+                        .on_click(move |_, _, cx| {
+                            cx.stop_propagation();
+                            cx.write_to_clipboard(ClipboardItem::new_string(
+                                commit_sha.to_string(),
+                            ));
+                        }),
+                )
+            })
     }
 
     fn apply_stash(workspace: &mut Workspace, window: &mut Window, cx: &mut App) {
@@ -898,7 +829,7 @@ impl Item for CommitView {
     type Event = EditorEvent;
 
     fn tab_icon(&self, _window: &Window, _cx: &App) -> Option<Icon> {
-        Some(Icon::new(IconName::GitBranch).color(Color::Muted))
+        Some(Icon::new(IconName::GitCommit).color(Color::Muted))
     }
 
     fn tab_content(&self, params: TabContentParams, _window: &Window, cx: &App) -> AnyElement {
@@ -1081,8 +1012,93 @@ impl CommitViewToolbar {
 impl EventEmitter<ToolbarItemEvent> for CommitViewToolbar {}
 
 impl Render for CommitViewToolbar {
-    fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
-        div().hidden()
+    fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+        let Some(commit_view) = self.commit_view.as_ref().and_then(|w| w.upgrade()) else {
+            return div();
+        };
+
+        let commit_view_ref = commit_view.read(cx);
+        let is_stash = commit_view_ref.stash.is_some();
+
+        let (additions, deletions) = commit_view_ref.calculate_changed_lines(cx);
+
+        let commit_sha = commit_view_ref.commit.sha.clone();
+
+        let remote_info = commit_view_ref.remote.as_ref().map(|remote| {
+            let provider = remote.host.name();
+            let parsed_remote = ParsedGitRemote {
+                owner: remote.owner.as_ref().into(),
+                repo: remote.repo.as_ref().into(),
+            };
+            let params = BuildCommitPermalinkParams { sha: &commit_sha };
+            let url = remote
+                .host
+                .build_commit_permalink(&parsed_remote, params)
+                .to_string();
+            (provider, url)
+        });
+
+        let sha_for_graph = commit_sha.to_string();
+
+        h_flex()
+            .gap_1()
+            .when(additions > 0 || deletions > 0, |this| {
+                this.child(
+                    h_flex()
+                        .gap_2()
+                        .child(DiffStat::new(
+                            "toolbar-diff-stat",
+                            additions as usize,
+                            deletions as usize,
+                        ))
+                        .child(Divider::vertical()),
+                )
+            })
+            .child(
+                IconButton::new("buffer-search", IconName::MagnifyingGlass)
+                    .icon_size(IconSize::Small)
+                    .tooltip(move |_, cx| {
+                        Tooltip::for_action(
+                            "Buffer Search",
+                            &zed_actions::buffer_search::Deploy::find(),
+                            cx,
+                        )
+                    })
+                    .on_click(|_, window, cx| {
+                        window.dispatch_action(
+                            Box::new(zed_actions::buffer_search::Deploy::find()),
+                            cx,
+                        );
+                    }),
+            )
+            .when(!is_stash, |this| {
+                this.when(cx.has_flag::<GitGraphFeatureFlag>(), |this| {
+                    this.child(
+                        IconButton::new("show-in-git-graph", IconName::GitGraph)
+                            .icon_size(IconSize::Small)
+                            .tooltip(Tooltip::text("Show in Git Graph"))
+                            .on_click(move |_, window, cx| {
+                                window.dispatch_action(
+                                    Box::new(crate::git_panel::OpenAtCommit {
+                                        sha: sha_for_graph.clone(),
+                                    }),
+                                    cx,
+                                );
+                            }),
+                    )
+                })
+                .children(remote_info.map(|(provider_name, url)| {
+                    let icon = match provider_name.as_str() {
+                        "GitHub" => IconName::Github,
+                        _ => IconName::Link,
+                    };
+
+                    IconButton::new("view_on_provider", icon)
+                        .icon_size(IconSize::Small)
+                        .tooltip(Tooltip::text(format!("View on {}", provider_name)))
+                        .on_click(move |_, _, cx| cx.open_url(&url))
+                }))
+            })
     }
 }
 
@@ -1093,12 +1109,11 @@ impl ToolbarItemView for CommitViewToolbar {
         _: &mut Window,
         cx: &mut Context<Self>,
     ) -> ToolbarItemLocation {
-        if let Some(entity) = active_pane_item.and_then(|i| i.act_as::<CommitView>(cx))
-            && entity.read(cx).stash.is_some()
-        {
+        if let Some(entity) = active_pane_item.and_then(|i| i.act_as::<CommitView>(cx)) {
             self.commit_view = Some(entity.downgrade());
             return ToolbarItemLocation::PrimaryRight;
         }
+        self.commit_view = None;
         ToolbarItemLocation::Hidden
     }
 

crates/git_ui/src/conflict_view.rs 🔗

@@ -290,7 +290,7 @@ fn conflicts_updated(
         blocks.push(BlockProperties {
             placement: BlockPlacement::Above(anchor),
             height: Some(1),
-            style: BlockStyle::Fixed,
+            style: BlockStyle::Sticky,
             render: Arc::new({
                 let conflict = conflict.clone();
                 move |cx| render_conflict_buttons(&conflict, excerpt_id, editor_handle.clone(), cx)

crates/git_ui/src/git_panel.rs 🔗

@@ -28,7 +28,7 @@ use git::repository::{
     UpstreamTrackingStatus, get_git_committer,
 };
 use git::stash::GitStash;
-use git::status::StageStatus;
+use git::status::{DiffStat, StageStatus};
 use git::{Amend, Signoff, ToggleStaged, repository::RepoPath, status::FileStatus};
 use git::{
     ExpandCommitEditor, GitHostingProviderRegistry, RestoreTrackedFiles, StageAll, StashAll,
@@ -55,6 +55,7 @@ use project::{
     project_settings::{GitPathStyle, ProjectSettings},
 };
 use prompt_store::{BuiltInPrompt, PromptId, PromptStore, RULES_FILE_NAMES};
+use proto::RpcError;
 use serde::{Deserialize, Serialize};
 use settings::{Settings, SettingsStore, StatusStyle};
 use smallvec::SmallVec;
@@ -123,6 +124,13 @@ actions!(
     ]
 );
 
+/// Opens the Git Graph Tab at a specific commit.
+#[derive(Clone, PartialEq, serde::Deserialize, schemars::JsonSchema, gpui::Action)]
+#[action(namespace = git_graph)]
+pub struct OpenAtCommit {
+    pub sha: String,
+}
+
 fn prompt<T>(
     msg: &str,
     detail: Option<&str>,
@@ -524,6 +532,7 @@ pub struct GitStatusEntry {
     pub(crate) repo_path: RepoPath,
     pub(crate) status: FileStatus,
     pub(crate) staging: StageStatus,
+    pub(crate) diff_stat: Option<DiffStat>,
 }
 
 impl GitStatusEntry {
@@ -644,6 +653,7 @@ pub struct GitPanel {
     local_committer_task: Option<Task<()>>,
     bulk_staging: Option<BulkStaging>,
     stash_entries: GitStash,
+
     _settings_subscription: Subscription,
 }
 
@@ -704,18 +714,26 @@ impl GitPanel {
 
             let mut was_sort_by_path = GitPanelSettings::get_global(cx).sort_by_path;
             let mut was_tree_view = GitPanelSettings::get_global(cx).tree_view;
+            let mut was_diff_stats = GitPanelSettings::get_global(cx).diff_stats;
             cx.observe_global_in::<SettingsStore>(window, move |this, window, cx| {
                 let sort_by_path = GitPanelSettings::get_global(cx).sort_by_path;
                 let tree_view = GitPanelSettings::get_global(cx).tree_view;
+                let diff_stats = GitPanelSettings::get_global(cx).diff_stats;
                 if tree_view != was_tree_view {
                     this.view_mode = GitPanelViewMode::from_settings(cx);
                 }
+
+                let mut update_entries = false;
                 if sort_by_path != was_sort_by_path || tree_view != was_tree_view {
                     this.bulk_staging.take();
+                    update_entries = true;
+                }
+                if (diff_stats != was_diff_stats) || update_entries {
                     this.update_visible_entries(window, cx);
                 }
                 was_sort_by_path = sort_by_path;
                 was_tree_view = tree_view;
+                was_diff_stats = diff_stats;
             })
             .detach();
 
@@ -747,9 +765,7 @@ impl GitPanel {
                 move |this, _git_store, event, window, cx| match event {
                     GitStoreEvent::RepositoryUpdated(
                         _,
-                        RepositoryEvent::StatusesChanged
-                        | RepositoryEvent::BranchChanged
-                        | RepositoryEvent::MergeHeadsChanged,
+                        RepositoryEvent::StatusesChanged | RepositoryEvent::BranchChanged,
                         true,
                     )
                     | GitStoreEvent::RepositoryAdded
@@ -2735,6 +2751,7 @@ impl GitPanel {
                     temperature,
                     thinking_allowed: false,
                     thinking_effort: None,
+                    speed: None,
                 };
 
                 let stream = model.stream_completion_text(request, cx);
@@ -3171,18 +3188,16 @@ impl GitPanel {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) -> AskPassDelegate {
-        let this = cx.weak_entity();
+        let workspace = self.workspace.clone();
         let operation = operation.into();
         let window = window.window_handle();
         AskPassDelegate::new(&mut cx.to_async(), move |prompt, tx, cx| {
             window
                 .update(cx, |_, window, cx| {
-                    this.update(cx, |this, cx| {
-                        this.workspace.update(cx, |workspace, cx| {
-                            workspace.toggle_modal(window, cx, |window, cx| {
-                                AskPassModal::new(operation.clone(), prompt.into(), tx, window, cx)
-                            });
-                        })
+                    workspace.update(cx, |workspace, cx| {
+                        workspace.toggle_modal(window, cx, |window, cx| {
+                            AskPassModal::new(operation.clone(), prompt.into(), tx, window, cx)
+                        });
                     })
                 })
                 .ok();
@@ -3526,6 +3541,7 @@ impl GitPanel {
                 repo_path: entry.repo_path.clone(),
                 status: entry.status,
                 staging,
+                diff_stat: entry.diff_stat,
             };
 
             if staging.has_staged() {
@@ -3562,6 +3578,7 @@ impl GitPanel {
                             repo_path: ops.repo_path.clone(),
                             status: status.status,
                             staging: StageStatus::Staged,
+                            diff_stat: status.diff_stat,
                         });
             }
         }
@@ -5108,6 +5125,8 @@ impl GitPanel {
                 }
             });
 
+        let id_for_diff_stat = id.clone();
+
         h_flex()
             .id(id)
             .h(self.list_item_height())
@@ -5124,6 +5143,16 @@ impl GitPanel {
             .hover(|s| s.bg(hover_bg))
             .active(|s| s.bg(active_bg))
             .child(name_row)
+            .when(GitPanelSettings::get_global(cx).diff_stats, |el| {
+                el.when_some(entry.diff_stat, move |this, stat| {
+                    let id = format!("diff-stat-{}", id_for_diff_stat);
+                    this.child(ui::DiffStat::new(
+                        id,
+                        stat.added as usize,
+                        stat.deleted as usize,
+                    ))
+                })
+            })
             .child(
                 div()
                     .id(checkbox_wrapper_id)
@@ -5514,6 +5543,21 @@ impl GitPanel {
     }
 }
 
+#[cfg(any(test, feature = "test-support"))]
+impl GitPanel {
+    pub fn new_test(
+        workspace: &mut Workspace,
+        window: &mut Window,
+        cx: &mut Context<Workspace>,
+    ) -> Entity<Self> {
+        Self::new(workspace, window, cx)
+    }
+
+    pub fn active_repository(&self) -> Option<&Entity<Repository>> {
+        self.active_repository.as_ref()
+    }
+}
+
 impl Render for GitPanel {
     fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
         let project = self.project.read(cx);
@@ -6306,7 +6350,7 @@ pub(crate) fn show_error_toast(
     cx: &mut App,
 ) {
     let action = action.into();
-    let message = e.to_string().trim().to_string();
+    let message = format_git_error_toast_message(&e);
     if message
         .matches(git::repository::REMOTE_CANCELLED_BY_USER)
         .next()
@@ -6332,6 +6376,20 @@ pub(crate) fn show_error_toast(
     }
 }
 
+fn rpc_error_raw_message_from_chain(error: &anyhow::Error) -> Option<&str> {
+    error
+        .chain()
+        .find_map(|cause| cause.downcast_ref::<RpcError>().map(RpcError::raw_message))
+}
+
+fn format_git_error_toast_message(error: &anyhow::Error) -> String {
+    if let Some(message) = rpc_error_raw_message_from_chain(error) {
+        message.trim().to_string()
+    } else {
+        error.to_string().trim().to_string()
+    }
+}
+
 #[cfg(test)]
 mod tests {
     use git::{
@@ -6363,6 +6421,47 @@ mod tests {
         });
     }
 
+    #[test]
+    fn test_format_git_error_toast_message_prefers_raw_rpc_message() {
+        let rpc_error = RpcError::from_proto(
+            &proto::Error {
+                message:
+                    "Your local changes to the following files would be overwritten by merge\n"
+                        .to_string(),
+                code: proto::ErrorCode::Internal as i32,
+                tags: Default::default(),
+            },
+            "Pull",
+        );
+
+        let message = format_git_error_toast_message(&rpc_error);
+        assert_eq!(
+            message,
+            "Your local changes to the following files would be overwritten by merge"
+        );
+    }
+
+    #[test]
+    fn test_format_git_error_toast_message_prefers_raw_rpc_message_when_wrapped() {
+        let rpc_error = RpcError::from_proto(
+            &proto::Error {
+                message:
+                    "Your local changes to the following files would be overwritten by merge\n"
+                        .to_string(),
+                code: proto::ErrorCode::Internal as i32,
+                tags: Default::default(),
+            },
+            "Pull",
+        );
+        let wrapped = rpc_error.context("sending pull request");
+
+        let message = format_git_error_toast_message(&wrapped);
+        assert_eq!(
+            message,
+            "Your local changes to the following files would be overwritten by merge"
+        );
+    }
+
     #[gpui::test]
     async fn test_entry_worktree_paths(cx: &mut TestAppContext) {
         init_test(cx);
@@ -6436,11 +6535,19 @@ mod tests {
                     repo_path: repo_path("crates/gpui/gpui.rs"),
                     status: StatusCode::Modified.worktree(),
                     staging: StageStatus::Unstaged,
+                    diff_stat: Some(DiffStat {
+                        added: 1,
+                        deleted: 1,
+                    }),
                 }),
                 GitListEntry::Status(GitStatusEntry {
                     repo_path: repo_path("crates/util/util.rs"),
                     status: StatusCode::Modified.worktree(),
                     staging: StageStatus::Unstaged,
+                    diff_stat: Some(DiffStat {
+                        added: 1,
+                        deleted: 1,
+                    }),
                 },),
             ],
         );
@@ -6461,11 +6568,19 @@ mod tests {
                     repo_path: repo_path("crates/gpui/gpui.rs"),
                     status: StatusCode::Modified.worktree(),
                     staging: StageStatus::Unstaged,
+                    diff_stat: Some(DiffStat {
+                        added: 1,
+                        deleted: 1,
+                    }),
                 }),
                 GitListEntry::Status(GitStatusEntry {
                     repo_path: repo_path("crates/util/util.rs"),
                     status: StatusCode::Modified.worktree(),
                     staging: StageStatus::Unstaged,
+                    diff_stat: Some(DiffStat {
+                        added: 1,
+                        deleted: 1,
+                    }),
                 },),
             ],
         );

crates/git_ui/src/git_panel_settings.rs 🔗

@@ -25,6 +25,7 @@ pub struct GitPanelSettings {
     pub sort_by_path: bool,
     pub collapse_untracked_diff: bool,
     pub tree_view: bool,
+    pub diff_stats: bool,
 }
 
 impl ScrollbarVisibility for GitPanelSettings {
@@ -58,6 +59,7 @@ impl Settings for GitPanelSettings {
             sort_by_path: git_panel.sort_by_path.unwrap(),
             collapse_untracked_diff: git_panel.collapse_untracked_diff.unwrap(),
             tree_view: git_panel.tree_view.unwrap(),
+            diff_stats: git_panel.diff_stats.unwrap(),
         }
     }
 }

crates/git_ui/src/git_picker.rs 🔗

@@ -15,7 +15,7 @@ use workspace::{ModalView, Workspace, pane};
 use crate::branch_picker::{self, BranchList, DeleteBranch, FilterRemotes};
 use crate::stash_picker::{self, DropStashItem, ShowStashItem, StashList};
 use crate::worktree_picker::{
-    self, WorktreeFromDefault, WorktreeFromDefaultOnWindow, WorktreeList,
+    self, DeleteWorktree, WorktreeFromDefault, WorktreeFromDefaultOnWindow, WorktreeList,
 };
 
 actions!(
@@ -408,6 +408,19 @@ impl GitPicker {
         }
     }
 
+    fn handle_worktree_delete(
+        &mut self,
+        _: &DeleteWorktree,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        if let Some(worktree_list) = &self.worktree_list {
+            worktree_list.update(cx, |list, cx| {
+                list.handle_delete(&DeleteWorktree, window, cx);
+            });
+        }
+    }
+
     fn handle_drop_stash(
         &mut self,
         _: &DropStashItem,
@@ -524,6 +537,7 @@ impl Render for GitPicker {
             .when(self.tab == GitPickerTab::Worktrees, |el| {
                 el.on_action(cx.listener(Self::handle_worktree_from_default))
                     .on_action(cx.listener(Self::handle_worktree_from_default_on_window))
+                    .on_action(cx.listener(Self::handle_worktree_delete))
             })
             .when(self.tab == GitPickerTab::Stash, |el| {
                 el.on_action(cx.listener(Self::handle_drop_stash))

crates/git_ui/src/text_diff_view.rs 🔗

@@ -145,11 +145,7 @@ impl TextDiffView {
         let multibuffer = cx.new(|cx| {
             let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite);
 
-            multibuffer.push_excerpts(
-                source_buffer.clone(),
-                [editor::ExcerptRange::new(source_range)],
-                cx,
-            );
+            multibuffer.set_excerpts_for_buffer(source_buffer.clone(), [source_range], 0, cx);
 
             multibuffer.add_diff(diff_buffer.clone(), cx);
             multibuffer

crates/git_ui/src/worktree_picker.rs 🔗

@@ -22,7 +22,16 @@ use ui::{HighlightedLabel, KeyBinding, ListItem, ListItemSpacing, prelude::*};
 use util::ResultExt;
 use workspace::{ModalView, MultiWorkspace, Workspace, notifications::DetachAndPromptErr};
 
-actions!(git, [WorktreeFromDefault, WorktreeFromDefaultOnWindow]);
+use crate::git_panel::show_error_toast;
+
+actions!(
+    git,
+    [
+        WorktreeFromDefault,
+        WorktreeFromDefaultOnWindow,
+        DeleteWorktree
+    ]
+);
 
 pub fn open(
     workspace: &mut Workspace,
@@ -181,6 +190,19 @@ impl WorktreeList {
             );
         })
     }
+
+    pub fn handle_delete(
+        &mut self,
+        _: &DeleteWorktree,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        self.picker.update(cx, |picker, cx| {
+            picker
+                .delegate
+                .delete_at(picker.delegate.selected_index, window, cx)
+        })
+    }
 }
 impl ModalView for WorktreeList {}
 impl EventEmitter<DismissEvent> for WorktreeList {}
@@ -203,6 +225,9 @@ impl Render for WorktreeList {
             .on_action(cx.listener(|this, _: &WorktreeFromDefaultOnWindow, w, cx| {
                 this.handle_new_worktree(true, w, cx)
             }))
+            .on_action(cx.listener(|this, _: &DeleteWorktree, window, cx| {
+                this.handle_delete(&DeleteWorktree, window, cx)
+            }))
             .child(self.picker.clone())
             .when(!self.embedded, |el| {
                 el.on_mouse_down_out({
@@ -275,9 +300,9 @@ impl WorktreeListDelegate {
                     .git
                     .worktree_directory
                     .clone();
-                let work_dir = repo.work_directory_abs_path.clone();
+                let original_repo = repo.original_repo_abs_path.clone();
                 let directory =
-                    validate_worktree_directory(&work_dir, &worktree_directory_setting)?;
+                    validate_worktree_directory(&original_repo, &worktree_directory_setting)?;
                 let new_worktree_path = directory.join(&branch);
                 let receiver = repo.create_worktree(branch.clone(), directory, commit);
                 anyhow::Ok((receiver, new_worktree_path))
@@ -420,6 +445,57 @@ impl WorktreeListDelegate {
             .as_ref()
             .and_then(|repo| repo.read(cx).branch.as_ref().map(|b| b.name()))
     }
+
+    fn delete_at(&self, idx: usize, window: &mut Window, cx: &mut Context<Picker<Self>>) {
+        let Some(entry) = self.matches.get(idx).cloned() else {
+            return;
+        };
+        if entry.is_new {
+            return;
+        }
+        let Some(repo) = self.repo.clone() else {
+            return;
+        };
+        let workspace = self.workspace.clone();
+        let path = entry.worktree.path;
+
+        cx.spawn_in(window, async move |picker, cx| {
+            let result = repo
+                .update(cx, |repo, _| repo.remove_worktree(path.clone(), false))
+                .await?;
+
+            if let Err(e) = result {
+                log::error!("Failed to remove worktree: {}", e);
+                if let Some(workspace) = workspace.upgrade() {
+                    cx.update(|_window, cx| {
+                        show_error_toast(
+                            workspace,
+                            format!("worktree remove {}", path.display()),
+                            e,
+                            cx,
+                        )
+                    })?;
+                }
+                return Ok(());
+            }
+
+            picker.update_in(cx, |picker, _, cx| {
+                picker.delegate.matches.retain(|e| e.worktree.path != path);
+                if let Some(all_worktrees) = &mut picker.delegate.all_worktrees {
+                    all_worktrees.retain(|w| w.path != path);
+                }
+                if picker.delegate.matches.is_empty() {
+                    picker.delegate.selected_index = 0;
+                } else if picker.delegate.selected_index >= picker.delegate.matches.len() {
+                    picker.delegate.selected_index = picker.delegate.matches.len() - 1;
+                }
+                cx.notify();
+            })?;
+
+            anyhow::Ok(())
+        })
+        .detach();
+    }
 }
 
 async fn open_remote_worktree(
@@ -778,6 +854,16 @@ impl PickerDelegate for WorktreeListDelegate {
         } else {
             Some(
                 footer_container
+                    .child(
+                        Button::new("delete-worktree", "Delete")
+                            .key_binding(
+                                KeyBinding::for_action_in(&DeleteWorktree, &focus_handle, cx)
+                                    .map(|kb| kb.size(rems_from_px(12.))),
+                            )
+                            .on_click(|_, window, cx| {
+                                window.dispatch_action(DeleteWorktree.boxed_clone(), cx)
+                            }),
+                    )
                     .child(
                         Button::new("open-in-new-window", "Open in New Window")
                             .key_binding(

crates/google_ai/src/google_ai.rs 🔗

@@ -510,11 +510,9 @@ pub enum Model {
         alias = "gemini-2.5-pro-preview-06-05"
     )]
     Gemini25Pro,
-    #[serde(rename = "gemini-3-pro-preview")]
-    Gemini3Pro,
     #[serde(rename = "gemini-3-flash-preview")]
     Gemini3Flash,
-    #[serde(rename = "gemini-3.1-pro-preview")]
+    #[serde(rename = "gemini-3.1-pro-preview", alias = "gemini-3-pro-preview")]
     Gemini31Pro,
     #[serde(rename = "custom")]
     Custom {
@@ -537,7 +535,6 @@ impl Model {
             Self::Gemini25FlashLite => "gemini-2.5-flash-lite",
             Self::Gemini25Flash => "gemini-2.5-flash",
             Self::Gemini25Pro => "gemini-2.5-pro",
-            Self::Gemini3Pro => "gemini-3-pro-preview",
             Self::Gemini3Flash => "gemini-3-flash-preview",
             Self::Gemini31Pro => "gemini-3.1-pro-preview",
             Self::Custom { name, .. } => name,
@@ -548,7 +545,6 @@ impl Model {
             Self::Gemini25FlashLite => "gemini-2.5-flash-lite",
             Self::Gemini25Flash => "gemini-2.5-flash",
             Self::Gemini25Pro => "gemini-2.5-pro",
-            Self::Gemini3Pro => "gemini-3-pro-preview",
             Self::Gemini3Flash => "gemini-3-flash-preview",
             Self::Gemini31Pro => "gemini-3.1-pro-preview",
             Self::Custom { name, .. } => name,
@@ -560,7 +556,6 @@ impl Model {
             Self::Gemini25FlashLite => "Gemini 2.5 Flash-Lite",
             Self::Gemini25Flash => "Gemini 2.5 Flash",
             Self::Gemini25Pro => "Gemini 2.5 Pro",
-            Self::Gemini3Pro => "Gemini 3 Pro",
             Self::Gemini3Flash => "Gemini 3 Flash",
             Self::Gemini31Pro => "Gemini 3.1 Pro",
             Self::Custom {
@@ -574,7 +569,6 @@ impl Model {
             Self::Gemini25FlashLite
             | Self::Gemini25Flash
             | Self::Gemini25Pro
-            | Self::Gemini3Pro
             | Self::Gemini3Flash
             | Self::Gemini31Pro => 1_048_576,
             Self::Custom { max_tokens, .. } => *max_tokens,
@@ -586,7 +580,6 @@ impl Model {
             Model::Gemini25FlashLite
             | Model::Gemini25Flash
             | Model::Gemini25Pro
-            | Model::Gemini3Pro
             | Model::Gemini3Flash
             | Model::Gemini31Pro => Some(65_536),
             Model::Custom { .. } => None,
@@ -603,10 +596,7 @@ impl Model {
 
     pub fn mode(&self) -> GoogleModelMode {
         match self {
-            Self::Gemini25FlashLite
-            | Self::Gemini25Flash
-            | Self::Gemini25Pro
-            | Self::Gemini3Pro => {
+            Self::Gemini25FlashLite | Self::Gemini25Flash | Self::Gemini25Pro => {
                 GoogleModelMode::Thinking {
                     // By default these models are set to "auto", so we preserve that behavior
                     // but indicate they are capable of thinking mode

crates/gpui/Cargo.toml 🔗

@@ -21,7 +21,6 @@ default = ["font-kit", "wayland", "x11", "windows-manifest"]
 test-support = [
     "leak-detection",
     "collections/test-support",
-    "util/test-support",
     "http_client/test-support",
     "wayland",
     "x11",
@@ -37,7 +36,7 @@ x11 = [
 screen-capture = [
     "scap",
 ]
-windows-manifest = []
+windows-manifest = ["dep:embed-resource"]
 
 [lib]
 path = "src/gpui.rs"
@@ -54,6 +53,7 @@ ctor.workspace = true
 derive_more.workspace = true
 etagere = "0.2"
 futures.workspace = true
+futures-concurrency.workspace = true
 gpui_macros.workspace = true
 http_client.workspace = true
 image.workspace = true
@@ -83,19 +83,27 @@ serde.workspace = true
 serde_json.workspace = true
 slotmap.workspace = true
 smallvec.workspace = true
-smol.workspace = true
+async-channel.workspace = true
 stacksafe.workspace = true
 strum.workspace = true
 sum_tree.workspace = true
 taffy = "=0.9.0"
 thiserror.workspace = true
-util.workspace = true
-uuid.workspace = true
+gpui_util.workspace = true
 waker-fn = "1.2.0"
 lyon = "1.0"
 pin-project = "1.1.10"
 circular-buffer.workspace = true
 spin = "0.10.0"
+pollster.workspace = true
+url.workspace = true
+uuid.workspace = true
+web-time.workspace = true
+
+[target.'cfg(target_family = "wasm")'.dependencies]
+getrandom = { version = "0.3.4", features = ["wasm_js"] }
+uuid = { workspace = true, features = ["js"] }
+
 
 [target.'cfg(target_os = "macos")'.dependencies]
 block = "0.1"
@@ -135,19 +143,23 @@ backtrace.workspace = true
 collections = { workspace = true, features = ["test-support"] }
 env_logger.workspace = true
 gpui_platform.workspace = true
-http_client = { workspace = true, features = ["test-support"] }
 lyon = { version = "1.0", features = ["extra"] }
 pretty_assertions.workspace = true
 rand.workspace = true
-reqwest_client = { workspace = true, features = ["test-support"] }
 scheduler = { workspace = true, features = ["test-support"] }
 unicode-segmentation.workspace = true
-util = { workspace = true, features = ["test-support"] }
+gpui_util = { workspace = true }
 
+[target.'cfg(not(target_family = "wasm"))'.dev-dependencies]
+http_client = { workspace = true, features = ["test-support"] }
+reqwest_client = { workspace = true, features = ["test-support"] }
 
+[target.'cfg(target_family = "wasm")'.dev-dependencies]
+wasm-bindgen = { workspace = true }
+gpui_web.workspace = true
 
-[target.'cfg(target_os = "windows")'.build-dependencies]
-embed-resource = "3.0"
+[build-dependencies]
+embed-resource = { version = "3.0", optional = true }
 
 [target.'cfg(target_os = "macos")'.build-dependencies]
 bindgen = "0.71"

crates/gpui/build.rs 🔗

@@ -1,14 +1,17 @@
 #![allow(clippy::disallowed_methods, reason = "build scripts are exempt")]
-#![cfg_attr(not(target_os = "macos"), allow(unused))]
 
 fn main() {
     println!("cargo::rustc-check-cfg=cfg(gles)");
 
-    #[cfg(all(target_os = "windows", feature = "windows-manifest"))]
-    embed_resource();
+    let target_os = std::env::var("CARGO_CFG_TARGET_OS").unwrap_or_default();
+
+    if target_os == "windows" {
+        #[cfg(feature = "windows-manifest")]
+        embed_resource();
+    }
 }
 
-#[cfg(all(target_os = "windows", feature = "windows-manifest"))]
+#[cfg(feature = "windows-manifest")]
 fn embed_resource() {
     let manifest = std::path::Path::new("resources/windows/gpui.manifest.xml");
     let rc_file = std::path::Path::new("resources/windows/gpui.rc");

crates/gpui/examples/animation.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use std::time::Duration;
 
 use anyhow::Result;
@@ -101,7 +103,7 @@ impl Render for AnimationExample {
     }
 }
 
-fn main() {
+fn run_example() {
     application().with_assets(Assets {}).run(|cx: &mut App| {
         let options = WindowOptions {
             window_bounds: Some(WindowBounds::Windowed(Bounds::centered(
@@ -118,3 +120,15 @@ fn main() {
         .unwrap();
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/data_table.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use std::{ops::Range, rc::Rc, time::Duration};
 
 use gpui::{
@@ -447,7 +449,7 @@ impl Render for DataTable {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         cx.open_window(
             WindowOptions {
@@ -472,3 +474,15 @@ fn main() {
         cx.activate(true);
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/drag_drop.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{
     App, Bounds, Context, Half, Hsla, Pixels, Point, Window, WindowBounds, WindowOptions, div,
     prelude::*, px, rgb, size,
@@ -121,7 +123,7 @@ impl Render for DragDrop {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         let bounds = Bounds::centered(None, size(px(800.), px(600.0)), cx);
         cx.open_window(
@@ -136,3 +138,15 @@ fn main() {
         cx.activate(true);
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/focus_visible.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{
     App, Bounds, Context, Div, ElementId, FocusHandle, KeyBinding, SharedString, Stateful, Window,
     WindowBounds, WindowOptions, actions, div, prelude::*, px, size,
@@ -192,7 +194,7 @@ impl Render for Example {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         cx.bind_keys([
             KeyBinding::new("tab", Tab, None),
@@ -213,3 +215,15 @@ fn main() {
         cx.activate(true);
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/gif_viewer.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{App, Context, Render, Window, WindowOptions, div, img, prelude::*};
 use gpui_platform::application;
 use std::path::PathBuf;
@@ -23,8 +25,7 @@ impl Render for GifViewer {
     }
 }
 
-fn main() {
-    env_logger::init();
+fn run_example() {
     application().run(|cx: &mut App| {
         let gif_path =
             PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("examples/image/black-cat-typing.gif");
@@ -40,3 +41,16 @@ fn main() {
         cx.activate(true);
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    env_logger::init();
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/gradient.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{
     App, Bounds, ColorSpace, Context, Half, Render, Window, WindowOptions, canvas, div,
     linear_color_stop, linear_gradient, point, prelude::*, px, size,
@@ -243,7 +245,7 @@ impl Render for GradientViewer {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         cx.open_window(
             WindowOptions {
@@ -256,3 +258,15 @@ fn main() {
         cx.activate(true);
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/grid_layout.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{
     App, Bounds, Context, Hsla, Window, WindowBounds, WindowOptions, div, prelude::*, px, rgb, size,
 };
@@ -64,7 +66,7 @@ impl Render for HolyGrailExample {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         let bounds = Bounds::centered(None, size(px(500.), px(500.0)), cx);
         cx.open_window(
@@ -78,3 +80,15 @@ fn main() {
         cx.activate(true);
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/hello_world.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{
     App, Bounds, Context, SharedString, Window, WindowBounds, WindowOptions, div, prelude::*, px,
     rgb, size,
@@ -87,7 +89,7 @@ impl Render for HelloWorld {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         let bounds = Bounds::centered(None, size(px(500.), px(500.0)), cx);
         cx.open_window(
@@ -105,3 +107,15 @@ fn main() {
         cx.activate(true);
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/image/image.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use std::fs;
 use std::path::PathBuf;
 use std::sync::Arc;
@@ -8,7 +10,7 @@ use gpui::{
     SharedString, SharedUri, TitlebarOptions, Window, WindowBounds, WindowOptions, actions, div,
     img, prelude::*, px, rgb, size,
 };
-use gpui_platform::application;
+#[cfg(not(target_family = "wasm"))]
 use reqwest_client::ReqwestClient;
 
 struct Assets {
@@ -146,50 +148,77 @@ impl Render for ImageShowcase {
 
 actions!(image, [Quit]);
 
-fn main() {
-    env_logger::init();
-
+fn run_example() {
     let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
 
-    application()
-        .with_assets(Assets {
-            base: manifest_dir.join("examples"),
-        })
-        .run(move |cx: &mut App| {
+    #[cfg(not(target_family = "wasm"))]
+    let app = gpui_platform::application();
+    #[cfg(target_family = "wasm")]
+    let app = gpui_platform::application();
+    app.with_assets(Assets {
+        base: manifest_dir.join("examples"),
+    })
+    .run(move |cx: &mut App| {
+        #[cfg(not(target_family = "wasm"))]
+        {
             let http_client = ReqwestClient::user_agent("gpui example").unwrap();
             cx.set_http_client(Arc::new(http_client));
-
-            cx.activate(true);
-            cx.on_action(|_: &Quit, cx| cx.quit());
-            cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]);
-            cx.set_menus(vec![Menu {
-                name: "Image".into(),
-                items: vec![MenuItem::action("Quit", Quit)],
-            }]);
-
-            let window_options = WindowOptions {
-                titlebar: Some(TitlebarOptions {
-                    title: Some(SharedString::from("Image Example")),
-                    appears_transparent: false,
-                    ..Default::default()
-                }),
-
-                window_bounds: Some(WindowBounds::Windowed(Bounds {
-                    size: size(px(1100.), px(600.)),
-                    origin: Point::new(px(200.), px(200.)),
-                })),
-
-                ..Default::default()
+        }
+        #[cfg(target_family = "wasm")]
+        {
+            // Safety: the web examples run single-threaded; the client is
+            // created and used exclusively on the main thread.
+            let http_client = unsafe {
+                gpui_web::FetchHttpClient::with_user_agent("gpui example")
+                    .expect("failed to create FetchHttpClient")
             };
+            cx.set_http_client(Arc::new(http_client));
+        }
 
-            cx.open_window(window_options, |_, cx| {
-                cx.new(|_| ImageShowcase {
-                    // Relative path to your root project path
-                    local_resource: manifest_dir.join("examples/image/app-icon.png").into(),
-                    remote_resource: "https://picsum.photos/800/400".into(),
-                    asset_resource: "image/color.svg".into(),
-                })
+        cx.activate(true);
+        cx.on_action(|_: &Quit, cx| cx.quit());
+        cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]);
+        cx.set_menus(vec![Menu {
+            name: "Image".into(),
+            items: vec![MenuItem::action("Quit", Quit)],
+        }]);
+
+        let window_options = WindowOptions {
+            titlebar: Some(TitlebarOptions {
+                title: Some(SharedString::from("Image Example")),
+                appears_transparent: false,
+                ..Default::default()
+            }),
+
+            window_bounds: Some(WindowBounds::Windowed(Bounds {
+                size: size(px(1100.), px(600.)),
+                origin: Point::new(px(200.), px(200.)),
+            })),
+
+            ..Default::default()
+        };
+
+        cx.open_window(window_options, |_, cx| {
+            cx.new(|_| ImageShowcase {
+                // Relative path to your root project path
+                local_resource: manifest_dir.join("examples/image/app-icon.png").into(),
+                remote_resource: "https://picsum.photos/800/400".into(),
+                asset_resource: "image/color.svg".into(),
             })
-            .unwrap();
-        });
+        })
+        .unwrap();
+    });
+}
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    env_logger::init();
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
 }

crates/gpui/examples/image_gallery.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use futures::FutureExt;
 use gpui::{
     App, AppContext, Asset as _, AssetLogger, Bounds, ClickEvent, Context, ElementId, Entity,
@@ -5,7 +7,7 @@ use gpui::{
     RetainAllImageCache, SharedString, TitlebarOptions, Window, WindowBounds, WindowOptions,
     actions, div, hash, image_cache, img, prelude::*, px, rgb, size,
 };
-use gpui_platform::application;
+#[cfg(not(target_family = "wasm"))]
 use reqwest_client::ReqwestClient;
 use std::{collections::HashMap, sync::Arc};
 
@@ -245,12 +247,28 @@ impl ImageCache for SimpleLruCache {
 
 actions!(image, [Quit]);
 
-fn main() {
-    env_logger::init();
+fn run_example() {
+    #[cfg(not(target_family = "wasm"))]
+    let app = gpui_platform::application();
+    #[cfg(target_family = "wasm")]
+    let app = gpui_platform::single_threaded_web();
 
-    application().run(move |cx: &mut App| {
-        let http_client = ReqwestClient::user_agent("gpui example").unwrap();
-        cx.set_http_client(Arc::new(http_client));
+    app.run(move |cx: &mut App| {
+        #[cfg(not(target_family = "wasm"))]
+        {
+            let http_client = ReqwestClient::user_agent("gpui example").unwrap();
+            cx.set_http_client(Arc::new(http_client));
+        }
+        #[cfg(target_family = "wasm")]
+        {
+            // Safety: the web examples run single-threaded; the client is
+            // created and used exclusively on the main thread.
+            let http_client = unsafe {
+                gpui_web::FetchHttpClient::with_user_agent("gpui example")
+                    .expect("failed to create FetchHttpClient")
+            };
+            cx.set_http_client(Arc::new(http_client));
+        }
 
         cx.activate(true);
         cx.on_action(|_: &Quit, cx| cx.quit());
@@ -287,3 +305,16 @@ fn main() {
         .unwrap();
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    env_logger::init();
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/image_loading.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use std::{path::Path, sync::Arc, time::Duration};
 
 use gpui::{
@@ -192,8 +194,7 @@ impl Render for ImageLoadingExample {
     }
 }
 
-fn main() {
-    env_logger::init();
+fn run_example() {
     application().with_assets(Assets {}).run(|cx: &mut App| {
         let options = WindowOptions {
             window_bounds: Some(WindowBounds::Windowed(Bounds::centered(
@@ -210,3 +211,16 @@ fn main() {
         .unwrap();
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    env_logger::init();
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/input.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use std::ops::Range;
 
 use gpui::{
@@ -682,7 +684,7 @@ impl Render for InputExample {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         let bounds = Bounds::centered(None, size(px(300.0), px(300.0)), cx);
         cx.bind_keys([
@@ -752,3 +754,15 @@ fn main() {
         cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]);
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/layer_shell.rs 🔗

@@ -1,4 +1,6 @@
-fn main() {
+#![cfg_attr(target_family = "wasm", no_main)]
+
+fn run_example() {
     #[cfg(all(target_os = "linux", feature = "wayland"))]
     example::main();
 
@@ -6,6 +8,18 @@ fn main() {
     panic!("This example requires the `wayland` feature and a linux system.");
 }
 
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}
+
 #[cfg(all(target_os = "linux", feature = "wayland"))]
 mod example {
     use std::time::{Duration, SystemTime, UNIX_EPOCH};

crates/gpui/examples/mouse_pressure.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{
     App, Bounds, Context, MousePressureEvent, PressureStage, Window, WindowBounds, WindowOptions,
     div, prelude::*, px, rgb, size,
@@ -44,7 +46,7 @@ impl MousePressureExample {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         let bounds = Bounds::centered(None, size(px(500.), px(500.0)), cx);
 
@@ -65,3 +67,15 @@ fn main() {
         cx.activate(true);
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/on_window_close_quit.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{
     App, Bounds, Context, FocusHandle, KeyBinding, Window, WindowBounds, WindowOptions, actions,
     div, prelude::*, px, rgb, size,
@@ -35,7 +37,7 @@ impl Render for ExampleWindow {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         let mut bounds = Bounds::centered(None, size(px(500.), px(500.0)), cx);
 
@@ -81,3 +83,15 @@ fn main() {
         .unwrap();
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/opacity.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use std::{fs, path::PathBuf};
 
 use anyhow::Result;
@@ -156,7 +158,7 @@ impl Render for HelloWorld {
     }
 }
 
-fn main() {
+fn run_example() {
     application()
         .with_assets(Assets {
             base: PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("examples"),
@@ -174,3 +176,15 @@ fn main() {
             cx.activate(true);
         });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/ownership_post.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{App, Context, Entity, EventEmitter, prelude::*};
 use gpui_platform::application;
 
@@ -11,7 +13,7 @@ struct Change {
 
 impl EventEmitter<Change> for Counter {}
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         let counter: Entity<Counter> = cx.new(|_cx| Counter { count: 0 });
         let subscriber = cx.new(|cx: &mut Context<Counter>| {
@@ -34,3 +36,15 @@ fn main() {
         assert_eq!(subscriber.read(cx).count, 4);
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/painting.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{
     Background, Bounds, ColorSpace, Context, MouseDownEvent, Path, PathBuilder, PathStyle, Pixels,
     Point, Render, StrokeOptions, Window, WindowOptions, canvas, div, linear_color_stop,
@@ -445,7 +447,7 @@ impl Render for PaintingViewer {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx| {
         cx.open_window(
             WindowOptions {
@@ -462,3 +464,15 @@ fn main() {
         cx.activate(true);
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/paths_bench.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{
     Background, Bounds, ColorSpace, Context, Path, PathBuilder, Pixels, Render, TitlebarOptions,
     Window, WindowBounds, WindowOptions, canvas, div, linear_color_stop, linear_gradient, point,
@@ -69,7 +71,7 @@ impl Render for PaintingViewer {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx| {
         cx.open_window(
             WindowOptions {
@@ -91,3 +93,15 @@ fn main() {
         cx.activate(true);
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/pattern.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{
     App, AppContext, Bounds, Context, Window, WindowBounds, WindowOptions, div, linear_color_stop,
     linear_gradient, pattern_slash, prelude::*, px, rgb, size,
@@ -99,7 +101,7 @@ impl Render for PatternExample {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         let bounds = Bounds::centered(None, size(px(600.0), px(600.0)), cx);
         cx.open_window(
@@ -114,3 +116,15 @@ fn main() {
         cx.activate(true);
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/popover.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{
     App, Context, Corner, Div, Hsla, Stateful, Window, WindowOptions, anchored, deferred, div,
     prelude::*, px,
@@ -161,7 +163,7 @@ impl Render for HelloWorld {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         cx.open_window(WindowOptions::default(), |_, cx| {
             cx.new(|_| HelloWorld {
@@ -173,3 +175,15 @@ fn main() {
         cx.activate(true);
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/scrollable.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{App, Bounds, Context, Window, WindowBounds, WindowOptions, div, prelude::*, px, size};
 use gpui_platform::application;
 
@@ -42,7 +44,7 @@ impl Render for Scrollable {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         let bounds = Bounds::centered(None, size(px(500.), px(500.0)), cx);
         cx.open_window(
@@ -56,3 +58,15 @@ fn main() {
         cx.activate(true);
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/set_menus.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{
     App, Context, Global, Menu, MenuItem, SharedString, SystemMenuType, Window, WindowOptions,
     actions, div, prelude::*, rgb,
@@ -20,7 +22,7 @@ impl Render for SetMenus {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         cx.set_global(AppState::new());
 
@@ -36,6 +38,18 @@ fn main() {
     });
 }
 
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}
+
 #[derive(PartialEq)]
 enum ViewMode {
     List,

crates/gpui/examples/shadow.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{
     App, Bounds, BoxShadow, Context, Div, SharedString, Window, WindowBounds, WindowOptions, div,
     hsla, point, prelude::*, px, relative, rgb, size,
@@ -569,7 +571,7 @@ impl Render for Shadow {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         let bounds = Bounds::centered(None, size(px(1000.0), px(800.0)), cx);
         cx.open_window(
@@ -584,3 +586,15 @@ fn main() {
         cx.activate(true);
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/svg/svg.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use std::fs;
 use std::path::PathBuf;
 
@@ -68,7 +70,7 @@ impl Render for SvgExample {
     }
 }
 
-fn main() {
+fn run_example() {
     application()
         .with_assets(Assets {
             base: PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("examples"),
@@ -86,3 +88,15 @@ fn main() {
             cx.activate(true);
         });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/tab_stop.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{
     App, Bounds, Context, Div, ElementId, FocusHandle, KeyBinding, SharedString, Stateful, Window,
     WindowBounds, WindowOptions, actions, div, prelude::*, px, size,
@@ -178,7 +180,7 @@ impl Render for Example {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         cx.bind_keys([
             KeyBinding::new("tab", Tab, None),
@@ -198,3 +200,15 @@ fn main() {
         cx.activate(true);
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/testing.rs 🔗

@@ -1,3 +1,4 @@
+#![cfg_attr(target_family = "wasm", no_main)]
 //! Example demonstrating GPUI's testing infrastructure.
 //!
 //! When run normally, this displays an interactive counter window.
@@ -176,7 +177,7 @@ impl Render for Counter {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         cx.bind_keys([
             gpui::KeyBinding::new("up", Increment, Some("Counter")),
@@ -199,6 +200,18 @@ fn main() {
     });
 }
 
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;

crates/gpui/examples/text.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use std::{
     ops::{Deref, DerefMut},
     sync::Arc,
@@ -298,7 +300,7 @@ impl Render for TextExample {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         cx.set_menus(vec![Menu {
             name: "GPUI Typography".into(),
@@ -332,3 +334,15 @@ fn main() {
             .unwrap();
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/text_layout.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{
     App, Bounds, Context, FontStyle, FontWeight, StyledText, Window, WindowBounds, WindowOptions,
     div, prelude::*, px, size,
@@ -81,7 +83,7 @@ impl Render for HelloWorld {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         let bounds = Bounds::centered(None, size(px(800.0), px(600.0)), cx);
         cx.open_window(
@@ -95,3 +97,15 @@ fn main() {
         cx.activate(true);
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/text_wrapper.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{
     App, Bounds, Context, TextOverflow, Window, WindowBounds, WindowOptions, div, prelude::*, px,
     size,
@@ -108,7 +110,7 @@ impl Render for HelloWorld {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         let bounds = Bounds::centered(None, size(px(800.0), px(600.0)), cx);
         cx.open_window(
@@ -122,3 +124,15 @@ fn main() {
         cx.activate(true);
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/tree.rs 🔗

@@ -1,3 +1,4 @@
+#![cfg_attr(target_family = "wasm", no_main)]
 //! Renders a div with deep children hierarchy. This example is useful to exemplify that Zed can
 //! handle deep hierarchies (even though it cannot just yet!).
 use std::sync::LazyLock;
@@ -29,7 +30,7 @@ impl Render for Tree {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         let bounds = Bounds::centered(None, size(px(300.0), px(300.0)), cx);
         cx.open_window(
@@ -42,3 +43,15 @@ fn main() {
         .unwrap();
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/uniform_list.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{
     App, Bounds, Context, Window, WindowBounds, WindowOptions, div, prelude::*, px, rgb, size,
     uniform_list,
@@ -36,7 +38,7 @@ impl Render for UniformListExample {
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         let bounds = Bounds::centered(None, size(px(300.0), px(300.0)), cx);
         cx.open_window(
@@ -49,3 +51,15 @@ fn main() {
         .unwrap();
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/window.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{
     App, Bounds, Context, KeyBinding, PromptButton, PromptLevel, Window, WindowBounds, WindowKind,
     WindowOptions, actions, div, prelude::*, px, rgb, size,
@@ -306,7 +308,7 @@ impl Render for WindowDemo {
 
 actions!(window, [Quit]);
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         let bounds = Bounds::centered(None, size(px(800.0), px(600.0)), cx);
 
@@ -333,3 +335,15 @@ fn main() {
         cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]);
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/window_positioning.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{
     App, Bounds, Context, DisplayId, Hsla, Pixels, SharedString, Size, Window,
     WindowBackgroundAppearance, WindowBounds, WindowKind, WindowOptions, div, point, prelude::*,
@@ -68,7 +70,7 @@ fn build_window_options(display_id: DisplayId, bounds: Bounds<Pixels>) -> Window
     }
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         // Create several new windows, positioned in the top right corner of each screen
         let size = Size {
@@ -218,3 +220,15 @@ fn main() {
         }
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/examples/window_shadow.rs 🔗

@@ -1,3 +1,5 @@
+#![cfg_attr(target_family = "wasm", no_main)]
+
 use gpui::{
     App, Bounds, Context, CursorStyle, Decorations, HitboxBehavior, Hsla, MouseButton, Pixels,
     Point, ResizeEdge, Size, Window, WindowBackgroundAppearance, WindowBounds, WindowDecorations,
@@ -203,7 +205,7 @@ fn resize_edge(pos: Point<Pixels>, shadow_size: Pixels, size: Size<Pixels>) -> O
     Some(edge)
 }
 
-fn main() {
+fn run_example() {
     application().run(|cx: &mut App| {
         let bounds = Bounds::centered(None, size(px(600.0), px(600.0)), cx);
         cx.open_window(
@@ -226,3 +228,15 @@ fn main() {
         .unwrap();
     });
 }
+
+#[cfg(not(target_family = "wasm"))]
+fn main() {
+    run_example();
+}
+
+#[cfg(target_family = "wasm")]
+#[wasm_bindgen::prelude::wasm_bindgen(start)]
+pub fn start() {
+    gpui_platform::web_init();
+    run_example();
+}

crates/gpui/src/app.rs 🔗

@@ -1,3 +1,4 @@
+use scheduler::Instant;
 use std::{
     any::{TypeId, type_name},
     cell::{BorrowMutError, Cell, Ref, RefCell, RefMut},
@@ -7,7 +8,7 @@ use std::{
     path::{Path, PathBuf},
     rc::{Rc, Weak},
     sync::{Arc, atomic::Ordering::SeqCst},
-    time::{Duration, Instant},
+    time::Duration,
 };
 
 use anyhow::{Context as _, Result, anyhow};
@@ -25,11 +26,11 @@ pub use async_context::*;
 use collections::{FxHashMap, FxHashSet, HashMap, VecDeque};
 pub use context::*;
 pub use entity_map::*;
+use gpui_util::{ResultExt, debug_panic};
 use http_client::{HttpClient, Url};
 use smallvec::SmallVec;
 #[cfg(any(test, feature = "test-support"))]
 pub use test_context::*;
-use util::{ResultExt, debug_panic};
 #[cfg(all(target_os = "macos", any(test, feature = "test-support")))]
 pub use visual_test_context::*;
 
@@ -752,6 +753,37 @@ impl App {
         app
     }
 
+    #[doc(hidden)]
+    pub fn ref_counts_drop_handle(&self) -> impl Sized + use<> {
+        self.entities.ref_counts_drop_handle()
+    }
+
+    /// Captures a snapshot of all entities that currently have alive handles.
+    ///
+    /// The returned [`LeakDetectorSnapshot`] can later be passed to
+    /// [`assert_no_new_leaks`](Self::assert_no_new_leaks) to verify that no
+    /// entities created after the snapshot are still alive.
+    #[cfg(any(test, feature = "leak-detection"))]
+    pub fn leak_detector_snapshot(&self) -> LeakDetectorSnapshot {
+        self.entities.leak_detector_snapshot()
+    }
+
+    /// Asserts that no entities created after `snapshot` still have alive handles.
+    ///
+    /// Entities that were already tracked at the time of the snapshot are ignored,
+    /// even if they still have handles. Only *new* entities (those whose
+    /// `EntityId` was not present in the snapshot) are considered leaks.
+    ///
+    /// # Panics
+    ///
+    /// Panics if any new entity handles exist. The panic message lists every
+    /// leaked entity with its type name, and includes allocation-site backtraces
+    /// when `LEAK_BACKTRACE` is set.
+    #[cfg(any(test, feature = "leak-detection"))]
+    pub fn assert_no_new_leaks(&self, snapshot: &LeakDetectorSnapshot) {
+        self.entities.assert_no_new_leaks(snapshot)
+    }
+
     /// Quit the application gracefully. Handlers registered with [`Context::on_app_quit`]
     /// will be given 100ms to complete before exiting.
     pub fn shutdown(&mut self) {

crates/gpui/src/app/async_context.rs 🔗

@@ -4,10 +4,10 @@ use crate::{
     PromptLevel, Render, Reservation, Result, Subscription, Task, VisualContext, Window,
     WindowHandle,
 };
-use anyhow::Context as _;
+use anyhow::{Context as _, bail};
 use derive_more::{Deref, DerefMut};
 use futures::channel::oneshot;
-use smol::future::FutureExt;
+use futures::future::FutureExt;
 use std::{future::Future, rc::Weak};
 
 use super::{Context, WeakEntity};
@@ -88,6 +88,9 @@ impl AppContext for AsyncApp {
     {
         let app = self.app.upgrade().context("app was released")?;
         let mut lock = app.try_borrow_mut()?;
+        if lock.quitting {
+            bail!("app is quitting");
+        }
         lock.update_window(window, f)
     }
 
@@ -101,6 +104,9 @@ impl AppContext for AsyncApp {
     {
         let app = self.app.upgrade().context("app was released")?;
         let lock = app.borrow();
+        if lock.quitting {
+            bail!("app is quitting");
+        }
         lock.read_window(window, read)
     }
 
@@ -174,6 +180,9 @@ impl AsyncApp {
     {
         let app = self.app();
         let mut lock = app.borrow_mut();
+        if lock.quitting {
+            bail!("app is quitting");
+        }
         lock.open_window(options, build_root_view)
     }
 
@@ -211,6 +220,9 @@ impl AsyncApp {
     pub fn try_read_global<G: Global, R>(&self, read: impl FnOnce(&G, &App) -> R) -> Option<R> {
         let app = self.app();
         let app = app.borrow_mut();
+        if app.quitting {
+            return None;
+        }
         Some(read(app.try_global()?, &app))
     }
 
@@ -241,10 +253,10 @@ impl AsyncApp {
         &self,
         entity: &WeakEntity<T>,
         f: Callback,
-    ) -> util::Deferred<impl FnOnce() + use<T, Callback>> {
+    ) -> gpui_util::Deferred<impl FnOnce() + use<T, Callback>> {
         let entity = entity.clone();
         let mut cx = self.clone();
-        util::defer(move || {
+        gpui_util::defer(move || {
             entity.update(&mut cx, f).ok();
         })
     }

crates/gpui/src/app/context.rs 🔗

@@ -5,6 +5,7 @@ use crate::{
 };
 use anyhow::Result;
 use futures::FutureExt;
+use gpui_util::Deferred;
 use std::{
     any::{Any, TypeId},
     borrow::{Borrow, BorrowMut},
@@ -12,7 +13,6 @@ use std::{
     ops,
     sync::Arc,
 };
-use util::Deferred;
 
 use super::{App, AsyncWindowContext, Entity, KeystrokeEvent};
 
@@ -278,7 +278,7 @@ impl<'a, T: 'static> Context<'a, T> {
     ) -> Deferred<impl FnOnce()> {
         let this = self.weak_entity();
         let mut cx = self.to_async();
-        util::defer(move || {
+        gpui_util::defer(move || {
             this.update(&mut cx, f).ok();
         })
     }

crates/gpui/src/app/entity_map.rs 🔗

@@ -83,6 +83,32 @@ impl EntityMap {
         }
     }
 
+    #[doc(hidden)]
+    pub fn ref_counts_drop_handle(&self) -> impl Sized + use<> {
+        self.ref_counts.clone()
+    }
+
+    /// Captures a snapshot of all entities that currently have alive handles.
+    ///
+    /// The returned [`LeakDetectorSnapshot`] can later be passed to
+    /// [`assert_no_new_leaks`](Self::assert_no_new_leaks) to verify that no
+    /// entities created after the snapshot are still alive.
+    #[cfg(any(test, feature = "leak-detection"))]
+    pub fn leak_detector_snapshot(&self) -> LeakDetectorSnapshot {
+        self.ref_counts.read().leak_detector.snapshot()
+    }
+
+    /// Asserts that no entities created after `snapshot` still have alive handles.
+    ///
+    /// See [`LeakDetector::assert_no_new_leaks`] for details.
+    #[cfg(any(test, feature = "leak-detection"))]
+    pub fn assert_no_new_leaks(&self, snapshot: &LeakDetectorSnapshot) {
+        self.ref_counts
+            .read()
+            .leak_detector
+            .assert_no_new_leaks(snapshot)
+    }
+
     /// Reserve a slot for an entity, which you can subsequently use with `insert`.
     pub fn reserve<T: 'static>(&self) -> Slot<T> {
         let id = self.ref_counts.write().counts.insert(1.into());
@@ -225,7 +251,12 @@ pub struct AnyEntity {
 }
 
 impl AnyEntity {
-    fn new(id: EntityId, entity_type: TypeId, entity_map: Weak<RwLock<EntityRefCounts>>) -> Self {
+    fn new(
+        id: EntityId,
+        entity_type: TypeId,
+        entity_map: Weak<RwLock<EntityRefCounts>>,
+        #[cfg(any(test, feature = "leak-detection"))] type_name: &'static str,
+    ) -> Self {
         Self {
             entity_id: id,
             entity_type,
@@ -236,7 +267,7 @@ impl AnyEntity {
                 .unwrap()
                 .write()
                 .leak_detector
-                .handle_created(id),
+                .handle_created(id, Some(type_name)),
             entity_map,
         }
     }
@@ -299,7 +330,7 @@ impl Clone for AnyEntity {
                 .unwrap()
                 .write()
                 .leak_detector
-                .handle_created(self.entity_id),
+                .handle_created(self.entity_id, None),
         }
     }
 }
@@ -395,7 +426,13 @@ impl<T: 'static> Entity<T> {
         T: 'static,
     {
         Self {
-            any_entity: AnyEntity::new(id, TypeId::of::<T>(), entity_map),
+            any_entity: AnyEntity::new(
+                id,
+                TypeId::of::<T>(),
+                entity_map,
+                #[cfg(any(test, feature = "leak-detection"))]
+                std::any::type_name::<T>(),
+            ),
             entity_type: PhantomData,
         }
     }
@@ -574,7 +611,7 @@ impl AnyWeakEntity {
                 .unwrap()
                 .write()
                 .leak_detector
-                .handle_created(self.entity_id),
+                .handle_created(self.entity_id, None),
         })
     }
 
@@ -892,7 +929,23 @@ pub(crate) struct HandleId {
 #[cfg(any(test, feature = "leak-detection"))]
 pub(crate) struct LeakDetector {
     next_handle_id: u64,
-    entity_handles: HashMap<EntityId, HashMap<HandleId, Option<backtrace::Backtrace>>>,
+    entity_handles: HashMap<EntityId, EntityLeakData>,
+}
+
+/// A snapshot of the set of alive entities at a point in time.
+///
+/// Created by [`LeakDetector::snapshot`]. Can later be passed to
+/// [`LeakDetector::assert_no_new_leaks`] to verify that no new entity
+/// handles remain between the snapshot and the current state.
+#[cfg(any(test, feature = "leak-detection"))]
+pub struct LeakDetectorSnapshot {
+    entity_ids: collections::HashSet<EntityId>,
+}
+
+#[cfg(any(test, feature = "leak-detection"))]
+struct EntityLeakData {
+    handles: HashMap<HandleId, Option<backtrace::Backtrace>>,
+    type_name: &'static str,
 }
 
 #[cfg(any(test, feature = "leak-detection"))]
@@ -903,11 +956,21 @@ impl LeakDetector {
     /// the handle is dropped. If `LEAK_BACKTRACE` is set, captures a backtrace
     /// at the allocation site.
     #[track_caller]
-    pub fn handle_created(&mut self, entity_id: EntityId) -> HandleId {
-        let id = util::post_inc(&mut self.next_handle_id);
+    pub fn handle_created(
+        &mut self,
+        entity_id: EntityId,
+        type_name: Option<&'static str>,
+    ) -> HandleId {
+        let id = gpui_util::post_inc(&mut self.next_handle_id);
         let handle_id = HandleId { id };
-        let handles = self.entity_handles.entry(entity_id).or_default();
-        handles.insert(
+        let handles = self
+            .entity_handles
+            .entry(entity_id)
+            .or_insert_with(|| EntityLeakData {
+                handles: HashMap::default(),
+                type_name: type_name.unwrap_or("<unknown>"),
+            });
+        handles.handles.insert(
             handle_id,
             LEAK_BACKTRACE.then(backtrace::Backtrace::new_unresolved),
         );
@@ -919,8 +982,14 @@ impl LeakDetector {
     /// This removes the handle from tracking. The `handle_id` should be the same
     /// one returned by `handle_created` when the handle was allocated.
     pub fn handle_released(&mut self, entity_id: EntityId, handle_id: HandleId) {
-        let handles = self.entity_handles.entry(entity_id).or_default();
-        handles.remove(&handle_id);
+        if let std::collections::hash_map::Entry::Occupied(mut data) =
+            self.entity_handles.entry(entity_id)
+        {
+            data.get_mut().handles.remove(&handle_id);
+            if data.get().handles.is_empty() {
+                data.remove();
+            }
+        }
     }
 
     /// Asserts that all handles to the given entity have been released.
@@ -932,11 +1001,10 @@ impl LeakDetector {
     /// otherwise it suggests setting the environment variable to get more info.
     pub fn assert_released(&mut self, entity_id: EntityId) {
         use std::fmt::Write as _;
-        let handles = self.entity_handles.entry(entity_id).or_default();
-        if !handles.is_empty() {
+        if let Some(data) = self.entity_handles.remove(&entity_id) {
             let mut out = String::new();
-            for backtrace in handles.values_mut() {
-                if let Some(mut backtrace) = backtrace.take() {
+            for (_, backtrace) in data.handles {
+                if let Some(mut backtrace) = backtrace {
                     backtrace.resolve();
                     writeln!(out, "Leaked handle:\n{:?}", backtrace).unwrap();
                 } else {
@@ -950,6 +1018,96 @@ impl LeakDetector {
             panic!("{out}");
         }
     }
+
+    /// Captures a snapshot of all entity IDs that currently have alive handles.
+    ///
+    /// The returned [`LeakDetectorSnapshot`] can later be passed to
+    /// [`assert_no_new_leaks`](Self::assert_no_new_leaks) to verify that no
+    /// entities created after the snapshot are still alive.
+    pub fn snapshot(&self) -> LeakDetectorSnapshot {
+        LeakDetectorSnapshot {
+            entity_ids: self.entity_handles.keys().copied().collect(),
+        }
+    }
+
+    /// Asserts that no entities created after `snapshot` still have alive handles.
+    ///
+    /// Entities that were already tracked at the time of the snapshot are ignored,
+    /// even if they still have handles. Only *new* entities (those whose
+    /// `EntityId` was not present in the snapshot) are considered leaks.
+    ///
+    /// # Panics
+    ///
+    /// Panics if any new entity handles exist. The panic message lists every
+    /// leaked entity with its type name, and includes allocation-site backtraces
+    /// when `LEAK_BACKTRACE` is set.
+    pub fn assert_no_new_leaks(&self, snapshot: &LeakDetectorSnapshot) {
+        use std::fmt::Write as _;
+
+        let mut out = String::new();
+        for (entity_id, data) in &self.entity_handles {
+            if snapshot.entity_ids.contains(entity_id) {
+                continue;
+            }
+            for (_, backtrace) in &data.handles {
+                if let Some(backtrace) = backtrace {
+                    let mut backtrace = backtrace.clone();
+                    backtrace.resolve();
+                    writeln!(
+                        out,
+                        "Leaked handle for entity {} ({entity_id:?}):\n{:?}",
+                        data.type_name, backtrace
+                    )
+                    .unwrap();
+                } else {
+                    writeln!(
+                        out,
+                        "Leaked handle for entity {} ({entity_id:?}): (export LEAK_BACKTRACE to find allocation site)",
+                        data.type_name
+                    )
+                    .unwrap();
+                }
+            }
+        }
+
+        if !out.is_empty() {
+            panic!("New entity leaks detected since snapshot:\n{out}");
+        }
+    }
+}
+
+#[cfg(any(test, feature = "leak-detection"))]
+impl Drop for LeakDetector {
+    fn drop(&mut self) {
+        use std::fmt::Write;
+
+        if self.entity_handles.is_empty() || std::thread::panicking() {
+            return;
+        }
+
+        let mut out = String::new();
+        for (entity_id, data) in self.entity_handles.drain() {
+            for (_handle, backtrace) in data.handles {
+                if let Some(mut backtrace) = backtrace {
+                    backtrace.resolve();
+                    writeln!(
+                        out,
+                        "Leaked handle for entity {} ({entity_id:?}):\n{:?}",
+                        data.type_name, backtrace
+                    )
+                    .unwrap();
+                } else {
+                    writeln!(
+                        out,
+                        "Leaked handle for entity {} ({entity_id:?}): (export LEAK_BACKTRACE to find allocation site)",
+                        data.type_name
+                    )
+                    .unwrap();
+                }
+            }
+        }
+        panic!("Exited with leaked handles:\n{out}");
+    }
 }
 
 #[cfg(test)]
@@ -1007,4 +1165,42 @@ mod test {
             vec![1],
         );
     }
+
+    #[test]
+    fn test_leak_detector_snapshot_no_leaks() {
+        let mut entity_map = EntityMap::new();
+
+        let slot = entity_map.reserve::<TestEntity>();
+        let pre_existing = entity_map.insert(slot, TestEntity { i: 1 });
+
+        let snapshot = entity_map.leak_detector_snapshot();
+
+        let slot = entity_map.reserve::<TestEntity>();
+        let temporary = entity_map.insert(slot, TestEntity { i: 2 });
+        drop(temporary);
+
+        entity_map.assert_no_new_leaks(&snapshot);
+
+        drop(pre_existing);
+    }
+
+    #[test]
+    #[should_panic(expected = "New entity leaks detected since snapshot")]
+    fn test_leak_detector_snapshot_detects_new_leak() {
+        let mut entity_map = EntityMap::new();
+
+        let slot = entity_map.reserve::<TestEntity>();
+        let pre_existing = entity_map.insert(slot, TestEntity { i: 1 });
+
+        let snapshot = entity_map.leak_detector_snapshot();
+
+        let slot = entity_map.reserve::<TestEntity>();
+        let leaked = entity_map.insert(slot, TestEntity { i: 2 });
+
+        // `leaked` is still alive, so this should panic.
+        entity_map.assert_no_new_leaks(&snapshot);
+
+        drop(pre_existing);
+        drop(leaked);
+    }
 }

crates/gpui/src/app/test_context.rs 🔗

@@ -5,7 +5,7 @@ use crate::{
     ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels,
     Platform, Point, Render, Result, Size, Task, TestDispatcher, TestPlatform,
     TestScreenCaptureSource, TestWindow, TextSystem, VisualContext, Window, WindowBounds,
-    WindowHandle, WindowOptions, app::GpuiMode,
+    WindowHandle, WindowOptions, app::GpuiMode, window::ElementArenaScope,
 };
 use anyhow::{anyhow, bail};
 use futures::{Stream, StreamExt, channel::oneshot};
@@ -18,18 +18,17 @@ use std::{
 /// an implementation of `Context` with additional methods that are useful in tests.
 #[derive(Clone)]
 pub struct TestAppContext {
-    #[doc(hidden)]
-    pub app: Rc<AppCell>,
     #[doc(hidden)]
     pub background_executor: BackgroundExecutor,
     #[doc(hidden)]
     pub foreground_executor: ForegroundExecutor,
-    #[doc(hidden)]
-    pub dispatcher: TestDispatcher,
+    dispatcher: TestDispatcher,
     test_platform: Rc<TestPlatform>,
     text_system: Arc<TextSystem>,
     fn_name: Option<&'static str>,
     on_quit: Rc<RefCell<Vec<Box<dyn FnOnce() + 'static>>>>,
+    #[doc(hidden)]
+    pub app: Rc<AppCell>,
 }
 
 impl AppContext for TestAppContext {
@@ -402,8 +401,8 @@ impl TestAppContext {
     }
 
     /// Wait until there are no more pending tasks.
-    pub fn run_until_parked(&mut self) {
-        self.background_executor.run_until_parked()
+    pub fn run_until_parked(&self) {
+        self.dispatcher.run_until_parked();
     }
 
     /// Simulate dispatching an action to the currently focused node in the window.
@@ -521,22 +520,25 @@ impl TestAppContext {
         let mut notifications = self.notifications(entity);
 
         use futures::FutureExt as _;
-        use smol::future::FutureExt as _;
+        use futures_concurrency::future::Race as _;
 
-        async {
-            loop {
-                if entity.update(self, &mut predicate) {
-                    return Ok(());
-                }
+        (
+            async {
+                loop {
+                    if entity.update(self, &mut predicate) {
+                        return Ok(());
+                    }
 
-                if notifications.next().await.is_none() {
-                    bail!("entity dropped")
+                    if notifications.next().await.is_none() {
+                        bail!("entity dropped")
+                    }
                 }
-            }
-        }
-        .race(timer.map(|_| Err(anyhow!("condition timed out"))))
-        .await
-        .unwrap();
+            },
+            timer.map(|_| Err(anyhow!("condition timed out"))),
+        )
+            .race()
+            .await
+            .unwrap();
     }
 
     /// Set a name for this App.
@@ -816,6 +818,8 @@ impl VisualTestContext {
         E: Element,
     {
         self.update(|window, cx| {
+            let _arena_scope = ElementArenaScope::enter(&cx.element_arena);
+
             window.invalidator.set_phase(DrawPhase::Prepaint);
             let mut element = Drawable::new(f(window, cx));
             element.layout_as_root(space.into(), window, cx);
@@ -827,6 +831,9 @@ impl VisualTestContext {
             window.invalidator.set_phase(DrawPhase::None);
             window.refresh();
 
+            drop(element);
+            cx.element_arena.borrow_mut().clear();
+
             (request_layout_state, prepaint_state)
         })
     }

crates/gpui/src/app/visual_test_context.rs 🔗

@@ -356,7 +356,7 @@ impl VisualTestAppContext {
         predicate: impl Fn(&T) -> bool,
         timeout: Duration,
     ) -> Result<()> {
-        let start = std::time::Instant::now();
+        let start = web_time::Instant::now();
         loop {
             {
                 let app = self.app.borrow();

crates/gpui/src/elements/animation.rs 🔗

@@ -1,7 +1,5 @@
-use std::{
-    rc::Rc,
-    time::{Duration, Instant},
-};
+use scheduler::Instant;
+use std::{rc::Rc, time::Duration};
 
 use crate::{
     AnyElement, App, Element, ElementId, GlobalElementId, InspectorElementId, IntoElement, Window,

crates/gpui/src/elements/deferred.rs 🔗

@@ -62,7 +62,7 @@ impl Element for Deferred {
     ) {
         let child = self.child.take().unwrap();
         let element_offset = window.element_offset();
-        window.defer_draw(child, element_offset, self.priority)
+        window.defer_draw(child, element_offset, self.priority, None)
     }
 
     fn paint(

crates/gpui/src/elements/div.rs 🔗

@@ -26,6 +26,7 @@ use crate::{
     size,
 };
 use collections::HashMap;
+use gpui_util::ResultExt;
 use refineable::Refineable;
 use smallvec::SmallVec;
 use stacksafe::{StackSafe, stacksafe};
@@ -40,7 +41,6 @@ use std::{
     sync::Arc,
     time::Duration,
 };
-use util::ResultExt;
 
 use super::ImageCacheProvider;
 
@@ -1886,18 +1886,18 @@ impl Interactivity {
             // high for the maximum scroll, we round the scroll max to 2 decimal
             // places here.
             let padded_content_size = self.content_size + padding_size;
-            let scroll_max = (padded_content_size - bounds.size)
+            let scroll_max = Point::from(padded_content_size - bounds.size)
                 .map(round_to_two_decimals)
                 .max(&Default::default());
             // Clamp scroll offset in case scroll max is smaller now (e.g., if children
             // were removed or the bounds became larger).
             let mut scroll_offset = scroll_offset.borrow_mut();
 
-            scroll_offset.x = scroll_offset.x.clamp(-scroll_max.width, px(0.));
+            scroll_offset.x = scroll_offset.x.clamp(-scroll_max.x, px(0.));
             if scroll_to_bottom {
-                scroll_offset.y = -scroll_max.height;
+                scroll_offset.y = -scroll_max.y;
             } else {
-                scroll_offset.y = scroll_offset.y.clamp(-scroll_max.height, px(0.));
+                scroll_offset.y = scroll_offset.y.clamp(-scroll_max.y, px(0.));
             }
 
             if let Some(mut scroll_handle_state) = tracked_scroll_handle {
@@ -3285,7 +3285,7 @@ impl ScrollAnchor {
 struct ScrollHandleState {
     offset: Rc<RefCell<Point<Pixels>>>,
     bounds: Bounds<Pixels>,
-    max_offset: Size<Pixels>,
+    max_offset: Point<Pixels>,
     child_bounds: Vec<Bounds<Pixels>>,
     scroll_to_bottom: bool,
     overflow: Point<Overflow>,
@@ -3329,7 +3329,7 @@ impl ScrollHandle {
     }
 
     /// Get the maximum scroll offset.
-    pub fn max_offset(&self) -> Size<Pixels> {
+    pub fn max_offset(&self) -> Point<Pixels> {
         self.0.borrow().max_offset
     }
 

crates/gpui/src/elements/img.rs 🔗

@@ -4,13 +4,15 @@ use crate::{
     Interactivity, IntoElement, LayoutId, Length, ObjectFit, Pixels, RenderImage, Resource,
     SharedString, SharedUri, StyleRefinement, Styled, Task, Window, px,
 };
-use anyhow::{Context as _, Result};
+use anyhow::Result;
 
-use futures::{AsyncReadExt, Future};
+use futures::Future;
+use gpui_util::ResultExt;
 use image::{
     AnimationDecoder, DynamicImage, Frame, ImageError, ImageFormat, Rgba,
     codecs::{gif::GifDecoder, webp::WebPDecoder},
 };
+use scheduler::Instant;
 use smallvec::SmallVec;
 use std::{
     fs,
@@ -19,10 +21,9 @@ use std::{
     path::{Path, PathBuf},
     str::FromStr,
     sync::Arc,
-    time::{Duration, Instant},
+    time::Duration,
 };
 use thiserror::Error;
-use util::ResultExt;
 
 use super::{Stateful, StatefulInteractiveElement};
 
@@ -49,7 +50,7 @@ pub enum ImageSource {
 }
 
 fn is_uri(uri: &str) -> bool {
-    http_client::Uri::from_str(uri).is_ok()
+    url::Url::from_str(uri).is_ok()
 }
 
 impl From<SharedUri> for ImageSource {
@@ -602,6 +603,9 @@ impl Asset for ImageAssetLoader {
             let bytes = match source.clone() {
                 Resource::Path(uri) => fs::read(uri.as_ref())?,
                 Resource::Uri(uri) => {
+                    use anyhow::Context as _;
+                    use futures::AsyncReadExt as _;
+
                     let mut response = client
                         .get(uri.as_ref(), ().into(), true)
                         .await

crates/gpui/src/elements/list.rs 🔗

@@ -491,7 +491,7 @@ impl ListState {
 
     /// Returns the maximum scroll offset according to the items we have measured.
     /// This value remains constant while dragging to prevent the scrollbar from moving away unexpectedly.
-    pub fn max_offset_for_scrollbar(&self) -> Size<Pixels> {
+    pub fn max_offset_for_scrollbar(&self) -> Point<Pixels> {
         let state = self.0.borrow();
         let bounds = state.last_layout_bounds.unwrap_or_default();
 
@@ -499,7 +499,7 @@ impl ListState {
             .scrollbar_drag_start_height
             .unwrap_or_else(|| state.items.summary().height);
 
-        Size::new(Pixels::ZERO, Pixels::ZERO.max(height - bounds.size.height))
+        point(Pixels::ZERO, Pixels::ZERO.max(height - bounds.size.height))
     }
 
     /// Returns the current scroll offset adjusted for the scrollbar

crates/gpui/src/elements/svg.rs 🔗

@@ -3,10 +3,9 @@ use std::{fs, path::Path, sync::Arc};
 use crate::{
     App, Asset, Bounds, Element, GlobalElementId, Hitbox, InspectorElementId, InteractiveElement,
     Interactivity, IntoElement, LayoutId, Pixels, Point, Radians, SharedString, Size,
-    StyleRefinement, Styled, TransformationMatrix, Window, geometry::Negate as _, point, px,
-    radians, size,
+    StyleRefinement, Styled, TransformationMatrix, Window, point, px, radians, size,
 };
-use util::ResultExt;
+use gpui_util::ResultExt;
 
 /// An SVG element.
 pub struct Svg {
@@ -254,7 +253,7 @@ impl Transformation {
             .translate(center.scale(scale_factor) + self.translate.scale(scale_factor))
             .rotate(self.rotate)
             .scale(self.scale)
-            .translate(center.scale(scale_factor).negate())
+            .translate(center.scale(-scale_factor))
     }
 }
 

crates/gpui/src/elements/text.rs 🔗

@@ -6,6 +6,7 @@ use crate::{
     WrappedLineLayout, register_tooltip_mouse_handlers, set_tooltip_on_window,
 };
 use anyhow::Context as _;
+use gpui_util::ResultExt;
 use itertools::Itertools;
 use smallvec::SmallVec;
 use std::{
@@ -16,7 +17,6 @@ use std::{
     rc::Rc,
     sync::Arc,
 };
-use util::ResultExt;
 
 impl Element for &'static str {
     type RequestLayoutState = TextLayout;

crates/gpui/src/executor.rs 🔗

@@ -1,18 +1,13 @@
 use crate::{App, PlatformDispatcher, PlatformScheduler};
 use futures::channel::mpsc;
+use futures::prelude::*;
+use gpui_util::TryFutureExt;
+use scheduler::Instant;
 use scheduler::Scheduler;
-use smol::prelude::*;
 use std::{
-    fmt::Debug,
-    future::Future,
-    marker::PhantomData,
-    mem,
-    pin::Pin,
-    rc::Rc,
-    sync::Arc,
-    time::{Duration, Instant},
+    fmt::Debug, future::Future, marker::PhantomData, mem, pin::Pin, rc::Rc, sync::Arc,
+    time::Duration,
 };
-use util::TryFutureExt;
 
 pub use scheduler::{FallibleTask, ForegroundExecutor as SchedulerForegroundExecutor, Priority};
 

crates/gpui/src/geometry.rs 🔗

@@ -78,6 +78,7 @@ pub trait Along {
     Deserialize,
     JsonSchema,
     Hash,
+    Neg,
 )]
 #[refineable(Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
 #[repr(C)]
@@ -182,12 +183,6 @@ impl<T: Clone + Debug + Default + PartialEq> Along for Point<T> {
     }
 }
 
-impl<T: Clone + Debug + Default + PartialEq + Negate> Negate for Point<T> {
-    fn negate(self) -> Self {
-        self.map(Negate::negate)
-    }
-}
-
 impl Point<Pixels> {
     /// Scales the point by a given factor, which is typically derived from the resolution
     /// of a target display to ensure proper sizing of UI elements.
@@ -393,7 +388,9 @@ impl<T: Clone + Debug + Default + PartialEq + Display> Display for Point<T> {
 ///
 /// This struct is generic over the type `T`, which can be any type that implements `Clone`, `Default`, and `Debug`.
 /// It is commonly used to specify dimensions for elements in a UI, such as a window or element.
-#[derive(Refineable, Default, Clone, Copy, PartialEq, Div, Hash, Serialize, Deserialize)]
+#[derive(
+    Add, Clone, Copy, Default, Deserialize, Div, Hash, Neg, PartialEq, Refineable, Serialize, Sub,
+)]
 #[refineable(Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
 #[repr(C)]
 pub struct Size<T: Clone + Debug + Default + PartialEq> {
@@ -598,34 +595,6 @@ where
     }
 }
 
-impl<T> Sub for Size<T>
-where
-    T: Sub<Output = T> + Clone + Debug + Default + PartialEq,
-{
-    type Output = Size<T>;
-
-    fn sub(self, rhs: Self) -> Self::Output {
-        Size {
-            width: self.width - rhs.width,
-            height: self.height - rhs.height,
-        }
-    }
-}
-
-impl<T> Add for Size<T>
-where
-    T: Add<Output = T> + Clone + Debug + Default + PartialEq,
-{
-    type Output = Size<T>;
-
-    fn add(self, rhs: Self) -> Self::Output {
-        Size {
-            width: self.width + rhs.width,
-            height: self.height + rhs.height,
-        }
-    }
-}
-
 impl<T, Rhs> Mul<Rhs> for Size<T>
 where
     T: Mul<Rhs, Output = Rhs> + Clone + Debug + Default + PartialEq,
@@ -1245,6 +1214,15 @@ where
     }
 }
 
+impl<T: Clone + Debug + Default + PartialEq> From<Size<T>> for Point<T> {
+    fn from(size: Size<T>) -> Self {
+        Self {
+            x: size.width,
+            y: size.height,
+        }
+    }
+}
+
 impl<T> Bounds<T>
 where
     T: Add<T, Output = T> + Clone + Debug + Default + PartialEq,
@@ -3754,48 +3732,6 @@ impl Half for Rems {
     }
 }
 
-/// Provides a trait for types that can negate their values.
-pub trait Negate {
-    /// Returns the negation of the given value
-    fn negate(self) -> Self;
-}
-
-impl Negate for i32 {
-    fn negate(self) -> Self {
-        -self
-    }
-}
-
-impl Negate for f32 {
-    fn negate(self) -> Self {
-        -self
-    }
-}
-
-impl Negate for DevicePixels {
-    fn negate(self) -> Self {
-        Self(-self.0)
-    }
-}
-
-impl Negate for ScaledPixels {
-    fn negate(self) -> Self {
-        Self(-self.0)
-    }
-}
-
-impl Negate for Pixels {
-    fn negate(self) -> Self {
-        Self(-self.0)
-    }
-}
-
-impl Negate for Rems {
-    fn negate(self) -> Self {
-        Self(-self.0)
-    }
-}
-
 /// A trait for checking if a value is zero.
 ///
 /// This trait provides a method to determine if a value is considered to be zero.

crates/gpui/src/gpui.rs 🔗

@@ -1,5 +1,5 @@
 #![doc = include_str!("../README.md")]
-#![deny(missing_docs)]
+#![warn(missing_docs)]
 #![allow(clippy::type_complexity)] // Not useful, GPUI makes heavy use of callbacks
 #![allow(clippy::collapsible_else_if)] // False positives in platform specific code
 #![allow(unused_mut)] // False positives in platform specific code
@@ -35,7 +35,7 @@ mod platform;
 pub mod prelude;
 /// Profiling utilities for task timing and thread performance tracking.
 pub mod profiler;
-#[cfg(any(target_os = "windows", target_os = "linux"))]
+#[cfg(any(target_os = "windows", target_os = "linux", target_family = "wasm"))]
 #[expect(missing_docs)]
 pub mod queue;
 mod scene;
@@ -87,6 +87,7 @@ pub use executor::*;
 pub use geometry::*;
 pub use global::*;
 pub use gpui_macros::{AppContext, IntoElement, Render, VisualContext, register_action, test};
+pub use gpui_util::arc_cow::ArcCow;
 pub use http_client;
 pub use input::*;
 pub use inspector::*;
@@ -96,7 +97,7 @@ pub use keymap::*;
 pub use path_builder::*;
 pub use platform::*;
 pub use profiler::*;
-#[cfg(any(target_os = "windows", target_os = "linux"))]
+#[cfg(any(target_os = "windows", target_os = "linux", target_family = "wasm"))]
 pub use queue::{PriorityQueueReceiver, PriorityQueueSender};
 pub use refineable::*;
 pub use scene::*;
@@ -113,7 +114,7 @@ pub use taffy::{AvailableSpace, LayoutId};
 #[cfg(any(test, feature = "test-support"))]
 pub use test::*;
 pub use text_system::*;
-pub use util::{FutureExt, Timeout, arc_cow::ArcCow};
+pub use util::{FutureExt, Timeout};
 pub use view::*;
 pub use window::*;
 

crates/gpui/src/platform.rs 🔗

@@ -44,6 +44,7 @@ use image::RgbaImage;
 use image::codecs::gif::GifDecoder;
 use image::{AnimationDecoder as _, Frame};
 use raw_window_handle::{HasDisplayHandle, HasWindowHandle};
+use scheduler::Instant;
 pub use scheduler::RunnableMeta;
 use schemars::JsonSchema;
 use seahash::SeaHasher;
@@ -53,7 +54,7 @@ use std::borrow::Cow;
 use std::hash::{Hash, Hasher};
 use std::io::Cursor;
 use std::ops;
-use std::time::{Duration, Instant};
+use std::time::Duration;
 use std::{
     fmt::{self, Debug},
     ops::Range,
@@ -228,7 +229,7 @@ pub trait Platform: 'static {
 }
 
 /// A handle to a platform's display, e.g. a monitor or laptop screen.
-pub trait PlatformDisplay: Send + Sync + Debug {
+pub trait PlatformDisplay: Debug {
     /// Get the ID for this display
     fn id(&self) -> DisplayId;
 
@@ -560,7 +561,7 @@ pub trait PlatformWindow: HasWindowHandle + HasDisplayHandle {
 pub type RunnableVariant = Runnable<RunnableMeta>;
 
 #[doc(hidden)]
-pub type TimerResolutionGuard = util::Deferred<Box<dyn FnOnce() + Send>>;
+pub type TimerResolutionGuard = gpui_util::Deferred<Box<dyn FnOnce() + Send>>;
 
 /// This type is public so that our test macro can generate and use it, but it should not
 /// be considered part of our public API.
@@ -579,7 +580,7 @@ pub trait PlatformDispatcher: Send + Sync {
     }
 
     fn increase_timer_resolution(&self) -> TimerResolutionGuard {
-        util::defer(Box::new(|| {}))
+        gpui_util::defer(Box::new(|| {}))
     }
 
     #[cfg(any(test, feature = "test-support"))]
@@ -827,7 +828,7 @@ impl From<RenderImageParams> for AtlasKey {
 }
 
 #[expect(missing_docs)]
-pub trait PlatformAtlas: Send + Sync {
+pub trait PlatformAtlas {
     fn get_or_insert_with<'a>(
         &self,
         key: &AtlasKey,
@@ -1235,7 +1236,7 @@ pub struct WindowOptions {
     ),
     allow(dead_code)
 )]
-#[expect(missing_docs)]
+#[allow(missing_docs)]
 pub struct WindowParams {
     pub bounds: Bounds<Pixels>,
 

crates/gpui/src/platform/scap_screen_capture.rs 🔗

@@ -126,7 +126,7 @@ fn start_default_target_screen_capture(
 ) {
     // Due to use of blocking APIs, a dedicated thread is used.
     std::thread::spawn(|| {
-        let start_result = util::maybe!({
+        let start_result = gpui_util::maybe!({
             let mut capturer = new_scap_capturer(None)?;
             capturer.start_capture();
             let first_frame = capturer

crates/gpui/src/platform/test/dispatcher.rs 🔗

@@ -1,11 +1,12 @@
 use crate::{PlatformDispatcher, Priority, RunnableVariant};
+use scheduler::Instant;
 use scheduler::{Clock, Scheduler, SessionId, TestScheduler, TestSchedulerConfig, Yield};
 use std::{
     sync::{
         Arc,
         atomic::{AtomicUsize, Ordering},
     },
-    time::{Duration, Instant},
+    time::Duration,
 };
 
 /// TestDispatcher provides deterministic async execution for tests.
@@ -47,6 +48,10 @@ impl TestDispatcher {
         self.session_id
     }
 
+    pub fn drain_tasks(&self) {
+        self.scheduler.drain_tasks();
+    }
+
     pub fn advance_clock(&self, by: Duration) {
         self.scheduler.advance_clock(by);
     }

crates/gpui/src/platform/test/window.rs 🔗

@@ -19,6 +19,7 @@ pub(crate) struct TestWindowState {
     pub(crate) title: Option<String>,
     pub(crate) edited: bool,
     platform: Weak<TestPlatform>,
+    // TODO: Replace with `Rc`
     sprite_atlas: Arc<dyn PlatformAtlas>,
     pub(crate) should_close_handler: Option<Box<dyn FnMut() -> bool>>,
     hit_test_window_control_callback: Option<Box<dyn FnMut() -> Option<WindowControlArea>>>,

crates/gpui/src/platform_scheduler.rs 🔗

@@ -2,7 +2,10 @@ use crate::{PlatformDispatcher, RunnableMeta};
 use async_task::Runnable;
 use chrono::{DateTime, Utc};
 use futures::channel::oneshot;
+use scheduler::Instant;
 use scheduler::{Clock, Priority, Scheduler, SessionId, TestScheduler, Timer};
+#[cfg(not(target_family = "wasm"))]
+use std::task::{Context, Poll};
 use std::{
     future::Future,
     pin::Pin,
@@ -10,10 +13,8 @@ use std::{
         Arc,
         atomic::{AtomicU16, Ordering},
     },
-    task::{Context, Poll},
-    time::{Duration, Instant},
+    time::Duration,
 };
-use waker_fn::waker_fn;
 
 /// A production implementation of [`Scheduler`] that wraps a [`PlatformDispatcher`].
 ///
@@ -43,37 +44,48 @@ impl Scheduler for PlatformScheduler {
     fn block(
         &self,
         _session_id: Option<SessionId>,
-        mut future: Pin<&mut dyn Future<Output = ()>>,
-        timeout: Option<Duration>,
+        #[cfg_attr(target_family = "wasm", allow(unused_mut))] mut future: Pin<
+            &mut dyn Future<Output = ()>,
+        >,
+        #[cfg_attr(target_family = "wasm", allow(unused_variables))] timeout: Option<Duration>,
     ) -> bool {
-        let deadline = timeout.map(|t| Instant::now() + t);
-        let parker = parking::Parker::new();
-        let unparker = parker.unparker();
-        let waker = waker_fn(move || {
-            unparker.unpark();
-        });
-        let mut cx = Context::from_waker(&waker);
-        if let Poll::Ready(()) = future.as_mut().poll(&mut cx) {
-            return true;
+        #[cfg(target_family = "wasm")]
+        {
+            let _ = (&future, &timeout);
+            panic!("Cannot block on wasm")
         }
+        #[cfg(not(target_family = "wasm"))]
+        {
+            use waker_fn::waker_fn;
+            let deadline = timeout.map(|t| Instant::now() + t);
+            let parker = parking::Parker::new();
+            let unparker = parker.unparker();
+            let waker = waker_fn(move || {
+                unparker.unpark();
+            });
+            let mut cx = Context::from_waker(&waker);
+            if let Poll::Ready(()) = future.as_mut().poll(&mut cx) {
+                return true;
+            }
 
-        let park_deadline = |deadline: Instant| {
-            // Timer expirations are only delivered every ~15.6 milliseconds by default on Windows.
-            // We increase the resolution during this wait so that short timeouts stay reasonably short.
-            let _timer_guard = self.dispatcher.increase_timer_resolution();
-            parker.park_deadline(deadline)
-        };
-
-        loop {
-            match deadline {
-                Some(deadline) if !park_deadline(deadline) && deadline <= Instant::now() => {
-                    return false;
+            let park_deadline = |deadline: Instant| {
+                // Timer expirations are only delivered every ~15.6 milliseconds by default on Windows.
+                // We increase the resolution during this wait so that short timeouts stay reasonably short.
+                let _timer_guard = self.dispatcher.increase_timer_resolution();
+                parker.park_deadline(deadline)
+            };
+
+            loop {
+                match deadline {
+                    Some(deadline) if !park_deadline(deadline) && deadline <= Instant::now() => {
+                        return false;
+                    }
+                    Some(_) => (),
+                    None => parker.park(),
+                }
+                if let Poll::Ready(()) = future.as_mut().poll(&mut cx) {
+                    break true;
                 }
-                Some(_) => (),
-                None => parker.park(),
-            }
-            if let Poll::Ready(()) = future.as_mut().poll(&mut cx) {
-                break true;
             }
         }
     }

crates/gpui/src/profiler.rs 🔗

@@ -1,3 +1,4 @@
+use scheduler::Instant;
 use std::{
     cell::LazyCell,
     collections::HashMap,
@@ -5,7 +6,6 @@ use std::{
     hash::{DefaultHasher, Hash},
     sync::Arc,
     thread::ThreadId,
-    time::Instant,
 };
 
 use serde::{Deserialize, Serialize};

crates/gpui/src/queue.rs 🔗

@@ -41,6 +41,32 @@ impl<T> PriorityQueueState<T> {
         }
 
         let mut queues = self.queues.lock();
+        Self::push(&mut queues, priority, item);
+        self.condvar.notify_one();
+        Ok(())
+    }
+
+    fn spin_send(&self, priority: Priority, item: T) -> Result<(), SendError<T>> {
+        if self
+            .receiver_count
+            .load(std::sync::atomic::Ordering::Relaxed)
+            == 0
+        {
+            return Err(SendError(item));
+        }
+
+        let mut queues = loop {
+            if let Some(guard) = self.queues.try_lock() {
+                break guard;
+            }
+            std::hint::spin_loop();
+        };
+        Self::push(&mut queues, priority, item);
+        self.condvar.notify_one();
+        Ok(())
+    }
+
+    fn push(queues: &mut PriorityQueues<T>, priority: Priority, item: T) {
         match priority {
             Priority::RealtimeAudio => unreachable!(
                 "Realtime audio priority runs on a dedicated thread and is never queued"
@@ -49,8 +75,6 @@ impl<T> PriorityQueueState<T> {
             Priority::Medium => queues.medium_priority.push_back(item),
             Priority::Low => queues.low_priority.push_back(item),
         };
-        self.condvar.notify_one();
-        Ok(())
     }
 
     fn recv<'a>(&'a self) -> Result<parking_lot::MutexGuard<'a, PriorityQueues<T>>, RecvError> {
@@ -84,6 +108,28 @@ impl<T> PriorityQueueState<T> {
             Ok(Some(queues))
         }
     }
+
+    fn spin_try_recv<'a>(
+        &'a self,
+    ) -> Result<Option<parking_lot::MutexGuard<'a, PriorityQueues<T>>>, RecvError> {
+        let queues = loop {
+            if let Some(guard) = self.queues.try_lock() {
+                break guard;
+            }
+            std::hint::spin_loop();
+        };
+
+        let sender_count = self.sender_count.load(std::sync::atomic::Ordering::Relaxed);
+        if queues.is_empty() && sender_count == 0 {
+            return Err(crate::queue::RecvError);
+        }
+
+        if queues.is_empty() {
+            Ok(None)
+        } else {
+            Ok(Some(queues))
+        }
+    }
 }
 
 #[doc(hidden)]
@@ -100,6 +146,11 @@ impl<T> PriorityQueueSender<T> {
         self.state.send(priority, item)?;
         Ok(())
     }
+
+    pub fn spin_send(&self, priority: Priority, item: T) -> Result<(), SendError<T>> {
+        self.state.spin_send(priority, item)?;
+        Ok(())
+    }
 }
 
 impl<T> Drop for PriorityQueueSender<T> {
@@ -183,6 +234,44 @@ impl<T> PriorityQueueReceiver<T> {
         self.pop_inner(false)
     }
 
+    pub fn spin_try_pop(&mut self) -> Result<Option<T>, RecvError> {
+        use Priority as P;
+
+        let Some(mut queues) = self.state.spin_try_recv()? else {
+            return Ok(None);
+        };
+
+        let high = P::High.weight() * !queues.high_priority.is_empty() as u32;
+        let medium = P::Medium.weight() * !queues.medium_priority.is_empty() as u32;
+        let low = P::Low.weight() * !queues.low_priority.is_empty() as u32;
+        let mut mass = high + medium + low;
+
+        if !queues.high_priority.is_empty() {
+            let flip = self.rand.random_ratio(P::High.weight(), mass);
+            if flip {
+                return Ok(queues.high_priority.pop_front());
+            }
+            mass -= P::High.weight();
+        }
+
+        if !queues.medium_priority.is_empty() {
+            let flip = self.rand.random_ratio(P::Medium.weight(), mass);
+            if flip {
+                return Ok(queues.medium_priority.pop_front());
+            }
+            mass -= P::Medium.weight();
+        }
+
+        if !queues.low_priority.is_empty() {
+            let flip = self.rand.random_ratio(P::Low.weight(), mass);
+            if flip {
+                return Ok(queues.low_priority.pop_front());
+            }
+        }
+
+        Ok(None)
+    }
+
     /// Pops an element from the priority queue blocking if necessary.
     ///
     /// This method is best suited if you only intend to pop one element, for better performance

crates/gpui/src/scene.rs 🔗

@@ -459,7 +459,7 @@ impl<'a> Iterator for BatchIterator<'a> {
     ),
     allow(dead_code)
 )]
-#[expect(missing_docs)]
+#[allow(missing_docs)]
 pub enum PrimitiveBatch {
     Shadows(Range<usize>),
     Quads(Range<usize>),
@@ -711,7 +711,7 @@ impl From<PolychromeSprite> for Primitive {
 }
 
 #[derive(Clone, Debug)]
-#[expect(missing_docs)]
+#[allow(missing_docs)]
 pub struct PaintSurface {
     pub order: DrawOrder,
     pub bounds: Bounds<ScaledPixels>,

crates/gpui/src/shared_string.rs 🔗

@@ -1,12 +1,12 @@
 use derive_more::{Deref, DerefMut};
 
+use gpui_util::arc_cow::ArcCow;
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
 use std::{
     borrow::{Borrow, Cow},
     sync::Arc,
 };
-use util::arc_cow::ArcCow;
 
 /// A shared string is an immutable string that can be cheaply cloned in GPUI
 /// tasks. Essentially an abstraction over an `Arc<str>` and `&'static str`,

crates/gpui/src/subscription.rs 🔗

@@ -1,11 +1,11 @@
 use collections::{BTreeMap, BTreeSet};
+use gpui_util::post_inc;
 use std::{
     cell::{Cell, RefCell},
     fmt::Debug,
     mem,
     rc::Rc,
 };
-use util::post_inc;
 
 pub(crate) struct SubscriberSet<EmitterKey, Callback>(
     Rc<RefCell<SubscriberSetState<EmitterKey, Callback>>>,

crates/gpui/src/test.rs 🔗

@@ -27,7 +27,6 @@
 //! ```
 use crate::{Entity, Subscription, TestAppContext, TestDispatcher};
 use futures::StreamExt as _;
-use smol::channel;
 use std::{
     env,
     panic::{self, RefUnwindSafe},
@@ -136,7 +135,7 @@ fn calculate_seeds(
 
 /// A test struct for converting an observation callback into a stream.
 pub struct Observation<T> {
-    rx: Pin<Box<channel::Receiver<T>>>,
+    rx: Pin<Box<async_channel::Receiver<T>>>,
     _subscription: Subscription,
 }
 
@@ -153,10 +152,10 @@ impl<T: 'static> futures::Stream for Observation<T> {
 
 /// observe returns a stream of the change events from the given `Entity`
 pub fn observe<T: 'static>(entity: &Entity<T>, cx: &mut TestAppContext) -> Observation<()> {
-    let (tx, rx) = smol::channel::unbounded();
+    let (tx, rx) = async_channel::unbounded();
     let _subscription = cx.update(|cx| {
         cx.observe(entity, move |_, _| {
-            let _ = smol::block_on(tx.send(()));
+            let _ = pollster::block_on(tx.send(()));
         })
     });
     let rx = Box::pin(rx);

crates/gpui/src/util.rs 🔗

@@ -7,8 +7,6 @@ use std::{
     time::Duration,
 };
 
-pub use util::*;
-
 /// A helper trait for building complex objects with imperative conditionals in a fluent style.
 pub trait FluentBuilder {
     /// Imperatively modify self with the given closure.

crates/gpui/src/window.rs 🔗

@@ -26,11 +26,14 @@ use core_video::pixel_buffer::CVPixelBuffer;
 use derive_more::{Deref, DerefMut};
 use futures::FutureExt;
 use futures::channel::oneshot;
+use gpui_util::post_inc;
+use gpui_util::{ResultExt, measure};
 use itertools::FoldWhile::{Continue, Done};
 use itertools::Itertools;
 use parking_lot::RwLock;
 use raw_window_handle::{HandleError, HasDisplayHandle, HasWindowHandle};
 use refineable::Refineable;
+use scheduler::Instant;
 use slotmap::SlotMap;
 use smallvec::SmallVec;
 use std::{
@@ -48,10 +51,8 @@ use std::{
         Arc, Weak,
         atomic::{AtomicUsize, Ordering::SeqCst},
     },
-    time::{Duration, Instant},
+    time::Duration,
 };
-use util::post_inc;
-use util::{ResultExt, measure};
 use uuid::Uuid;
 
 mod prompts;
@@ -725,6 +726,7 @@ pub(crate) struct DeferredDraw {
     parent_node: DispatchNodeId,
     element_id_stack: SmallVec<[ElementId; 32]>,
     text_style_stack: Vec<TextStyleRefinement>,
+    content_mask: Option<ContentMask<Pixels>>,
     rem_size: Pixels,
     element: Option<AnyElement>,
     absolute_offset: Point<Pixels>,
@@ -2428,15 +2430,18 @@ impl Window {
                 .set_active_node(deferred_draw.parent_node);
 
             let prepaint_start = self.prepaint_index();
+            let content_mask = deferred_draw.content_mask.clone();
             if let Some(element) = deferred_draw.element.as_mut() {
                 self.with_rendered_view(deferred_draw.current_view, |window| {
-                    window.with_rem_size(Some(deferred_draw.rem_size), |window| {
-                        window.with_absolute_element_offset(
-                            deferred_draw.absolute_offset,
-                            |window| {
-                                element.prepaint(window, cx);
-                            },
-                        );
+                    window.with_content_mask(content_mask, |window| {
+                        window.with_rem_size(Some(deferred_draw.rem_size), |window| {
+                            window.with_absolute_element_offset(
+                                deferred_draw.absolute_offset,
+                                |window| {
+                                    element.prepaint(window, cx);
+                                },
+                            );
+                        });
                     });
                 })
             } else {
@@ -2468,10 +2473,13 @@ impl Window {
                 .set_active_node(deferred_draw.parent_node);
 
             let paint_start = self.paint_index();
+            let content_mask = deferred_draw.content_mask.clone();
             if let Some(element) = deferred_draw.element.as_mut() {
                 self.with_rendered_view(deferred_draw.current_view, |window| {
-                    window.with_rem_size(Some(deferred_draw.rem_size), |window| {
-                        element.paint(window, cx);
+                    window.with_content_mask(content_mask, |window| {
+                        window.with_rem_size(Some(deferred_draw.rem_size), |window| {
+                            element.paint(window, cx);
+                        });
                     })
                 })
             } else {
@@ -2535,6 +2543,7 @@ impl Window {
                     parent_node: reused_subtree.refresh_node_id(deferred_draw.parent_node),
                     element_id_stack: deferred_draw.element_id_stack.clone(),
                     text_style_stack: deferred_draw.text_style_stack.clone(),
+                    content_mask: deferred_draw.content_mask.clone(),
                     rem_size: deferred_draw.rem_size,
                     priority: deferred_draw.priority,
                     element: None,
@@ -3018,12 +3027,16 @@ impl Window {
     /// at a later time. The `priority` parameter determines the drawing order relative to other deferred elements,
     /// with higher values being drawn on top.
     ///
+    /// When `content_mask` is provided, the deferred element will be clipped to that region during
+    /// both prepaint and paint. When `None`, no additional clipping is applied.
+    ///
     /// This method should only be called as part of the prepaint phase of element drawing.
     pub fn defer_draw(
         &mut self,
         element: AnyElement,
         absolute_offset: Point<Pixels>,
         priority: usize,
+        content_mask: Option<ContentMask<Pixels>>,
     ) {
         self.invalidator.debug_assert_prepaint();
         let parent_node = self.next_frame.dispatch_tree.active_node_id().unwrap();
@@ -3032,6 +3045,7 @@ impl Window {
             parent_node,
             element_id_stack: self.element_id_stack.clone(),
             text_style_stack: self.text_style_stack.clone(),
+            content_mask,
             rem_size: self.rem_size(),
             priority,
             element: Some(element),

crates/gpui_linux/Cargo.toml 🔗

@@ -18,8 +18,7 @@ wayland = [
     "bitflags",
     "gpui_wgpu",
     "ashpd/wayland",
-    "cosmic-text",
-    "font-kit",
+
     "calloop-wayland-source",
     "wayland-backend",
     "wayland-client",
@@ -35,8 +34,7 @@ wayland = [
 x11 = [
     "gpui_wgpu",
     "ashpd",
-    "cosmic-text",
-    "font-kit",
+
     "as-raw-xcb-connection",
     "x11rb",
     "xkbcommon",
@@ -58,22 +56,24 @@ bytemuck = "1"
 collections.workspace = true
 futures.workspace = true
 gpui.workspace = true
-gpui_wgpu = { workspace = true, optional = true }
+gpui_wgpu = { workspace = true, optional = true, features = ["font-kit"] }
 http_client.workspace = true
 itertools.workspace = true
 libc.workspace = true
 log.workspace = true
 parking_lot.workspace = true
 pathfinder_geometry = "0.5"
+pollster.workspace = true
 profiling.workspace = true
 smallvec.workspace = true
 smol.workspace = true
 strum.workspace = true
+url.workspace = true
 util.workspace = true
 uuid.workspace = true
 
 # Always used
-oo7 = { version = "0.5.0", default-features = false, features = [
+oo7 = { version = "0.6", default-features = false, features = [
     "async-std",
     "native_crypto",
 ] }
@@ -82,12 +82,7 @@ raw-window-handle = "0.6"
 
 # Used in both windowing options
 ashpd = { workspace = true, optional = true }
-cosmic-text = { version = "0.17.0", optional = true }
 swash = { version = "0.2.6" }
-# WARNING: If you change this, you must also publish a new version of zed-font-kit to crates.io
-font-kit = { git = "https://github.com/zed-industries/font-kit", rev = "110523127440aefb11ce0cf280ae7c5071337ec5", package = "zed-font-kit", version = "0.14.1-zed", features = [
-    "source-fontconfig-dlopen",
-], optional = true }
 bitflags = { workspace = true, optional = true }
 filedescriptor = { version = "0.8.2", optional = true }
 open = { version = "5.2.0", optional = true }
@@ -126,6 +121,7 @@ x11rb = { version = "0.13.1", features = [
     "cursor",
     "resource_manager",
     "sync",
+    "dri3",
 ], optional = true }
 # WARNING: If you change this, you must also publish a new version of zed-xim to crates.io
 xim = { git = "https://github.com/zed-industries/xim-rs.git", rev = "16f35a2c881b815a2b6cdfd6687988e84f8447d8", features = [

crates/gpui_linux/src/linux/headless/client.rs 🔗

@@ -64,6 +64,7 @@ impl LinuxClient for HeadlessClient {
         None
     }
 
+    #[cfg(feature = "screen-capture")]
     fn screen_capture_sources(
         &self,
     ) -> futures::channel::oneshot::Receiver<anyhow::Result<Vec<Rc<dyn gpui::ScreenCaptureSource>>>>

crates/gpui_linux/src/linux/platform.rs 🔗

@@ -55,10 +55,12 @@ pub(crate) trait LinuxClient {
     fn display(&self, id: DisplayId) -> Option<Rc<dyn PlatformDisplay>>;
     fn primary_display(&self) -> Option<Rc<dyn PlatformDisplay>>;
 
+    #[cfg(feature = "screen-capture")]
     fn is_screen_capture_supported(&self) -> bool {
         false
     }
 
+    #[cfg(feature = "screen-capture")]
     fn screen_capture_sources(
         &self,
     ) -> oneshot::Receiver<Result<Vec<Rc<dyn gpui::ScreenCaptureSource>>>> {
@@ -122,7 +124,7 @@ impl LinuxCommon {
         let (main_sender, main_receiver) = PriorityQueueCalloopReceiver::new();
 
         #[cfg(any(feature = "wayland", feature = "x11"))]
-        let text_system = Arc::new(crate::linux::CosmicTextSystem::new());
+        let text_system = Arc::new(crate::linux::CosmicTextSystem::new("IBM Plex Sans"));
         #[cfg(not(any(feature = "wayland", feature = "x11")))]
         let text_system = Arc::new(gpui::NoopTextSystem::new());
 
@@ -227,17 +229,14 @@ impl<P: LinuxClient + 'static> Platform for LinuxPlatform<P> {
         log::info!("Restarting process, using app path: {:?}", app_path);
 
         // Script to wait for the current process to exit and then restart the app.
-        let script = format!(
-            r#"
-            while kill -0 {pid} 2>/dev/null; do
+        // Pass dynamic values as positional parameters to avoid shell interpolation issues.
+        let script = r#"
+            while kill -0 "$0" 2>/dev/null; do
                 sleep 0.1
             done
 
-            {app_path}
-            "#,
-            pid = app_pid,
-            app_path = app_path.display()
-        );
+            "$1"
+            "#;
 
         #[allow(
             clippy::disallowed_methods,
@@ -247,6 +246,8 @@ impl<P: LinuxClient + 'static> Platform for LinuxPlatform<P> {
             .arg("bash")
             .arg("-c")
             .arg(script)
+            .arg(&app_pid)
+            .arg(&app_path)
             .process_group(0)
             .spawn();
 
@@ -364,7 +365,8 @@ impl<P: LinuxClient + 'static> Platform for LinuxPlatform<P> {
                         response
                             .uris()
                             .iter()
-                            .filter_map(|uri| uri.to_file_path().ok())
+                            .filter_map(|uri: &ashpd::Uri| url::Url::parse(uri.as_str()).ok())
+                            .filter_map(|uri: url::Url| uri.to_file_path().ok())
                             .collect::<Vec<_>>(),
                     )),
                     Err(ashpd::Error::Response(_)) => Ok(None),
@@ -426,7 +428,8 @@ impl<P: LinuxClient + 'static> Platform for LinuxPlatform<P> {
                         Ok(response) => Ok(response
                             .uris()
                             .first()
-                            .and_then(|uri| uri.to_file_path().ok())),
+                            .and_then(|uri: &ashpd::Uri| url::Url::parse(uri.as_str()).ok())
+                            .and_then(|uri: url::Url| uri.to_file_path().ok())),
                         Err(ashpd::Error::Response(_)) => Ok(None),
                         Err(e) => Err(e.into()),
                     };
@@ -627,7 +630,7 @@ pub(super) fn open_uri_internal(
     uri: &str,
     activation_token: Option<String>,
 ) {
-    if let Some(uri) = ashpd::url::Url::parse(uri).log_err() {
+    if let Some(uri) = ashpd::Uri::parse(uri).log_err() {
         executor
             .spawn(async move {
                 match ashpd::desktop::open_uri::OpenFileRequest::default()
@@ -1034,6 +1037,46 @@ pub(super) fn capslock_from_xkb(keymap_state: &State) -> gpui::Capslock {
     gpui::Capslock { on }
 }
 
+/// Resolve a Linux `dev_t` to PCI vendor/device IDs via sysfs, returning a
+/// [`CompositorGpuHint`] that the GPU adapter selection code can use to
+/// prioritize the compositor's rendering device.
+#[cfg(any(feature = "wayland", feature = "x11"))]
+pub(super) fn compositor_gpu_hint_from_dev_t(dev: u64) -> Option<gpui_wgpu::CompositorGpuHint> {
+    fn dev_major(dev: u64) -> u32 {
+        ((dev >> 8) & 0xfff) as u32 | (((dev >> 32) & !0xfff) as u32)
+    }
+
+    fn dev_minor(dev: u64) -> u32 {
+        (dev & 0xff) as u32 | (((dev >> 12) & !0xff) as u32)
+    }
+
+    fn read_sysfs_hex_id(path: &str) -> Option<u32> {
+        let content = std::fs::read_to_string(path).ok()?;
+        let trimmed = content.trim().strip_prefix("0x").unwrap_or(content.trim());
+        u32::from_str_radix(trimmed, 16).ok()
+    }
+
+    let major = dev_major(dev);
+    let minor = dev_minor(dev);
+
+    let vendor_path = format!("/sys/dev/char/{major}:{minor}/device/vendor");
+    let device_path = format!("/sys/dev/char/{major}:{minor}/device/device");
+
+    let vendor_id = read_sysfs_hex_id(&vendor_path)?;
+    let device_id = read_sysfs_hex_id(&device_path)?;
+
+    log::info!(
+        "Compositor GPU hint: vendor={:#06x}, device={:#06x} (from dev {major}:{minor})",
+        vendor_id,
+        device_id,
+    );
+
+    Some(gpui_wgpu::CompositorGpuHint {
+        vendor_id,
+        device_id,
+    })
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;

crates/gpui_linux/src/linux/text_system.rs 🔗

@@ -1,538 +1 @@
-use anyhow::{Context as _, Ok, Result};
-use collections::HashMap;
-use cosmic_text::{
-    Attrs, AttrsList, Family, Font as CosmicTextFont, FontFeatures as CosmicFontFeatures,
-    FontSystem, ShapeBuffer, ShapeLine,
-};
-use gpui::{
-    Bounds, DevicePixels, Font, FontFeatures, FontId, FontMetrics, FontRun, GlyphId, LineLayout,
-    Pixels, PlatformTextSystem, RenderGlyphParams, SUBPIXEL_VARIANTS_X, SUBPIXEL_VARIANTS_Y,
-    ShapedGlyph, ShapedRun, SharedString, Size, TextRenderingMode, point, size,
-};
-
-use itertools::Itertools;
-use parking_lot::RwLock;
-use smallvec::SmallVec;
-use std::{borrow::Cow, sync::Arc};
-use swash::{
-    scale::{Render, ScaleContext, Source, StrikeWith},
-    zeno::{Format, Vector},
-};
-
-pub(crate) struct CosmicTextSystem(RwLock<CosmicTextSystemState>);
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-struct FontKey {
-    family: SharedString,
-    features: FontFeatures,
-}
-
-impl FontKey {
-    fn new(family: SharedString, features: FontFeatures) -> Self {
-        Self { family, features }
-    }
-}
-
-struct CosmicTextSystemState {
-    font_system: FontSystem,
-    scratch: ShapeBuffer,
-    swash_scale_context: ScaleContext,
-    /// Contains all already loaded fonts, including all faces. Indexed by `FontId`.
-    loaded_fonts: Vec<LoadedFont>,
-    /// Caches the `FontId`s associated with a specific family to avoid iterating the font database
-    /// for every font face in a family.
-    font_ids_by_family_cache: HashMap<FontKey, SmallVec<[FontId; 4]>>,
-}
-
-struct LoadedFont {
-    font: Arc<CosmicTextFont>,
-    features: CosmicFontFeatures,
-    is_known_emoji_font: bool,
-}
-
-impl CosmicTextSystem {
-    pub(crate) fn new() -> Self {
-        // todo(linux) make font loading non-blocking
-        let font_system = FontSystem::new();
-
-        Self(RwLock::new(CosmicTextSystemState {
-            font_system,
-            scratch: ShapeBuffer::default(),
-            swash_scale_context: ScaleContext::new(),
-            loaded_fonts: Vec::new(),
-            font_ids_by_family_cache: HashMap::default(),
-        }))
-    }
-}
-
-impl Default for CosmicTextSystem {
-    fn default() -> Self {
-        Self::new()
-    }
-}
-
-impl PlatformTextSystem for CosmicTextSystem {
-    fn add_fonts(&self, fonts: Vec<Cow<'static, [u8]>>) -> Result<()> {
-        self.0.write().add_fonts(fonts)
-    }
-
-    fn all_font_names(&self) -> Vec<String> {
-        let mut result = self
-            .0
-            .read()
-            .font_system
-            .db()
-            .faces()
-            .filter_map(|face| face.families.first().map(|family| family.0.clone()))
-            .collect_vec();
-        result.sort();
-        result.dedup();
-        result
-    }
-
-    fn font_id(&self, font: &Font) -> Result<FontId> {
-        // todo(linux): Do we need to use CosmicText's Font APIs? Can we consolidate this to use font_kit?
-        let mut state = self.0.write();
-        let key = FontKey::new(font.family.clone(), font.features.clone());
-        let candidates = if let Some(font_ids) = state.font_ids_by_family_cache.get(&key) {
-            font_ids.as_slice()
-        } else {
-            let font_ids = state.load_family(&font.family, &font.features)?;
-            state.font_ids_by_family_cache.insert(key.clone(), font_ids);
-            state.font_ids_by_family_cache[&key].as_ref()
-        };
-
-        // todo(linux) ideally we would make fontdb's `find_best_match` pub instead of using font-kit here
-        let candidate_properties = candidates
-            .iter()
-            .map(|font_id| {
-                let database_id = state.loaded_font(*font_id).font.id();
-                let face_info = state.font_system.db().face(database_id).expect("");
-                face_info_into_properties(face_info)
-            })
-            .collect::<SmallVec<[_; 4]>>();
-
-        let ix =
-            font_kit::matching::find_best_match(&candidate_properties, &font_into_properties(font))
-                .context("requested font family contains no font matching the other parameters")?;
-
-        Ok(candidates[ix])
-    }
-
-    fn font_metrics(&self, font_id: FontId) -> FontMetrics {
-        let metrics = self
-            .0
-            .read()
-            .loaded_font(font_id)
-            .font
-            .as_swash()
-            .metrics(&[]);
-
-        FontMetrics {
-            units_per_em: metrics.units_per_em as u32,
-            ascent: metrics.ascent,
-            descent: -metrics.descent, // todo(linux) confirm this is correct
-            line_gap: metrics.leading,
-            underline_position: metrics.underline_offset,
-            underline_thickness: metrics.stroke_size,
-            cap_height: metrics.cap_height,
-            x_height: metrics.x_height,
-            // todo(linux): Compute this correctly
-            bounding_box: Bounds {
-                origin: point(0.0, 0.0),
-                size: size(metrics.max_width, metrics.ascent + metrics.descent),
-            },
-        }
-    }
-
-    fn typographic_bounds(&self, font_id: FontId, glyph_id: GlyphId) -> Result<Bounds<f32>> {
-        let lock = self.0.read();
-        let glyph_metrics = lock.loaded_font(font_id).font.as_swash().glyph_metrics(&[]);
-        let glyph_id = glyph_id.0 as u16;
-        // todo(linux): Compute this correctly
-        // see https://github.com/servo/font-kit/blob/master/src/loaders/freetype.rs#L614-L620
-        Ok(Bounds {
-            origin: point(0.0, 0.0),
-            size: size(
-                glyph_metrics.advance_width(glyph_id),
-                glyph_metrics.advance_height(glyph_id),
-            ),
-        })
-    }
-
-    fn advance(&self, font_id: FontId, glyph_id: GlyphId) -> Result<Size<f32>> {
-        self.0.read().advance(font_id, glyph_id)
-    }
-
-    fn glyph_for_char(&self, font_id: FontId, ch: char) -> Option<GlyphId> {
-        self.0.read().glyph_for_char(font_id, ch)
-    }
-
-    fn glyph_raster_bounds(&self, params: &RenderGlyphParams) -> Result<Bounds<DevicePixels>> {
-        self.0.write().raster_bounds(params)
-    }
-
-    fn rasterize_glyph(
-        &self,
-        params: &RenderGlyphParams,
-        raster_bounds: Bounds<DevicePixels>,
-    ) -> Result<(Size<DevicePixels>, Vec<u8>)> {
-        self.0.write().rasterize_glyph(params, raster_bounds)
-    }
-
-    fn layout_line(&self, text: &str, font_size: Pixels, runs: &[FontRun]) -> LineLayout {
-        self.0.write().layout_line(text, font_size, runs)
-    }
-
-    fn recommended_rendering_mode(
-        &self,
-        _font_id: FontId,
-        _font_size: Pixels,
-    ) -> TextRenderingMode {
-        // Ideally, we'd use fontconfig to read the user preference.
-        TextRenderingMode::Subpixel
-    }
-}
-
-impl CosmicTextSystemState {
-    fn loaded_font(&self, font_id: FontId) -> &LoadedFont {
-        &self.loaded_fonts[font_id.0]
-    }
-
-    #[profiling::function]
-    fn add_fonts(&mut self, fonts: Vec<Cow<'static, [u8]>>) -> Result<()> {
-        let db = self.font_system.db_mut();
-        for bytes in fonts {
-            match bytes {
-                Cow::Borrowed(embedded_font) => {
-                    db.load_font_data(embedded_font.to_vec());
-                }
-                Cow::Owned(bytes) => {
-                    db.load_font_data(bytes);
-                }
-            }
-        }
-        Ok(())
-    }
-
-    #[profiling::function]
-    fn load_family(
-        &mut self,
-        name: &str,
-        features: &FontFeatures,
-    ) -> Result<SmallVec<[FontId; 4]>> {
-        // TODO: Determine the proper system UI font.
-        let name = gpui::font_name_with_fallbacks(name, "IBM Plex Sans");
-
-        let families = self
-            .font_system
-            .db()
-            .faces()
-            .filter(|face| face.families.iter().any(|family| *name == family.0))
-            .map(|face| (face.id, face.post_script_name.clone()))
-            .collect::<SmallVec<[_; 4]>>();
-
-        let mut loaded_font_ids = SmallVec::new();
-        for (font_id, postscript_name) in families {
-            let font = self
-                .font_system
-                .get_font(font_id, cosmic_text::Weight::NORMAL)
-                .context("Could not load font")?;
-
-            // HACK: To let the storybook run and render Windows caption icons. We should actually do better font fallback.
-            let allowed_bad_font_names = [
-                "SegoeFluentIcons", // NOTE: Segoe fluent icons postscript name is inconsistent
-                "Segoe Fluent Icons",
-            ];
-
-            if font.as_swash().charmap().map('m') == 0
-                && !allowed_bad_font_names.contains(&postscript_name.as_str())
-            {
-                self.font_system.db_mut().remove_face(font.id());
-                continue;
-            };
-
-            let font_id = FontId(self.loaded_fonts.len());
-            loaded_font_ids.push(font_id);
-            self.loaded_fonts.push(LoadedFont {
-                font,
-                features: cosmic_font_features(features)?,
-                is_known_emoji_font: check_is_known_emoji_font(&postscript_name),
-            });
-        }
-
-        Ok(loaded_font_ids)
-    }
-
-    fn advance(&self, font_id: FontId, glyph_id: GlyphId) -> Result<Size<f32>> {
-        let glyph_metrics = self.loaded_font(font_id).font.as_swash().glyph_metrics(&[]);
-        Ok(Size {
-            width: glyph_metrics.advance_width(glyph_id.0 as u16),
-            height: glyph_metrics.advance_height(glyph_id.0 as u16),
-        })
-    }
-
-    fn glyph_for_char(&self, font_id: FontId, ch: char) -> Option<GlyphId> {
-        let glyph_id = self.loaded_font(font_id).font.as_swash().charmap().map(ch);
-        if glyph_id == 0 {
-            None
-        } else {
-            Some(GlyphId(glyph_id.into()))
-        }
-    }
-
-    fn raster_bounds(&mut self, params: &RenderGlyphParams) -> Result<Bounds<DevicePixels>> {
-        let image = self.render_glyph_image(params)?;
-        Ok(Bounds {
-            origin: point(image.placement.left.into(), (-image.placement.top).into()),
-            size: size(image.placement.width.into(), image.placement.height.into()),
-        })
-    }
-
-    #[profiling::function]
-    fn rasterize_glyph(
-        &mut self,
-        params: &RenderGlyphParams,
-        glyph_bounds: Bounds<DevicePixels>,
-    ) -> Result<(Size<DevicePixels>, Vec<u8>)> {
-        if glyph_bounds.size.width.0 == 0 || glyph_bounds.size.height.0 == 0 {
-            anyhow::bail!("glyph bounds are empty");
-        }
-
-        let mut image = self.render_glyph_image(params)?;
-        let bitmap_size = glyph_bounds.size;
-        match image.content {
-            swash::scale::image::Content::Color | swash::scale::image::Content::SubpixelMask => {
-                // Convert from RGBA to BGRA.
-                for pixel in image.data.chunks_exact_mut(4) {
-                    pixel.swap(0, 2);
-                }
-                Ok((bitmap_size, image.data))
-            }
-            swash::scale::image::Content::Mask => Ok((bitmap_size, image.data)),
-        }
-    }
-
-    fn render_glyph_image(
-        &mut self,
-        params: &RenderGlyphParams,
-    ) -> Result<swash::scale::image::Image> {
-        let loaded_font = &self.loaded_fonts[params.font_id.0];
-        let font_ref = loaded_font.font.as_swash();
-        let pixel_size = f32::from(params.font_size);
-
-        let subpixel_offset = Vector::new(
-            params.subpixel_variant.x as f32 / SUBPIXEL_VARIANTS_X as f32 / params.scale_factor,
-            params.subpixel_variant.y as f32 / SUBPIXEL_VARIANTS_Y as f32 / params.scale_factor,
-        );
-
-        let mut scaler = self
-            .swash_scale_context
-            .builder(font_ref)
-            .size(pixel_size * params.scale_factor)
-            .hint(true)
-            .build();
-
-        let sources: &[Source] = if params.is_emoji {
-            &[
-                Source::ColorOutline(0),
-                Source::ColorBitmap(StrikeWith::BestFit),
-                Source::Outline,
-            ]
-        } else {
-            &[Source::Outline]
-        };
-
-        let mut renderer = Render::new(sources);
-        if params.subpixel_rendering {
-            // There seems to be a bug in Swash where the B and R values are swapped.
-            renderer
-                .format(Format::subpixel_bgra())
-                .offset(subpixel_offset);
-        } else {
-            renderer.format(Format::Alpha).offset(subpixel_offset);
-        }
-
-        let glyph_id: u16 = params.glyph_id.0.try_into()?;
-        renderer
-            .render(&mut scaler, glyph_id)
-            .with_context(|| format!("unable to render glyph via swash for {params:?}"))
-    }
-
-    /// This is used when cosmic_text has chosen a fallback font instead of using the requested
-    /// font, typically to handle some unicode characters. When this happens, `loaded_fonts` may not
-    /// yet have an entry for this fallback font, and so one is added.
-    ///
-    /// Note that callers shouldn't use this `FontId` somewhere that will retrieve the corresponding
-    /// `LoadedFont.features`, as it will have an arbitrarily chosen or empty value. The only
-    /// current use of this field is for the *input* of `layout_line`, and so it's fine to use
-    /// `font_id_for_cosmic_id` when computing the *output* of `layout_line`.
-    fn font_id_for_cosmic_id(&mut self, id: cosmic_text::fontdb::ID) -> FontId {
-        if let Some(ix) = self
-            .loaded_fonts
-            .iter()
-            .position(|loaded_font| loaded_font.font.id() == id)
-        {
-            FontId(ix)
-        } else {
-            let font = self
-                .font_system
-                .get_font(id, cosmic_text::Weight::NORMAL)
-                .unwrap();
-            let face = self.font_system.db().face(id).unwrap();
-
-            let font_id = FontId(self.loaded_fonts.len());
-            self.loaded_fonts.push(LoadedFont {
-                font,
-                features: CosmicFontFeatures::new(),
-                is_known_emoji_font: check_is_known_emoji_font(&face.post_script_name),
-            });
-
-            font_id
-        }
-    }
-
-    #[profiling::function]
-    fn layout_line(&mut self, text: &str, font_size: Pixels, font_runs: &[FontRun]) -> LineLayout {
-        let mut attrs_list = AttrsList::new(&Attrs::new());
-        let mut offs = 0;
-        for run in font_runs {
-            let loaded_font = self.loaded_font(run.font_id);
-            let font = self.font_system.db().face(loaded_font.font.id()).unwrap();
-
-            attrs_list.add_span(
-                offs..(offs + run.len),
-                &Attrs::new()
-                    .metadata(run.font_id.0)
-                    .family(Family::Name(&font.families.first().unwrap().0))
-                    .stretch(font.stretch)
-                    .style(font.style)
-                    .weight(font.weight)
-                    .font_features(loaded_font.features.clone()),
-            );
-            offs += run.len;
-        }
-
-        let line = ShapeLine::new(
-            &mut self.font_system,
-            text,
-            &attrs_list,
-            cosmic_text::Shaping::Advanced,
-            4,
-        );
-        let mut layout_lines = Vec::with_capacity(1);
-        line.layout_to_buffer(
-            &mut self.scratch,
-            f32::from(font_size),
-            None, // We do our own wrapping
-            cosmic_text::Wrap::None,
-            None,
-            &mut layout_lines,
-            None,
-            cosmic_text::Hinting::Disabled,
-        );
-        let layout = layout_lines.first().unwrap();
-
-        let mut runs: Vec<ShapedRun> = Vec::new();
-        for glyph in &layout.glyphs {
-            let mut font_id = FontId(glyph.metadata);
-            let mut loaded_font = self.loaded_font(font_id);
-            if loaded_font.font.id() != glyph.font_id {
-                font_id = self.font_id_for_cosmic_id(glyph.font_id);
-                loaded_font = self.loaded_font(font_id);
-            }
-            let is_emoji = loaded_font.is_known_emoji_font;
-
-            // HACK: Prevent crash caused by variation selectors.
-            if glyph.glyph_id == 3 && is_emoji {
-                continue;
-            }
-
-            let shaped_glyph = ShapedGlyph {
-                id: GlyphId(glyph.glyph_id as u32),
-                position: point(glyph.x.into(), glyph.y.into()),
-                index: glyph.start,
-                is_emoji,
-            };
-
-            if let Some(last_run) = runs
-                .last_mut()
-                .filter(|last_run| last_run.font_id == font_id)
-            {
-                last_run.glyphs.push(shaped_glyph);
-            } else {
-                runs.push(ShapedRun {
-                    font_id,
-                    glyphs: vec![shaped_glyph],
-                });
-            }
-        }
-
-        LineLayout {
-            font_size,
-            width: layout.w.into(),
-            ascent: layout.max_ascent.into(),
-            descent: layout.max_descent.into(),
-            runs,
-            len: text.len(),
-        }
-    }
-}
-
-fn cosmic_font_features(features: &FontFeatures) -> Result<CosmicFontFeatures> {
-    let mut result = CosmicFontFeatures::new();
-    for feature in features.0.iter() {
-        let name_bytes: [u8; 4] = feature
-            .0
-            .as_bytes()
-            .try_into()
-            .context("Incorrect feature flag format")?;
-
-        let tag = cosmic_text::FeatureTag::new(&name_bytes);
-
-        result.set(tag, feature.1);
-    }
-    Ok(result)
-}
-
-fn font_into_properties(font: &gpui::Font) -> font_kit::properties::Properties {
-    font_kit::properties::Properties {
-        style: match font.style {
-            gpui::FontStyle::Normal => font_kit::properties::Style::Normal,
-            gpui::FontStyle::Italic => font_kit::properties::Style::Italic,
-            gpui::FontStyle::Oblique => font_kit::properties::Style::Oblique,
-        },
-        weight: font_kit::properties::Weight(font.weight.0),
-        stretch: Default::default(),
-    }
-}
-
-fn face_info_into_properties(
-    face_info: &cosmic_text::fontdb::FaceInfo,
-) -> font_kit::properties::Properties {
-    font_kit::properties::Properties {
-        style: match face_info.style {
-            cosmic_text::Style::Normal => font_kit::properties::Style::Normal,
-            cosmic_text::Style::Italic => font_kit::properties::Style::Italic,
-            cosmic_text::Style::Oblique => font_kit::properties::Style::Oblique,
-        },
-        // both libs use the same values for weight
-        weight: font_kit::properties::Weight(face_info.weight.0.into()),
-        stretch: match face_info.stretch {
-            cosmic_text::Stretch::Condensed => font_kit::properties::Stretch::CONDENSED,
-            cosmic_text::Stretch::Expanded => font_kit::properties::Stretch::EXPANDED,
-            cosmic_text::Stretch::ExtraCondensed => font_kit::properties::Stretch::EXTRA_CONDENSED,
-            cosmic_text::Stretch::ExtraExpanded => font_kit::properties::Stretch::EXTRA_EXPANDED,
-            cosmic_text::Stretch::Normal => font_kit::properties::Stretch::NORMAL,
-            cosmic_text::Stretch::SemiCondensed => font_kit::properties::Stretch::SEMI_CONDENSED,
-            cosmic_text::Stretch::SemiExpanded => font_kit::properties::Stretch::SEMI_EXPANDED,
-            cosmic_text::Stretch::UltraCondensed => font_kit::properties::Stretch::ULTRA_CONDENSED,
-            cosmic_text::Stretch::UltraExpanded => font_kit::properties::Stretch::ULTRA_EXPANDED,
-        },
-    }
-}
-
-fn check_is_known_emoji_font(postscript_name: &str) -> bool {
-    // TODO: Include other common emoji fonts
-    postscript_name == "NotoColorEmoji"
-}
+pub(crate) use gpui_wgpu::CosmicTextSystem;

crates/gpui_linux/src/linux/wayland/client.rs 🔗

@@ -95,7 +95,10 @@ use gpui::{
     ScrollDelta, ScrollWheelEvent, SharedString, Size, TaskTiming, TouchPhase, WindowParams, point,
     profiler, px, size,
 };
-use gpui_wgpu::WgpuContext;
+use gpui_wgpu::{CompositorGpuHint, WgpuContext};
+use wayland_protocols::wp::linux_dmabuf::zv1::client::{
+    zwp_linux_dmabuf_feedback_v1, zwp_linux_dmabuf_v1,
+};
 
 /// Used to convert evdev scancode to xkb scancode
 const MIN_KEYCODE: u32 = 8;
@@ -202,6 +205,7 @@ pub(crate) struct WaylandClientState {
     serial_tracker: SerialTracker,
     globals: Globals,
     pub gpu_context: Option<WgpuContext>,
+    pub compositor_gpu: Option<CompositorGpuHint>,
     wl_seat: wl_seat::WlSeat, // TODO: Multi seat support
     wl_pointer: Option<wl_pointer::WlPointer>,
     wl_keyboard: Option<wl_keyboard::WlKeyboard>,
@@ -515,6 +519,7 @@ impl WaylandClient {
             })
             .unwrap();
 
+        let compositor_gpu = detect_compositor_gpu();
         let gpu_context = None;
 
         let seat = seat.unwrap();
@@ -571,6 +576,7 @@ impl WaylandClient {
             serial_tracker: SerialTracker::new(),
             globals,
             gpu_context,
+            compositor_gpu,
             wl_seat: seat,
             wl_pointer: None,
             wl_keyboard: None,
@@ -715,10 +721,12 @@ impl LinuxClient for WaylandClient {
         let parent = state.keyboard_focused_window.clone();
 
         let appearance = state.common.appearance;
+        let compositor_gpu = state.compositor_gpu.take();
         let (window, surface_id) = WaylandWindow::new(
             handle,
             state.globals.clone(),
             &mut state.gpu_context,
+            compositor_gpu,
             WaylandClientStatePtr(Rc::downgrade(&self.0)),
             params,
             appearance,
@@ -904,6 +912,70 @@ impl LinuxClient for WaylandClient {
     }
 }
 
+struct DmabufProbeState {
+    device: Option<u64>,
+}
+
+impl Dispatch<wl_registry::WlRegistry, GlobalListContents> for DmabufProbeState {
+    fn event(
+        _: &mut Self,
+        _: &wl_registry::WlRegistry,
+        _: wl_registry::Event,
+        _: &GlobalListContents,
+        _: &Connection,
+        _: &QueueHandle<Self>,
+    ) {
+    }
+}
+
+impl Dispatch<zwp_linux_dmabuf_v1::ZwpLinuxDmabufV1, ()> for DmabufProbeState {
+    fn event(
+        _: &mut Self,
+        _: &zwp_linux_dmabuf_v1::ZwpLinuxDmabufV1,
+        _: zwp_linux_dmabuf_v1::Event,
+        _: &(),
+        _: &Connection,
+        _: &QueueHandle<Self>,
+    ) {
+    }
+}
+
+impl Dispatch<zwp_linux_dmabuf_feedback_v1::ZwpLinuxDmabufFeedbackV1, ()> for DmabufProbeState {
+    fn event(
+        state: &mut Self,
+        _: &zwp_linux_dmabuf_feedback_v1::ZwpLinuxDmabufFeedbackV1,
+        event: zwp_linux_dmabuf_feedback_v1::Event,
+        _: &(),
+        _: &Connection,
+        _: &QueueHandle<Self>,
+    ) {
+        if let zwp_linux_dmabuf_feedback_v1::Event::MainDevice { device } = event {
+            if let Ok(bytes) = <[u8; 8]>::try_from(device.as_slice()) {
+                state.device = Some(u64::from_ne_bytes(bytes));
+            }
+        }
+    }
+}
+
+fn detect_compositor_gpu() -> Option<CompositorGpuHint> {
+    let connection = Connection::connect_to_env().ok()?;
+    let (globals, mut event_queue) = registry_queue_init::<DmabufProbeState>(&connection).ok()?;
+    let queue_handle = event_queue.handle();
+
+    let dmabuf: zwp_linux_dmabuf_v1::ZwpLinuxDmabufV1 =
+        globals.bind(&queue_handle, 4..=4, ()).ok()?;
+    let feedback = dmabuf.get_default_feedback(&queue_handle, ());
+
+    let mut state = DmabufProbeState { device: None };
+
+    event_queue.roundtrip(&mut state).ok()?;
+
+    feedback.destroy();
+    dmabuf.destroy();
+
+    crate::linux::compositor_gpu_hint_from_dev_t(state.device?)
+}
+
 impl Dispatch<wl_registry::WlRegistry, GlobalListContents> for WaylandClientStatePtr {
     fn event(
         this: &mut Self,

crates/gpui_linux/src/linux/wayland/window.rs 🔗

@@ -34,7 +34,7 @@ use gpui::{
     WindowDecorations, WindowKind, WindowParams, layer_shell::LayerShellNotSupportedError, px,
     size,
 };
-use gpui_wgpu::{WgpuContext, WgpuRenderer, WgpuSurfaceConfig};
+use gpui_wgpu::{CompositorGpuHint, WgpuContext, WgpuRenderer, WgpuSurfaceConfig};
 
 #[derive(Default)]
 pub(crate) struct Callbacks {
@@ -318,6 +318,7 @@ impl WaylandWindowState {
         client: WaylandClientStatePtr,
         globals: Globals,
         gpu_context: &mut Option<WgpuContext>,
+        compositor_gpu: Option<CompositorGpuHint>,
         options: WindowParams,
         parent: Option<WaylandWindowStatePtr>,
     ) -> anyhow::Result<Self> {
@@ -338,13 +339,19 @@ impl WaylandWindowState {
                 },
                 transparent: true,
             };
-            WgpuRenderer::new(gpu_context, &raw_window, config)?
+            WgpuRenderer::new(gpu_context, &raw_window, config, compositor_gpu)?
         };
 
         if let WaylandSurfaceState::Xdg(ref xdg_state) = surface_state {
             if let Some(title) = options.titlebar.and_then(|titlebar| titlebar.title) {
                 xdg_state.toplevel.set_title(title.to_string());
             }
+            // Set max window size based on the GPU's maximum texture dimension.
+            // This prevents the window from being resized larger than what the GPU can render.
+            let max_texture_size = renderer.max_texture_size() as i32;
+            xdg_state
+                .toplevel
+                .set_max_size(max_texture_size, max_texture_size);
         }
 
         Ok(Self {
@@ -482,6 +489,7 @@ impl WaylandWindow {
         handle: AnyWindowHandle,
         globals: Globals,
         gpu_context: &mut Option<WgpuContext>,
+        compositor_gpu: Option<CompositorGpuHint>,
         client: WaylandClientStatePtr,
         params: WindowParams,
         appearance: WindowAppearance,
@@ -509,6 +517,7 @@ impl WaylandWindow {
                 client,
                 globals,
                 gpu_context,
+                compositor_gpu,
                 params,
                 parent,
             )?)),
@@ -640,19 +649,19 @@ impl WaylandWindowStatePtr {
             match mode {
                 WEnum::Value(zxdg_toplevel_decoration_v1::Mode::ServerSide) => {
                     self.state.borrow_mut().decorations = WindowDecorations::Server;
-                    if let Some(appearance_changed) =
-                        self.callbacks.borrow_mut().appearance_changed.as_mut()
-                    {
-                        appearance_changed();
+                    let callback = self.callbacks.borrow_mut().appearance_changed.take();
+                    if let Some(mut fun) = callback {
+                        fun();
+                        self.callbacks.borrow_mut().appearance_changed = Some(fun);
                     }
                 }
                 WEnum::Value(zxdg_toplevel_decoration_v1::Mode::ClientSide) => {
                     self.state.borrow_mut().decorations = WindowDecorations::Client;
                     // Update background to be transparent
-                    if let Some(appearance_changed) =
-                        self.callbacks.borrow_mut().appearance_changed.as_mut()
-                    {
-                        appearance_changed();
+                    let callback = self.callbacks.borrow_mut().appearance_changed.take();
+                    if let Some(mut fun) = callback {
+                        fun();
+                        self.callbacks.borrow_mut().appearance_changed = Some(fun);
                     }
                 }
                 WEnum::Value(_) => {
@@ -924,8 +933,10 @@ impl WaylandWindowStatePtr {
             (state.bounds.size, state.scale)
         };
 
-        if let Some(ref mut fun) = self.callbacks.borrow_mut().resize {
+        let callback = self.callbacks.borrow_mut().resize.take();
+        if let Some(mut fun) = callback {
             fun(size, scale);
+            self.callbacks.borrow_mut().resize = Some(fun);
         }
 
         {
@@ -971,10 +982,13 @@ impl WaylandWindowStatePtr {
         if self.is_blocked() {
             return;
         }
-        if let Some(ref mut fun) = self.callbacks.borrow_mut().input
-            && !fun(input.clone()).propagate
-        {
-            return;
+        let callback = self.callbacks.borrow_mut().input.take();
+        if let Some(mut fun) = callback {
+            let result = fun(input.clone());
+            self.callbacks.borrow_mut().input = Some(fun);
+            if !result.propagate {
+                return;
+            }
         }
         if let PlatformInput::KeyDown(event) = input
             && event.keystroke.modifiers.is_subset_of(&Modifiers::shift())
@@ -991,23 +1005,28 @@ impl WaylandWindowStatePtr {
 
     pub fn set_focused(&self, focus: bool) {
         self.state.borrow_mut().active = focus;
-        if let Some(ref mut fun) = self.callbacks.borrow_mut().active_status_change {
+        let callback = self.callbacks.borrow_mut().active_status_change.take();
+        if let Some(mut fun) = callback {
             fun(focus);
+            self.callbacks.borrow_mut().active_status_change = Some(fun);
         }
     }
 
     pub fn set_hovered(&self, focus: bool) {
-        if let Some(ref mut fun) = self.callbacks.borrow_mut().hover_status_change {
+        let callback = self.callbacks.borrow_mut().hover_status_change.take();
+        if let Some(mut fun) = callback {
             fun(focus);
+            self.callbacks.borrow_mut().hover_status_change = Some(fun);
         }
     }
 
     pub fn set_appearance(&mut self, appearance: WindowAppearance) {
         self.state.borrow_mut().appearance = appearance;
 
-        let mut callbacks = self.callbacks.borrow_mut();
-        if let Some(ref mut fun) = callbacks.appearance_changed {
-            (fun)()
+        let callback = self.callbacks.borrow_mut().appearance_changed.take();
+        if let Some(mut fun) = callback {
+            fun();
+            self.callbacks.borrow_mut().appearance_changed = Some(fun);
         }
     }
 

crates/gpui_linux/src/linux/x11/client.rs 🔗

@@ -31,7 +31,7 @@ use x11rb::{
         AtomEnum, ChangeWindowAttributesAux, ClientMessageData, ClientMessageEvent,
         ConnectionExt as _, EventMask, ModMask, Visibility,
     },
-    protocol::{Event, randr, render, xinput, xkb, xproto},
+    protocol::{Event, dri3, randr, render, xinput, xkb, xproto},
     resource_manager::Database,
     wrapper::ConnectionExt as _,
     xcb_ffi::XCBConnection,
@@ -64,7 +64,7 @@ use gpui::{
     PlatformKeyboardLayout, PlatformWindow, Point, RequestFrameOptions, ScrollDelta, Size,
     TouchPhase, WindowParams, point, px,
 };
-use gpui_wgpu::WgpuContext;
+use gpui_wgpu::{CompositorGpuHint, WgpuContext};
 
 /// Value for DeviceId parameters which selects all devices.
 pub(crate) const XINPUT_ALL_DEVICES: xinput::DeviceId = 0;
@@ -178,6 +178,7 @@ pub struct X11ClientState {
     pub(crate) current_count: usize,
 
     pub(crate) gpu_context: Option<WgpuContext>,
+    pub(crate) compositor_gpu: Option<CompositorGpuHint>,
 
     pub(crate) scale_factor: f32,
 
@@ -430,6 +431,9 @@ impl X11Client {
 
         let clipboard = Clipboard::new().context("Failed to initialize clipboard")?;
 
+        let screen = &xcb_connection.setup().roots[x_root_index];
+        let compositor_gpu = detect_compositor_gpu(&xcb_connection, screen);
+
         let xcb_connection = Rc::new(xcb_connection);
 
         let ximc = X11rbClient::init(Rc::clone(&xcb_connection), x_root_index, None).ok();
@@ -490,6 +494,7 @@ impl X11Client {
             last_location: Point::new(px(0.0), px(0.0)),
             current_count: 0,
             gpu_context: None,
+            compositor_gpu,
             scale_factor,
 
             xkb_context,
@@ -1514,11 +1519,13 @@ impl LinuxClient for X11Client {
         let atoms = state.atoms;
         let scale_factor = state.scale_factor;
         let appearance = state.common.appearance;
+        let compositor_gpu = state.compositor_gpu.take();
         let window = X11Window::new(
             handle,
             X11ClientStatePtr(Rc::downgrade(&self.0)),
             state.common.foreground_executor.clone(),
             &mut state.gpu_context,
+            compositor_gpu,
             params,
             &xcb_connection,
             client_side_decorations_supported,
@@ -1976,7 +1983,30 @@ fn fp3232_to_f32(value: xinput::Fp3232) -> f32 {
     value.integral as f32 + value.frac as f32 / u32::MAX as f32
 }
 
-fn check_compositor_present(xcb_connection: &XCBConnection, root: u32) -> bool {
+fn detect_compositor_gpu(
+    xcb_connection: &XCBConnection,
+    screen: &xproto::Screen,
+) -> Option<CompositorGpuHint> {
+    use std::os::fd::AsRawFd;
+    use std::os::unix::fs::MetadataExt;
+
+    xcb_connection
+        .extension_information(dri3::X11_EXTENSION_NAME)
+        .ok()??;
+
+    let reply = dri3::open(xcb_connection, screen.root, 0)
+        .ok()?
+        .reply()
+        .ok()?;
+    let fd = reply.device_fd;
+
+    let path = format!("/proc/self/fd/{}", fd.as_raw_fd());
+    let metadata = std::fs::metadata(&path).ok()?;
+
+    crate::linux::compositor_gpu_hint_from_dev_t(metadata.rdev())
+}
+
+fn check_compositor_present(xcb_connection: &XCBConnection, root: xproto::Window) -> bool {
     // Method 1: Check for _NET_WM_CM_S{root}
     let atom_name = format!("_NET_WM_CM_S{}", root);
     let atom1 = get_reply(

crates/gpui_linux/src/linux/x11/window.rs 🔗

@@ -9,7 +9,7 @@ use gpui::{
     Tiling, WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowControlArea,
     WindowDecorations, WindowKind, WindowParams, px,
 };
-use gpui_wgpu::{WgpuContext, WgpuRenderer, WgpuSurfaceConfig};
+use gpui_wgpu::{CompositorGpuHint, WgpuContext, WgpuRenderer, WgpuSurfaceConfig};
 
 use collections::FxHashSet;
 use raw_window_handle as rwh;
@@ -319,12 +319,28 @@ impl rwh::HasDisplayHandle for RawWindow {
 
 impl rwh::HasWindowHandle for X11Window {
     fn window_handle(&self) -> Result<rwh::WindowHandle<'_>, rwh::HandleError> {
-        unimplemented!()
+        let Some(non_zero) = NonZeroU32::new(self.0.x_window) else {
+            return Err(rwh::HandleError::Unavailable);
+        };
+        let handle = rwh::XcbWindowHandle::new(non_zero);
+        Ok(unsafe { rwh::WindowHandle::borrow_raw(handle.into()) })
     }
 }
+
 impl rwh::HasDisplayHandle for X11Window {
     fn display_handle(&self) -> Result<rwh::DisplayHandle<'_>, rwh::HandleError> {
-        unimplemented!()
+        let connection =
+            as_raw_xcb_connection::AsRawXcbConnection::as_raw_xcb_connection(&*self.0.xcb)
+                as *mut _;
+        let Some(non_zero) = NonNull::new(connection) else {
+            return Err(rwh::HandleError::Unavailable);
+        };
+        let screen_id = {
+            let state = self.0.state.borrow();
+            u32::from(state.display.id()) as i32
+        };
+        let handle = rwh::XcbDisplayHandle::new(Some(non_zero), screen_id);
+        Ok(unsafe { rwh::DisplayHandle::borrow_raw(handle.into()) })
     }
 }
 
@@ -392,6 +408,7 @@ impl X11WindowState {
         client: X11ClientStatePtr,
         executor: ForegroundExecutor,
         gpu_context: &mut Option<WgpuContext>,
+        compositor_gpu: Option<CompositorGpuHint>,
         params: WindowParams,
         xcb: &Rc<XCBConnection>,
         client_side_decorations_supported: bool,
@@ -497,21 +514,6 @@ impl X11WindowState {
                 ),
             )?;
 
-            if let Some(size) = params.window_min_size {
-                let mut size_hints = WmSizeHints::new();
-                let min_size = (f32::from(size.width) as i32, f32::from(size.height) as i32);
-                size_hints.min_size = Some(min_size);
-                check_reply(
-                    || {
-                        format!(
-                            "X11 change of WM_SIZE_HINTS failed. min_size: {:?}",
-                            min_size
-                        )
-                    },
-                    size_hints.set_normal_hints(xcb, x_window),
-                )?;
-            }
-
             let reply = get_reply(|| "X11 GetGeometry failed.", xcb.get_geometry(x_window))?;
             if reply.x == 0 && reply.y == 0 {
                 bounds.origin.x.0 += 2;
@@ -694,9 +696,28 @@ impl X11WindowState {
                     // too
                     transparent: false,
                 };
-                WgpuRenderer::new(gpu_context, &raw_window, config)?
+                WgpuRenderer::new(gpu_context, &raw_window, config, compositor_gpu)?
             };
 
+            // Set max window size hints based on the GPU's maximum texture dimension.
+            // This prevents the window from being resized larger than what the GPU can render.
+            let max_texture_size = renderer.max_texture_size();
+            let mut size_hints = WmSizeHints::new();
+            if let Some(size) = params.window_min_size {
+                size_hints.min_size =
+                    Some((f32::from(size.width) as i32, f32::from(size.height) as i32));
+            }
+            size_hints.max_size = Some((max_texture_size as i32, max_texture_size as i32));
+            check_reply(
+                || {
+                    format!(
+                        "X11 change of WM_SIZE_HINTS failed. max_size: {:?}",
+                        max_texture_size
+                    )
+                },
+                size_hints.set_normal_hints(xcb, x_window),
+            )?;
+
             let display = Rc::new(X11Display::new(xcb, scale_factor, x_screen_index)?);
 
             Ok(Self {
@@ -799,6 +820,7 @@ impl X11Window {
         client: X11ClientStatePtr,
         executor: ForegroundExecutor,
         gpu_context: &mut Option<WgpuContext>,
+        compositor_gpu: Option<CompositorGpuHint>,
         params: WindowParams,
         xcb: &Rc<XCBConnection>,
         client_side_decorations_supported: bool,
@@ -815,6 +837,7 @@ impl X11Window {
                 client,
                 executor,
                 gpu_context,
+                compositor_gpu,
                 params,
                 xcb,
                 client_side_decorations_supported,
@@ -1045,9 +1068,10 @@ impl X11WindowStatePtr {
     }
 
     pub fn refresh(&self, request_frame_options: RequestFrameOptions) {
-        let mut cb = self.callbacks.borrow_mut();
-        if let Some(ref mut fun) = cb.request_frame {
+        let callback = self.callbacks.borrow_mut().request_frame.take();
+        if let Some(mut fun) = callback {
             fun(request_frame_options);
+            self.callbacks.borrow_mut().request_frame = Some(fun);
         }
     }
 
@@ -1055,10 +1079,13 @@ impl X11WindowStatePtr {
         if self.is_blocked() {
             return;
         }
-        if let Some(ref mut fun) = self.callbacks.borrow_mut().input
-            && !fun(input.clone()).propagate
-        {
-            return;
+        let callback = self.callbacks.borrow_mut().input.take();
+        if let Some(mut fun) = callback {
+            let result = fun(input.clone());
+            self.callbacks.borrow_mut().input = Some(fun);
+            if !result.propagate {
+                return;
+            }
         }
         if let PlatformInput::KeyDown(event) = input {
             // only allow shift modifier when inserting text
@@ -1191,14 +1218,18 @@ impl X11WindowStatePtr {
     }
 
     pub fn set_active(&self, focus: bool) {
-        if let Some(ref mut fun) = self.callbacks.borrow_mut().active_status_change {
+        let callback = self.callbacks.borrow_mut().active_status_change.take();
+        if let Some(mut fun) = callback {
             fun(focus);
+            self.callbacks.borrow_mut().active_status_change = Some(fun);
         }
     }
 
     pub fn set_hovered(&self, focus: bool) {
-        if let Some(ref mut fun) = self.callbacks.borrow_mut().hovered_status_change {
+        let callback = self.callbacks.borrow_mut().hovered_status_change.take();
+        if let Some(mut fun) = callback {
             fun(focus);
+            self.callbacks.borrow_mut().hovered_status_change = Some(fun);
         }
     }
 
@@ -1209,9 +1240,10 @@ impl X11WindowStatePtr {
         state.renderer.update_transparency(is_transparent);
         state.appearance = appearance;
         drop(state);
-        let mut callbacks = self.callbacks.borrow_mut();
-        if let Some(ref mut fun) = callbacks.appearance_changed {
-            (fun)()
+        let callback = self.callbacks.borrow_mut().appearance_changed.take();
+        if let Some(mut fun) = callback {
+            fun();
+            self.callbacks.borrow_mut().appearance_changed = Some(fun);
         }
     }
 }

crates/gpui_macos/Cargo.toml 🔗

@@ -34,6 +34,7 @@ core-text = "21"
 core-video.workspace = true
 ctor.workspace = true
 derive_more.workspace = true
+dispatch2 = "0.3.1"
 etagere = "0.2"
 # WARNING: If you change this, you must also publish a new version of zed-font-kit to crates.io
 font-kit = { git = "https://github.com/zed-industries/font-kit", rev = "110523127440aefb11ce0cf280ae7c5071337ec5", package = "zed-font-kit", version = "0.14.1-zed", optional = true }
@@ -57,6 +58,5 @@ util.workspace = true
 uuid.workspace = true
 
 [target.'cfg(target_os = "macos")'.build-dependencies]
-bindgen = "0.71"
 cbindgen = { version = "0.28.0", default-features = false }
-gpui.workspace = true
+gpui.workspace = true

crates/gpui_macos/build.rs 🔗

@@ -15,8 +15,6 @@ mod macos_build {
     use cbindgen::Config;
 
     pub fn run() {
-        generate_dispatch_bindings();
-
         let header_path = generate_shader_bindings();
 
         #[cfg(feature = "runtime_shaders")]
@@ -25,39 +23,6 @@ mod macos_build {
         compile_metal_shaders(&header_path);
     }
 
-    fn generate_dispatch_bindings() {
-        println!("cargo:rustc-link-lib=framework=System");
-
-        let bindings = bindgen::Builder::default()
-            .header("src/dispatch.h")
-            .allowlist_var("_dispatch_main_q")
-            .allowlist_var("_dispatch_source_type_data_add")
-            .allowlist_var("DISPATCH_QUEUE_PRIORITY_HIGH")
-            .allowlist_var("DISPATCH_QUEUE_PRIORITY_DEFAULT")
-            .allowlist_var("DISPATCH_QUEUE_PRIORITY_LOW")
-            .allowlist_var("DISPATCH_TIME_NOW")
-            .allowlist_function("dispatch_get_global_queue")
-            .allowlist_function("dispatch_async_f")
-            .allowlist_function("dispatch_after_f")
-            .allowlist_function("dispatch_time")
-            .allowlist_function("dispatch_source_merge_data")
-            .allowlist_function("dispatch_source_create")
-            .allowlist_function("dispatch_source_set_event_handler_f")
-            .allowlist_function("dispatch_resume")
-            .allowlist_function("dispatch_suspend")
-            .allowlist_function("dispatch_source_cancel")
-            .allowlist_function("dispatch_set_context")
-            .parse_callbacks(Box::new(bindgen::CargoCallbacks::new()))
-            .layout_tests(false)
-            .generate()
-            .expect("unable to generate bindings");
-
-        let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
-        bindings
-            .write_to_file(out_path.join("dispatch_sys.rs"))
-            .expect("couldn't write dispatch bindings");
-    }
-
     fn generate_shader_bindings() -> PathBuf {
         let output_path = PathBuf::from(env::var("OUT_DIR").unwrap()).join("scene.h");
 

crates/gpui_macos/src/dispatcher.rs 🔗

@@ -1,7 +1,4 @@
-#![allow(non_upper_case_globals)]
-#![allow(non_camel_case_types)]
-#![allow(non_snake_case)]
-
+use dispatch2::{DispatchQueue, DispatchQueueGlobalPriority, DispatchTime, GlobalQueueIdentifier};
 use gpui::{
     GLOBAL_THREAD_TIMINGS, PlatformDispatcher, Priority, RunnableMeta, RunnableVariant,
     THREAD_TIMINGS, TaskTiming, ThreadTaskTimings,
@@ -26,21 +23,10 @@ use objc::{
 };
 use std::{
     ffi::c_void,
-    ptr::{NonNull, addr_of},
+    ptr::NonNull,
     time::{Duration, Instant},
 };
 
-/// All items in the generated file are marked as pub, so we're gonna wrap it in a separate mod to prevent
-/// these pub items from leaking into public API.
-pub(crate) mod dispatch_sys {
-    include!(concat!(env!("OUT_DIR"), "/dispatch_sys.rs"));
-}
-
-use dispatch_sys::*;
-pub(crate) fn dispatch_get_main_queue() -> dispatch_queue_t {
-    addr_of!(_dispatch_main_q) as *const _ as dispatch_queue_t
-}
-
 pub(crate) struct MacDispatcher;
 
 impl MacDispatcher {
@@ -89,43 +75,32 @@ impl PlatformDispatcher for MacDispatcher {
             Priority::RealtimeAudio => {
                 panic!("RealtimeAudio priority should use spawn_realtime, not dispatch")
             }
-            Priority::High => DISPATCH_QUEUE_PRIORITY_HIGH as isize,
-            Priority::Medium => DISPATCH_QUEUE_PRIORITY_DEFAULT as isize,
-            Priority::Low => DISPATCH_QUEUE_PRIORITY_LOW as isize,
+            Priority::High => DispatchQueueGlobalPriority::High,
+            Priority::Medium => DispatchQueueGlobalPriority::Default,
+            Priority::Low => DispatchQueueGlobalPriority::Low,
         };
 
         unsafe {
-            dispatch_async_f(
-                dispatch_get_global_queue(queue_priority, 0),
-                context,
-                Some(trampoline as unsafe extern "C" fn(*mut c_void)),
-            );
+            DispatchQueue::global_queue(GlobalQueueIdentifier::Priority(queue_priority))
+                .exec_async_f(context, trampoline);
         }
     }
 
     fn dispatch_on_main_thread(&self, runnable: RunnableVariant, _priority: Priority) {
         let context = runnable.into_raw().as_ptr() as *mut c_void;
         unsafe {
-            dispatch_async_f(
-                dispatch_get_main_queue(),
-                context,
-                Some(trampoline as unsafe extern "C" fn(*mut c_void)),
-            );
+            DispatchQueue::main().exec_async_f(context, trampoline);
         }
     }
 
     fn dispatch_after(&self, duration: Duration, runnable: RunnableVariant) {
         let context = runnable.into_raw().as_ptr() as *mut c_void;
+        let queue = DispatchQueue::global_queue(GlobalQueueIdentifier::Priority(
+            DispatchQueueGlobalPriority::High,
+        ));
+        let when = DispatchTime::NOW.time(duration.as_nanos() as i64);
         unsafe {
-            let queue =
-                dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH.try_into().unwrap(), 0);
-            let when = dispatch_time(DISPATCH_TIME_NOW as u64, duration.as_nanos() as i64);
-            dispatch_after_f(
-                when,
-                queue,
-                context,
-                Some(trampoline as unsafe extern "C" fn(*mut c_void)),
-            );
+            DispatchQueue::exec_after_f(when, &queue, context, trampoline);
         }
     }
 

crates/gpui_macos/src/display_link.rs 🔗

@@ -1,26 +1,21 @@
-use crate::{
-    dispatch_get_main_queue,
-    dispatcher::dispatch_sys::{
-        _dispatch_source_type_data_add, dispatch_resume, dispatch_set_context,
-        dispatch_source_cancel, dispatch_source_create, dispatch_source_merge_data,
-        dispatch_source_set_event_handler_f, dispatch_source_t, dispatch_suspend,
-    },
-};
 use anyhow::Result;
 use core_graphics::display::CGDirectDisplayID;
+use dispatch2::{
+    _dispatch_source_type_data_add, DispatchObject, DispatchQueue, DispatchRetained, DispatchSource,
+};
 use std::ffi::c_void;
 use util::ResultExt;
 
 pub struct DisplayLink {
     display_link: Option<sys::DisplayLink>,
-    frame_requests: dispatch_source_t,
+    frame_requests: DispatchRetained<DispatchSource>,
 }
 
 impl DisplayLink {
     pub fn new(
         display_id: CGDirectDisplayID,
         data: *mut c_void,
-        callback: unsafe extern "C" fn(*mut c_void),
+        callback: extern "C" fn(*mut c_void),
     ) -> Result<DisplayLink> {
         unsafe extern "C" fn display_link_callback(
             _display_link_out: *mut sys::CVDisplayLink,
@@ -31,31 +26,27 @@ impl DisplayLink {
             frame_requests: *mut c_void,
         ) -> i32 {
             unsafe {
-                let frame_requests = frame_requests as dispatch_source_t;
-                dispatch_source_merge_data(frame_requests, 1);
+                let frame_requests = &*(frame_requests as *const DispatchSource);
+                frame_requests.merge_data(1);
                 0
             }
         }
 
         unsafe {
-            let frame_requests = dispatch_source_create(
-                &_dispatch_source_type_data_add,
+            let frame_requests = DispatchSource::new(
+                &raw const _dispatch_source_type_data_add as *mut _,
                 0,
                 0,
-                dispatch_get_main_queue(),
-            );
-            dispatch_set_context(
-                crate::dispatch_sys::dispatch_object_t {
-                    _ds: frame_requests,
-                },
-                data,
+                Some(DispatchQueue::main()),
             );
-            dispatch_source_set_event_handler_f(frame_requests, Some(callback));
+            frame_requests.set_context(data);
+            frame_requests.set_event_handler_f(callback);
+            frame_requests.resume();
 
             let display_link = sys::DisplayLink::new(
                 display_id,
                 display_link_callback,
-                frame_requests as *mut c_void,
+                &*frame_requests as *const DispatchSource as *mut c_void,
             )?;
 
             Ok(Self {
@@ -67,9 +58,6 @@ impl DisplayLink {
 
     pub fn start(&mut self) -> Result<()> {
         unsafe {
-            dispatch_resume(crate::dispatch_sys::dispatch_object_t {
-                _ds: self.frame_requests,
-            });
             self.display_link.as_mut().unwrap().start()?;
         }
         Ok(())
@@ -77,9 +65,6 @@ impl DisplayLink {
 
     pub fn stop(&mut self) -> Result<()> {
         unsafe {
-            dispatch_suspend(crate::dispatch_sys::dispatch_object_t {
-                _ds: self.frame_requests,
-            });
             self.display_link.as_mut().unwrap().stop()?;
         }
         Ok(())
@@ -97,9 +82,7 @@ impl Drop for DisplayLink {
         //
         // We might also want to upgrade to CADisplayLink, but that requires dropping old macOS support.
         std::mem::forget(self.display_link.take());
-        unsafe {
-            dispatch_source_cancel(self.frame_requests);
-        }
+        self.frame_requests.cancel();
     }
 }
 

crates/gpui_macos/src/platform.rs 🔗

@@ -24,6 +24,7 @@ use core_foundation::{
     string::{CFString, CFStringRef},
 };
 use ctor::ctor;
+use dispatch2::DispatchQueue;
 use futures::channel::oneshot;
 use gpui::{
     Action, AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, ForegroundExecutor,
@@ -493,13 +494,11 @@ impl Platform for MacPlatform {
         // this, we make quitting the application asynchronous so that we aren't holding borrows to
         // the app state on the stack when we actually terminate the app.
 
-        use crate::dispatcher::{dispatch_get_main_queue, dispatch_sys::dispatch_async_f};
-
         unsafe {
-            dispatch_async_f(dispatch_get_main_queue(), ptr::null_mut(), Some(quit));
+            DispatchQueue::main().exec_async_f(ptr::null_mut(), quit);
         }
 
-        unsafe extern "C" fn quit(_: *mut c_void) {
+        extern "C" fn quit(_: *mut c_void) {
             unsafe {
                 let app = NSApplication::sharedApplication(nil);
                 let _: () = msg_send![app, terminate: nil];
@@ -1261,19 +1260,13 @@ extern "C" fn on_thermal_state_change(this: &mut Object, _: Sel, _: id) {
     // Defer to the next run loop iteration to avoid re-entrant borrows of the App RefCell,
     // as NSNotificationCenter delivers this notification synchronously and it may fire while
     // the App is already borrowed (same pattern as quit() above).
-    use crate::dispatcher::{dispatch_get_main_queue, dispatch_sys::dispatch_async_f};
-
     let platform = unsafe { get_mac_platform(this) };
     let platform_ptr = platform as *const MacPlatform as *mut c_void;
     unsafe {
-        dispatch_async_f(
-            dispatch_get_main_queue(),
-            platform_ptr,
-            Some(on_thermal_state_change),
-        );
+        DispatchQueue::main().exec_async_f(platform_ptr, on_thermal_state_change);
     }
 
-    unsafe extern "C" fn on_thermal_state_change(context: *mut c_void) {
+    extern "C" fn on_thermal_state_change(context: *mut c_void) {
         let platform = unsafe { &*(context as *const MacPlatform) };
         let mut lock = platform.0.lock();
         if let Some(mut callback) = lock.on_thermal_state_change.take() {

crates/gpui_macos/src/window.rs 🔗

@@ -1,7 +1,6 @@
 use crate::{
-    BoolExt, DisplayLink, MacDisplay, NSRange, NSStringExt, dispatch_get_main_queue,
-    dispatcher::dispatch_sys::dispatch_async_f, events::platform_input_from_native, ns_string,
-    renderer,
+    BoolExt, DisplayLink, MacDisplay, NSRange, NSStringExt, events::platform_input_from_native,
+    ns_string, renderer,
 };
 #[cfg(any(test, feature = "test-support"))]
 use anyhow::Result;
@@ -22,6 +21,7 @@ use cocoa::{
         NSUserDefaults,
     },
 };
+use dispatch2::DispatchQueue;
 use gpui::{
     AnyWindowHandle, BackgroundExecutor, Bounds, Capslock, ExternalPaths, FileDropEvent,
     ForegroundExecutor, KeyDownEvent, Keystroke, Modifiers, ModifiersChangedEvent, MouseButton,
@@ -1050,34 +1050,32 @@ impl PlatformWindow for MacWindow {
 
     fn merge_all_windows(&self) {
         let native_window = self.0.lock().native_window;
-        unsafe extern "C" fn merge_windows_async(context: *mut std::ffi::c_void) {
-            let native_window = context as id;
-            let _: () = msg_send![native_window, mergeAllWindows:nil];
+        extern "C" fn merge_windows_async(context: *mut std::ffi::c_void) {
+            unsafe {
+                let native_window = context as id;
+                let _: () = msg_send![native_window, mergeAllWindows:nil];
+            }
         }
 
         unsafe {
-            dispatch_async_f(
-                dispatch_get_main_queue(),
-                native_window as *mut std::ffi::c_void,
-                Some(merge_windows_async),
-            );
+            DispatchQueue::main()
+                .exec_async_f(native_window as *mut std::ffi::c_void, merge_windows_async);
         }
     }
 
     fn move_tab_to_new_window(&self) {
         let native_window = self.0.lock().native_window;
-        unsafe extern "C" fn move_tab_async(context: *mut std::ffi::c_void) {
-            let native_window = context as id;
-            let _: () = msg_send![native_window, moveTabToNewWindow:nil];
-            let _: () = msg_send![native_window, makeKeyAndOrderFront: nil];
+        extern "C" fn move_tab_async(context: *mut std::ffi::c_void) {
+            unsafe {
+                let native_window = context as id;
+                let _: () = msg_send![native_window, moveTabToNewWindow:nil];
+                let _: () = msg_send![native_window, makeKeyAndOrderFront: nil];
+            }
         }
 
         unsafe {
-            dispatch_async_f(
-                dispatch_get_main_queue(),
-                native_window as *mut std::ffi::c_void,
-                Some(move_tab_async),
-            );
+            DispatchQueue::main()
+                .exec_async_f(native_window as *mut std::ffi::c_void, move_tab_async);
         }
     }
 
@@ -2252,7 +2250,7 @@ extern "C" fn display_layer(this: &Object, _: Sel, _: id) {
     }
 }
 
-unsafe extern "C" fn step(view: *mut c_void) {
+extern "C" fn step(view: *mut c_void) {
     let view = view as id;
     let window_state = unsafe { get_window_state(&*view) };
     let mut lock = window_state.lock();
@@ -2551,19 +2549,20 @@ fn send_file_drop_event(
     window_state: Arc<Mutex<MacWindowState>>,
     file_drop_event: FileDropEvent,
 ) -> bool {
-    let mut window_state = window_state.lock();
-    let window_event_callback = window_state.event_callback.as_mut();
-    if let Some(callback) = window_event_callback {
-        let external_files_dragged = match file_drop_event {
-            FileDropEvent::Entered { .. } => Some(true),
-            FileDropEvent::Exited => Some(false),
-            _ => None,
-        };
+    let external_files_dragged = match file_drop_event {
+        FileDropEvent::Entered { .. } => Some(true),
+        FileDropEvent::Exited => Some(false),
+        _ => None,
+    };
 
+    let mut lock = window_state.lock();
+    if let Some(mut callback) = lock.event_callback.take() {
+        drop(lock);
         callback(PlatformInput::FileDrop(file_drop_event));
-
+        let mut lock = window_state.lock();
+        lock.event_callback = Some(callback);
         if let Some(external_files_dragged) = external_files_dragged {
-            window_state.external_files_dragged = external_files_dragged;
+            lock.external_files_dragged = external_files_dragged;
         }
         true
     } else {

crates/gpui_macros/src/test.rs 🔗

@@ -165,12 +165,13 @@ fn generate_test_function(
                                 dispatcher.clone(),
                                 Some(stringify!(#outer_fn_name)),
                             );
+                            let _entity_refcounts = #cx_varname.app.borrow().ref_counts_drop_handle();
                         ));
                         cx_teardowns.extend(quote!(
-                            dispatcher.run_until_parked();
-                            #cx_varname.executor().forbid_parking();
-                            #cx_varname.quit();
-                            dispatcher.run_until_parked();
+                            #cx_varname.run_until_parked();
+                            #cx_varname.update(|cx| { cx.background_executor().forbid_parking(); cx.quit(); });
+                            #cx_varname.run_until_parked();
+                            drop(#cx_varname);
                         ));
                         inner_fn_args.extend(quote!(&mut #cx_varname,));
                         continue;
@@ -191,10 +192,17 @@ fn generate_test_function(
                     &[#seeds],
                     #max_retries,
                     &mut |dispatcher, _seed| {
-                        let foreground_executor = gpui::ForegroundExecutor::new(std::sync::Arc::new(dispatcher.clone()));
+                        let exec = std::sync::Arc::new(dispatcher.clone());
                         #cx_vars
-                        foreground_executor.block_test(#inner_fn_name(#inner_fn_args));
+                        gpui::ForegroundExecutor::new(exec.clone()).block_test(#inner_fn_name(#inner_fn_args));
+                        drop(exec);
                         #cx_teardowns
+                        // Ideally we would only drop cancelled tasks, that way we could detect leaks due to task <-> entity
+                        // cycles as cancelled tasks will be dropped properly once the runnable gets run again
+                        //
+                        // async-task does not give us the power to do this just yet though
+                        dispatcher.drain_tasks();
+                        drop(dispatcher);
                     },
                     #on_failure_fn_name
                 );
@@ -229,13 +237,15 @@ fn generate_test_function(
                                    Some(stringify!(#outer_fn_name))
                                 );
                                 let mut #cx_varname_lock = #cx_varname.app.borrow_mut();
+                                let _entity_refcounts = #cx_varname_lock.ref_counts_drop_handle();
                             ));
                             inner_fn_args.extend(quote!(&mut #cx_varname_lock,));
                             cx_teardowns.extend(quote!(
                                     drop(#cx_varname_lock);
-                                    dispatcher.run_until_parked();
+                                    #cx_varname.run_until_parked();
                                     #cx_varname.update(|cx| { cx.background_executor().forbid_parking(); cx.quit(); });
-                                    dispatcher.run_until_parked();
+                                    #cx_varname.run_until_parked();
+                                    drop(#cx_varname);
                                 ));
                             continue;
                         }
@@ -246,12 +256,13 @@ fn generate_test_function(
                                     dispatcher.clone(),
                                     Some(stringify!(#outer_fn_name))
                                 );
+                                let _entity_refcounts = #cx_varname.app.borrow().ref_counts_drop_handle();
                             ));
                             cx_teardowns.extend(quote!(
-                                dispatcher.run_until_parked();
-                                #cx_varname.executor().forbid_parking();
-                                #cx_varname.quit();
-                                dispatcher.run_until_parked();
+                                #cx_varname.run_until_parked();
+                                #cx_varname.update(|cx| { cx.background_executor().forbid_parking(); cx.quit(); });
+                                #cx_varname.run_until_parked();
+                                drop(#cx_varname);
                             ));
                             inner_fn_args.extend(quote!(&mut #cx_varname,));
                             continue;
@@ -277,6 +288,12 @@ fn generate_test_function(
                         #cx_vars
                         #inner_fn_name(#inner_fn_args);
                         #cx_teardowns
+                        // Ideally we would only drop cancelled tasks, that way we could detect leaks due to task <-> entity
+                        // cycles as cancelled tasks will be dropped properly once they runnable gets run again
+                        //
+                        // async-task does not give us the power to do this just yet though
+                        dispatcher.drain_tasks();
+                        drop(dispatcher);
                     },
                     #on_failure_fn_name,
                 );

crates/gpui_platform/Cargo.toml 🔗

@@ -31,3 +31,7 @@ gpui_windows.workspace = true
 
 [target.'cfg(any(target_os = "linux", target_os = "freebsd"))'.dependencies]
 gpui_linux.workspace = true
+
+[target.'cfg(target_family = "wasm")'.dependencies]
+gpui_web.workspace = true
+console_error_panic_hook = "0.1.7"

crates/gpui_platform/src/gpui_platform.rs 🔗

@@ -18,6 +18,20 @@ pub fn headless() -> gpui::Application {
     gpui::Application::with_platform(current_platform(true))
 }
 
+/// Unlike `application`, this function returns a single-threaded web application.
+#[cfg(target_family = "wasm")]
+pub fn single_threaded_web() -> gpui::Application {
+    gpui::Application::with_platform(Rc::new(gpui_web::WebPlatform::new(false)))
+}
+
+/// Initializes panic hooks and logging for the web platform.
+/// Call this before running the application in a wasm_bindgen entrypoint.
+#[cfg(target_family = "wasm")]
+pub fn web_init() {
+    console_error_panic_hook::set_once();
+    gpui_web::init_logging();
+}
+
 /// Returns the default [`Platform`] for the current OS.
 pub fn current_platform(headless: bool) -> Rc<dyn Platform> {
     #[cfg(target_os = "macos")]
@@ -33,10 +47,16 @@ pub fn current_platform(headless: bool) -> Rc<dyn Platform> {
         )
     }
 
-    #[cfg(not(any(target_os = "macos", target_os = "windows")))]
+    #[cfg(any(target_os = "linux", target_os = "freebsd"))]
     {
         gpui_linux::current_platform(headless)
     }
+
+    #[cfg(target_family = "wasm")]
+    {
+        let _ = headless;
+        Rc::new(gpui_web::WebPlatform::new(true))
+    }
 }
 
 #[cfg(all(test, target_os = "macos"))]

crates/gpui_util/Cargo.toml 🔗

@@ -0,0 +1,12 @@
+[package]
+name = "gpui_util"
+version = "0.1.0"
+publish.workspace = true
+edition.workspace = true
+
+[dependencies]
+log.workspace = true
+anyhow.workspace = true
+
+[lints]
+workspace = true

crates/gpui_util/src/lib.rs 🔗

@@ -0,0 +1,292 @@
+// FluentBuilder
+// pub use gpui_util::{FutureExt, Timeout, arc_cow::ArcCow};
+
+use std::{
+    env,
+    ops::AddAssign,
+    panic::Location,
+    pin::Pin,
+    sync::OnceLock,
+    task::{Context, Poll},
+    time::Instant,
+};
+
+pub mod arc_cow;
+
+pub fn post_inc<T: From<u8> + AddAssign<T> + Copy>(value: &mut T) -> T {
+    let prev = *value;
+    *value += T::from(1);
+    prev
+}
+
+pub fn measure<R>(label: &str, f: impl FnOnce() -> R) -> R {
+    static ZED_MEASUREMENTS: OnceLock<bool> = OnceLock::new();
+    let zed_measurements = ZED_MEASUREMENTS.get_or_init(|| {
+        env::var("ZED_MEASUREMENTS")
+            .map(|measurements| measurements == "1" || measurements == "true")
+            .unwrap_or(false)
+    });
+
+    if *zed_measurements {
+        let start = Instant::now();
+        let result = f();
+        let elapsed = start.elapsed();
+        eprintln!("{}: {:?}", label, elapsed);
+        result
+    } else {
+        f()
+    }
+}
+
+#[macro_export]
+macro_rules! debug_panic {
+    ( $($fmt_arg:tt)* ) => {
+        if cfg!(debug_assertions) {
+            panic!( $($fmt_arg)* );
+        } else {
+            let backtrace = std::backtrace::Backtrace::capture();
+            log::error!("{}\n{:?}", format_args!($($fmt_arg)*), backtrace);
+        }
+    };
+}
+
+#[track_caller]
+pub fn some_or_debug_panic<T>(option: Option<T>) -> Option<T> {
+    #[cfg(debug_assertions)]
+    if option.is_none() {
+        panic!("Unexpected None");
+    }
+    option
+}
+
+/// Expands to an immediately-invoked function expression. Good for using the ? operator
+/// in functions which do not return an Option or Result.
+///
+/// Accepts a normal block, an async block, or an async move block.
+#[macro_export]
+macro_rules! maybe {
+    ($block:block) => {
+        (|| $block)()
+    };
+    (async $block:block) => {
+        (async || $block)()
+    };
+    (async move $block:block) => {
+        (async move || $block)()
+    };
+}
+pub trait ResultExt<E> {
+    type Ok;
+
+    fn log_err(self) -> Option<Self::Ok>;
+    /// Assert that this result should never be an error in development or tests.
+    fn debug_assert_ok(self, reason: &str) -> Self;
+    fn warn_on_err(self) -> Option<Self::Ok>;
+    fn log_with_level(self, level: log::Level) -> Option<Self::Ok>;
+    fn anyhow(self) -> anyhow::Result<Self::Ok>
+    where
+        E: Into<anyhow::Error>;
+}
+
+impl<T, E> ResultExt<E> for Result<T, E>
+where
+    E: std::fmt::Debug,
+{
+    type Ok = T;
+
+    #[track_caller]
+    fn log_err(self) -> Option<T> {
+        self.log_with_level(log::Level::Error)
+    }
+
+    #[track_caller]
+    fn debug_assert_ok(self, reason: &str) -> Self {
+        if let Err(error) = &self {
+            debug_panic!("{reason} - {error:?}");
+        }
+        self
+    }
+
+    #[track_caller]
+    fn warn_on_err(self) -> Option<T> {
+        self.log_with_level(log::Level::Warn)
+    }
+
+    #[track_caller]
+    fn log_with_level(self, level: log::Level) -> Option<T> {
+        match self {
+            Ok(value) => Some(value),
+            Err(error) => {
+                log_error_with_caller(*Location::caller(), error, level);
+                None
+            }
+        }
+    }
+
+    fn anyhow(self) -> anyhow::Result<T>
+    where
+        E: Into<anyhow::Error>,
+    {
+        self.map_err(Into::into)
+    }
+}
+
+fn log_error_with_caller<E>(caller: core::panic::Location<'_>, error: E, level: log::Level)
+where
+    E: std::fmt::Debug,
+{
+    #[cfg(not(windows))]
+    let file = caller.file();
+    #[cfg(windows)]
+    let file = caller.file().replace('\\', "/");
+    // In this codebase all crates reside in a `crates` directory,
+    // so discard the prefix up to that segment to find the crate name
+    let file = file.split_once("crates/");
+    let target = file.as_ref().and_then(|(_, s)| s.split_once("/src/"));
+
+    let module_path = target.map(|(krate, module)| {
+        if module.starts_with(krate) {
+            module.trim_end_matches(".rs").replace('/', "::")
+        } else {
+            krate.to_owned() + "::" + &module.trim_end_matches(".rs").replace('/', "::")
+        }
+    });
+    let file = file.map(|(_, file)| format!("crates/{file}"));
+    log::logger().log(
+        &log::Record::builder()
+            .target(module_path.as_deref().unwrap_or(""))
+            .module_path(file.as_deref())
+            .args(format_args!("{:?}", error))
+            .file(Some(caller.file()))
+            .line(Some(caller.line()))
+            .level(level)
+            .build(),
+    );
+}
+
+pub fn log_err<E: std::fmt::Debug>(error: &E) {
+    log_error_with_caller(*Location::caller(), error, log::Level::Error);
+}
+
+pub trait TryFutureExt {
+    fn log_err(self) -> LogErrorFuture<Self>
+    where
+        Self: Sized;
+
+    fn log_tracked_err(self, location: core::panic::Location<'static>) -> LogErrorFuture<Self>
+    where
+        Self: Sized;
+
+    fn warn_on_err(self) -> LogErrorFuture<Self>
+    where
+        Self: Sized;
+    fn unwrap(self) -> UnwrapFuture<Self>
+    where
+        Self: Sized;
+}
+
+impl<F, T, E> TryFutureExt for F
+where
+    F: Future<Output = Result<T, E>>,
+    E: std::fmt::Debug,
+{
+    #[track_caller]
+    fn log_err(self) -> LogErrorFuture<Self>
+    where
+        Self: Sized,
+    {
+        let location = Location::caller();
+        LogErrorFuture(self, log::Level::Error, *location)
+    }
+
+    fn log_tracked_err(self, location: core::panic::Location<'static>) -> LogErrorFuture<Self>
+    where
+        Self: Sized,
+    {
+        LogErrorFuture(self, log::Level::Error, location)
+    }
+
+    #[track_caller]
+    fn warn_on_err(self) -> LogErrorFuture<Self>
+    where
+        Self: Sized,
+    {
+        let location = Location::caller();
+        LogErrorFuture(self, log::Level::Warn, *location)
+    }
+
+    fn unwrap(self) -> UnwrapFuture<Self>
+    where
+        Self: Sized,
+    {
+        UnwrapFuture(self)
+    }
+}
+
+#[must_use]
+pub struct LogErrorFuture<F>(F, log::Level, core::panic::Location<'static>);
+
+impl<F, T, E> Future for LogErrorFuture<F>
+where
+    F: Future<Output = Result<T, E>>,
+    E: std::fmt::Debug,
+{
+    type Output = Option<T>;
+
+    fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
+        let level = self.1;
+        let location = self.2;
+        let inner = unsafe { Pin::new_unchecked(&mut self.get_unchecked_mut().0) };
+        match inner.poll(cx) {
+            Poll::Ready(output) => Poll::Ready(match output {
+                Ok(output) => Some(output),
+                Err(error) => {
+                    log_error_with_caller(location, error, level);
+                    None
+                }
+            }),
+            Poll::Pending => Poll::Pending,
+        }
+    }
+}
+
+pub struct UnwrapFuture<F>(F);
+
+impl<F, T, E> Future for UnwrapFuture<F>
+where
+    F: Future<Output = Result<T, E>>,
+    E: std::fmt::Debug,
+{
+    type Output = T;
+
+    fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
+        let inner = unsafe { Pin::new_unchecked(&mut self.get_unchecked_mut().0) };
+        match inner.poll(cx) {
+            Poll::Ready(result) => Poll::Ready(result.unwrap()),
+            Poll::Pending => Poll::Pending,
+        }
+    }
+}
+
+pub struct Deferred<F: FnOnce()>(Option<F>);
+
+impl<F: FnOnce()> Deferred<F> {
+    /// Drop without running the deferred function.
+    pub fn abort(mut self) {
+        self.0.take();
+    }
+}
+
+impl<F: FnOnce()> Drop for Deferred<F> {
+    fn drop(&mut self) {
+        if let Some(f) = self.0.take() {
+            f()
+        }
+    }
+}
+
+/// Run the given function when the returned value is dropped (unless it's cancelled).
+#[must_use]
+pub fn defer<F: FnOnce()>(f: F) -> Deferred<F> {
+    Deferred(Some(f))
+}

crates/gpui_web/Cargo.toml 🔗

@@ -0,0 +1,71 @@
+[package]
+name = "gpui_web"
+version = "0.1.0"
+publish.workspace = true
+edition.workspace = true
+license = "Apache-2.0"
+autoexamples = false
+
+[lints]
+workspace = true
+
+[features]
+default = ["multithreaded"]
+multithreaded = ["dep:wasm_thread"]
+
+[lib]
+path = "src/gpui_web.rs"
+
+[target.'cfg(target_family = "wasm")'.dependencies]
+gpui.workspace = true
+parking_lot = { workspace = true, features = ["nightly"] }
+gpui_wgpu.workspace = true
+http_client.workspace = true
+anyhow.workspace = true
+futures.workspace = true
+log.workspace = true
+smallvec.workspace = true
+uuid.workspace = true
+wasm-bindgen.workspace = true
+wasm-bindgen-futures = "0.4"
+web-time.workspace = true
+console_error_panic_hook = "0.1.7"
+js-sys = "0.3"
+raw-window-handle = "0.6"
+wasm_thread = { version = "0.3", features = ["es_modules"], optional = true }
+web-sys = { version = "0.3", features = [
+    "console",
+    "CssStyleDeclaration",
+    "DataTransfer",
+    "Document",
+    "DomRect",
+    "DragEvent",
+    "Element",
+    "EventTarget",
+    "File",
+    "FileList",
+    "HtmlCanvasElement",
+    "HtmlElement",
+    "HtmlInputElement",
+    "KeyboardEvent",
+    "MediaQueryList",
+    "MediaQueryListEvent",
+    "MouseEvent",
+    "Navigator",
+    "PointerEvent",
+    "ResizeObserver",
+    "ResizeObserverBoxOptions",
+    "ResizeObserverEntry",
+    "ResizeObserverSize",
+    "ResizeObserverOptions",
+    "Screen",
+    "Storage",
+    "VisualViewport",
+    "Headers",
+    "Request",
+    "RequestInit",
+    "RequestRedirect",
+    "Response",
+    "WheelEvent",
+    "Window",
+] }

crates/gpui_web/examples/hello_web/.cargo/config.toml 🔗

@@ -0,0 +1,14 @@
+[target.wasm32-unknown-unknown]
+rustflags = [
+    "-C", "target-feature=+atomics,+bulk-memory,+mutable-globals",
+    "-C", "link-arg=--shared-memory",
+    "-C", "link-arg=--max-memory=1073741824",
+    "-C", "link-arg=--import-memory",
+    "-C", "link-arg=--export=__wasm_init_tls",
+    "-C", "link-arg=--export=__tls_size",
+    "-C", "link-arg=--export=__tls_align",
+    "-C", "link-arg=--export=__tls_base",
+]
+
+[unstable]
+build-std = ["std,panic_abort"]

crates/gpui_web/examples/hello_web/Cargo.toml 🔗

@@ -0,0 +1,16 @@
+[workspace]
+
+[package]
+name = "hello_web"
+version = "0.1.0"
+edition = "2024"
+publish = false
+
+[[bin]]
+name = "hello_web"
+path = "main.rs"
+
+[dependencies]
+gpui = { path = "../../../gpui" }
+gpui_platform = { path = "../../../gpui_platform" }
+web-time = "1"

crates/gpui_web/examples/hello_web/index.html 🔗

@@ -0,0 +1,31 @@
+<!doctype html>
+<html lang="en">
+    <head>
+        <meta charset="utf-8" />
+        <meta name="viewport" content="width=device-width, height=device-height, initial-scale=1.0, user-scalable=0" />
+        <title>GPUI Web: hello_web</title>
+        <link data-trunk rel="rust" data-bin="hello_web" data-bindgen-target="web" data-keep-debug data-wasm-opt="0" />
+        <style>
+            * {
+                margin: 0;
+                padding: 0;
+                box-sizing: border-box;
+            }
+            html,
+            body {
+                margin: 0;
+                height: 100%;
+            }
+            canvas {
+                display: block;
+                width: 100%;
+                height: 100%;
+                touch-action: none;
+                outline: none;
+                -webkit-user-select: none;
+                user-select: none;
+            }
+        </style>
+    </head>
+    <body></body>
+</html>

crates/gpui_web/examples/hello_web/main.rs 🔗

@@ -0,0 +1,422 @@
+use gpui::prelude::*;
+use gpui::{
+    App, Bounds, Context, ElementId, SharedString, Task, Window, WindowBounds, WindowOptions, div,
+    px, rgb, size,
+};
+
+// ---------------------------------------------------------------------------
+// Prime counting (intentionally brute-force so it hammers the CPU)
+// ---------------------------------------------------------------------------
+
+fn is_prime(n: u64) -> bool {
+    if n < 2 {
+        return false;
+    }
+    if n < 4 {
+        return true;
+    }
+    if n % 2 == 0 || n % 3 == 0 {
+        return false;
+    }
+    let mut i = 5;
+    while i * i <= n {
+        if n % i == 0 || n % (i + 2) == 0 {
+            return false;
+        }
+        i += 6;
+    }
+    true
+}
+
+fn count_primes_in_range(start: u64, end: u64) -> u64 {
+    let mut count = 0;
+    for n in start..end {
+        if is_prime(n) {
+            count += 1;
+        }
+    }
+    count
+}
+
+// ---------------------------------------------------------------------------
+// App state
+// ---------------------------------------------------------------------------
+
+const NUM_CHUNKS: u64 = 12;
+
+#[derive(Clone, Copy, PartialEq, Eq)]
+enum Preset {
+    TenMillion,
+    FiftyMillion,
+    HundredMillion,
+}
+
+impl Preset {
+    fn label(self) -> &'static str {
+        match self {
+            Preset::TenMillion => "10 M",
+            Preset::FiftyMillion => "50 M",
+            Preset::HundredMillion => "100 M",
+        }
+    }
+
+    fn value(self) -> u64 {
+        match self {
+            Preset::TenMillion => 10_000_000,
+            Preset::FiftyMillion => 50_000_000,
+            Preset::HundredMillion => 100_000_000,
+        }
+    }
+
+    const ALL: [Preset; 3] = [
+        Preset::TenMillion,
+        Preset::FiftyMillion,
+        Preset::HundredMillion,
+    ];
+}
+
+struct ChunkResult {
+    count: u64,
+}
+
+struct Run {
+    limit: u64,
+    chunks_done: u64,
+    chunk_results: Vec<ChunkResult>,
+    total: Option<u64>,
+    elapsed: Option<f64>,
+}
+
+struct HelloWeb {
+    selected_preset: Preset,
+    current_run: Option<Run>,
+    history: Vec<SharedString>,
+    _tasks: Vec<Task<()>>,
+}
+
+impl HelloWeb {
+    fn new(_cx: &mut Context<Self>) -> Self {
+        Self {
+            selected_preset: Preset::TenMillion,
+            current_run: None,
+            history: Vec::new(),
+            _tasks: Vec::new(),
+        }
+    }
+
+    fn start_search(&mut self, cx: &mut Context<Self>) {
+        let limit = self.selected_preset.value();
+        let chunk_size = limit / NUM_CHUNKS;
+
+        self.current_run = Some(Run {
+            limit,
+            chunks_done: 0,
+            chunk_results: Vec::new(),
+            total: None,
+            elapsed: None,
+        });
+        self._tasks.clear();
+        cx.notify();
+
+        let start_time = web_time::Instant::now();
+
+        for i in 0..NUM_CHUNKS {
+            let range_start = i * chunk_size;
+            let range_end = if i == NUM_CHUNKS - 1 {
+                limit
+            } else {
+                range_start + chunk_size
+            };
+
+            let task = cx.spawn(async move |this, cx| {
+                let count = cx
+                    .background_spawn(async move { count_primes_in_range(range_start, range_end) })
+                    .await;
+
+                this.update(cx, |this, cx| {
+                    if let Some(run) = &mut this.current_run {
+                        run.chunk_results.push(ChunkResult { count });
+                        run.chunks_done += 1;
+
+                        if run.chunks_done == NUM_CHUNKS {
+                            let total: u64 = run.chunk_results.iter().map(|r| r.count).sum();
+                            let elapsed_ms = start_time.elapsed().as_secs_f64() * 1000.0;
+                            run.total = Some(total);
+                            run.elapsed = Some(elapsed_ms);
+                            this.history.push(
+                                format!(
+                                    "π({}) = {} ({:.0} ms, {} chunks)",
+                                    format_number(run.limit),
+                                    format_number(total),
+                                    elapsed_ms,
+                                    NUM_CHUNKS,
+                                )
+                                .into(),
+                            );
+                        }
+                        cx.notify();
+                    }
+                })
+                .ok();
+            });
+
+            self._tasks.push(task);
+        }
+    }
+}
+
+fn format_number(n: u64) -> String {
+    let s = n.to_string();
+    let mut result = String::new();
+    for (i, ch) in s.chars().rev().enumerate() {
+        if i > 0 && i % 3 == 0 {
+            result.push(',');
+        }
+        result.push(ch);
+    }
+    result.chars().rev().collect()
+}
+
+// ---------------------------------------------------------------------------
+// Render
+// ---------------------------------------------------------------------------
+
+const BG_BASE: u32 = 0x1e1e2e;
+const BG_SURFACE: u32 = 0x313244;
+const BG_OVERLAY: u32 = 0x45475a;
+const TEXT_PRIMARY: u32 = 0xcdd6f4;
+const TEXT_SECONDARY: u32 = 0xa6adc8;
+const TEXT_DIM: u32 = 0x6c7086;
+const ACCENT_YELLOW: u32 = 0xf9e2af;
+const ACCENT_GREEN: u32 = 0xa6e3a1;
+const ACCENT_BLUE: u32 = 0x89b4fa;
+const ACCENT_MAUVE: u32 = 0xcba6f7;
+
+impl Render for HelloWeb {
+    fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+        let is_running = self.current_run.as_ref().is_some_and(|r| r.total.is_none());
+
+        // -- Preset buttons --
+        let preset_row = Preset::ALL.iter().enumerate().fold(
+            div().flex().flex_row().gap_2(),
+            |row, (index, &preset)| {
+                let is_selected = preset == self.selected_preset;
+                let (bg, text_color) = if is_selected {
+                    (ACCENT_BLUE, BG_BASE)
+                } else {
+                    (BG_OVERLAY, TEXT_SECONDARY)
+                };
+                row.child(
+                    div()
+                        .id(ElementId::NamedInteger("preset".into(), index as u64))
+                        .px_3()
+                        .py_1()
+                        .rounded_md()
+                        .bg(rgb(bg))
+                        .text_color(rgb(text_color))
+                        .text_sm()
+                        .cursor_pointer()
+                        .when(!is_running, |this| {
+                            this.on_click(cx.listener(move |this, _event, _window, _cx| {
+                                this.selected_preset = preset;
+                            }))
+                        })
+                        .child(preset.label()),
+                )
+            },
+        );
+
+        // -- Go button --
+        let (go_bg, go_text, go_label) = if is_running {
+            (BG_OVERLAY, TEXT_DIM, "Running…")
+        } else {
+            (ACCENT_GREEN, BG_BASE, "Count Primes")
+        };
+        let go_button = div()
+            .id("go")
+            .px_4()
+            .py(px(6.))
+            .rounded_md()
+            .bg(rgb(go_bg))
+            .text_color(rgb(go_text))
+            .cursor_pointer()
+            .when(!is_running, |this| {
+                this.on_click(cx.listener(|this, _event, _window, cx| {
+                    this.start_search(cx);
+                }))
+            })
+            .child(go_label);
+
+        // -- Progress / result area --
+        let status_area = if let Some(run) = &self.current_run {
+            let progress_fraction = run.chunks_done as f32 / NUM_CHUNKS as f32;
+            let progress_pct = (progress_fraction * 100.0) as u32;
+
+            let status_text: SharedString = if let Some(total) = run.total {
+                format!(
+                    "Found {} primes below {} in {:.0} ms",
+                    format_number(total),
+                    format_number(run.limit),
+                    run.elapsed.unwrap_or(0.0),
+                )
+                .into()
+            } else {
+                format!(
+                    "Searching up to {} … {}/{} chunks  ({}%)",
+                    format_number(run.limit),
+                    run.chunks_done,
+                    NUM_CHUNKS,
+                    progress_pct,
+                )
+                .into()
+            };
+
+            let bar_color = if run.total.is_some() {
+                ACCENT_GREEN
+            } else {
+                ACCENT_BLUE
+            };
+
+            let chunk_dots =
+                (0..NUM_CHUNKS as usize).fold(div().flex().flex_row().gap_1().mt_2(), |row, i| {
+                    let done = i < run.chunks_done as usize;
+                    let color = if done { ACCENT_MAUVE } else { BG_OVERLAY };
+                    row.child(div().size(px(10.)).rounded_sm().bg(rgb(color)))
+                });
+
+            div()
+                .flex()
+                .flex_col()
+                .w_full()
+                .gap_2()
+                .child(div().text_color(rgb(TEXT_PRIMARY)).child(status_text))
+                .child(
+                    div()
+                        .w_full()
+                        .h(px(8.))
+                        .rounded_sm()
+                        .bg(rgb(BG_OVERLAY))
+                        .child(
+                            div()
+                                .h_full()
+                                .rounded_sm()
+                                .bg(rgb(bar_color))
+                                .w(gpui::relative(progress_fraction)),
+                        ),
+                )
+                .child(chunk_dots)
+        } else {
+            div().flex().flex_col().w_full().child(
+                div()
+                    .text_color(rgb(TEXT_DIM))
+                    .child("Select a range and press Count Primes to begin."),
+            )
+        };
+
+        // -- History log --
+        let history_section = if self.history.is_empty() {
+            div()
+        } else {
+            self.history
+                .iter()
+                .rev()
+                .fold(div().flex().flex_col().gap_1(), |col, entry| {
+                    col.child(
+                        div()
+                            .text_sm()
+                            .text_color(rgb(TEXT_SECONDARY))
+                            .child(entry.clone()),
+                    )
+                })
+        };
+
+        // -- Layout --
+        div()
+            .flex()
+            .flex_col()
+            .size_full()
+            .bg(rgb(BG_BASE))
+            .justify_center()
+            .items_center()
+            .gap_4()
+            .p_4()
+            // Title
+            .child(
+                div()
+                    .text_xl()
+                    .text_color(rgb(TEXT_PRIMARY))
+                    .child("Prime Sieve — GPUI Web"),
+            )
+            .child(div().text_sm().text_color(rgb(TEXT_DIM)).child(format!(
+                "Background threads: {} · Chunks per run: {}",
+                std::thread::available_parallelism().map_or(2, |n| n.get().max(2)),
+                NUM_CHUNKS,
+            )))
+            // Controls
+            .child(
+                div()
+                    .flex()
+                    .flex_col()
+                    .items_center()
+                    .gap_3()
+                    .p_4()
+                    .w(px(500.))
+                    .rounded_lg()
+                    .bg(rgb(BG_SURFACE))
+                    .child(
+                        div()
+                            .text_sm()
+                            .text_color(rgb(ACCENT_YELLOW))
+                            .child("Count primes below:"),
+                    )
+                    .child(preset_row)
+                    .child(go_button),
+            )
+            // Status
+            .child(
+                div()
+                    .flex()
+                    .flex_col()
+                    .w(px(500.))
+                    .p_4()
+                    .rounded_lg()
+                    .bg(rgb(BG_SURFACE))
+                    .child(status_area),
+            )
+            // History
+            .when(!self.history.is_empty(), |this| {
+                this.child(
+                    div()
+                        .flex()
+                        .flex_col()
+                        .w(px(500.))
+                        .p_4()
+                        .rounded_lg()
+                        .bg(rgb(BG_SURFACE))
+                        .gap_2()
+                        .child(div().text_sm().text_color(rgb(TEXT_DIM)).child("History"))
+                        .child(history_section),
+                )
+            })
+    }
+}
+
+// ---------------------------------------------------------------------------
+// Entry point
+// ---------------------------------------------------------------------------
+
+fn main() {
+    gpui_platform::web_init();
+    gpui_platform::application().run(|cx: &mut App| {
+        let bounds = Bounds::centered(None, size(px(640.), px(560.)), cx);
+        cx.open_window(
+            WindowOptions {
+                window_bounds: Some(WindowBounds::Windowed(bounds)),
+                ..Default::default()
+            },
+            |_, cx| cx.new(HelloWeb::new),
+        )
+        .expect("failed to open window");
+        cx.activate(true);
+    });
+}

crates/gpui_web/examples/hello_web/trunk.toml 🔗

@@ -0,0 +1,7 @@
+[serve]
+addresses = ["127.0.0.1"]
+port = 8080
+open = true
+
+# Headers required for WebGPU / SharedArrayBuffer support.
+headers = { "Cross-Origin-Embedder-Policy" = "require-corp", "Cross-Origin-Opener-Policy" = "same-origin" }

crates/gpui_web/src/dispatcher.rs 🔗

@@ -0,0 +1,345 @@
+use gpui::{

+    PlatformDispatcher, Priority, PriorityQueueReceiver, PriorityQueueSender, RunnableVariant,

+    ThreadTaskTimings,

+};

+use std::sync::Arc;

+use std::sync::atomic::AtomicI32;

+use std::time::Duration;

+use wasm_bindgen::prelude::*;

+use web_time::Instant;

+

+#[cfg(feature = "multithreaded")]

+const MIN_BACKGROUND_THREADS: usize = 2;

+

+#[cfg(feature = "multithreaded")]

+fn shared_memory_supported() -> bool {

+    let global = js_sys::global();

+    let has_shared_array_buffer =

+        js_sys::Reflect::has(&global, &JsValue::from_str("SharedArrayBuffer")).unwrap_or(false);

+    let has_atomics = js_sys::Reflect::has(&global, &JsValue::from_str("Atomics")).unwrap_or(false);

+    let memory = js_sys::WebAssembly::Memory::from(wasm_bindgen::memory());

+    let buffer = memory.buffer();

+    let is_shared_buffer = buffer.is_instance_of::<js_sys::SharedArrayBuffer>();

+    has_shared_array_buffer && has_atomics && is_shared_buffer

+}

+

+enum MainThreadItem {

+    Runnable(RunnableVariant),

+    Delayed {

+        runnable: RunnableVariant,

+        millis: i32,

+    },

+    // TODO-Wasm: Shouldn't these run on their own dedicated thread?

+    RealtimeFunction(Box<dyn FnOnce() + Send>),

+}

+

+struct MainThreadMailbox {

+    sender: PriorityQueueSender<MainThreadItem>,

+    receiver: parking_lot::Mutex<PriorityQueueReceiver<MainThreadItem>>,

+    signal: AtomicI32,

+}

+

+impl MainThreadMailbox {

+    fn new() -> Self {

+        let (sender, receiver) = PriorityQueueReceiver::new();

+        Self {

+            sender,

+            receiver: parking_lot::Mutex::new(receiver),

+            signal: AtomicI32::new(0),

+        }

+    }

+

+    fn post(&self, priority: Priority, item: MainThreadItem) {

+        if self.sender.spin_send(priority, item).is_err() {

+            log::error!("MainThreadMailbox::send failed: receiver disconnected");

+        }

+

+        // TODO-Wasm: Verify this lock-free protocol

+        let view = self.signal_view();

+        js_sys::Atomics::store(&view, 0, 1).ok();

+        js_sys::Atomics::notify(&view, 0).ok();

+    }

+

+    fn drain(&self, window: &web_sys::Window) {

+        let mut receiver = self.receiver.lock();

+        loop {

+            // We need these `spin` variants because we can't acquire a lock on the main thread.

+            // TODO-WASM: Should we do something different?

+            match receiver.spin_try_pop() {

+                Ok(Some(item)) => execute_on_main_thread(window, item),

+                Ok(None) => break,

+                Err(_) => break,

+            }

+        }

+    }

+

+    fn signal_view(&self) -> js_sys::Int32Array {

+        let byte_offset = self.signal.as_ptr() as u32;

+        let memory = js_sys::WebAssembly::Memory::from(wasm_bindgen::memory());

+        js_sys::Int32Array::new_with_byte_offset_and_length(&memory.buffer(), byte_offset, 1)

+    }

+

+    fn run_waker_loop(self: &Arc<Self>, window: web_sys::Window) {

+        if !shared_memory_supported() {

+            log::warn!("SharedArrayBuffer not available; main thread mailbox waker loop disabled");

+            return;

+        }

+

+        let mailbox = Arc::clone(self);

+        wasm_bindgen_futures::spawn_local(async move {

+            let view = mailbox.signal_view();

+            loop {

+                js_sys::Atomics::store(&view, 0, 0).expect("Atomics.store failed");

+

+                let result = match js_sys::Atomics::wait_async(&view, 0, 0) {

+                    Ok(result) => result,

+                    Err(error) => {

+                        log::error!("Atomics.waitAsync failed: {error:?}");

+                        break;

+                    }

+                };

+

+                let is_async = js_sys::Reflect::get(&result, &JsValue::from_str("async"))

+                    .ok()

+                    .and_then(|v| v.as_bool())

+                    .unwrap_or(false);

+

+                if !is_async {

+                    log::error!("Atomics.waitAsync returned synchronously; waker loop exiting");

+                    break;

+                }

+

+                let promise: js_sys::Promise =

+                    js_sys::Reflect::get(&result, &JsValue::from_str("value"))

+                        .expect("waitAsync result missing 'value'")

+                        .unchecked_into();

+

+                let _ = wasm_bindgen_futures::JsFuture::from(promise).await;

+

+                mailbox.drain(&window);

+            }

+        });

+    }

+}

+

+pub struct WebDispatcher {

+    main_thread_id: std::thread::ThreadId,

+    browser_window: web_sys::Window,

+    background_sender: PriorityQueueSender<RunnableVariant>,

+    main_thread_mailbox: Arc<MainThreadMailbox>,

+    supports_threads: bool,

+    #[cfg(feature = "multithreaded")]

+    _background_threads: Vec<wasm_thread::JoinHandle<()>>,

+}

+

+// Safety: `web_sys::Window` is only accessed from the main thread

+// All other fields are `Send + Sync` by construction.

+unsafe impl Send for WebDispatcher {}

+unsafe impl Sync for WebDispatcher {}

+

+impl WebDispatcher {

+    pub fn new(browser_window: web_sys::Window, allow_threads: bool) -> Self {

+        #[cfg(feature = "multithreaded")]

+        let (background_sender, background_receiver) = PriorityQueueReceiver::new();

+        #[cfg(not(feature = "multithreaded"))]

+        let (background_sender, _) = PriorityQueueReceiver::new();

+

+        let main_thread_mailbox = Arc::new(MainThreadMailbox::new());

+

+        #[cfg(feature = "multithreaded")]

+        let supports_threads = allow_threads && shared_memory_supported();

+        #[cfg(not(feature = "multithreaded"))]

+        let supports_threads = false;

+

+        if supports_threads {

+            main_thread_mailbox.run_waker_loop(browser_window.clone());

+        } else {

+            log::warn!(

+                "SharedArrayBuffer not available; falling back to single-threaded dispatcher"

+            );

+        }

+

+        #[cfg(feature = "multithreaded")]

+        let background_threads = if supports_threads {

+            let thread_count = browser_window

+                .navigator()

+                .hardware_concurrency()

+                .max(MIN_BACKGROUND_THREADS as f64) as usize;

+

+            // TODO-Wasm: Is it bad to have web workers blocking for a long time like this?

+            (0..thread_count)

+                .map(|i| {

+                    let mut receiver = background_receiver.clone();

+                    wasm_thread::Builder::new()

+                        .name(format!("background-worker-{i}"))

+                        .spawn(move || {

+                            loop {

+                                let runnable: RunnableVariant = match receiver.pop() {

+                                    Ok(runnable) => runnable,

+                                    Err(_) => {

+                                        log::info!(

+                                            "background-worker-{i}: channel disconnected, exiting"

+                                        );

+                                        break;

+                                    }

+                                };

+

+                                if runnable.metadata().is_closed() {

+                                    continue;

+                                }

+

+                                runnable.run();

+                            }

+                        })

+                        .expect("failed to spawn background worker thread")

+                })

+                .collect::<Vec<_>>()

+        } else {

+            Vec::new()

+        };

+

+        Self {

+            main_thread_id: std::thread::current().id(),

+            browser_window,

+            background_sender,

+            main_thread_mailbox,

+            supports_threads,

+            #[cfg(feature = "multithreaded")]

+            _background_threads: background_threads,

+        }

+    }

+

+    fn on_main_thread(&self) -> bool {

+        std::thread::current().id() == self.main_thread_id

+    }

+}

+

+impl PlatformDispatcher for WebDispatcher {

+    fn get_all_timings(&self) -> Vec<ThreadTaskTimings> {

+        // TODO-Wasm: should we panic here?

+        Vec::new()

+    }

+

+    fn get_current_thread_timings(&self) -> ThreadTaskTimings {

+        ThreadTaskTimings {

+            thread_name: None,

+            thread_id: std::thread::current().id(),

+            timings: Vec::new(),

+            total_pushed: 0,

+        }

+    }

+

+    fn is_main_thread(&self) -> bool {

+        self.on_main_thread()

+    }

+

+    fn dispatch(&self, runnable: RunnableVariant, priority: Priority) {

+        if !self.supports_threads {

+            self.dispatch_on_main_thread(runnable, priority);

+            return;

+        }

+

+        let result = if self.on_main_thread() {

+            self.background_sender.spin_send(priority, runnable)

+        } else {

+            self.background_sender.send(priority, runnable)

+        };

+

+        if let Err(error) = result {

+            log::error!("dispatch: failed to send to background queue: {error:?}");

+        }

+    }

+

+    fn dispatch_on_main_thread(&self, runnable: RunnableVariant, priority: Priority) {

+        if self.on_main_thread() {

+            schedule_runnable(&self.browser_window, runnable, priority);

+        } else {

+            self.main_thread_mailbox

+                .post(priority, MainThreadItem::Runnable(runnable));

+        }

+    }

+

+    fn dispatch_after(&self, duration: Duration, runnable: RunnableVariant) {

+        let millis = duration.as_millis().min(i32::MAX as u128) as i32;

+        if self.on_main_thread() {

+            let callback = Closure::once_into_js(move || {

+                if !runnable.metadata().is_closed() {

+                    runnable.run();

+                }

+            });

+            self.browser_window

+                .set_timeout_with_callback_and_timeout_and_arguments_0(

+                    callback.unchecked_ref(),

+                    millis,

+                )

+                .ok();

+        } else {

+            self.main_thread_mailbox

+                .post(Priority::High, MainThreadItem::Delayed { runnable, millis });

+        }

+    }

+

+    fn spawn_realtime(&self, function: Box<dyn FnOnce() + Send>) {

+        if self.on_main_thread() {

+            let callback = Closure::once_into_js(move || {

+                function();

+            });

+            self.browser_window

+                .queue_microtask(callback.unchecked_ref());

+        } else {

+            self.main_thread_mailbox

+                .post(Priority::High, MainThreadItem::RealtimeFunction(function));

+        }

+    }

+

+    fn now(&self) -> Instant {

+        Instant::now()

+    }

+}

+

+fn execute_on_main_thread(window: &web_sys::Window, item: MainThreadItem) {

+    match item {

+        MainThreadItem::Runnable(runnable) => {

+            if !runnable.metadata().is_closed() {

+                runnable.run();

+            }

+        }

+        MainThreadItem::Delayed { runnable, millis } => {

+            let callback = Closure::once_into_js(move || {

+                if !runnable.metadata().is_closed() {

+                    runnable.run();

+                }

+            });

+            window

+                .set_timeout_with_callback_and_timeout_and_arguments_0(

+                    callback.unchecked_ref(),

+                    millis,

+                )

+                .ok();

+        }

+        MainThreadItem::RealtimeFunction(function) => {

+            function();

+        }

+    }

+}

+

+fn schedule_runnable(window: &web_sys::Window, runnable: RunnableVariant, priority: Priority) {

+    let callback = Closure::once_into_js(move || {

+        if !runnable.metadata().is_closed() {

+            runnable.run();

+        }

+    });

+    let callback: &js_sys::Function = callback.unchecked_ref();

+

+    match priority {

+        Priority::RealtimeAudio => {

+            window.queue_microtask(callback);

+        }

+        _ => {

+            // TODO-Wasm: this ought to enqueue so we can dequeue with proper priority

+            window

+                .set_timeout_with_callback_and_timeout_and_arguments_0(callback, 0)

+                .ok();

+        }

+    }

+}

crates/gpui_web/src/display.rs 🔗

@@ -0,0 +1,98 @@
+use anyhow::Result;

+use gpui::{Bounds, DisplayId, Pixels, PlatformDisplay, Point, Size, px};

+

+#[derive(Debug)]

+pub struct WebDisplay {

+    id: DisplayId,

+    uuid: uuid::Uuid,

+    browser_window: web_sys::Window,

+}

+

+// Safety: WASM is single-threaded — there is no concurrent access to `web_sys::Window`.

+unsafe impl Send for WebDisplay {}

+unsafe impl Sync for WebDisplay {}

+

+impl WebDisplay {

+    pub fn new(browser_window: web_sys::Window) -> Self {

+        WebDisplay {

+            id: DisplayId::new(1),

+            uuid: uuid::Uuid::new_v4(),

+            browser_window,

+        }

+    }

+

+    fn screen_size(&self) -> Size<Pixels> {

+        let Some(screen) = self.browser_window.screen().ok() else {

+            return Size {

+                width: px(1920.),

+                height: px(1080.),

+            };

+        };

+

+        let width = screen.width().unwrap_or(1920) as f32;

+        let height = screen.height().unwrap_or(1080) as f32;

+

+        Size {

+            width: px(width),

+            height: px(height),

+        }

+    }

+

+    fn viewport_size(&self) -> Size<Pixels> {

+        let width = self

+            .browser_window

+            .inner_width()

+            .ok()

+            .and_then(|v| v.as_f64())

+            .unwrap_or(1920.0) as f32;

+        let height = self

+            .browser_window

+            .inner_height()

+            .ok()

+            .and_then(|v| v.as_f64())

+            .unwrap_or(1080.0) as f32;

+

+        Size {

+            width: px(width),

+            height: px(height),

+        }

+    }

+}

+

+impl PlatformDisplay for WebDisplay {

+    fn id(&self) -> DisplayId {

+        self.id

+    }

+

+    fn uuid(&self) -> Result<uuid::Uuid> {

+        Ok(self.uuid)

+    }

+

+    fn bounds(&self) -> Bounds<Pixels> {

+        let size = self.screen_size();

+        Bounds {

+            origin: Point::default(),

+            size,

+        }

+    }

+

+    fn visible_bounds(&self) -> Bounds<Pixels> {

+        let size = self.viewport_size();

+        Bounds {

+            origin: Point::default(),

+            size,

+        }

+    }

+

+    fn default_bounds(&self) -> Bounds<Pixels> {

+        let visible = self.visible_bounds();

+        let width = visible.size.width * 0.75;

+        let height = visible.size.height * 0.75;

+        let origin_x = (visible.size.width - width) / 2.0;

+        let origin_y = (visible.size.height - height) / 2.0;

+        Bounds {

+            origin: Point::new(origin_x, origin_y),

+            size: Size { width, height },

+        }

+    }

+}

crates/gpui_web/src/events.rs 🔗

@@ -0,0 +1,615 @@
+use std::rc::Rc;

+

+use gpui::{

+    Capslock, ExternalPaths, FileDropEvent, KeyDownEvent, KeyUpEvent, Keystroke, Modifiers,

+    ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseExitEvent, MouseMoveEvent,

+    MouseUpEvent, NavigationDirection, Pixels, PlatformInput, Point, ScrollDelta, ScrollWheelEvent,

+    TouchPhase, point, px,

+};

+use smallvec::smallvec;

+use wasm_bindgen::prelude::*;

+

+use crate::window::WebWindowInner;

+

+pub struct WebEventListeners {

+    #[allow(dead_code)]

+    closures: Vec<Closure<dyn FnMut(JsValue)>>,

+}

+

+pub(crate) struct ClickState {

+    last_position: Point<Pixels>,

+    last_time: f64,

+    current_count: usize,

+}

+

+impl Default for ClickState {

+    fn default() -> Self {

+        Self {

+            last_position: Point::default(),

+            last_time: 0.0,

+            current_count: 0,

+        }

+    }

+}

+

+impl ClickState {

+    fn register_click(&mut self, position: Point<Pixels>, time: f64) -> usize {

+        let distance = ((f32::from(position.x) - f32::from(self.last_position.x)).powi(2)

+            + (f32::from(position.y) - f32::from(self.last_position.y)).powi(2))

+        .sqrt();

+

+        if (time - self.last_time) < 400.0 && distance < 5.0 {

+            self.current_count += 1;

+        } else {

+            self.current_count = 1;

+        }

+

+        self.last_position = position;

+        self.last_time = time;

+        self.current_count

+    }

+}

+

+impl WebWindowInner {

+    pub fn register_event_listeners(self: &Rc<Self>) -> WebEventListeners {

+        let mut closures = vec![

+            self.register_pointer_down(),

+            self.register_pointer_up(),

+            self.register_pointer_move(),

+            self.register_pointer_leave(),

+            self.register_wheel(),

+            self.register_context_menu(),

+            self.register_dragover(),

+            self.register_drop(),

+            self.register_dragleave(),

+            self.register_key_down(),

+            self.register_key_up(),

+            self.register_focus(),

+            self.register_blur(),

+            self.register_pointer_enter(),

+            self.register_pointer_leave_hover(),

+        ];

+        closures.extend(self.register_visibility_change());

+        closures.extend(self.register_appearance_change());

+

+        WebEventListeners { closures }

+    }

+

+    fn listen(

+        self: &Rc<Self>,

+        event_name: &str,

+        handler: impl FnMut(JsValue) + 'static,

+    ) -> Closure<dyn FnMut(JsValue)> {

+        let closure = Closure::<dyn FnMut(JsValue)>::new(handler);

+        self.canvas

+            .add_event_listener_with_callback(event_name, closure.as_ref().unchecked_ref())

+            .ok();

+        closure

+    }

+

+    /// Registers a listener with `{passive: false}` so that `preventDefault()` works.

+    /// Needed for events like `wheel` which are passive by default in modern browsers.

+    fn listen_non_passive(

+        self: &Rc<Self>,

+        event_name: &str,

+        handler: impl FnMut(JsValue) + 'static,

+    ) -> Closure<dyn FnMut(JsValue)> {

+        let closure = Closure::<dyn FnMut(JsValue)>::new(handler);

+        let canvas_js: &JsValue = self.canvas.as_ref();

+        let callback_js: &JsValue = closure.as_ref();

+        let options = js_sys::Object::new();

+        js_sys::Reflect::set(&options, &"passive".into(), &false.into()).ok();

+        if let Ok(add_fn_val) = js_sys::Reflect::get(canvas_js, &"addEventListener".into()) {

+            if let Ok(add_fn) = add_fn_val.dyn_into::<js_sys::Function>() {

+                add_fn

+                    .call3(canvas_js, &event_name.into(), callback_js, &options)

+                    .ok();

+            }

+        }

+        closure

+    }

+

+    fn dispatch_input(&self, input: PlatformInput) {

+        let mut borrowed = self.callbacks.borrow_mut();

+        if let Some(ref mut callback) = borrowed.input {

+            callback(input);

+        }

+    }

+

+    fn register_pointer_down(self: &Rc<Self>) -> Closure<dyn FnMut(JsValue)> {

+        let this = Rc::clone(self);

+        self.listen("pointerdown", move |event: JsValue| {

+            let event: web_sys::PointerEvent = event.unchecked_into();

+            event.prevent_default();

+            this.canvas.focus().ok();

+

+            let button = dom_mouse_button_to_gpui(event.button());

+            let position = pointer_position_in_element(&event);

+            let modifiers = modifiers_from_mouse_event(&event, this.is_mac);

+            let time = js_sys::Date::now();

+

+            this.pressed_button.set(Some(button));

+            let click_count = this.click_state.borrow_mut().register_click(position, time);

+

+            {

+                let mut current_state = this.state.borrow_mut();

+                current_state.mouse_position = position;

+                current_state.modifiers = modifiers;

+            }

+

+            this.dispatch_input(PlatformInput::MouseDown(MouseDownEvent {

+                button,

+                position,

+                modifiers,

+                click_count,

+                first_mouse: false,

+            }));

+        })

+    }

+

+    fn register_pointer_up(self: &Rc<Self>) -> Closure<dyn FnMut(JsValue)> {

+        let this = Rc::clone(self);

+        self.listen("pointerup", move |event: JsValue| {

+            let event: web_sys::PointerEvent = event.unchecked_into();

+            event.prevent_default();

+

+            let button = dom_mouse_button_to_gpui(event.button());

+            let position = pointer_position_in_element(&event);

+            let modifiers = modifiers_from_mouse_event(&event, this.is_mac);

+

+            this.pressed_button.set(None);

+            let click_count = this.click_state.borrow().current_count;

+

+            {

+                let mut current_state = this.state.borrow_mut();

+                current_state.mouse_position = position;

+                current_state.modifiers = modifiers;

+            }

+

+            this.dispatch_input(PlatformInput::MouseUp(MouseUpEvent {

+                button,

+                position,

+                modifiers,

+                click_count,

+            }));

+        })

+    }

+

+    fn register_pointer_move(self: &Rc<Self>) -> Closure<dyn FnMut(JsValue)> {

+        let this = Rc::clone(self);

+        self.listen("pointermove", move |event: JsValue| {

+            let event: web_sys::PointerEvent = event.unchecked_into();

+            event.prevent_default();

+

+            let position = pointer_position_in_element(&event);

+            let modifiers = modifiers_from_mouse_event(&event, this.is_mac);

+            let current_pressed = this.pressed_button.get();

+

+            {

+                let mut current_state = this.state.borrow_mut();

+                current_state.mouse_position = position;

+                current_state.modifiers = modifiers;

+            }

+

+            this.dispatch_input(PlatformInput::MouseMove(MouseMoveEvent {

+                position,

+                pressed_button: current_pressed,

+                modifiers,

+            }));

+        })

+    }

+

+    fn register_pointer_leave(self: &Rc<Self>) -> Closure<dyn FnMut(JsValue)> {

+        let this = Rc::clone(self);

+        self.listen("pointerleave", move |event: JsValue| {

+            let event: web_sys::PointerEvent = event.unchecked_into();

+

+            let position = pointer_position_in_element(&event);

+            let modifiers = modifiers_from_mouse_event(&event, this.is_mac);

+            let current_pressed = this.pressed_button.get();

+

+            {

+                let mut current_state = this.state.borrow_mut();

+                current_state.mouse_position = position;

+                current_state.modifiers = modifiers;

+            }

+

+            this.dispatch_input(PlatformInput::MouseExited(MouseExitEvent {

+                position,

+                pressed_button: current_pressed,

+                modifiers,

+            }));

+        })

+    }

+

+    fn register_wheel(self: &Rc<Self>) -> Closure<dyn FnMut(JsValue)> {

+        let this = Rc::clone(self);

+        self.listen_non_passive("wheel", move |event: JsValue| {

+            let event: web_sys::WheelEvent = event.unchecked_into();

+            event.prevent_default();

+

+            let mouse_event: &web_sys::MouseEvent = event.as_ref();

+            let position = mouse_position_in_element(mouse_event);

+            let modifiers = modifiers_from_wheel_event(mouse_event, this.is_mac);

+

+            let delta_mode = event.delta_mode();

+            let delta = if delta_mode == 1 {

+                ScrollDelta::Lines(point(-event.delta_x() as f32, -event.delta_y() as f32))

+            } else {

+                ScrollDelta::Pixels(point(

+                    px(-event.delta_x() as f32),

+                    px(-event.delta_y() as f32),

+                ))

+            };

+

+            {

+                let mut current_state = this.state.borrow_mut();

+                current_state.modifiers = modifiers;

+            }

+

+            this.dispatch_input(PlatformInput::ScrollWheel(ScrollWheelEvent {

+                position,

+                delta,

+                modifiers,

+                touch_phase: TouchPhase::Moved,

+            }));

+        })

+    }

+

+    fn register_context_menu(self: &Rc<Self>) -> Closure<dyn FnMut(JsValue)> {

+        self.listen("contextmenu", move |event: JsValue| {

+            let event: web_sys::Event = event.unchecked_into();

+            event.prevent_default();

+        })

+    }

+

+    fn register_dragover(self: &Rc<Self>) -> Closure<dyn FnMut(JsValue)> {

+        let this = Rc::clone(self);

+        self.listen("dragover", move |event: JsValue| {

+            let event: web_sys::DragEvent = event.unchecked_into();

+            event.prevent_default();

+

+            let mouse_event: &web_sys::MouseEvent = event.as_ref();

+            let position = mouse_position_in_element(mouse_event);

+

+            {

+                let mut current_state = this.state.borrow_mut();

+                current_state.mouse_position = position;

+            }

+

+            this.dispatch_input(PlatformInput::FileDrop(FileDropEvent::Pending { position }));

+        })

+    }

+

+    fn register_drop(self: &Rc<Self>) -> Closure<dyn FnMut(JsValue)> {

+        let this = Rc::clone(self);

+        self.listen("drop", move |event: JsValue| {

+            let event: web_sys::DragEvent = event.unchecked_into();

+            event.prevent_default();

+

+            let mouse_event: &web_sys::MouseEvent = event.as_ref();

+            let position = mouse_position_in_element(mouse_event);

+

+            {

+                let mut current_state = this.state.borrow_mut();

+                current_state.mouse_position = position;

+            }

+

+            let paths = extract_file_paths_from_drag(&event);

+

+            this.dispatch_input(PlatformInput::FileDrop(FileDropEvent::Entered {

+                position,

+                paths: ExternalPaths(paths),

+            }));

+

+            this.dispatch_input(PlatformInput::FileDrop(FileDropEvent::Submit { position }));

+        })

+    }

+

+    fn register_dragleave(self: &Rc<Self>) -> Closure<dyn FnMut(JsValue)> {

+        let this = Rc::clone(self);

+        self.listen("dragleave", move |_event: JsValue| {

+            this.dispatch_input(PlatformInput::FileDrop(FileDropEvent::Exited));

+        })

+    }

+

+    fn register_key_down(self: &Rc<Self>) -> Closure<dyn FnMut(JsValue)> {

+        let this = Rc::clone(self);

+        self.listen("keydown", move |event: JsValue| {

+            let event: web_sys::KeyboardEvent = event.unchecked_into();

+

+            let modifiers = modifiers_from_keyboard_event(&event, this.is_mac);

+            let capslock = capslock_from_keyboard_event(&event);

+

+            {

+                let mut current_state = this.state.borrow_mut();

+                current_state.modifiers = modifiers;

+                current_state.capslock = capslock;

+            }

+

+            this.dispatch_input(PlatformInput::ModifiersChanged(ModifiersChangedEvent {

+                modifiers,

+                capslock,

+            }));

+

+            let key = dom_key_to_gpui_key(&event);

+

+            if is_modifier_only_key(&key) {

+                return;

+            }

+

+            event.prevent_default();

+

+            let is_held = event.repeat();

+            let key_char = compute_key_char(&event, &key, &modifiers);

+

+            let keystroke = Keystroke {

+                modifiers,

+                key,

+                key_char,

+            };

+

+            this.dispatch_input(PlatformInput::KeyDown(KeyDownEvent {

+                keystroke,

+                is_held,

+                prefer_character_input: false,

+            }));

+        })

+    }

+

+    fn register_key_up(self: &Rc<Self>) -> Closure<dyn FnMut(JsValue)> {

+        let this = Rc::clone(self);

+        self.listen("keyup", move |event: JsValue| {

+            let event: web_sys::KeyboardEvent = event.unchecked_into();

+

+            let modifiers = modifiers_from_keyboard_event(&event, this.is_mac);

+            let capslock = capslock_from_keyboard_event(&event);

+

+            {

+                let mut current_state = this.state.borrow_mut();

+                current_state.modifiers = modifiers;

+                current_state.capslock = capslock;

+            }

+

+            this.dispatch_input(PlatformInput::ModifiersChanged(ModifiersChangedEvent {

+                modifiers,

+                capslock,

+            }));

+

+            let key = dom_key_to_gpui_key(&event);

+

+            if is_modifier_only_key(&key) {

+                return;

+            }

+

+            event.prevent_default();

+

+            let key_char = compute_key_char(&event, &key, &modifiers);

+

+            let keystroke = Keystroke {

+                modifiers,

+                key,

+                key_char,

+            };

+

+            this.dispatch_input(PlatformInput::KeyUp(KeyUpEvent { keystroke }));

+        })

+    }

+

+    fn register_focus(self: &Rc<Self>) -> Closure<dyn FnMut(JsValue)> {

+        let this = Rc::clone(self);

+        self.listen("focus", move |_event: JsValue| {

+            {

+                let mut state = this.state.borrow_mut();

+                state.is_active = true;

+            }

+            let mut callbacks = this.callbacks.borrow_mut();

+            if let Some(ref mut callback) = callbacks.active_status_change {

+                callback(true);

+            }

+        })

+    }

+

+    fn register_blur(self: &Rc<Self>) -> Closure<dyn FnMut(JsValue)> {

+        let this = Rc::clone(self);

+        self.listen("blur", move |_event: JsValue| {

+            {

+                let mut state = this.state.borrow_mut();

+                state.is_active = false;

+            }

+            let mut callbacks = this.callbacks.borrow_mut();

+            if let Some(ref mut callback) = callbacks.active_status_change {

+                callback(false);

+            }

+        })

+    }

+

+    fn register_pointer_enter(self: &Rc<Self>) -> Closure<dyn FnMut(JsValue)> {

+        let this = Rc::clone(self);

+        self.listen("pointerenter", move |_event: JsValue| {

+            {

+                let mut state = this.state.borrow_mut();

+                state.is_hovered = true;

+            }

+            let mut callbacks = this.callbacks.borrow_mut();

+            if let Some(ref mut callback) = callbacks.hover_status_change {

+                callback(true);

+            }

+        })

+    }

+

+    fn register_pointer_leave_hover(self: &Rc<Self>) -> Closure<dyn FnMut(JsValue)> {

+        let this = Rc::clone(self);

+        self.listen("pointerleave", move |_event: JsValue| {

+            {

+                let mut state = this.state.borrow_mut();

+                state.is_hovered = false;

+            }

+            let mut callbacks = this.callbacks.borrow_mut();

+            if let Some(ref mut callback) = callbacks.hover_status_change {

+                callback(false);

+            }

+        })

+    }

+}

+

+fn dom_key_to_gpui_key(event: &web_sys::KeyboardEvent) -> String {

+    let key = event.key();

+    match key.as_str() {

+        "Enter" => "enter".to_string(),

+        "Backspace" => "backspace".to_string(),

+        "Tab" => "tab".to_string(),

+        "Escape" => "escape".to_string(),

+        "Delete" => "delete".to_string(),

+        " " => "space".to_string(),

+        "ArrowLeft" => "left".to_string(),

+        "ArrowRight" => "right".to_string(),

+        "ArrowUp" => "up".to_string(),

+        "ArrowDown" => "down".to_string(),

+        "Home" => "home".to_string(),

+        "End" => "end".to_string(),

+        "PageUp" => "pageup".to_string(),

+        "PageDown" => "pagedown".to_string(),

+        "Insert" => "insert".to_string(),

+        "Control" => "control".to_string(),

+        "Alt" => "alt".to_string(),

+        "Shift" => "shift".to_string(),

+        "Meta" => "platform".to_string(),

+        "CapsLock" => "capslock".to_string(),

+        other => {

+            if let Some(rest) = other.strip_prefix('F') {

+                if let Ok(number) = rest.parse::<u8>() {

+                    if (1..=35).contains(&number) {

+                        return format!("f{number}");

+                    }

+                }

+            }

+            other.to_lowercase()

+        }

+    }

+}

+

+fn dom_mouse_button_to_gpui(button: i16) -> MouseButton {

+    match button {

+        0 => MouseButton::Left,

+        1 => MouseButton::Middle,

+        2 => MouseButton::Right,

+        3 => MouseButton::Navigate(NavigationDirection::Back),

+        4 => MouseButton::Navigate(NavigationDirection::Forward),

+        _ => MouseButton::Left,

+    }

+}

+

+fn modifiers_from_keyboard_event(event: &web_sys::KeyboardEvent, _is_mac: bool) -> Modifiers {

+    Modifiers {

+        control: event.ctrl_key(),

+        alt: event.alt_key(),

+        shift: event.shift_key(),

+        platform: event.meta_key(),

+        function: false,

+    }

+}

+

+fn modifiers_from_mouse_event(event: &web_sys::PointerEvent, _is_mac: bool) -> Modifiers {

+    let mouse_event: &web_sys::MouseEvent = event.as_ref();

+    Modifiers {

+        control: mouse_event.ctrl_key(),

+        alt: mouse_event.alt_key(),

+        shift: mouse_event.shift_key(),

+        platform: mouse_event.meta_key(),

+        function: false,

+    }

+}

+

+fn modifiers_from_wheel_event(event: &web_sys::MouseEvent, _is_mac: bool) -> Modifiers {

+    Modifiers {

+        control: event.ctrl_key(),

+        alt: event.alt_key(),

+        shift: event.shift_key(),

+        platform: event.meta_key(),

+        function: false,

+    }

+}

+

+fn capslock_from_keyboard_event(event: &web_sys::KeyboardEvent) -> Capslock {

+    Capslock {

+        on: event.get_modifier_state("CapsLock"),

+    }

+}

+

+pub(crate) fn is_mac_platform(browser_window: &web_sys::Window) -> bool {

+    let navigator = browser_window.navigator();

+

+    #[allow(deprecated)]

+    // navigator.platform() is deprecated but navigator.userAgentData is not widely available yet

+    if let Ok(platform) = navigator.platform() {

+        if platform.contains("Mac") {

+            return true;

+        }

+    }

+

+    if let Ok(user_agent) = navigator.user_agent() {

+        return user_agent.contains("Mac");

+    }

+

+    false

+}

+

+fn is_modifier_only_key(key: &str) -> bool {

+    matches!(key, "control" | "alt" | "shift" | "platform" | "capslock")

+}

+

+fn compute_key_char(

+    event: &web_sys::KeyboardEvent,

+    gpui_key: &str,

+    modifiers: &Modifiers,

+) -> Option<String> {

+    if modifiers.platform || modifiers.control {

+        return None;

+    }

+

+    if is_modifier_only_key(gpui_key) {

+        return None;

+    }

+

+    if gpui_key == "space" {

+        return Some(" ".to_string());

+    }

+

+    let raw_key = event.key();

+

+    if raw_key.len() == 1 {

+        return Some(raw_key);

+    }

+

+    None

+}

+

+fn pointer_position_in_element(event: &web_sys::PointerEvent) -> Point<Pixels> {

+    let mouse_event: &web_sys::MouseEvent = event.as_ref();

+    mouse_position_in_element(mouse_event)

+}

+

+fn mouse_position_in_element(event: &web_sys::MouseEvent) -> Point<Pixels> {

+    // offset_x/offset_y give position relative to the target element's padding edge

+    point(px(event.offset_x() as f32), px(event.offset_y() as f32))

+}

+

+fn extract_file_paths_from_drag(

+    event: &web_sys::DragEvent,

+) -> smallvec::SmallVec<[std::path::PathBuf; 2]> {

+    let mut paths = smallvec![];

+    let Some(data_transfer) = event.data_transfer() else {

+        return paths;

+    };

+    let file_list = data_transfer.files();

+    let Some(files) = file_list else {

+        return paths;

+    };

+    for index in 0..files.length() {

+        if let Some(file) = files.get(index) {

+            paths.push(std::path::PathBuf::from(file.name()));

+        }

+    }

+    paths

+}

crates/gpui_web/src/gpui_web.rs 🔗

@@ -0,0 +1,18 @@
+#![cfg(target_family = "wasm")]
+
+mod dispatcher;
+mod display;
+mod events;
+mod http_client;
+mod keyboard;
+mod logging;
+mod platform;
+mod window;
+
+pub use dispatcher::WebDispatcher;
+pub use display::WebDisplay;
+pub use http_client::FetchHttpClient;
+pub use keyboard::WebKeyboardLayout;
+pub use logging::init_logging;
+pub use platform::WebPlatform;
+pub use window::WebWindow;

crates/gpui_web/src/http_client.rs 🔗

@@ -0,0 +1,199 @@
+use anyhow::anyhow;
+use futures::AsyncReadExt as _;
+use http_client::{AsyncBody, HttpClient, RedirectPolicy};
+use std::future::Future;
+use std::pin::Pin;
+use std::task::Poll;
+use wasm_bindgen::JsCast as _;
+use wasm_bindgen::prelude::*;
+
+#[wasm_bindgen]
+extern "C" {
+    #[wasm_bindgen(catch, js_name = "fetch")]
+    fn global_fetch(input: &web_sys::Request) -> Result<js_sys::Promise, JsValue>;
+}
+
+pub struct FetchHttpClient {
+    user_agent: Option<http_client::http::header::HeaderValue>,
+}
+
+impl Default for FetchHttpClient {
+    fn default() -> Self {
+        Self { user_agent: None }
+    }
+}
+
+#[cfg(feature = "multithreaded")]
+impl FetchHttpClient {
+    /// # Safety
+    ///
+    /// The caller must ensure that the created `FetchHttpClient` is only used in a single thread environment.
+    pub unsafe fn new() -> Self {
+        Self::default()
+    }
+
+    /// # Safety
+    ///
+    /// The caller must ensure that the created `FetchHttpClient` is only used in a single thread environment.
+    pub unsafe fn with_user_agent(user_agent: &str) -> anyhow::Result<Self> {
+        Ok(Self {
+            user_agent: Some(http_client::http::header::HeaderValue::from_str(
+                user_agent,
+            )?),
+        })
+    }
+}
+
+#[cfg(not(feature = "multithreaded"))]
+impl FetchHttpClient {
+    pub fn new() -> Self {
+        Self::default()
+    }
+
+    pub fn with_user_agent(user_agent: &str) -> anyhow::Result<Self> {
+        Ok(Self {
+            user_agent: Some(http_client::http::header::HeaderValue::from_str(
+                user_agent,
+            )?),
+        })
+    }
+}
+
+/// Wraps a `!Send` future to satisfy the `Send` bound on `BoxFuture`.
+///
+/// Safety: only valid in WASM contexts where the `FetchHttpClient` is
+/// confined to a single thread (guaranteed by the caller via unsafe
+/// constructors when `multithreaded` is enabled, or by the absence of
+/// threads when it is not).
+struct AssertSend<F>(F);
+
+unsafe impl<F> Send for AssertSend<F> {}
+
+impl<F: Future> Future for AssertSend<F> {
+    type Output = F::Output;
+
+    fn poll(self: Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> Poll<Self::Output> {
+        // Safety: pin projection for a single-field newtype wrapper.
+        let inner = unsafe { self.map_unchecked_mut(|this| &mut this.0) };
+        inner.poll(cx)
+    }
+}
+
+impl HttpClient for FetchHttpClient {
+    fn user_agent(&self) -> Option<&http_client::http::header::HeaderValue> {
+        self.user_agent.as_ref()
+    }
+
+    fn proxy(&self) -> Option<&http_client::Url> {
+        None
+    }
+
+    fn send(
+        &self,
+        req: http_client::http::Request<AsyncBody>,
+    ) -> futures::future::BoxFuture<'static, anyhow::Result<http_client::http::Response<AsyncBody>>>
+    {
+        let (parts, body) = req.into_parts();
+
+        Box::pin(AssertSend(async move {
+            let body_bytes = read_body_to_bytes(body).await?;
+
+            let init = web_sys::RequestInit::new();
+            init.set_method(parts.method.as_str());
+
+            if let Some(redirect_policy) = parts.extensions.get::<RedirectPolicy>() {
+                match redirect_policy {
+                    RedirectPolicy::NoFollow => {
+                        init.set_redirect(web_sys::RequestRedirect::Manual);
+                    }
+                    RedirectPolicy::FollowLimit(_) | RedirectPolicy::FollowAll => {
+                        init.set_redirect(web_sys::RequestRedirect::Follow);
+                    }
+                }
+            }
+
+            if let Some(ref bytes) = body_bytes {
+                let uint8array = js_sys::Uint8Array::from(bytes.as_slice());
+                init.set_body(uint8array.as_ref());
+            }
+
+            let url = parts.uri.to_string();
+            let request = web_sys::Request::new_with_str_and_init(&url, &init)
+                .map_err(|error| anyhow!("failed to create fetch Request: {error:?}"))?;
+
+            let request_headers = request.headers();
+            for (name, value) in &parts.headers {
+                let value_str = value
+                    .to_str()
+                    .map_err(|_| anyhow!("non-ASCII header value for {name}"))?;
+                request_headers
+                    .set(name.as_str(), value_str)
+                    .map_err(|error| anyhow!("failed to set header {name}: {error:?}"))?;
+            }
+
+            let promise = global_fetch(&request)
+                .map_err(|error| anyhow!("fetch threw an error: {error:?}"))?;
+            let response_value = wasm_bindgen_futures::JsFuture::from(promise)
+                .await
+                .map_err(|error| anyhow!("fetch failed: {error:?}"))?;
+
+            let web_response: web_sys::Response = response_value
+                .dyn_into()
+                .map_err(|error| anyhow!("fetch result is not a Response: {error:?}"))?;
+
+            let status = web_response.status();
+            let mut builder = http_client::http::Response::builder().status(status);
+
+            // `Headers` is a JS iterable yielding `[name, value]` pairs.
+            // `js_sys::Array::from` calls `Array.from()` which accepts any iterable.
+            let header_pairs = js_sys::Array::from(&web_response.headers());
+            for index in 0..header_pairs.length() {
+                match header_pairs.get(index).dyn_into::<js_sys::Array>() {
+                    Ok(pair) => match (pair.get(0).as_string(), pair.get(1).as_string()) {
+                        (Some(name), Some(value)) => {
+                            builder = builder.header(name, value);
+                        }
+                        (name, value) => {
+                            log::warn!(
+                                "skipping response header at index {index}: \
+                                     name={name:?}, value={value:?}"
+                            );
+                        }
+                    },
+                    Err(entry) => {
+                        log::warn!("skipping non-array header entry at index {index}: {entry:?}");
+                    }
+                }
+            }
+
+            // The entire response body is eagerly buffered into memory via
+            // `arrayBuffer()`. The Fetch API does not expose a synchronous
+            // streaming interface; streaming would require `ReadableStream`
+            // interop which is significantly more complex.
+            let body_promise = web_response
+                .array_buffer()
+                .map_err(|error| anyhow!("failed to initiate response body read: {error:?}"))?;
+            let body_value = wasm_bindgen_futures::JsFuture::from(body_promise)
+                .await
+                .map_err(|error| anyhow!("failed to read response body: {error:?}"))?;
+            let array_buffer: js_sys::ArrayBuffer = body_value
+                .dyn_into()
+                .map_err(|error| anyhow!("response body is not an ArrayBuffer: {error:?}"))?;
+            let response_bytes = js_sys::Uint8Array::new(&array_buffer).to_vec();
+
+            builder
+                .body(AsyncBody::from(response_bytes))
+                .map_err(|error| anyhow!(error))
+        }))
+    }
+}
+
+async fn read_body_to_bytes(mut body: AsyncBody) -> anyhow::Result<Option<Vec<u8>>> {
+    let mut buffer = Vec::new();
+    body.read_to_end(&mut buffer).await?;
+    if buffer.is_empty() {
+        Ok(None)
+    } else {
+        Ok(Some(buffer))
+    }
+}

crates/gpui_web/src/keyboard.rs 🔗

@@ -0,0 +1,19 @@
+use gpui::PlatformKeyboardLayout;

+

+pub struct WebKeyboardLayout;

+

+impl WebKeyboardLayout {

+    pub fn new() -> Self {

+        WebKeyboardLayout

+    }

+}

+

+impl PlatformKeyboardLayout for WebKeyboardLayout {

+    fn id(&self) -> &str {

+        "us"

+    }

+

+    fn name(&self) -> &str {

+        "US"

+    }

+}

crates/gpui_web/src/logging.rs 🔗

@@ -0,0 +1,37 @@
+use log::{Level, Log, Metadata, Record};

+

+struct ConsoleLogger;

+

+impl Log for ConsoleLogger {

+    fn enabled(&self, _metadata: &Metadata) -> bool {

+        true

+    }

+

+    fn log(&self, record: &Record) {

+        if !self.enabled(record.metadata()) {

+            return;

+        }

+

+        let message = format!(

+            "[{}] {}: {}",

+            record.level(),

+            record.target(),

+            record.args()

+        );

+        let js_string = wasm_bindgen::JsValue::from_str(&message);

+

+        match record.level() {

+            Level::Error => web_sys::console::error_1(&js_string),

+            Level::Warn => web_sys::console::warn_1(&js_string),

+            Level::Info => web_sys::console::info_1(&js_string),

+            Level::Debug | Level::Trace => web_sys::console::log_1(&js_string),

+        }

+    }

+

+    fn flush(&self) {}

+}

+

+pub fn init_logging() {

+    log::set_logger(&ConsoleLogger).ok();

+    log::set_max_level(log::LevelFilter::Info);

+}

crates/gpui_web/src/platform.rs 🔗

@@ -0,0 +1,344 @@
+use crate::dispatcher::WebDispatcher;

+use crate::display::WebDisplay;

+use crate::keyboard::WebKeyboardLayout;

+use crate::window::WebWindow;

+use anyhow::Result;

+use futures::channel::oneshot;

+use gpui::{

+    Action, AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, DummyKeyboardMapper,

+    ForegroundExecutor, Keymap, Menu, MenuItem, PathPromptOptions, Platform, PlatformDisplay,

+    PlatformKeyboardLayout, PlatformKeyboardMapper, PlatformTextSystem, PlatformWindow, Task,

+    ThermalState, WindowAppearance, WindowParams,

+};

+use gpui_wgpu::WgpuContext;

+use std::{

+    borrow::Cow,

+    cell::RefCell,

+    path::{Path, PathBuf},

+    rc::Rc,

+    sync::Arc,

+};

+

+static BUNDLED_FONTS: &[&[u8]] = &[

+    include_bytes!("../../../assets/fonts/ibm-plex-sans/IBMPlexSans-Regular.ttf"),

+    include_bytes!("../../../assets/fonts/ibm-plex-sans/IBMPlexSans-Italic.ttf"),

+    include_bytes!("../../../assets/fonts/ibm-plex-sans/IBMPlexSans-SemiBold.ttf"),

+    include_bytes!("../../../assets/fonts/ibm-plex-sans/IBMPlexSans-SemiBoldItalic.ttf"),

+    include_bytes!("../../../assets/fonts/lilex/Lilex-Regular.ttf"),

+    include_bytes!("../../../assets/fonts/lilex/Lilex-Bold.ttf"),

+    include_bytes!("../../../assets/fonts/lilex/Lilex-Italic.ttf"),

+    include_bytes!("../../../assets/fonts/lilex/Lilex-BoldItalic.ttf"),

+];

+

+pub struct WebPlatform {

+    browser_window: web_sys::Window,

+    background_executor: BackgroundExecutor,

+    foreground_executor: ForegroundExecutor,

+    text_system: Arc<dyn PlatformTextSystem>,

+    active_window: RefCell<Option<AnyWindowHandle>>,

+    active_display: Rc<dyn PlatformDisplay>,

+    callbacks: RefCell<WebPlatformCallbacks>,

+    wgpu_context: Rc<RefCell<Option<WgpuContext>>>,

+}

+

+#[derive(Default)]

+struct WebPlatformCallbacks {

+    open_urls: Option<Box<dyn FnMut(Vec<String>)>>,

+    quit: Option<Box<dyn FnMut()>>,

+    reopen: Option<Box<dyn FnMut()>>,

+    app_menu_action: Option<Box<dyn FnMut(&dyn Action)>>,

+    will_open_app_menu: Option<Box<dyn FnMut()>>,

+    validate_app_menu_command: Option<Box<dyn FnMut(&dyn Action) -> bool>>,

+    keyboard_layout_change: Option<Box<dyn FnMut()>>,

+    thermal_state_change: Option<Box<dyn FnMut()>>,

+}

+

+impl WebPlatform {

+    pub fn new(allow_multi_threading: bool) -> Self {

+        let browser_window =

+            web_sys::window().expect("must be running in a browser window context");

+        let dispatcher = Arc::new(WebDispatcher::new(

+            browser_window.clone(),

+            allow_multi_threading,

+        ));

+        let background_executor = BackgroundExecutor::new(dispatcher.clone());

+        let foreground_executor = ForegroundExecutor::new(dispatcher);

+        let text_system = Arc::new(gpui_wgpu::CosmicTextSystem::new_without_system_fonts(

+            "IBM Plex Sans",

+        ));

+        let fonts = BUNDLED_FONTS

+            .iter()

+            .map(|bytes| Cow::Borrowed(*bytes))

+            .collect();

+        if let Err(error) = text_system.add_fonts(fonts) {

+            log::error!("failed to load bundled fonts: {error:#}");

+        }

+        let text_system: Arc<dyn PlatformTextSystem> = text_system;

+        let active_display: Rc<dyn PlatformDisplay> =

+            Rc::new(WebDisplay::new(browser_window.clone()));

+

+        Self {

+            browser_window,

+            background_executor,

+            foreground_executor,

+            text_system,

+            active_window: RefCell::new(None),

+            active_display,

+            callbacks: RefCell::new(WebPlatformCallbacks::default()),

+            wgpu_context: Rc::new(RefCell::new(None)),

+        }

+    }

+}

+

+impl Platform for WebPlatform {

+    fn background_executor(&self) -> BackgroundExecutor {

+        self.background_executor.clone()

+    }

+

+    fn foreground_executor(&self) -> ForegroundExecutor {

+        self.foreground_executor.clone()

+    }

+

+    fn text_system(&self) -> Arc<dyn PlatformTextSystem> {

+        self.text_system.clone()

+    }

+

+    fn run(&self, on_finish_launching: Box<dyn 'static + FnOnce()>) {

+        let wgpu_context = self.wgpu_context.clone();

+        wasm_bindgen_futures::spawn_local(async move {

+            match WgpuContext::new_web().await {

+                Ok(context) => {

+                    log::info!("WebGPU context initialized successfully");

+                    *wgpu_context.borrow_mut() = Some(context);

+                    on_finish_launching();

+                }

+                Err(err) => {

+                    log::error!("Failed to initialize WebGPU context: {err:#}");

+                    on_finish_launching();

+                }

+            }

+        });

+    }

+

+    fn quit(&self) {

+        log::warn!("WebPlatform::quit called, but quitting is not supported in the browser .");

+    }

+

+    fn restart(&self, _binary_path: Option<PathBuf>) {}

+

+    fn activate(&self, _ignoring_other_apps: bool) {}

+

+    fn hide(&self) {}

+

+    fn hide_other_apps(&self) {}

+

+    fn unhide_other_apps(&self) {}

+

+    fn displays(&self) -> Vec<Rc<dyn PlatformDisplay>> {

+        vec![self.active_display.clone()]

+    }

+

+    fn primary_display(&self) -> Option<Rc<dyn PlatformDisplay>> {

+        Some(self.active_display.clone())

+    }

+

+    fn active_window(&self) -> Option<AnyWindowHandle> {

+        *self.active_window.borrow()

+    }

+

+    fn open_window(

+        &self,

+        handle: AnyWindowHandle,

+        params: WindowParams,

+    ) -> anyhow::Result<Box<dyn PlatformWindow>> {

+        let context_ref = self.wgpu_context.borrow();

+        let context = context_ref.as_ref().ok_or_else(|| {

+            anyhow::anyhow!("WebGPU context not initialized. Was Platform::run() called?")

+        })?;

+

+        let window = WebWindow::new(handle, params, context, self.browser_window.clone())?;

+        *self.active_window.borrow_mut() = Some(handle);

+        Ok(Box::new(window))

+    }

+

+    fn window_appearance(&self) -> WindowAppearance {

+        let Ok(Some(media_query)) = self

+            .browser_window

+            .match_media("(prefers-color-scheme: dark)")

+        else {

+            return WindowAppearance::Light;

+        };

+        if media_query.matches() {

+            WindowAppearance::Dark

+        } else {

+            WindowAppearance::Light

+        }

+    }

+

+    fn open_url(&self, url: &str) {

+        if let Err(error) = self.browser_window.open_with_url(url) {

+            log::warn!("Failed to open URL '{url}': {error:?}");

+        }

+    }

+

+    fn on_open_urls(&self, callback: Box<dyn FnMut(Vec<String>)>) {

+        self.callbacks.borrow_mut().open_urls = Some(callback);

+    }

+

+    fn register_url_scheme(&self, _url: &str) -> Task<Result<()>> {

+        Task::ready(Ok(()))

+    }

+

+    fn prompt_for_paths(

+        &self,

+        _options: PathPromptOptions,

+    ) -> oneshot::Receiver<Result<Option<Vec<PathBuf>>>> {

+        let (tx, rx) = oneshot::channel();

+        tx.send(Err(anyhow::anyhow!(

+            "prompt_for_paths is not supported on the web"

+        )))

+        .ok();

+        rx

+    }

+

+    fn prompt_for_new_path(

+        &self,

+        _directory: &Path,

+        _suggested_name: Option<&str>,

+    ) -> oneshot::Receiver<Result<Option<PathBuf>>> {

+        let (sender, receiver) = oneshot::channel();

+        sender

+            .send(Err(anyhow::anyhow!(

+                "prompt_for_new_path is not supported on the web"

+            )))

+            .ok();

+        receiver

+    }

+

+    fn can_select_mixed_files_and_dirs(&self) -> bool {

+        false

+    }

+

+    fn reveal_path(&self, _path: &Path) {}

+

+    fn open_with_system(&self, _path: &Path) {}

+

+    fn on_quit(&self, callback: Box<dyn FnMut()>) {

+        self.callbacks.borrow_mut().quit = Some(callback);

+    }

+

+    fn on_reopen(&self, callback: Box<dyn FnMut()>) {

+        self.callbacks.borrow_mut().reopen = Some(callback);

+    }

+

+    fn set_menus(&self, _menus: Vec<Menu>, _keymap: &Keymap) {}

+

+    fn set_dock_menu(&self, _menu: Vec<MenuItem>, _keymap: &Keymap) {}

+

+    fn on_app_menu_action(&self, callback: Box<dyn FnMut(&dyn Action)>) {

+        self.callbacks.borrow_mut().app_menu_action = Some(callback);

+    }

+

+    fn on_will_open_app_menu(&self, callback: Box<dyn FnMut()>) {

+        self.callbacks.borrow_mut().will_open_app_menu = Some(callback);

+    }

+

+    fn on_validate_app_menu_command(&self, callback: Box<dyn FnMut(&dyn Action) -> bool>) {

+        self.callbacks.borrow_mut().validate_app_menu_command = Some(callback);

+    }

+

+    fn thermal_state(&self) -> ThermalState {

+        ThermalState::Nominal

+    }

+

+    fn on_thermal_state_change(&self, callback: Box<dyn FnMut()>) {

+        self.callbacks.borrow_mut().thermal_state_change = Some(callback);

+    }

+

+    fn compositor_name(&self) -> &'static str {

+        "Web"

+    }

+

+    fn app_path(&self) -> Result<PathBuf> {

+        Err(anyhow::anyhow!("app_path is not available on the web"))

+    }

+

+    fn path_for_auxiliary_executable(&self, _name: &str) -> Result<PathBuf> {

+        Err(anyhow::anyhow!(

+            "path_for_auxiliary_executable is not available on the web"

+        ))

+    }

+

+    fn set_cursor_style(&self, style: CursorStyle) {

+        let css_cursor = match style {

+            CursorStyle::Arrow => "default",

+            CursorStyle::IBeam => "text",

+            CursorStyle::Crosshair => "crosshair",

+            CursorStyle::ClosedHand => "grabbing",

+            CursorStyle::OpenHand => "grab",

+            CursorStyle::PointingHand => "pointer",

+            CursorStyle::ResizeLeft | CursorStyle::ResizeRight | CursorStyle::ResizeLeftRight => {

+                "ew-resize"

+            }

+            CursorStyle::ResizeUp | CursorStyle::ResizeDown | CursorStyle::ResizeUpDown => {

+                "ns-resize"

+            }

+            CursorStyle::ResizeUpLeftDownRight => "nesw-resize",

+            CursorStyle::ResizeUpRightDownLeft => "nwse-resize",

+            CursorStyle::ResizeColumn => "col-resize",

+            CursorStyle::ResizeRow => "row-resize",

+            CursorStyle::IBeamCursorForVerticalLayout => "vertical-text",

+            CursorStyle::OperationNotAllowed => "not-allowed",

+            CursorStyle::DragLink => "alias",

+            CursorStyle::DragCopy => "copy",

+            CursorStyle::ContextualMenu => "context-menu",

+            CursorStyle::None => "none",

+        };

+

+        if let Some(document) = self.browser_window.document() {

+            if let Some(body) = document.body() {

+                if let Err(error) = body.style().set_property("cursor", css_cursor) {

+                    log::warn!("Failed to set cursor style: {error:?}");

+                }

+            }

+        }

+    }

+

+    fn should_auto_hide_scrollbars(&self) -> bool {

+        true

+    }

+

+    fn read_from_clipboard(&self) -> Option<ClipboardItem> {

+        None

+    }

+

+    fn write_to_clipboard(&self, _item: ClipboardItem) {}

+

+    fn write_credentials(&self, _url: &str, _username: &str, _password: &[u8]) -> Task<Result<()>> {

+        Task::ready(Err(anyhow::anyhow!(

+            "credential storage is not available on the web"

+        )))

+    }

+

+    fn read_credentials(&self, _url: &str) -> Task<Result<Option<(String, Vec<u8>)>>> {

+        Task::ready(Ok(None))

+    }

+

+    fn delete_credentials(&self, _url: &str) -> Task<Result<()>> {

+        Task::ready(Err(anyhow::anyhow!(

+            "credential storage is not available on the web"

+        )))

+    }

+

+    fn keyboard_layout(&self) -> Box<dyn PlatformKeyboardLayout> {

+        Box::new(WebKeyboardLayout)

+    }

+

+    fn keyboard_mapper(&self) -> Rc<dyn PlatformKeyboardMapper> {

+        Rc::new(DummyKeyboardMapper)

+    }

+

+    fn on_keyboard_layout_change(&self, callback: Box<dyn FnMut()>) {

+        self.callbacks.borrow_mut().keyboard_layout_change = Some(callback);

+    }

+}

crates/gpui_web/src/window.rs 🔗

@@ -0,0 +1,702 @@
+use crate::display::WebDisplay;

+use crate::events::{ClickState, WebEventListeners, is_mac_platform};

+use std::sync::Arc;

+use std::{cell::Cell, cell::RefCell, rc::Rc};

+

+use gpui::{

+    AnyWindowHandle, Bounds, Capslock, Decorations, DevicePixels, DispatchEventResult, GpuSpecs,

+    Modifiers, MouseButton, Pixels, PlatformAtlas, PlatformDisplay, PlatformInput,

+    PlatformInputHandler, PlatformWindow, Point, PromptButton, PromptLevel, RequestFrameOptions,

+    ResizeEdge, Scene, Size, WindowAppearance, WindowBackgroundAppearance, WindowBounds,

+    WindowControlArea, WindowControls, WindowDecorations, WindowParams, px,

+};

+use gpui_wgpu::{WgpuContext, WgpuRenderer, WgpuSurfaceConfig};

+use wasm_bindgen::prelude::*;

+

+#[derive(Default)]

+pub(crate) struct WebWindowCallbacks {

+    pub(crate) request_frame: Option<Box<dyn FnMut(RequestFrameOptions)>>,

+    pub(crate) input: Option<Box<dyn FnMut(PlatformInput) -> DispatchEventResult>>,

+    pub(crate) active_status_change: Option<Box<dyn FnMut(bool)>>,

+    pub(crate) hover_status_change: Option<Box<dyn FnMut(bool)>>,

+    pub(crate) resize: Option<Box<dyn FnMut(Size<Pixels>, f32)>>,

+    pub(crate) moved: Option<Box<dyn FnMut()>>,

+    pub(crate) should_close: Option<Box<dyn FnMut() -> bool>>,

+    pub(crate) close: Option<Box<dyn FnOnce()>>,

+    pub(crate) appearance_changed: Option<Box<dyn FnMut()>>,

+    pub(crate) hit_test_window_control: Option<Box<dyn FnMut() -> Option<WindowControlArea>>>,

+}

+

+pub(crate) struct WebWindowMutableState {

+    pub(crate) renderer: WgpuRenderer,

+    pub(crate) bounds: Bounds<Pixels>,

+    pub(crate) scale_factor: f32,

+    pub(crate) max_texture_dimension: u32,

+    pub(crate) title: String,

+    pub(crate) input_handler: Option<PlatformInputHandler>,

+    pub(crate) is_fullscreen: bool,

+    pub(crate) is_active: bool,

+    pub(crate) is_hovered: bool,

+    pub(crate) mouse_position: Point<Pixels>,

+    pub(crate) modifiers: Modifiers,

+    pub(crate) capslock: Capslock,

+}

+

+pub(crate) struct WebWindowInner {

+    pub(crate) browser_window: web_sys::Window,

+    pub(crate) canvas: web_sys::HtmlCanvasElement,

+    pub(crate) has_device_pixel_support: bool,

+    pub(crate) is_mac: bool,

+    pub(crate) state: RefCell<WebWindowMutableState>,

+    pub(crate) callbacks: RefCell<WebWindowCallbacks>,

+    pub(crate) click_state: RefCell<ClickState>,

+    pub(crate) pressed_button: Cell<Option<MouseButton>>,

+    pub(crate) last_physical_size: Cell<(u32, u32)>,

+    pub(crate) notify_scale: Cell<bool>,

+    mql_handle: RefCell<Option<MqlHandle>>,

+    pending_physical_size: Cell<Option<(u32, u32)>>,

+}

+

+pub struct WebWindow {

+    inner: Rc<WebWindowInner>,

+    display: Rc<dyn PlatformDisplay>,

+    #[allow(dead_code)]

+    handle: AnyWindowHandle,

+    _raf_closure: Closure<dyn FnMut()>,

+    _resize_observer: Option<web_sys::ResizeObserver>,

+    _resize_observer_closure: Closure<dyn FnMut(js_sys::Array)>,

+    _event_listeners: WebEventListeners,

+}

+

+impl WebWindow {

+    pub fn new(

+        handle: AnyWindowHandle,

+        _params: WindowParams,

+        context: &WgpuContext,

+        browser_window: web_sys::Window,

+    ) -> anyhow::Result<Self> {

+        let document = browser_window

+            .document()

+            .ok_or_else(|| anyhow::anyhow!("No `document` found on window"))?;

+

+        let canvas: web_sys::HtmlCanvasElement = document

+            .create_element("canvas")

+            .map_err(|e| anyhow::anyhow!("Failed to create canvas element: {e:?}"))?

+            .dyn_into()

+            .map_err(|e| anyhow::anyhow!("Created element is not a canvas: {e:?}"))?;

+

+        let dpr = browser_window.device_pixel_ratio() as f32;

+        let max_texture_dimension = context.device.limits().max_texture_dimension_2d;

+        let has_device_pixel_support = check_device_pixel_support();

+

+        canvas.set_tab_index(0);

+

+        let style = canvas.style();

+        style

+            .set_property("width", "100%")

+            .map_err(|e| anyhow::anyhow!("Failed to set canvas width style: {e:?}"))?;

+        style

+            .set_property("height", "100%")

+            .map_err(|e| anyhow::anyhow!("Failed to set canvas height style: {e:?}"))?;

+        style

+            .set_property("display", "block")

+            .map_err(|e| anyhow::anyhow!("Failed to set canvas display style: {e:?}"))?;

+        style

+            .set_property("outline", "none")

+            .map_err(|e| anyhow::anyhow!("Failed to set canvas outline style: {e:?}"))?;

+        style

+            .set_property("touch-action", "none")

+            .map_err(|e| anyhow::anyhow!("Failed to set touch-action style: {e:?}"))?;

+

+        let body = document

+            .body()

+            .ok_or_else(|| anyhow::anyhow!("No `body` found on document"))?;

+        body.append_child(&canvas)

+            .map_err(|e| anyhow::anyhow!("Failed to append canvas to body: {e:?}"))?;

+

+        canvas.focus().ok();

+

+        let device_size = Size {

+            width: DevicePixels(0),

+            height: DevicePixels(0),

+        };

+

+        let renderer_config = WgpuSurfaceConfig {

+            size: device_size,

+            transparent: false,

+        };

+

+        let renderer = WgpuRenderer::new_from_canvas(context, &canvas, renderer_config)?;

+

+        let display: Rc<dyn PlatformDisplay> = Rc::new(WebDisplay::new(browser_window.clone()));

+

+        let initial_bounds = Bounds {

+            origin: Point::default(),

+            size: Size::default(),

+        };

+

+        let mutable_state = WebWindowMutableState {

+            renderer,

+            bounds: initial_bounds,

+            scale_factor: dpr,

+            max_texture_dimension,

+            title: String::new(),

+            input_handler: None,

+            is_fullscreen: false,

+            is_active: true,

+            is_hovered: false,

+            mouse_position: Point::default(),

+            modifiers: Modifiers::default(),

+            capslock: Capslock::default(),

+        };

+

+        let is_mac = is_mac_platform(&browser_window);

+

+        let inner = Rc::new(WebWindowInner {

+            browser_window,

+            canvas,

+            has_device_pixel_support,

+            is_mac,

+            state: RefCell::new(mutable_state),

+            callbacks: RefCell::new(WebWindowCallbacks::default()),

+            click_state: RefCell::new(ClickState::default()),

+            pressed_button: Cell::new(None),

+            last_physical_size: Cell::new((0, 0)),

+            notify_scale: Cell::new(false),

+            mql_handle: RefCell::new(None),

+            pending_physical_size: Cell::new(None),

+        });

+

+        let raf_closure = inner.create_raf_closure();

+        inner.schedule_raf(&raf_closure);

+

+        let resize_observer_closure = Self::create_resize_observer_closure(Rc::clone(&inner));

+        let resize_observer =

+            web_sys::ResizeObserver::new(resize_observer_closure.as_ref().unchecked_ref()).ok();

+

+        if let Some(ref observer) = resize_observer {

+            inner.observe_canvas(observer);

+            inner.watch_dpr_changes(observer);

+        }

+

+        let event_listeners = inner.register_event_listeners();

+

+        Ok(Self {

+            inner,

+            display,

+            handle,

+            _raf_closure: raf_closure,

+            _resize_observer: resize_observer,

+            _resize_observer_closure: resize_observer_closure,

+            _event_listeners: event_listeners,

+        })

+    }

+

+    fn create_resize_observer_closure(

+        inner: Rc<WebWindowInner>,

+    ) -> Closure<dyn FnMut(js_sys::Array)> {

+        Closure::new(move |entries: js_sys::Array| {

+            let entry: web_sys::ResizeObserverEntry = match entries.get(0).dyn_into().ok() {

+                Some(entry) => entry,

+                None => return,

+            };

+

+            let dpr = inner.browser_window.device_pixel_ratio();

+            let dpr_f32 = dpr as f32;

+

+            let (physical_width, physical_height, logical_width, logical_height) =

+                if inner.has_device_pixel_support {

+                    let size: web_sys::ResizeObserverSize = entry

+                        .device_pixel_content_box_size()

+                        .get(0)

+                        .unchecked_into();

+                    let pw = size.inline_size() as u32;

+                    let ph = size.block_size() as u32;

+                    let lw = pw as f64 / dpr;

+                    let lh = ph as f64 / dpr;

+                    (pw, ph, lw as f32, lh as f32)

+                } else {

+                    // Safari fallback: use contentRect (always CSS px).

+                    let rect = entry.content_rect();

+                    let lw = rect.width() as f32;

+                    let lh = rect.height() as f32;

+                    let pw = (lw as f64 * dpr).round() as u32;

+                    let ph = (lh as f64 * dpr).round() as u32;

+                    (pw, ph, lw, lh)

+                };

+

+            let scale_changed = inner.notify_scale.replace(false);

+            let prev = inner.last_physical_size.get();

+            let size_changed = prev != (physical_width, physical_height);

+

+            if !scale_changed && !size_changed {

+                return;

+            }

+            inner

+                .last_physical_size

+                .set((physical_width, physical_height));

+

+            // Skip rendering to a zero-size canvas (e.g. display:none).

+            if physical_width == 0 || physical_height == 0 {

+                let mut s = inner.state.borrow_mut();

+                s.bounds.size = Size::default();

+                s.scale_factor = dpr_f32;

+                // Still fire the callback so GPUI knows the window is gone.

+                drop(s);

+                let mut cbs = inner.callbacks.borrow_mut();

+                if let Some(ref mut callback) = cbs.resize {

+                    callback(Size::default(), dpr_f32);

+                }

+                return;

+            }

+

+            let max_texture_dimension = inner.state.borrow().max_texture_dimension;

+            let clamped_width = physical_width.min(max_texture_dimension);

+            let clamped_height = physical_height.min(max_texture_dimension);

+

+            inner

+                .pending_physical_size

+                .set(Some((clamped_width, clamped_height)));

+

+            {

+                let mut s = inner.state.borrow_mut();

+                s.bounds.size = Size {

+                    width: px(logical_width),

+                    height: px(logical_height),

+                };

+                s.scale_factor = dpr_f32;

+            }

+

+            let new_size = Size {

+                width: px(logical_width),

+                height: px(logical_height),

+            };

+

+            let mut cbs = inner.callbacks.borrow_mut();

+            if let Some(ref mut callback) = cbs.resize {

+                callback(new_size, dpr_f32);

+            }

+        })

+    }

+}

+

+impl WebWindowInner {

+    fn create_raf_closure(self: &Rc<Self>) -> Closure<dyn FnMut()> {

+        let raf_handle: Rc<RefCell<Option<js_sys::Function>>> = Rc::new(RefCell::new(None));

+        let raf_handle_inner = Rc::clone(&raf_handle);

+

+        let this = Rc::clone(self);

+        let closure = Closure::new(move || {

+            {

+                let mut callbacks = this.callbacks.borrow_mut();

+                if let Some(ref mut callback) = callbacks.request_frame {

+                    callback(RequestFrameOptions {

+                        require_presentation: true,

+                        force_render: false,

+                    });

+                }

+            }

+

+            // Re-schedule for the next frame

+            if let Some(ref func) = *raf_handle_inner.borrow() {

+                this.browser_window.request_animation_frame(func).ok();

+            }

+        });

+

+        let js_func: js_sys::Function =

+            closure.as_ref().unchecked_ref::<js_sys::Function>().clone();

+        *raf_handle.borrow_mut() = Some(js_func);

+

+        closure

+    }

+

+    fn schedule_raf(&self, closure: &Closure<dyn FnMut()>) {

+        self.browser_window

+            .request_animation_frame(closure.as_ref().unchecked_ref())

+            .ok();

+    }

+

+    fn observe_canvas(&self, observer: &web_sys::ResizeObserver) {

+        observer.unobserve(&self.canvas);

+        if self.has_device_pixel_support {

+            let options = web_sys::ResizeObserverOptions::new();

+            options.set_box(web_sys::ResizeObserverBoxOptions::DevicePixelContentBox);

+            observer.observe_with_options(&self.canvas, &options);

+        } else {

+            observer.observe(&self.canvas);

+        }

+    }

+

+    fn watch_dpr_changes(self: &Rc<Self>, observer: &web_sys::ResizeObserver) {

+        let current_dpr = self.browser_window.device_pixel_ratio();

+        let media_query =

+            format!("(resolution: {current_dpr}dppx), (-webkit-device-pixel-ratio: {current_dpr})");

+        let Some(mql) = self.browser_window.match_media(&media_query).ok().flatten() else {

+            return;

+        };

+

+        let this = Rc::clone(self);

+        let observer = observer.clone();

+

+        let closure = Closure::<dyn FnMut(JsValue)>::new(move |_event: JsValue| {

+            this.notify_scale.set(true);

+            this.observe_canvas(&observer);

+            this.watch_dpr_changes(&observer);

+        });

+

+        mql.add_event_listener_with_callback("change", closure.as_ref().unchecked_ref())

+            .ok();

+

+        *self.mql_handle.borrow_mut() = Some(MqlHandle {

+            mql,

+            _closure: closure,

+        });

+    }

+

+    pub(crate) fn register_visibility_change(

+        self: &Rc<Self>,

+    ) -> Option<Closure<dyn FnMut(JsValue)>> {

+        let document = self.browser_window.document()?;

+        let this = Rc::clone(self);

+

+        let closure = Closure::<dyn FnMut(JsValue)>::new(move |_event: JsValue| {

+            let is_visible = this

+                .browser_window

+                .document()

+                .map(|doc| {

+                    let state_str: String = js_sys::Reflect::get(&doc, &"visibilityState".into())

+                        .ok()

+                        .and_then(|v| v.as_string())

+                        .unwrap_or_default();

+                    state_str == "visible"

+                })

+                .unwrap_or(true);

+

+            {

+                let mut state = this.state.borrow_mut();

+                state.is_active = is_visible;

+            }

+            let mut callbacks = this.callbacks.borrow_mut();

+            if let Some(ref mut callback) = callbacks.active_status_change {

+                callback(is_visible);

+            }

+        });

+

+        document

+            .add_event_listener_with_callback("visibilitychange", closure.as_ref().unchecked_ref())

+            .ok();

+

+        Some(closure)

+    }

+

+    pub(crate) fn register_appearance_change(

+        self: &Rc<Self>,

+    ) -> Option<Closure<dyn FnMut(JsValue)>> {

+        let mql = self

+            .browser_window

+            .match_media("(prefers-color-scheme: dark)")

+            .ok()??;

+

+        let this = Rc::clone(self);

+        let closure = Closure::<dyn FnMut(JsValue)>::new(move |_event: JsValue| {

+            let mut callbacks = this.callbacks.borrow_mut();

+            if let Some(ref mut callback) = callbacks.appearance_changed {

+                callback();

+            }

+        });

+

+        mql.add_event_listener_with_callback("change", closure.as_ref().unchecked_ref())

+            .ok();

+

+        Some(closure)

+    }

+}

+

+fn current_appearance(browser_window: &web_sys::Window) -> WindowAppearance {

+    let is_dark = browser_window

+        .match_media("(prefers-color-scheme: dark)")

+        .ok()

+        .flatten()

+        .map(|mql| mql.matches())

+        .unwrap_or(false);

+

+    if is_dark {

+        WindowAppearance::Dark

+    } else {

+        WindowAppearance::Light

+    }

+}

+

+struct MqlHandle {

+    mql: web_sys::MediaQueryList,

+    _closure: Closure<dyn FnMut(JsValue)>,

+}

+

+impl Drop for MqlHandle {

+    fn drop(&mut self) {

+        self.mql

+            .remove_event_listener_with_callback("change", self._closure.as_ref().unchecked_ref())

+            .ok();

+    }

+}

+

+// Safari does not support `devicePixelContentBoxSize`, so detect whether it's available.

+fn check_device_pixel_support() -> bool {

+    let global: JsValue = js_sys::global().into();

+    let Ok(constructor) = js_sys::Reflect::get(&global, &"ResizeObserverEntry".into()) else {

+        return false;

+    };

+    let Ok(prototype) = js_sys::Reflect::get(&constructor, &"prototype".into()) else {

+        return false;

+    };

+    let descriptor = js_sys::Object::get_own_property_descriptor(

+        &prototype.unchecked_into::<js_sys::Object>(),

+        &"devicePixelContentBoxSize".into(),

+    );

+    !descriptor.is_undefined()

+}

+

+impl raw_window_handle::HasWindowHandle for WebWindow {

+    fn window_handle(

+        &self,

+    ) -> Result<raw_window_handle::WindowHandle<'_>, raw_window_handle::HandleError> {

+        let canvas_ref: &JsValue = self.inner.canvas.as_ref();

+        let obj = std::ptr::NonNull::from(canvas_ref).cast::<std::ffi::c_void>();

+        let handle = raw_window_handle::WebCanvasWindowHandle::new(obj);

+        Ok(unsafe { raw_window_handle::WindowHandle::borrow_raw(handle.into()) })

+    }

+}

+

+impl raw_window_handle::HasDisplayHandle for WebWindow {

+    fn display_handle(

+        &self,

+    ) -> Result<raw_window_handle::DisplayHandle<'_>, raw_window_handle::HandleError> {

+        Ok(raw_window_handle::DisplayHandle::web())

+    }

+}

+

+impl PlatformWindow for WebWindow {

+    fn bounds(&self) -> Bounds<Pixels> {

+        self.inner.state.borrow().bounds

+    }

+

+    fn is_maximized(&self) -> bool {

+        false

+    }

+

+    fn window_bounds(&self) -> WindowBounds {

+        WindowBounds::Windowed(self.bounds())

+    }

+

+    fn content_size(&self) -> Size<Pixels> {

+        self.inner.state.borrow().bounds.size

+    }

+

+    fn resize(&mut self, size: Size<Pixels>) {

+        let style = self.inner.canvas.style();

+        style

+            .set_property("width", &format!("{}px", f32::from(size.width)))

+            .ok();

+        style

+            .set_property("height", &format!("{}px", f32::from(size.height)))

+            .ok();

+    }

+

+    fn scale_factor(&self) -> f32 {

+        self.inner.state.borrow().scale_factor

+    }

+

+    fn appearance(&self) -> WindowAppearance {

+        current_appearance(&self.inner.browser_window)

+    }

+

+    fn display(&self) -> Option<Rc<dyn PlatformDisplay>> {

+        Some(self.display.clone())

+    }

+

+    fn mouse_position(&self) -> Point<Pixels> {

+        self.inner.state.borrow().mouse_position

+    }

+

+    fn modifiers(&self) -> Modifiers {

+        self.inner.state.borrow().modifiers

+    }

+

+    fn capslock(&self) -> Capslock {

+        self.inner.state.borrow().capslock

+    }

+

+    fn set_input_handler(&mut self, input_handler: PlatformInputHandler) {

+        self.inner.state.borrow_mut().input_handler = Some(input_handler);

+    }

+

+    fn take_input_handler(&mut self) -> Option<PlatformInputHandler> {

+        self.inner.state.borrow_mut().input_handler.take()

+    }

+

+    fn prompt(

+        &self,

+        _level: PromptLevel,

+        _msg: &str,

+        _detail: Option<&str>,

+        _answers: &[PromptButton],

+    ) -> Option<futures::channel::oneshot::Receiver<usize>> {

+        None

+    }

+

+    fn activate(&self) {

+        self.inner.state.borrow_mut().is_active = true;

+    }

+

+    fn is_active(&self) -> bool {

+        self.inner.state.borrow().is_active

+    }

+

+    fn is_hovered(&self) -> bool {

+        self.inner.state.borrow().is_hovered

+    }

+

+    fn background_appearance(&self) -> WindowBackgroundAppearance {

+        WindowBackgroundAppearance::Opaque

+    }

+

+    fn set_title(&mut self, title: &str) {

+        self.inner.state.borrow_mut().title = title.to_owned();

+        if let Some(document) = self.inner.browser_window.document() {

+            document.set_title(title);

+        }

+    }

+

+    fn set_background_appearance(&self, _background: WindowBackgroundAppearance) {}

+

+    fn minimize(&self) {

+        log::warn!("WebWindow::minimize is not supported in the browser");

+    }

+

+    fn zoom(&self) {

+        log::warn!("WebWindow::zoom is not supported in the browser");

+    }

+

+    fn toggle_fullscreen(&self) {

+        let mut state = self.inner.state.borrow_mut();

+        state.is_fullscreen = !state.is_fullscreen;

+

+        if state.is_fullscreen {

+            let canvas: &web_sys::Element = self.inner.canvas.as_ref();

+            canvas.request_fullscreen().ok();

+        } else {

+            if let Some(document) = self.inner.browser_window.document() {

+                document.exit_fullscreen();

+            }

+        }

+    }

+

+    fn is_fullscreen(&self) -> bool {

+        self.inner.state.borrow().is_fullscreen

+    }

+

+    fn on_request_frame(&self, callback: Box<dyn FnMut(RequestFrameOptions)>) {

+        self.inner.callbacks.borrow_mut().request_frame = Some(callback);

+    }

+

+    fn on_input(&self, callback: Box<dyn FnMut(PlatformInput) -> DispatchEventResult>) {

+        self.inner.callbacks.borrow_mut().input = Some(callback);

+    }

+

+    fn on_active_status_change(&self, callback: Box<dyn FnMut(bool)>) {

+        self.inner.callbacks.borrow_mut().active_status_change = Some(callback);

+    }

+

+    fn on_hover_status_change(&self, callback: Box<dyn FnMut(bool)>) {

+        self.inner.callbacks.borrow_mut().hover_status_change = Some(callback);

+    }

+

+    fn on_resize(&self, callback: Box<dyn FnMut(Size<Pixels>, f32)>) {

+        self.inner.callbacks.borrow_mut().resize = Some(callback);

+    }

+

+    fn on_moved(&self, callback: Box<dyn FnMut()>) {

+        self.inner.callbacks.borrow_mut().moved = Some(callback);

+    }

+

+    fn on_should_close(&self, callback: Box<dyn FnMut() -> bool>) {

+        self.inner.callbacks.borrow_mut().should_close = Some(callback);

+    }

+

+    fn on_close(&self, callback: Box<dyn FnOnce()>) {

+        self.inner.callbacks.borrow_mut().close = Some(callback);

+    }

+

+    fn on_hit_test_window_control(&self, callback: Box<dyn FnMut() -> Option<WindowControlArea>>) {

+        self.inner.callbacks.borrow_mut().hit_test_window_control = Some(callback);

+    }

+

+    fn on_appearance_changed(&self, callback: Box<dyn FnMut()>) {

+        self.inner.callbacks.borrow_mut().appearance_changed = Some(callback);

+    }

+

+    fn draw(&self, scene: &Scene) {

+        if let Some((width, height)) = self.inner.pending_physical_size.take() {

+            if self.inner.canvas.width() != width || self.inner.canvas.height() != height {

+                self.inner.canvas.set_width(width);

+                self.inner.canvas.set_height(height);

+            }

+

+            let mut state = self.inner.state.borrow_mut();

+            state.renderer.update_drawable_size(Size {

+                width: DevicePixels(width as i32),

+                height: DevicePixels(height as i32),

+            });

+            drop(state);

+        }

+

+        self.inner.state.borrow_mut().renderer.draw(scene);

+    }

+

+    fn completed_frame(&self) {

+        // On web, presentation happens automatically via wgpu surface present

+    }

+

+    fn sprite_atlas(&self) -> Arc<dyn PlatformAtlas> {

+        self.inner.state.borrow().renderer.sprite_atlas().clone()

+    }

+

+    fn is_subpixel_rendering_supported(&self) -> bool {

+        self.inner

+            .state

+            .borrow()

+            .renderer

+            .supports_dual_source_blending()

+    }

+

+    fn gpu_specs(&self) -> Option<GpuSpecs> {

+        Some(self.inner.state.borrow().renderer.gpu_specs())

+    }

+

+    fn update_ime_position(&self, _bounds: Bounds<Pixels>) {}

+

+    fn request_decorations(&self, _decorations: WindowDecorations) {}

+

+    fn show_window_menu(&self, _position: Point<Pixels>) {}

+

+    fn start_window_move(&self) {}

+

+    fn start_window_resize(&self, _edge: ResizeEdge) {}

+

+    fn window_decorations(&self) -> Decorations {

+        Decorations::Server

+    }

+

+    fn set_app_id(&mut self, _app_id: &str) {}

+

+    fn window_controls(&self) -> WindowControls {

+        WindowControls {

+            fullscreen: true,

+            maximize: false,

+            minimize: false,

+            window_menu: false,

+        }

+    }

+

+    fn set_client_inset(&self, _inset: Pixels) {}

+}

crates/gpui_wgpu/Cargo.toml 🔗

@@ -11,16 +11,36 @@ workspace = true
 [lib]
 path = "src/gpui_wgpu.rs"
 
-[target.'cfg(not(target_os = "windows"))'.dependencies]
+[features]
+default = []
+font-kit = ["dep:font-kit"]
+
+[dependencies]
 gpui.workspace = true
 anyhow.workspace = true
 bytemuck = "1"
 collections.workspace = true
+cosmic-text = "0.17.0"
 etagere = "0.2"
+itertools.workspace = true
 log.workspace = true
 parking_lot.workspace = true
 profiling.workspace = true
 raw-window-handle = "0.6"
-smol.workspace = true
-util.workspace = true
+smallvec.workspace = true
+swash = "0.2.6"
+gpui_util.workspace = true
 wgpu.workspace = true
+
+# Optional: only needed on platforms with multiple font sources (e.g. Linux)
+# WARNING: If you change this, you must also publish a new version of zed-font-kit to crates.io
+font-kit = { git = "https://github.com/zed-industries/font-kit", rev = "110523127440aefb11ce0cf280ae7c5071337ec5", package = "zed-font-kit", version = "0.14.1-zed", optional = true }
+
+[target.'cfg(not(target_family = "wasm"))'.dependencies]
+pollster.workspace = true
+
+[target.'cfg(target_family = "wasm")'.dependencies]
+wasm-bindgen.workspace = true
+wasm-bindgen-futures = "0.4"
+web-sys = { version = "0.3", features = ["HtmlCanvasElement"] }
+js-sys = "0.3"

crates/gpui_wgpu/src/cosmic_text_system.rs 🔗

@@ -0,0 +1,645 @@
+use anyhow::{Context as _, Ok, Result};

+use collections::HashMap;

+use cosmic_text::{

+    Attrs, AttrsList, Family, Font as CosmicTextFont, FontFeatures as CosmicFontFeatures,

+    FontSystem, ShapeBuffer, ShapeLine,

+};

+use gpui::{

+    Bounds, DevicePixels, Font, FontFeatures, FontId, FontMetrics, FontRun, GlyphId, LineLayout,

+    Pixels, PlatformTextSystem, RenderGlyphParams, SUBPIXEL_VARIANTS_X, SUBPIXEL_VARIANTS_Y,

+    ShapedGlyph, ShapedRun, SharedString, Size, TextRenderingMode, point, size,

+};

+

+use itertools::Itertools;

+use parking_lot::RwLock;

+use smallvec::SmallVec;

+use std::{borrow::Cow, sync::Arc};

+use swash::{

+    scale::{Render, ScaleContext, Source, StrikeWith},

+    zeno::{Format, Vector},

+};

+

+pub struct CosmicTextSystem(RwLock<CosmicTextSystemState>);

+

+#[derive(Debug, Clone, PartialEq, Eq, Hash)]

+struct FontKey {

+    family: SharedString,

+    features: FontFeatures,

+}

+

+impl FontKey {

+    fn new(family: SharedString, features: FontFeatures) -> Self {

+        Self { family, features }

+    }

+}

+

+struct CosmicTextSystemState {

+    font_system: FontSystem,

+    scratch: ShapeBuffer,

+    swash_scale_context: ScaleContext,

+    /// Contains all already loaded fonts, including all faces. Indexed by `FontId`.

+    loaded_fonts: Vec<LoadedFont>,

+    /// Caches the `FontId`s associated with a specific family to avoid iterating the font database

+    /// for every font face in a family.

+    font_ids_by_family_cache: HashMap<FontKey, SmallVec<[FontId; 4]>>,

+    system_font_fallback: String,

+}

+

+struct LoadedFont {

+    font: Arc<CosmicTextFont>,

+    features: CosmicFontFeatures,

+    is_known_emoji_font: bool,

+}

+

+impl CosmicTextSystem {

+    pub fn new(system_font_fallback: &str) -> Self {

+        let font_system = FontSystem::new();

+

+        Self(RwLock::new(CosmicTextSystemState {

+            font_system,

+            scratch: ShapeBuffer::default(),

+            swash_scale_context: ScaleContext::new(),

+            loaded_fonts: Vec::new(),

+            font_ids_by_family_cache: HashMap::default(),

+            system_font_fallback: system_font_fallback.to_string(),

+        }))

+    }

+

+    pub fn new_without_system_fonts(system_font_fallback: &str) -> Self {

+        let font_system = FontSystem::new_with_locale_and_db(

+            "en-US".to_string(),

+            cosmic_text::fontdb::Database::new(),

+        );

+

+        Self(RwLock::new(CosmicTextSystemState {

+            font_system,

+            scratch: ShapeBuffer::default(),

+            swash_scale_context: ScaleContext::new(),

+            loaded_fonts: Vec::new(),

+            font_ids_by_family_cache: HashMap::default(),

+            system_font_fallback: system_font_fallback.to_string(),

+        }))

+    }

+}

+

+impl PlatformTextSystem for CosmicTextSystem {

+    fn add_fonts(&self, fonts: Vec<Cow<'static, [u8]>>) -> Result<()> {

+        self.0.write().add_fonts(fonts)

+    }

+

+    fn all_font_names(&self) -> Vec<String> {

+        let mut result = self

+            .0

+            .read()

+            .font_system

+            .db()

+            .faces()

+            .filter_map(|face| face.families.first().map(|family| family.0.clone()))

+            .collect_vec();

+        result.sort();

+        result.dedup();

+        result

+    }

+

+    fn font_id(&self, font: &Font) -> Result<FontId> {

+        let mut state = self.0.write();

+        let key = FontKey::new(font.family.clone(), font.features.clone());

+        let candidates = if let Some(font_ids) = state.font_ids_by_family_cache.get(&key) {

+            font_ids.as_slice()

+        } else {

+            let font_ids = state.load_family(&font.family, &font.features)?;

+            state.font_ids_by_family_cache.insert(key.clone(), font_ids);

+            state.font_ids_by_family_cache[&key].as_ref()

+        };

+

+        let ix = find_best_match(font, candidates, &state)?;

+

+        Ok(candidates[ix])

+    }

+

+    fn font_metrics(&self, font_id: FontId) -> FontMetrics {

+        let metrics = self

+            .0

+            .read()

+            .loaded_font(font_id)

+            .font

+            .as_swash()

+            .metrics(&[]);

+

+        FontMetrics {

+            units_per_em: metrics.units_per_em as u32,

+            ascent: metrics.ascent,

+            descent: -metrics.descent,

+            line_gap: metrics.leading,

+            underline_position: metrics.underline_offset,

+            underline_thickness: metrics.stroke_size,

+            cap_height: metrics.cap_height,

+            x_height: metrics.x_height,

+            bounding_box: Bounds {

+                origin: point(0.0, 0.0),

+                size: size(metrics.max_width, metrics.ascent + metrics.descent),

+            },

+        }

+    }

+

+    fn typographic_bounds(&self, font_id: FontId, glyph_id: GlyphId) -> Result<Bounds<f32>> {

+        let lock = self.0.read();

+        let glyph_metrics = lock.loaded_font(font_id).font.as_swash().glyph_metrics(&[]);

+        let glyph_id = glyph_id.0 as u16;

+        Ok(Bounds {

+            origin: point(0.0, 0.0),

+            size: size(

+                glyph_metrics.advance_width(glyph_id),

+                glyph_metrics.advance_height(glyph_id),

+            ),

+        })

+    }

+

+    fn advance(&self, font_id: FontId, glyph_id: GlyphId) -> Result<Size<f32>> {

+        self.0.read().advance(font_id, glyph_id)

+    }

+

+    fn glyph_for_char(&self, font_id: FontId, ch: char) -> Option<GlyphId> {

+        self.0.read().glyph_for_char(font_id, ch)

+    }

+

+    fn glyph_raster_bounds(&self, params: &RenderGlyphParams) -> Result<Bounds<DevicePixels>> {

+        self.0.write().raster_bounds(params)

+    }

+

+    fn rasterize_glyph(

+        &self,

+        params: &RenderGlyphParams,

+        raster_bounds: Bounds<DevicePixels>,

+    ) -> Result<(Size<DevicePixels>, Vec<u8>)> {

+        self.0.write().rasterize_glyph(params, raster_bounds)

+    }

+

+    fn layout_line(&self, text: &str, font_size: Pixels, runs: &[FontRun]) -> LineLayout {

+        self.0.write().layout_line(text, font_size, runs)

+    }

+

+    fn recommended_rendering_mode(

+        &self,

+        _font_id: FontId,

+        _font_size: Pixels,

+    ) -> TextRenderingMode {

+        TextRenderingMode::Subpixel

+    }

+}

+

+impl CosmicTextSystemState {

+    fn loaded_font(&self, font_id: FontId) -> &LoadedFont {

+        &self.loaded_fonts[font_id.0]

+    }

+

+    #[profiling::function]

+    fn add_fonts(&mut self, fonts: Vec<Cow<'static, [u8]>>) -> Result<()> {

+        let db = self.font_system.db_mut();

+        for bytes in fonts {

+            match bytes {

+                Cow::Borrowed(embedded_font) => {

+                    db.load_font_data(embedded_font.to_vec());

+                }

+                Cow::Owned(bytes) => {

+                    db.load_font_data(bytes);

+                }

+            }

+        }

+        Ok(())

+    }

+

+    #[profiling::function]

+    fn load_family(

+        &mut self,

+        name: &str,

+        features: &FontFeatures,

+    ) -> Result<SmallVec<[FontId; 4]>> {

+        let name = gpui::font_name_with_fallbacks(name, &self.system_font_fallback);

+

+        let families = self

+            .font_system

+            .db()

+            .faces()

+            .filter(|face| face.families.iter().any(|family| *name == family.0))

+            .map(|face| (face.id, face.post_script_name.clone()))

+            .collect::<SmallVec<[_; 4]>>();

+

+        let mut loaded_font_ids = SmallVec::new();

+        for (font_id, postscript_name) in families {

+            let font = self

+                .font_system

+                .get_font(font_id, cosmic_text::Weight::NORMAL)

+                .context("Could not load font")?;

+

+            // HACK: To let the storybook run and render Windows caption icons. We should actually do better font fallback.

+            let allowed_bad_font_names = [

+                "SegoeFluentIcons", // NOTE: Segoe fluent icons postscript name is inconsistent

+                "Segoe Fluent Icons",

+            ];

+

+            if font.as_swash().charmap().map('m') == 0

+                && !allowed_bad_font_names.contains(&postscript_name.as_str())

+            {

+                self.font_system.db_mut().remove_face(font.id());

+                continue;

+            };

+

+            let font_id = FontId(self.loaded_fonts.len());

+            loaded_font_ids.push(font_id);

+            self.loaded_fonts.push(LoadedFont {

+                font,

+                features: cosmic_font_features(features)?,

+                is_known_emoji_font: check_is_known_emoji_font(&postscript_name),

+            });

+        }

+

+        Ok(loaded_font_ids)

+    }

+

+    fn advance(&self, font_id: FontId, glyph_id: GlyphId) -> Result<Size<f32>> {

+        let glyph_metrics = self.loaded_font(font_id).font.as_swash().glyph_metrics(&[]);

+        Ok(Size {

+            width: glyph_metrics.advance_width(glyph_id.0 as u16),

+            height: glyph_metrics.advance_height(glyph_id.0 as u16),

+        })

+    }

+

+    fn glyph_for_char(&self, font_id: FontId, ch: char) -> Option<GlyphId> {

+        let glyph_id = self.loaded_font(font_id).font.as_swash().charmap().map(ch);

+        if glyph_id == 0 {

+            None

+        } else {

+            Some(GlyphId(glyph_id.into()))

+        }

+    }

+

+    fn raster_bounds(&mut self, params: &RenderGlyphParams) -> Result<Bounds<DevicePixels>> {

+        let image = self.render_glyph_image(params)?;

+        Ok(Bounds {

+            origin: point(image.placement.left.into(), (-image.placement.top).into()),

+            size: size(image.placement.width.into(), image.placement.height.into()),

+        })

+    }

+

+    #[profiling::function]

+    fn rasterize_glyph(

+        &mut self,

+        params: &RenderGlyphParams,

+        glyph_bounds: Bounds<DevicePixels>,

+    ) -> Result<(Size<DevicePixels>, Vec<u8>)> {

+        if glyph_bounds.size.width.0 == 0 || glyph_bounds.size.height.0 == 0 {

+            anyhow::bail!("glyph bounds are empty");

+        }

+

+        let mut image = self.render_glyph_image(params)?;

+        let bitmap_size = glyph_bounds.size;

+        match image.content {

+            swash::scale::image::Content::Color | swash::scale::image::Content::SubpixelMask => {

+                // Convert from RGBA to BGRA.

+                for pixel in image.data.chunks_exact_mut(4) {

+                    pixel.swap(0, 2);

+                }

+                Ok((bitmap_size, image.data))

+            }

+            swash::scale::image::Content::Mask => Ok((bitmap_size, image.data)),

+        }

+    }

+

+    fn render_glyph_image(

+        &mut self,

+        params: &RenderGlyphParams,

+    ) -> Result<swash::scale::image::Image> {

+        let loaded_font = &self.loaded_fonts[params.font_id.0];

+        let font_ref = loaded_font.font.as_swash();

+        let pixel_size = f32::from(params.font_size);

+

+        let subpixel_offset = Vector::new(

+            params.subpixel_variant.x as f32 / SUBPIXEL_VARIANTS_X as f32 / params.scale_factor,

+            params.subpixel_variant.y as f32 / SUBPIXEL_VARIANTS_Y as f32 / params.scale_factor,

+        );

+

+        let mut scaler = self

+            .swash_scale_context

+            .builder(font_ref)

+            .size(pixel_size * params.scale_factor)

+            .hint(true)

+            .build();

+

+        let sources: &[Source] = if params.is_emoji {

+            &[

+                Source::ColorOutline(0),

+                Source::ColorBitmap(StrikeWith::BestFit),

+                Source::Outline,

+            ]

+        } else {

+            &[Source::Outline]

+        };

+

+        let mut renderer = Render::new(sources);

+        if params.subpixel_rendering {

+            // There seems to be a bug in Swash where the B and R values are swapped.

+            renderer

+                .format(Format::subpixel_bgra())

+                .offset(subpixel_offset);

+        } else {

+            renderer.format(Format::Alpha).offset(subpixel_offset);

+        }

+

+        let glyph_id: u16 = params.glyph_id.0.try_into()?;

+        renderer

+            .render(&mut scaler, glyph_id)

+            .with_context(|| format!("unable to render glyph via swash for {params:?}"))

+    }

+

+    /// This is used when cosmic_text has chosen a fallback font instead of using the requested

+    /// font, typically to handle some unicode characters. When this happens, `loaded_fonts` may not

+    /// yet have an entry for this fallback font, and so one is added.

+    ///

+    /// Note that callers shouldn't use this `FontId` somewhere that will retrieve the corresponding

+    /// `LoadedFont.features`, as it will have an arbitrarily chosen or empty value. The only

+    /// current use of this field is for the *input* of `layout_line`, and so it's fine to use

+    /// `font_id_for_cosmic_id` when computing the *output* of `layout_line`.

+    fn font_id_for_cosmic_id(&mut self, id: cosmic_text::fontdb::ID) -> Result<FontId> {

+        if let Some(ix) = self

+            .loaded_fonts

+            .iter()

+            .position(|loaded_font| loaded_font.font.id() == id)

+        {

+            Ok(FontId(ix))

+        } else {

+            let font = self

+                .font_system

+                .get_font(id, cosmic_text::Weight::NORMAL)

+                .context("failed to get fallback font from cosmic-text font system")?;

+            let face = self

+                .font_system

+                .db()

+                .face(id)

+                .context("fallback font face not found in cosmic-text database")?;

+

+            let font_id = FontId(self.loaded_fonts.len());

+            self.loaded_fonts.push(LoadedFont {

+                font,

+                features: CosmicFontFeatures::new(),

+                is_known_emoji_font: check_is_known_emoji_font(&face.post_script_name),

+            });

+

+            Ok(font_id)

+        }

+    }

+

+    #[profiling::function]

+    fn layout_line(&mut self, text: &str, font_size: Pixels, font_runs: &[FontRun]) -> LineLayout {

+        let mut attrs_list = AttrsList::new(&Attrs::new());

+        let mut offs = 0;

+        for run in font_runs {

+            let loaded_font = self.loaded_font(run.font_id);

+            let Some(face) = self.font_system.db().face(loaded_font.font.id()) else {

+                log::warn!(

+                    "font face not found in database for font_id {:?}",

+                    run.font_id

+                );

+                offs += run.len;

+                continue;

+            };

+            let Some(first_family) = face.families.first() else {

+                log::warn!(

+                    "font face has no family names for font_id {:?}",

+                    run.font_id

+                );

+                offs += run.len;

+                continue;

+            };

+

+            attrs_list.add_span(

+                offs..(offs + run.len),

+                &Attrs::new()

+                    .metadata(run.font_id.0)

+                    .family(Family::Name(&first_family.0))

+                    .stretch(face.stretch)

+                    .style(face.style)

+                    .weight(face.weight)

+                    .font_features(loaded_font.features.clone()),

+            );

+            offs += run.len;

+        }

+

+        let line = ShapeLine::new(

+            &mut self.font_system,

+            text,

+            &attrs_list,

+            cosmic_text::Shaping::Advanced,

+            4,

+        );

+        let mut layout_lines = Vec::with_capacity(1);

+        line.layout_to_buffer(

+            &mut self.scratch,

+            f32::from(font_size),

+            None, // We do our own wrapping

+            cosmic_text::Wrap::None,

+            None,

+            &mut layout_lines,

+            None,

+            cosmic_text::Hinting::Disabled,

+        );

+

+        let Some(layout) = layout_lines.first() else {

+            return LineLayout {

+                font_size,

+                width: Pixels::ZERO,

+                ascent: Pixels::ZERO,

+                descent: Pixels::ZERO,

+                runs: Vec::new(),

+                len: text.len(),

+            };

+        };

+

+        let mut runs: Vec<ShapedRun> = Vec::new();

+        for glyph in &layout.glyphs {

+            let mut font_id = FontId(glyph.metadata);

+            let mut loaded_font = self.loaded_font(font_id);

+            if loaded_font.font.id() != glyph.font_id {

+                match self.font_id_for_cosmic_id(glyph.font_id) {

+                    std::result::Result::Ok(resolved_id) => {

+                        font_id = resolved_id;

+                        loaded_font = self.loaded_font(font_id);

+                    }

+                    Err(error) => {

+                        log::warn!(

+                            "failed to resolve cosmic font id {:?}: {error:#}",

+                            glyph.font_id

+                        );

+                        continue;

+                    }

+                }

+            }

+            let is_emoji = loaded_font.is_known_emoji_font;

+

+            // HACK: Prevent crash caused by variation selectors.

+            if glyph.glyph_id == 3 && is_emoji {

+                continue;

+            }

+

+            let shaped_glyph = ShapedGlyph {

+                id: GlyphId(glyph.glyph_id as u32),

+                position: point(glyph.x.into(), glyph.y.into()),

+                index: glyph.start,

+                is_emoji,

+            };

+

+            if let Some(last_run) = runs

+                .last_mut()

+                .filter(|last_run| last_run.font_id == font_id)

+            {

+                last_run.glyphs.push(shaped_glyph);

+            } else {

+                runs.push(ShapedRun {

+                    font_id,

+                    glyphs: vec![shaped_glyph],

+                });

+            }

+        }

+

+        LineLayout {

+            font_size,

+            width: layout.w.into(),

+            ascent: layout.max_ascent.into(),

+            descent: layout.max_descent.into(),

+            runs,

+            len: text.len(),

+        }

+    }

+}

+

+#[cfg(feature = "font-kit")]

+fn find_best_match(

+    font: &Font,

+    candidates: &[FontId],

+    state: &CosmicTextSystemState,

+) -> Result<usize> {

+    let candidate_properties = candidates

+        .iter()

+        .map(|font_id| {

+            let database_id = state.loaded_font(*font_id).font.id();

+            let face_info = state

+                .font_system

+                .db()

+                .face(database_id)

+                .context("font face not found in database")?;

+            Ok(face_info_into_properties(face_info))

+        })

+        .collect::<Result<SmallVec<[_; 4]>>>()?;

+

+    let ix =

+        font_kit::matching::find_best_match(&candidate_properties, &font_into_properties(font))

+            .context("requested font family contains no font matching the other parameters")?;

+

+    Ok(ix)

+}

+

+#[cfg(not(feature = "font-kit"))]

+fn find_best_match(

+    font: &Font,

+    candidates: &[FontId],

+    state: &CosmicTextSystemState,

+) -> Result<usize> {

+    if candidates.is_empty() {

+        anyhow::bail!("requested font family contains no font matching the other parameters");

+    }

+    if candidates.len() == 1 {

+        return Ok(0);

+    }

+

+    let target_weight = font.weight.0;

+    let target_italic = matches!(

+        font.style,

+        gpui::FontStyle::Italic | gpui::FontStyle::Oblique

+    );

+

+    let mut best_index = 0;

+    let mut best_score = u32::MAX;

+

+    for (index, font_id) in candidates.iter().enumerate() {

+        let database_id = state.loaded_font(*font_id).font.id();

+        let face_info = state

+            .font_system

+            .db()

+            .face(database_id)

+            .context("font face not found in database")?;

+

+        let is_italic = matches!(

+            face_info.style,

+            cosmic_text::Style::Italic | cosmic_text::Style::Oblique

+        );

+        let style_penalty: u32 = if is_italic == target_italic { 0 } else { 1000 };

+        let weight_diff = (face_info.weight.0 as i32 - target_weight as i32).unsigned_abs();

+        let score = style_penalty + weight_diff;

+

+        if score < best_score {

+            best_score = score;

+            best_index = index;

+        }

+    }

+

+    Ok(best_index)

+}

+

+fn cosmic_font_features(features: &FontFeatures) -> Result<CosmicFontFeatures> {

+    let mut result = CosmicFontFeatures::new();

+    for feature in features.0.iter() {

+        let name_bytes: [u8; 4] = feature

+            .0

+            .as_bytes()

+            .try_into()

+            .context("Incorrect feature flag format")?;

+

+        let tag = cosmic_text::FeatureTag::new(&name_bytes);

+

+        result.set(tag, feature.1);

+    }

+    Ok(result)

+}

+

+#[cfg(feature = "font-kit")]

+fn font_into_properties(font: &gpui::Font) -> font_kit::properties::Properties {

+    font_kit::properties::Properties {

+        style: match font.style {

+            gpui::FontStyle::Normal => font_kit::properties::Style::Normal,

+            gpui::FontStyle::Italic => font_kit::properties::Style::Italic,

+            gpui::FontStyle::Oblique => font_kit::properties::Style::Oblique,

+        },

+        weight: font_kit::properties::Weight(font.weight.0),

+        stretch: Default::default(),

+    }

+}

+

+#[cfg(feature = "font-kit")]

+fn face_info_into_properties(

+    face_info: &cosmic_text::fontdb::FaceInfo,

+) -> font_kit::properties::Properties {

+    font_kit::properties::Properties {

+        style: match face_info.style {

+            cosmic_text::Style::Normal => font_kit::properties::Style::Normal,

+            cosmic_text::Style::Italic => font_kit::properties::Style::Italic,

+            cosmic_text::Style::Oblique => font_kit::properties::Style::Oblique,

+        },

+        weight: font_kit::properties::Weight(face_info.weight.0.into()),

+        stretch: match face_info.stretch {

+            cosmic_text::Stretch::Condensed => font_kit::properties::Stretch::CONDENSED,

+            cosmic_text::Stretch::Expanded => font_kit::properties::Stretch::EXPANDED,

+            cosmic_text::Stretch::ExtraCondensed => font_kit::properties::Stretch::EXTRA_CONDENSED,

+            cosmic_text::Stretch::ExtraExpanded => font_kit::properties::Stretch::EXTRA_EXPANDED,

+            cosmic_text::Stretch::Normal => font_kit::properties::Stretch::NORMAL,

+            cosmic_text::Stretch::SemiCondensed => font_kit::properties::Stretch::SEMI_CONDENSED,

+            cosmic_text::Stretch::SemiExpanded => font_kit::properties::Stretch::SEMI_EXPANDED,

+            cosmic_text::Stretch::UltraCondensed => font_kit::properties::Stretch::ULTRA_CONDENSED,

+            cosmic_text::Stretch::UltraExpanded => font_kit::properties::Stretch::ULTRA_EXPANDED,

+        },

+    }

+}

+

+fn check_is_known_emoji_font(postscript_name: &str) -> bool {

+    // TODO: Include other common emoji fonts

+    postscript_name == "NotoColorEmoji"

+}

crates/gpui_wgpu/src/gpui_wgpu.rs 🔗

@@ -1,8 +1,9 @@
-#![cfg(not(target_os = "windows"))]
+mod cosmic_text_system;
 mod wgpu_atlas;
 mod wgpu_context;
 mod wgpu_renderer;
 
+pub use cosmic_text_system::*;
 pub use wgpu_atlas::*;
 pub use wgpu_context::*;
 pub use wgpu_renderer::*;

crates/gpui_wgpu/src/shaders.wgsl 🔗

@@ -1,4 +1,3 @@
-enable dual_source_blending;
 /* Functions useful for debugging:
 
 // A heat map color for debugging (blue -> cyan -> green -> yellow -> red).
@@ -501,11 +500,11 @@ fn gradient_color(background: Background, position: vec2<f32>, bounds: Bounds,
             // checkerboard
             let size = background.gradient_angle_or_pattern_height;
             let relative_position = position - bounds.origin;
-            
+
             let x_index = floor(relative_position.x / size);
             let y_index = floor(relative_position.y / size);
             let should_be_colored = (x_index + y_index) % 2.0;
-            
+
             background_color = solid_color;
             background_color.a *= saturate(should_be_colored);
         }
@@ -1033,7 +1032,7 @@ struct PathRasterizationVertex {
 struct PathRasterizationVarying {
     @builtin(position) position: vec4<f32>,
     @location(0) st_position: vec2<f32>,
-    @location(1) vertex_id: u32,
+    @location(1) @interpolate(flat) vertex_id: u32,
     //TODO: use `clip_distance` once Naga supports it
     @location(3) clip_distances: vec4<f32>,
 }
@@ -1072,14 +1071,14 @@ fn fs_path_rasterization(input: PathRasterizationVarying) -> @location(0) vec4<f
         let distance = f / length(gradient);
         alpha = saturate(0.5 - distance);
     }
-    let gradient_color = prepare_gradient_color(
+    let prepared_gradient = prepare_gradient_color(
         background.tag,
         background.color_space,
         background.solid,
         background.colors,
     );
     let color = gradient_color(background, input.position.xy, bounds,
-        gradient_color.solid, gradient_color.color0, gradient_color.color1);
+        prepared_gradient.solid, prepared_gradient.color0, prepared_gradient.color1);
     return vec4<f32>(color.rgb * color.a * alpha, color.a * alpha);
 }
 
@@ -1334,57 +1333,3 @@ fn fs_surface(input: SurfaceVarying) -> @location(0) vec4<f32> {
 
     return ycbcr_to_RGB * y_cb_cr;
 }
-
-// --- subpixel sprites --- //
-
-struct SubpixelSprite {
-    order: u32,
-    pad: u32,
-    bounds: Bounds,
-    content_mask: Bounds,
-    color: Hsla,
-    tile: AtlasTile,
-    transformation: TransformationMatrix,
-}
-@group(1) @binding(0) var<storage, read> b_subpixel_sprites: array<SubpixelSprite>;
-
-struct SubpixelSpriteOutput {
-    @builtin(position) position: vec4<f32>,
-    @location(0) tile_position: vec2<f32>,
-    @location(1) @interpolate(flat) color: vec4<f32>,
-    @location(3) clip_distances: vec4<f32>,
-}
-
-struct SubpixelSpriteFragmentOutput {
-    @location(0) @blend_src(0) foreground: vec4<f32>,
-    @location(0) @blend_src(1) alpha: vec4<f32>,
-}
-
-@vertex
-fn vs_subpixel_sprite(@builtin(vertex_index) vertex_id: u32, @builtin(instance_index) instance_id: u32) -> SubpixelSpriteOutput {
-    let unit_vertex = vec2<f32>(f32(vertex_id & 1u), 0.5 * f32(vertex_id & 2u));
-    let sprite = b_subpixel_sprites[instance_id];
-
-    var out = SubpixelSpriteOutput();
-    out.position = to_device_position_transformed(unit_vertex, sprite.bounds, sprite.transformation);
-    out.tile_position = to_tile_position(unit_vertex, sprite.tile);
-    out.color = hsla_to_rgba(sprite.color);
-    out.clip_distances = distance_from_clip_rect_transformed(unit_vertex, sprite.bounds, sprite.content_mask, sprite.transformation);
-    return out;
-}
-
-@fragment
-fn fs_subpixel_sprite(input: SubpixelSpriteOutput) -> SubpixelSpriteFragmentOutput {
-    let sample = textureSample(t_sprite, s_sprite, input.tile_position).rgb;
-    let alpha_corrected = apply_contrast_and_gamma_correction3(sample, input.color.rgb, gamma_params.subpixel_enhanced_contrast, gamma_params.gamma_ratios);
-
-    // Alpha clip after using the derivatives.
-    if (any(input.clip_distances < vec4<f32>(0.0))) {
-        return SubpixelSpriteFragmentOutput(vec4<f32>(0.0), vec4<f32>(0.0));
-    }
-
-    var out = SubpixelSpriteFragmentOutput();
-    out.foreground = vec4<f32>(input.color.rgb, 1.0);
-    out.alpha = vec4<f32>(input.color.a * alpha_corrected, 1.0);
-    return out;
-}

crates/gpui_wgpu/src/shaders_subpixel.wgsl 🔗

@@ -0,0 +1,53 @@
+// --- subpixel sprites --- //

+

+struct SubpixelSprite {

+    order: u32,

+    pad: u32,

+    bounds: Bounds,

+    content_mask: Bounds,

+    color: Hsla,

+    tile: AtlasTile,

+    transformation: TransformationMatrix,

+}

+@group(1) @binding(0) var<storage, read> b_subpixel_sprites: array<SubpixelSprite>;

+

+struct SubpixelSpriteOutput {

+    @builtin(position) position: vec4<f32>,

+    @location(0) tile_position: vec2<f32>,

+    @location(1) @interpolate(flat) color: vec4<f32>,

+    @location(3) clip_distances: vec4<f32>,

+}

+

+struct SubpixelSpriteFragmentOutput {

+    @location(0) @blend_src(0) foreground: vec4<f32>,

+    @location(0) @blend_src(1) alpha: vec4<f32>,

+}

+

+@vertex

+fn vs_subpixel_sprite(@builtin(vertex_index) vertex_id: u32, @builtin(instance_index) instance_id: u32) -> SubpixelSpriteOutput {

+    let unit_vertex = vec2<f32>(f32(vertex_id & 1u), 0.5 * f32(vertex_id & 2u));

+    let sprite = b_subpixel_sprites[instance_id];

+

+    var out = SubpixelSpriteOutput();

+    out.position = to_device_position_transformed(unit_vertex, sprite.bounds, sprite.transformation);

+    out.tile_position = to_tile_position(unit_vertex, sprite.tile);

+    out.color = hsla_to_rgba(sprite.color);

+    out.clip_distances = distance_from_clip_rect_transformed(unit_vertex, sprite.bounds, sprite.content_mask, sprite.transformation);

+    return out;

+}

+

+@fragment

+fn fs_subpixel_sprite(input: SubpixelSpriteOutput) -> SubpixelSpriteFragmentOutput {

+    let sample = textureSample(t_sprite, s_sprite, input.tile_position).rgb;

+    let alpha_corrected = apply_contrast_and_gamma_correction3(sample, input.color.rgb, gamma_params.subpixel_enhanced_contrast, gamma_params.gamma_ratios);

+

+    // Alpha clip after using the derivatives.

+    if (any(input.clip_distances < vec4<f32>(0.0))) {

+        return SubpixelSpriteFragmentOutput(vec4<f32>(0.0), vec4<f32>(0.0));

+    }

+

+    var out = SubpixelSpriteFragmentOutput();

+    out.foreground = vec4<f32>(input.color.rgb, 1.0);

+    out.alpha = vec4<f32>(input.color.a * alpha_corrected, 1.0);

+    return out;

+}

crates/gpui_wgpu/src/wgpu_atlas.rs 🔗

@@ -1,4 +1,4 @@
-use anyhow::Result;
+use anyhow::{Context as _, Result};
 use collections::FxHashMap;
 use etagere::{BucketedAtlasAllocator, size2};
 use gpui::{
@@ -30,6 +30,7 @@ struct PendingUpload {
 struct WgpuAtlasState {
     device: Arc<wgpu::Device>,
     queue: Arc<wgpu::Queue>,
+    max_texture_size: u32,
     storage: WgpuAtlasStorage,
     tiles_by_key: FxHashMap<AtlasKey, AtlasTile>,
     pending_uploads: Vec<PendingUpload>,
@@ -41,9 +42,11 @@ pub struct WgpuTextureInfo {
 
 impl WgpuAtlas {
     pub fn new(device: Arc<wgpu::Device>, queue: Arc<wgpu::Queue>) -> Self {
+        let max_texture_size = device.limits().max_texture_dimension_2d;
         WgpuAtlas(Mutex::new(WgpuAtlasState {
             device,
             queue,
+            max_texture_size,
             storage: WgpuAtlasStorage::default(),
             tiles_by_key: Default::default(),
             pending_uploads: Vec::new(),
@@ -78,7 +81,9 @@ impl PlatformAtlas for WgpuAtlas {
             let Some((size, bytes)) = build()? else {
                 return Ok(None);
             };
-            let tile = lock.allocate(size, key.texture_kind());
+            let tile = lock
+                .allocate(size, key.texture_kind())
+                .context("failed to allocate")?;
             lock.upload_texture(tile.texture_id, tile.bounds, &bytes);
             lock.tiles_by_key.insert(key.clone(), tile.clone());
             Ok(Some(tile))
@@ -110,7 +115,11 @@ impl PlatformAtlas for WgpuAtlas {
 }
 
 impl WgpuAtlasState {
-    fn allocate(&mut self, size: Size<DevicePixels>, texture_kind: AtlasTextureKind) -> AtlasTile {
+    fn allocate(
+        &mut self,
+        size: Size<DevicePixels>,
+        texture_kind: AtlasTextureKind,
+    ) -> Option<AtlasTile> {
         {
             let textures = &mut self.storage[texture_kind];
 
@@ -119,14 +128,12 @@ impl WgpuAtlasState {
                 .rev()
                 .find_map(|texture| texture.allocate(size))
             {
-                return tile;
+                return Some(tile);
             }
         }
 
         let texture = self.push_texture(size, texture_kind);
-        texture
-            .allocate(size)
-            .expect("Failed to allocate from newly created texture")
+        texture.allocate(size)
     }
 
     fn push_texture(
@@ -138,8 +145,13 @@ impl WgpuAtlasState {
             width: DevicePixels(1024),
             height: DevicePixels(1024),
         };
+        let max_texture_size = self.max_texture_size as i32;
+        let max_atlas_size = Size {
+            width: DevicePixels(max_texture_size),
+            height: DevicePixels(max_texture_size),
+        };
 
-        let size = min_size.max(&DEFAULT_ATLAS_SIZE);
+        let size = min_size.min(&max_atlas_size).max(&DEFAULT_ATLAS_SIZE);
         let format = match kind {
             AtlasTextureKind::Monochrome => wgpu::TextureFormat::R8Unorm,
             AtlasTextureKind::Subpixel => wgpu::TextureFormat::Bgra8Unorm,

crates/gpui_wgpu/src/wgpu_context.rs 🔗

@@ -1,6 +1,8 @@
+#[cfg(not(target_family = "wasm"))]
 use anyhow::Context as _;
+#[cfg(not(target_family = "wasm"))]
+use gpui_util::ResultExt;
 use std::sync::Arc;
-use util::ResultExt;
 
 pub struct WgpuContext {
     pub instance: wgpu::Instance,
@@ -10,8 +12,19 @@ pub struct WgpuContext {
     dual_source_blending: bool,
 }
 
+#[cfg(not(target_family = "wasm"))]
+pub struct CompositorGpuHint {
+    pub vendor_id: u32,
+    pub device_id: u32,
+}
+
 impl WgpuContext {
-    pub fn new(instance: wgpu::Instance, surface: &wgpu::Surface<'_>) -> anyhow::Result<Self> {
+    #[cfg(not(target_family = "wasm"))]
+    pub fn new(
+        instance: wgpu::Instance,
+        surface: &wgpu::Surface<'_>,
+        compositor_gpu: Option<CompositorGpuHint>,
+    ) -> anyhow::Result<Self> {
         let device_id_filter = match std::env::var("ZED_DEVICE_ID") {
             Ok(val) => parse_pci_id(&val)
                 .context("Failed to parse device ID from `ZED_DEVICE_ID` environment variable")
@@ -24,24 +37,48 @@ impl WgpuContext {
             }
         };
 
-        let adapter = smol::block_on(Self::select_adapter(
-            &instance,
-            device_id_filter,
-            Some(surface),
-        ))?;
+        // Select an adapter by actually testing surface configuration with the real device.
+        // This is the only reliable way to determine compatibility on hybrid GPU systems.
+        let (adapter, device, queue, dual_source_blending) =
+            pollster::block_on(Self::select_adapter_and_device(
+                &instance,
+                device_id_filter,
+                surface,
+                compositor_gpu.as_ref(),
+            ))?;
 
-        let caps = surface.get_capabilities(&adapter);
-        if caps.formats.is_empty() {
-            let info = adapter.get_info();
-            anyhow::bail!(
-                "No adapter compatible with the display surface could be found. \
-                 Best candidate {:?} (backend={:?}, device={:#06x}) reports no \
-                 supported surface formats.",
-                info.name,
-                info.backend,
-                info.device,
-            );
-        }
+        log::info!(
+            "Selected GPU adapter: {:?} ({:?})",
+            adapter.get_info().name,
+            adapter.get_info().backend
+        );
+
+        Ok(Self {
+            instance,
+            adapter,
+            device: Arc::new(device),
+            queue: Arc::new(queue),
+            dual_source_blending,
+        })
+    }
+
+    #[cfg(target_family = "wasm")]
+    pub async fn new_web() -> anyhow::Result<Self> {
+        let instance = wgpu::Instance::new(&wgpu::InstanceDescriptor {
+            backends: wgpu::Backends::BROWSER_WEBGPU | wgpu::Backends::GL,
+            flags: wgpu::InstanceFlags::default(),
+            backend_options: wgpu::BackendOptions::default(),
+            memory_budget_thresholds: wgpu::MemoryBudgetThresholds::default(),
+        });
+
+        let adapter = instance
+            .request_adapter(&wgpu::RequestAdapterOptions {
+                power_preference: wgpu::PowerPreference::HighPerformance,
+                compatible_surface: None,
+                force_fallback_adapter: false,
+            })
+            .await
+            .map_err(|e| anyhow::anyhow!("Failed to request GPU adapter: {e}"))?;
 
         log::info!(
             "Selected GPU adapter: {:?} ({:?})",
@@ -49,7 +86,7 @@ impl WgpuContext {
             adapter.get_info().backend
         );
 
-        let (device, queue, dual_source_blending) = Self::create_device(&adapter)?;
+        let (device, queue, dual_source_blending) = Self::create_device(&adapter).await?;
 
         Ok(Self {
             instance,
@@ -60,6 +97,41 @@ impl WgpuContext {
         })
     }
 
+    async fn create_device(
+        adapter: &wgpu::Adapter,
+    ) -> anyhow::Result<(wgpu::Device, wgpu::Queue, bool)> {
+        let dual_source_blending = adapter
+            .features()
+            .contains(wgpu::Features::DUAL_SOURCE_BLENDING);
+
+        let mut required_features = wgpu::Features::empty();
+        if dual_source_blending {
+            required_features |= wgpu::Features::DUAL_SOURCE_BLENDING;
+        } else {
+            log::warn!(
+                "Dual-source blending not available on this GPU. \
+                Subpixel text antialiasing will be disabled."
+            );
+        }
+
+        let (device, queue) = adapter
+            .request_device(&wgpu::DeviceDescriptor {
+                label: Some("gpui_device"),
+                required_features,
+                required_limits: wgpu::Limits::downlevel_defaults()
+                    .using_resolution(adapter.limits())
+                    .using_alignment(adapter.limits()),
+                memory_hints: wgpu::MemoryHints::MemoryUsage,
+                trace: wgpu::Trace::Off,
+                experimental_features: wgpu::ExperimentalFeatures::disabled(),
+            })
+            .await
+            .map_err(|e| anyhow::anyhow!("Failed to create wgpu device: {e}"))?;
+
+        Ok((device, queue, dual_source_blending))
+    }
+
+    #[cfg(not(target_family = "wasm"))]
     pub fn instance() -> wgpu::Instance {
         wgpu::Instance::new(&wgpu::InstanceDescriptor {
             backends: wgpu::Backends::VULKAN | wgpu::Backends::GL,
@@ -84,97 +156,165 @@ impl WgpuContext {
         Ok(())
     }
 
-    fn create_device(adapter: &wgpu::Adapter) -> anyhow::Result<(wgpu::Device, wgpu::Queue, bool)> {
-        let dual_source_blending_available = adapter
-            .features()
-            .contains(wgpu::Features::DUAL_SOURCE_BLENDING);
+    /// Select an adapter and create a device, testing that the surface can actually be configured.
+    /// This is the only reliable way to determine compatibility on hybrid GPU systems, where
+    /// adapters may report surface compatibility via get_capabilities() but fail when actually
+    /// configuring (e.g., NVIDIA reporting Vulkan Wayland support but failing because the
+    /// Wayland compositor runs on the Intel GPU).
+    #[cfg(not(target_family = "wasm"))]
+    async fn select_adapter_and_device(
+        instance: &wgpu::Instance,
+        device_id_filter: Option<u32>,
+        surface: &wgpu::Surface<'_>,
+        compositor_gpu: Option<&CompositorGpuHint>,
+    ) -> anyhow::Result<(wgpu::Adapter, wgpu::Device, wgpu::Queue, bool)> {
+        let mut adapters: Vec<_> = instance.enumerate_adapters(wgpu::Backends::all()).await;
 
-        let mut required_features = wgpu::Features::empty();
-        if dual_source_blending_available {
-            required_features |= wgpu::Features::DUAL_SOURCE_BLENDING;
-        } else {
-            log::warn!(
-                "Dual-source blending not available on this GPU. \
-                Subpixel text antialiasing will be disabled."
-            );
+        if adapters.is_empty() {
+            anyhow::bail!("No GPU adapters found");
         }
 
-        let (device, queue) = smol::block_on(adapter.request_device(&wgpu::DeviceDescriptor {
-            label: Some("gpui_device"),
-            required_features,
-            required_limits: wgpu::Limits::default(),
-            memory_hints: wgpu::MemoryHints::MemoryUsage,
-            trace: wgpu::Trace::Off,
-            experimental_features: wgpu::ExperimentalFeatures::disabled(),
-        }))
-        .map_err(|e| anyhow::anyhow!("Failed to create wgpu device: {e}"))?;
-
-        Ok((device, queue, dual_source_blending_available))
-    }
-
-    async fn select_adapter(
-        instance: &wgpu::Instance,
-        device_id_filter: Option<u32>,
-        compatible_surface: Option<&wgpu::Surface<'_>>,
-    ) -> anyhow::Result<wgpu::Adapter> {
         if let Some(device_id) = device_id_filter {
-            let adapters: Vec<_> = instance.enumerate_adapters(wgpu::Backends::all()).await;
+            log::info!("ZED_DEVICE_ID filter: {:#06x}", device_id);
+        }
 
-            if adapters.is_empty() {
-                anyhow::bail!("No GPU adapters found");
-            }
+        // Sort adapters into a single priority order. Tiers (from highest to lowest):
+        //
+        // 1. ZED_DEVICE_ID match — explicit user override
+        // 2. Compositor GPU match — the GPU the display server is rendering on
+        // 3. Device type — WGPU HighPerformance order (Discrete > Integrated >
+        //    Other > Virtual > Cpu). "Other" ranks above "Virtual" because
+        //    backends like OpenGL may report real hardware as "Other".
+        // 4. Backend — prefer Vulkan/Metal/Dx12 over GL/etc.
+        adapters.sort_by_key(|adapter| {
+            let info = adapter.get_info();
+
+            // Backends like OpenGL report device=0 for all adapters, so
+            // device-based matching is only meaningful when non-zero.
+            let device_known = info.device != 0;
+
+            let user_override: u8 = match device_id_filter {
+                Some(id) if device_known && info.device == id => 0,
+                _ => 1,
+            };
+
+            let compositor_match: u8 = match compositor_gpu {
+                Some(hint)
+                    if device_known
+                        && info.vendor == hint.vendor_id
+                        && info.device == hint.device_id =>
+                {
+                    0
+                }
+                _ => 1,
+            };
+
+            let type_priority: u8 = match info.device_type {
+                wgpu::DeviceType::DiscreteGpu => 0,
+                wgpu::DeviceType::IntegratedGpu => 1,
+                wgpu::DeviceType::Other => 2,
+                wgpu::DeviceType::VirtualGpu => 3,
+                wgpu::DeviceType::Cpu => 4,
+            };
+
+            let backend_priority: u8 = match info.backend {
+                wgpu::Backend::Vulkan => 0,
+                wgpu::Backend::Metal => 0,
+                wgpu::Backend::Dx12 => 0,
+                _ => 1,
+            };
+
+            (
+                user_override,
+                compositor_match,
+                type_priority,
+                backend_priority,
+            )
+        });
+
+        // Log all available adapters (in sorted order)
+        log::info!("Found {} GPU adapter(s):", adapters.len());
+        for adapter in &adapters {
+            let info = adapter.get_info();
+            log::info!(
+                "  - {} (vendor={:#06x}, device={:#06x}, backend={:?}, type={:?})",
+                info.name,
+                info.vendor,
+                info.device,
+                info.backend,
+                info.device_type,
+            );
+        }
 
-            let mut non_matching_adapter_infos: Vec<wgpu::AdapterInfo> = Vec::new();
-
-            for adapter in adapters.into_iter() {
-                let info = adapter.get_info();
-                if info.device == device_id {
-                    if let Some(surface) = compatible_surface {
-                        let caps = surface.get_capabilities(&adapter);
-                        if caps.formats.is_empty() {
-                            log::warn!(
-                                "GPU matching ZED_DEVICE_ID={:#06x} ({}) is not compatible \
-                                 with the display surface. Falling back to auto-selection.",
-                                device_id,
-                                info.name,
-                            );
-                            break;
-                        }
-                    }
+        // Test each adapter by creating a device and configuring the surface
+        for adapter in adapters {
+            let info = adapter.get_info();
+            log::info!("Testing adapter: {} ({:?})...", info.name, info.backend);
+
+            match Self::try_adapter_with_surface(&adapter, surface).await {
+                Ok((device, queue, dual_source_blending)) => {
                     log::info!(
-                        "Found GPU matching ZED_DEVICE_ID={:#06x}: {}",
-                        device_id,
-                        info.name
+                        "Selected GPU (passed configuration test): {} ({:?})",
+                        info.name,
+                        info.backend
+                    );
+                    return Ok((adapter, device, queue, dual_source_blending));
+                }
+                Err(e) => {
+                    log::info!(
+                        "  Adapter {} ({:?}) failed: {}, trying next...",
+                        info.name,
+                        info.backend,
+                        e
                     );
-                    return Ok(adapter);
-                } else {
-                    non_matching_adapter_infos.push(info);
                 }
             }
+        }
 
-            log::warn!(
-                "No compatible GPU found matching ZED_DEVICE_ID={:#06x}. Available devices:",
-                device_id
-            );
+        anyhow::bail!("No GPU adapter found that can configure the display surface")
+    }
 
-            for info in &non_matching_adapter_infos {
-                log::warn!(
-                    "  - {} (device_id={:#06x}, backend={})",
-                    info.name,
-                    info.device,
-                    info.backend
-                );
-            }
+    /// Try to use an adapter with a surface by creating a device and testing configuration.
+    /// Returns the device and queue if successful, allowing them to be reused.
+    #[cfg(not(target_family = "wasm"))]
+    async fn try_adapter_with_surface(
+        adapter: &wgpu::Adapter,
+        surface: &wgpu::Surface<'_>,
+    ) -> anyhow::Result<(wgpu::Device, wgpu::Queue, bool)> {
+        let caps = surface.get_capabilities(adapter);
+        if caps.formats.is_empty() {
+            anyhow::bail!("no compatible surface formats");
+        }
+        if caps.alpha_modes.is_empty() {
+            anyhow::bail!("no compatible alpha modes");
         }
 
-        instance
-            .request_adapter(&wgpu::RequestAdapterOptions {
-                power_preference: wgpu::PowerPreference::None,
-                compatible_surface,
-                force_fallback_adapter: false,
-            })
-            .await
-            .map_err(|e| anyhow::anyhow!("Failed to request GPU adapter: {e}"))
+        // Create the real device with full features
+        let (device, queue, dual_source_blending) = Self::create_device(adapter).await?;
+
+        // Use an error scope to capture any validation errors during configure
+        let error_scope = device.push_error_scope(wgpu::ErrorFilter::Validation);
+
+        let test_config = wgpu::SurfaceConfiguration {
+            usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
+            format: caps.formats[0],
+            width: 64,
+            height: 64,
+            present_mode: wgpu::PresentMode::Fifo,
+            desired_maximum_frame_latency: 2,
+            alpha_mode: caps.alpha_modes[0],
+            view_formats: vec![],
+        };
+
+        surface.configure(&device, &test_config);
+
+        // Check if there was a validation error
+        let error = error_scope.pop().await;
+        if let Some(e) = error {
+            anyhow::bail!("surface configuration failed: {e}");
+        }
+
+        Ok((device, queue, dual_source_blending))
     }
 
     pub fn supports_dual_source_blending(&self) -> bool {
@@ -182,6 +322,7 @@ impl WgpuContext {
     }
 }
 
+#[cfg(not(target_family = "wasm"))]
 fn parse_pci_id(id: &str) -> anyhow::Result<u32> {
     let mut id = id.trim();
 

crates/gpui_wgpu/src/wgpu_renderer.rs 🔗

@@ -1,3 +1,5 @@
+#[cfg(not(target_family = "wasm"))]
+use crate::CompositorGpuHint;
 use crate::{WgpuAtlas, WgpuContext};
 use bytemuck::{Pod, Zeroable};
 use gpui::{
@@ -5,9 +7,11 @@ use gpui::{
     PolychromeSprite, PrimitiveBatch, Quad, ScaledPixels, Scene, Shadow, Size, SubpixelSprite,
     Underline, get_gamma_correction_ratios,
 };
+use log::warn;
+#[cfg(not(target_family = "wasm"))]
 use raw_window_handle::{HasDisplayHandle, HasWindowHandle};
 use std::num::NonZeroU64;
-use std::sync::Arc;
+use std::sync::{Arc, Mutex};
 
 #[repr(C)]
 #[derive(Clone, Copy, Pod, Zeroable)]
@@ -105,9 +109,10 @@ pub struct WgpuRenderer {
     path_globals_bind_group: wgpu::BindGroup,
     instance_buffer: wgpu::Buffer,
     instance_buffer_capacity: u64,
+    max_buffer_size: u64,
     storage_buffer_alignment: u64,
-    path_intermediate_texture: wgpu::Texture,
-    path_intermediate_view: wgpu::TextureView,
+    path_intermediate_texture: Option<wgpu::Texture>,
+    path_intermediate_view: Option<wgpu::TextureView>,
     path_msaa_texture: Option<wgpu::Texture>,
     path_msaa_view: Option<wgpu::TextureView>,
     rendering_params: RenderingParameters,
@@ -115,6 +120,9 @@ pub struct WgpuRenderer {
     adapter_info: wgpu::AdapterInfo,
     transparent_alpha_mode: wgpu::CompositeAlphaMode,
     opaque_alpha_mode: wgpu::CompositeAlphaMode,
+    max_texture_size: u32,
+    last_error: Arc<Mutex<Option<String>>>,
+    failed_frame_count: u32,
 }
 
 impl WgpuRenderer {
@@ -123,10 +131,12 @@ impl WgpuRenderer {
     /// # Safety
     /// The caller must ensure that the window handle remains valid for the lifetime
     /// of the returned renderer.
+    #[cfg(not(target_family = "wasm"))]
     pub fn new<W: HasWindowHandle + HasDisplayHandle>(
         gpu_context: &mut Option<WgpuContext>,
         window: &W,
         config: WgpuSurfaceConfig,
+        compositor_gpu: Option<CompositorGpuHint>,
     ) -> anyhow::Result<Self> {
         let window_handle = window
             .window_handle()
@@ -162,9 +172,30 @@ impl WgpuRenderer {
                 context.check_compatible_with_surface(&surface)?;
                 context
             }
-            None => gpu_context.insert(WgpuContext::new(instance, &surface)?),
+            None => gpu_context.insert(WgpuContext::new(instance, &surface, compositor_gpu)?),
         };
 
+        Self::new_with_surface(context, surface, config)
+    }
+
+    #[cfg(target_family = "wasm")]
+    pub fn new_from_canvas(
+        context: &WgpuContext,
+        canvas: &web_sys::HtmlCanvasElement,
+        config: WgpuSurfaceConfig,
+    ) -> anyhow::Result<Self> {
+        let surface = context
+            .instance
+            .create_surface(wgpu::SurfaceTarget::Canvas(canvas.clone()))
+            .map_err(|e| anyhow::anyhow!("Failed to create surface: {e}"))?;
+        Self::new_with_surface(context, surface, config)
+    }
+
+    fn new_with_surface(
+        context: &WgpuContext,
+        surface: wgpu::Surface<'static>,
+        config: WgpuSurfaceConfig,
+    ) -> anyhow::Result<Self> {
         let surface_caps = surface.get_capabilities(&context.adapter);
         let preferred_formats = [
             wgpu::TextureFormat::Bgra8Unorm,
@@ -214,19 +245,36 @@ impl WgpuRenderer {
             opaque_alpha_mode
         };
 
+        let device = Arc::clone(&context.device);
+        let max_texture_size = device.limits().max_texture_dimension_2d;
+
+        let requested_width = config.size.width.0 as u32;
+        let requested_height = config.size.height.0 as u32;
+        let clamped_width = requested_width.min(max_texture_size);
+        let clamped_height = requested_height.min(max_texture_size);
+
+        if clamped_width != requested_width || clamped_height != requested_height {
+            warn!(
+                "Requested surface size ({}, {}) exceeds maximum texture dimension {}. \
+                 Clamping to ({}, {}). Window content may not fill the entire window.",
+                requested_width, requested_height, max_texture_size, clamped_width, clamped_height
+            );
+        }
+
         let surface_config = wgpu::SurfaceConfiguration {
             usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
             format: surface_format,
-            width: config.size.width.0 as u32,
-            height: config.size.height.0 as u32,
+            width: clamped_width.max(1),
+            height: clamped_height.max(1),
             present_mode: wgpu::PresentMode::Fifo,
             desired_maximum_frame_latency: 2,
             alpha_mode,
             view_formats: vec![],
         };
+        // Configure the surface immediately. The adapter selection process already validated
+        // that this adapter can successfully configure this surface.
         surface.configure(&context.device, &surface_config);
 
-        let device = Arc::clone(&context.device);
         let queue = Arc::clone(&context.queue);
         let dual_source_blending = context.supports_dual_source_blending();
 
@@ -262,6 +310,7 @@ impl WgpuRenderer {
             mapped_at_creation: false,
         });
 
+        let max_buffer_size = device.limits().max_buffer_size;
         let storage_buffer_alignment = device.limits().min_storage_buffer_offset_alignment as u64;
         let initial_instance_buffer_capacity = 2 * 1024 * 1024;
         let instance_buffer = device.create_buffer(&wgpu::BufferDescriptor {
@@ -271,23 +320,6 @@ impl WgpuRenderer {
             mapped_at_creation: false,
         });
 
-        let (path_intermediate_texture, path_intermediate_view) = Self::create_path_intermediate(
-            &device,
-            surface_format,
-            config.size.width.0 as u32,
-            config.size.height.0 as u32,
-        );
-
-        let (path_msaa_texture, path_msaa_view) = Self::create_msaa_if_needed(
-            &device,
-            surface_format,
-            config.size.width.0 as u32,
-            config.size.height.0 as u32,
-            rendering_params.path_sample_count,
-        )
-        .map(|(t, v)| (Some(t), Some(v)))
-        .unwrap_or((None, None));
-
         let globals_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
             label: Some("globals_bind_group"),
             layout: &bind_group_layouts.globals,
@@ -336,6 +368,13 @@ impl WgpuRenderer {
 
         let adapter_info = context.adapter.get_info();
 
+        let last_error: Arc<Mutex<Option<String>>> = Arc::new(Mutex::new(None));
+        let last_error_clone = Arc::clone(&last_error);
+        device.on_uncaptured_error(Arc::new(move |error| {
+            let mut guard = last_error_clone.lock().unwrap();
+            *guard = Some(error.to_string());
+        }));
+
         Ok(Self {
             device,
             queue,
@@ -352,16 +391,22 @@ impl WgpuRenderer {
             path_globals_bind_group,
             instance_buffer,
             instance_buffer_capacity: initial_instance_buffer_capacity,
+            max_buffer_size,
             storage_buffer_alignment,
-            path_intermediate_texture,
-            path_intermediate_view,
-            path_msaa_texture,
-            path_msaa_view,
+            // Defer intermediate texture creation to first draw call via ensure_intermediate_textures().
+            // This avoids panics when the device/surface is in an invalid state during initialization.
+            path_intermediate_texture: None,
+            path_intermediate_view: None,
+            path_msaa_texture: None,
+            path_msaa_view: None,
             rendering_params,
             dual_source_blending,
             adapter_info,
             transparent_alpha_mode,
             opaque_alpha_mode,
+            max_texture_size,
+            last_error,
+            failed_frame_count: 0,
         })
     }
 
@@ -497,12 +542,25 @@ impl WgpuRenderer {
         path_sample_count: u32,
         dual_source_blending: bool,
     ) -> WgpuPipelines {
-        let shader_source = include_str!("shaders.wgsl");
+        let base_shader_source = include_str!("shaders.wgsl");
         let shader_module = device.create_shader_module(wgpu::ShaderModuleDescriptor {
             label: Some("gpui_shaders"),
-            source: wgpu::ShaderSource::Wgsl(shader_source.into()),
+            source: wgpu::ShaderSource::Wgsl(std::borrow::Cow::Borrowed(base_shader_source)),
         });
 
+        let subpixel_shader_source = include_str!("shaders_subpixel.wgsl");
+        let subpixel_shader_module = if dual_source_blending {
+            let combined = format!(
+                "enable dual_source_blending;\n{base_shader_source}\n{subpixel_shader_source}"
+            );
+            Some(device.create_shader_module(wgpu::ShaderModuleDescriptor {
+                label: Some("gpui_subpixel_shaders"),
+                source: wgpu::ShaderSource::Wgsl(std::borrow::Cow::Owned(combined)),
+            }))
+        } else {
+            None
+        };
+
         let blend_mode = match alpha_mode {
             wgpu::CompositeAlphaMode::PreMultiplied => {
                 wgpu::BlendState::PREMULTIPLIED_ALPHA_BLENDING
@@ -523,7 +581,8 @@ impl WgpuRenderer {
                                data_layout: &wgpu::BindGroupLayout,
                                topology: wgpu::PrimitiveTopology,
                                color_targets: &[Option<wgpu::ColorTargetState>],
-                               sample_count: u32| {
+                               sample_count: u32,
+                               module: &wgpu::ShaderModule| {
             let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
                 label: Some(&format!("{name}_layout")),
                 bind_group_layouts: &[globals_layout, data_layout],
@@ -534,13 +593,13 @@ impl WgpuRenderer {
                 label: Some(name),
                 layout: Some(&pipeline_layout),
                 vertex: wgpu::VertexState {
-                    module: &shader_module,
+                    module,
                     entry_point: Some(vs_entry),
                     buffers: &[],
                     compilation_options: wgpu::PipelineCompilationOptions::default(),
                 },
                 fragment: Some(wgpu::FragmentState {
-                    module: &shader_module,
+                    module,
                     entry_point: Some(fs_entry),
                     targets: color_targets,
                     compilation_options: wgpu::PipelineCompilationOptions::default(),
@@ -574,6 +633,7 @@ impl WgpuRenderer {
             wgpu::PrimitiveTopology::TriangleStrip,
             &[Some(color_target.clone())],
             1,
+            &shader_module,
         );
 
         let shadows = create_pipeline(
@@ -585,6 +645,7 @@ impl WgpuRenderer {
             wgpu::PrimitiveTopology::TriangleStrip,
             &[Some(color_target.clone())],
             1,
+            &shader_module,
         );
 
         let path_rasterization = create_pipeline(
@@ -600,6 +661,7 @@ impl WgpuRenderer {
                 write_mask: wgpu::ColorWrites::ALL,
             })],
             path_sample_count,
+            &shader_module,
         );
 
         let paths_blend = wgpu::BlendState {
@@ -628,6 +690,7 @@ impl WgpuRenderer {
                 write_mask: wgpu::ColorWrites::ALL,
             })],
             1,
+            &shader_module,
         );
 
         let underlines = create_pipeline(
@@ -639,6 +702,7 @@ impl WgpuRenderer {
             wgpu::PrimitiveTopology::TriangleStrip,
             &[Some(color_target.clone())],
             1,
+            &shader_module,
         );
 
         let mono_sprites = create_pipeline(
@@ -650,9 +714,10 @@ impl WgpuRenderer {
             wgpu::PrimitiveTopology::TriangleStrip,
             &[Some(color_target.clone())],
             1,
+            &shader_module,
         );
 
-        let subpixel_sprites = if dual_source_blending {
+        let subpixel_sprites = if let Some(subpixel_module) = &subpixel_shader_module {
             let subpixel_blend = wgpu::BlendState {
                 color: wgpu::BlendComponent {
                     src_factor: wgpu::BlendFactor::Src1,
@@ -679,6 +744,7 @@ impl WgpuRenderer {
                     write_mask: wgpu::ColorWrites::COLOR,
                 })],
                 1,
+                subpixel_module,
             ))
         } else {
             None
@@ -693,6 +759,7 @@ impl WgpuRenderer {
             wgpu::PrimitiveTopology::TriangleStrip,
             &[Some(color_target.clone())],
             1,
+            &shader_module,
         );
 
         let surfaces = create_pipeline(
@@ -704,6 +771,7 @@ impl WgpuRenderer {
             wgpu::PrimitiveTopology::TriangleStrip,
             &[Some(color_target)],
             1,
+            &shader_module,
         );
 
         WgpuPipelines {
@@ -776,32 +844,75 @@ impl WgpuRenderer {
         let height = size.height.0 as u32;
 
         if width != self.surface_config.width || height != self.surface_config.height {
-            self.surface_config.width = width.max(1);
-            self.surface_config.height = height.max(1);
+            let clamped_width = width.min(self.max_texture_size);
+            let clamped_height = height.min(self.max_texture_size);
+
+            if clamped_width != width || clamped_height != height {
+                warn!(
+                    "Requested surface size ({}, {}) exceeds maximum texture dimension {}. \
+                     Clamping to ({}, {}). Window content may not fill the entire window.",
+                    width, height, self.max_texture_size, clamped_width, clamped_height
+                );
+            }
+
+            // Wait for any in-flight GPU work to complete before destroying textures
+            if let Err(e) = self.device.poll(wgpu::PollType::Wait {
+                submission_index: None,
+                timeout: None,
+            }) {
+                warn!("Failed to poll device during resize: {e:?}");
+            }
+
+            // Destroy old textures before allocating new ones to avoid GPU memory spikes
+            if let Some(ref texture) = self.path_intermediate_texture {
+                texture.destroy();
+            }
+            if let Some(ref texture) = self.path_msaa_texture {
+                texture.destroy();
+            }
+
+            self.surface_config.width = clamped_width.max(1);
+            self.surface_config.height = clamped_height.max(1);
             self.surface.configure(&self.device, &self.surface_config);
 
-            let (path_intermediate_texture, path_intermediate_view) =
-                Self::create_path_intermediate(
-                    &self.device,
-                    self.surface_config.format,
-                    self.surface_config.width,
-                    self.surface_config.height,
-                );
-            self.path_intermediate_texture = path_intermediate_texture;
-            self.path_intermediate_view = path_intermediate_view;
+            // Invalidate intermediate textures - they will be lazily recreated
+            // in draw() after we confirm the surface is healthy. This avoids
+            // panics when the device/surface is in an invalid state during resize.
+            self.path_intermediate_texture = None;
+            self.path_intermediate_view = None;
+            self.path_msaa_texture = None;
+            self.path_msaa_view = None;
+        }
+    }
 
-            let (path_msaa_texture, path_msaa_view) = Self::create_msaa_if_needed(
+    fn ensure_intermediate_textures(&mut self) {
+        if self.path_intermediate_texture.is_some() {
+            return;
+        }
+
+        let (path_intermediate_texture, path_intermediate_view) = {
+            let (t, v) = Self::create_path_intermediate(
                 &self.device,
                 self.surface_config.format,
                 self.surface_config.width,
                 self.surface_config.height,
-                self.rendering_params.path_sample_count,
-            )
-            .map(|(t, v)| (Some(t), Some(v)))
-            .unwrap_or((None, None));
-            self.path_msaa_texture = path_msaa_texture;
-            self.path_msaa_view = path_msaa_view;
-        }
+            );
+            (Some(t), Some(v))
+        };
+        self.path_intermediate_texture = path_intermediate_texture;
+        self.path_intermediate_view = path_intermediate_view;
+
+        let (path_msaa_texture, path_msaa_view) = Self::create_msaa_if_needed(
+            &self.device,
+            self.surface_config.format,
+            self.surface_config.width,
+            self.surface_config.height,
+            self.rendering_params.path_sample_count,
+        )
+        .map(|(t, v)| (Some(t), Some(v)))
+        .unwrap_or((None, None));
+        self.path_msaa_texture = path_msaa_texture;
+        self.path_msaa_view = path_msaa_view;
     }
 
     pub fn update_transparency(&mut self, transparent: bool) {
@@ -837,6 +948,10 @@ impl WgpuRenderer {
         &self.atlas
     }
 
+    pub fn supports_dual_source_blending(&self) -> bool {
+        self.dual_source_blending
+    }
+
     pub fn gpu_specs(&self) -> GpuSpecs {
         GpuSpecs {
             is_software_emulated: self.adapter_info.device_type == wgpu::DeviceType::Cpu,
@@ -846,7 +961,25 @@ impl WgpuRenderer {
         }
     }
 
+    pub fn max_texture_size(&self) -> u32 {
+        self.max_texture_size
+    }
+
     pub fn draw(&mut self, scene: &Scene) {
+        let last_error = self.last_error.lock().unwrap().take();
+        if let Some(error) = last_error {
+            self.failed_frame_count += 1;
+            log::error!(
+                "GPU error during frame (failure {} of 20): {error}",
+                self.failed_frame_count
+            );
+            if self.failed_frame_count > 20 {
+                panic!("Too many consecutive GPU errors. Last error: {error}");
+            }
+        } else {
+            self.failed_frame_count = 0;
+        }
+
         self.atlas.before_frame();
 
         let frame = match self.surface.get_current_texture() {
@@ -860,6 +993,10 @@ impl WgpuRenderer {
                 return;
             }
         };
+
+        // Now that we know the surface is healthy, ensure intermediate textures exist
+        self.ensure_intermediate_textures();
+
         let frame_view = frame
             .texture
             .create_view(&wgpu::TextureViewDescriptor::default());
@@ -1020,7 +1157,7 @@ impl WgpuRenderer {
 
             if overflow {
                 drop(encoder);
-                if self.instance_buffer_capacity >= 256 * 1024 * 1024 {
+                if self.instance_buffer_capacity >= self.max_buffer_size {
                     log::error!(
                         "instance buffer size grew too large: {}",
                         self.instance_buffer_capacity
@@ -1249,11 +1386,15 @@ impl WgpuRenderer {
             vec![PathSprite { bounds }]
         };
 
+        let Some(path_intermediate_view) = self.path_intermediate_view.as_ref() else {
+            return true;
+        };
+
         let sprite_data = unsafe { Self::instance_bytes(&sprites) };
         self.draw_instances_with_texture(
             sprite_data,
             sprites.len() as u32,
-            &self.path_intermediate_view,
+            path_intermediate_view,
             &self.pipelines.paths,
             instance_offset,
             pass,
@@ -1297,10 +1438,14 @@ impl WgpuRenderer {
             }],
         });
 
+        let Some(path_intermediate_view) = self.path_intermediate_view.as_ref() else {
+            return true;
+        };
+
         let (target_view, resolve_target) = if let Some(ref msaa_view) = self.path_msaa_view {
-            (msaa_view, Some(&self.path_intermediate_view))
+            (msaa_view, Some(path_intermediate_view))
         } else {
-            (&self.path_intermediate_view, None)
+            (path_intermediate_view, None)
         };
 
         {
@@ -1329,7 +1474,7 @@ impl WgpuRenderer {
     }
 
     fn grow_instance_buffer(&mut self) {
-        let new_capacity = self.instance_buffer_capacity * 2;
+        let new_capacity = (self.instance_buffer_capacity * 2).min(self.max_buffer_size);
         log::info!("increased instance buffer size to {}", new_capacity);
         self.instance_buffer = self.device.create_buffer(&wgpu::BufferDescriptor {
             label: Some("instance_buffer"),

crates/http_client/Cargo.toml 🔗

@@ -19,8 +19,6 @@ doctest = true
 [dependencies]
 anyhow.workspace = true
 async-compression.workspace = true
-async-fs.workspace = true
-async-tar.workspace = true
 bytes.workspace = true
 derive_more.workspace = true
 futures.workspace = true
@@ -31,7 +29,11 @@ parking_lot.workspace = true
 serde.workspace = true
 serde_json.workspace = true
 serde_urlencoded.workspace = true
-sha2.workspace = true
-tempfile.workspace = true
 url.workspace = true
+
+[target.'cfg(not(target_family = "wasm"))'.dependencies]
 util.workspace = true
+async-fs.workspace = true
+async-tar.workspace = true
+sha2.workspace = true
+tempfile.workspace = true

crates/http_client/src/async_body.rs 🔗

@@ -7,6 +7,7 @@ use std::{
 use bytes::Bytes;
 use futures::AsyncRead;
 use http_body::{Body, Frame};
+use serde::Serialize;
 
 /// Based on the implementation of AsyncBody in
 /// <https://github.com/sagebind/isahc/blob/5c533f1ef4d6bdf1fd291b5103c22110f41d0bf0/src/body/mod.rs>.
@@ -88,6 +89,19 @@ impl From<&'static str> for AsyncBody {
     }
 }
 
+/// Newtype wrapper that serializes a value as JSON into an `AsyncBody`.
+pub struct Json<T: Serialize>(pub T);
+
+impl<T: Serialize> From<Json<T>> for AsyncBody {
+    fn from(json: Json<T>) -> Self {
+        Self::from_bytes(
+            serde_json::to_vec(&json.0)
+                .expect("failed to serialize JSON")
+                .into(),
+        )
+    }
+}
+
 impl<T: Into<Self>> From<Option<T>> for AsyncBody {
     fn from(body: Option<T>) -> Self {
         match body {

crates/http_client/src/http_client.rs 🔗

@@ -1,9 +1,11 @@
 mod async_body;
+#[cfg(not(target_family = "wasm"))]
 pub mod github;
+#[cfg(not(target_family = "wasm"))]
 pub mod github_download;
 
 pub use anyhow::{Result, anyhow};
-pub use async_body::{AsyncBody, Inner};
+pub use async_body::{AsyncBody, Inner, Json};
 use derive_more::Deref;
 use http::HeaderValue;
 pub use http::{self, Method, Request, Response, StatusCode, Uri, request::Builder};

crates/icons/src/icons.rs 🔗

@@ -23,6 +23,7 @@ pub enum IconName {
     AiOpenAi,
     AiOpenAiCompat,
     AiOpenRouter,
+    AiVercel,
     AiVZero,
     AiXAi,
     AiZed,
@@ -112,6 +113,8 @@ pub enum IconName {
     ExpandUp,
     ExpandVertical,
     Eye,
+    FastForward,
+    FastForwardOff,
     File,
     FileCode,
     FileDiff,
@@ -142,6 +145,7 @@ pub enum IconName {
     GitBranch,
     GitBranchAlt,
     GitBranchPlus,
+    GitCommit,
     GitGraph,
     Github,
     Hash,
@@ -172,7 +176,9 @@ pub enum IconName {
     Mic,
     MicMute,
     Minimize,
+    NewThread,
     Notepad,
+    OpenFolder,
     Option,
     PageDown,
     PageUp,
@@ -187,6 +193,7 @@ pub enum IconName {
     Power,
     Public,
     PullRequest,
+    QueueMessage,
     Quote,
     Reader,
     RefreshTitle,
@@ -220,10 +227,6 @@ pub enum IconName {
     Star,
     StarFilled,
     Stop,
-    Supermaven,
-    SupermavenDisabled,
-    SupermavenError,
-    SupermavenInit,
     SwatchBook,
     SweepAi,
     SweepAiDisabled,

crates/language/src/buffer.rs 🔗

@@ -4,7 +4,7 @@ use crate::{
     DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
     RunnableTag, TextObject, TreeSitterOptions,
     diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
-    language_settings::{LanguageSettings, language_settings},
+    language_settings::{AutoIndentMode, LanguageSettings, language_settings},
     outline::OutlineItem,
     row_chunk::RowChunks,
     syntax_map::{
@@ -187,7 +187,7 @@ struct BufferBranchState {
 /// state of a buffer.
 pub struct BufferSnapshot {
     pub text: text::BufferSnapshot,
-    pub syntax: SyntaxSnapshot,
+    pub(crate) syntax: SyntaxSnapshot,
     tree_sitter_data: Arc<TreeSitterData>,
     diagnostics: TreeMap<LanguageServerId, DiagnosticSet>,
     remote_selections: TreeMap<ReplicaId, SelectionSet>,
@@ -1776,7 +1776,9 @@ impl Buffer {
         self.syntax_map.lock().contains_unknown_injections()
     }
 
-    #[cfg(any(test, feature = "test-support"))]
+    /// Sets the sync parse timeout for this buffer.
+    ///
+    /// Setting this to `None` disables sync parsing entirely.
     pub fn set_sync_parse_timeout(&mut self, timeout: Option<Duration>) {
         self.sync_parse_timeout = timeout;
     }
@@ -2736,17 +2738,18 @@ impl Buffer {
                 .filter(|((_, (range, _)), _)| {
                     let language = before_edit.language_at(range.start);
                     let language_id = language.map(|l| l.id());
-                    if let Some((cached_language_id, auto_indent)) = previous_setting
+                    if let Some((cached_language_id, apply_syntax_indent)) = previous_setting
                         && cached_language_id == language_id
                     {
-                        auto_indent
+                        apply_syntax_indent
                     } else {
                         // The auto-indent setting is not present in editorconfigs, hence
                         // we can avoid passing the file here.
-                        let auto_indent =
+                        let auto_indent_mode =
                             language_settings(language.map(|l| l.name()), None, cx).auto_indent;
-                        previous_setting = Some((language_id, auto_indent));
-                        auto_indent
+                        let apply_syntax_indent = auto_indent_mode == AutoIndentMode::SyntaxAware;
+                        previous_setting = Some((language_id, apply_syntax_indent));
+                        apply_syntax_indent
                     }
                 })
                 .map(|((ix, (range, _)), new_text)| {
@@ -3706,6 +3709,14 @@ impl BufferSnapshot {
         None
     }
 
+    pub fn captures(
+        &self,
+        range: Range<usize>,
+        query: fn(&Grammar) -> Option<&tree_sitter::Query>,
+    ) -> SyntaxMapCaptures<'_> {
+        self.syntax.captures(range, &self.text, query)
+    }
+
     #[ztracing::instrument(skip_all)]
     fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
         let captures = self.syntax.captures(range, &self.text, |grammar| {

crates/language/src/language.rs 🔗

@@ -23,7 +23,7 @@ mod toolchain;
 pub mod buffer_tests;
 
 use crate::language_settings::SoftWrap;
-pub use crate::language_settings::{EditPredictionsMode, IndentGuideSettings};
+pub use crate::language_settings::{AutoIndentMode, EditPredictionsMode, IndentGuideSettings};
 use anyhow::{Context as _, Result};
 use async_trait::async_trait;
 use collections::{HashMap, HashSet, IndexSet};
@@ -491,6 +491,7 @@ pub trait LspAdapter: 'static + Send + Sync + DynLspInstaller {
     async fn initialization_options(
         self: Arc<Self>,
         _: &Arc<dyn LspAdapterDelegate>,
+        _cx: &mut AsyncApp,
     ) -> Result<Option<Value>> {
         Ok(None)
     }
@@ -834,6 +835,11 @@ pub struct LanguageConfig {
     pub name: LanguageName,
     /// The name of this language for a Markdown code fence block
     pub code_fence_block_name: Option<Arc<str>>,
+    /// Alternative language names that Jupyter kernels may report for this language.
+    /// Used when a kernel's `language` field differs from Zed's language name.
+    /// For example, the Nu extension would set this to `["nushell"]`.
+    #[serde(default)]
+    pub kernel_language_names: Vec<Arc<str>>,
     // The name of the grammar in a WASM bundle (experimental).
     pub grammar: Option<Arc<str>>,
     /// The criteria for matching this language to a given file.
@@ -1140,6 +1146,7 @@ impl Default for LanguageConfig {
         Self {
             name: LanguageName::new_static(""),
             code_fence_block_name: None,
+            kernel_language_names: Default::default(),
             grammar: None,
             matcher: LanguageMatcher::default(),
             brackets: Default::default(),
@@ -2074,6 +2081,23 @@ impl Language {
             .unwrap_or_else(|| self.config.name.as_ref().to_lowercase().into())
     }
 
+    pub fn matches_kernel_language(&self, kernel_language: &str) -> bool {
+        let kernel_language_lower = kernel_language.to_lowercase();
+
+        if self.code_fence_block_name().to_lowercase() == kernel_language_lower {
+            return true;
+        }
+
+        if self.config.name.as_ref().to_lowercase() == kernel_language_lower {
+            return true;
+        }
+
+        self.config
+            .kernel_language_names
+            .iter()
+            .any(|name| name.to_lowercase() == kernel_language_lower)
+    }
+
     pub fn context_provider(&self) -> Option<Arc<dyn ContextProvider>> {
         self.context_provider.clone()
     }
@@ -2638,6 +2662,7 @@ impl LspAdapter for FakeLspAdapter {
     async fn initialization_options(
         self: Arc<Self>,
         _: &Arc<dyn LspAdapterDelegate>,
+        _cx: &mut AsyncApp,
     ) -> Result<Option<Value>> {
         Ok(self.initialization_options.clone())
     }

crates/language/src/language_settings.rs 🔗

@@ -12,7 +12,7 @@ use itertools::{Either, Itertools};
 use settings::{DocumentFoldingRanges, DocumentSymbols, IntoGpui, SemanticTokens};
 
 pub use settings::{
-    CompletionSettingsContent, EditPredictionPromptFormat, EditPredictionProvider,
+    AutoIndentMode, CompletionSettingsContent, EditPredictionPromptFormat, EditPredictionProvider,
     EditPredictionsMode, FormatOnSave, Formatter, FormatterList, InlayHintKind,
     LanguageSettingsContent, LspInsertMode, RewrapBehavior, ShowWhitespaceSetting, SoftWrap,
     WordsCompletionMode,
@@ -144,8 +144,8 @@ pub struct LanguageSettings {
     /// Whether to use additional LSP queries to format (and amend) the code after
     /// every "trigger" symbol input, defined by LSP server capabilities.
     pub use_on_type_format: bool,
-    /// Whether indentation should be adjusted based on the context whilst typing.
-    pub auto_indent: bool,
+    /// Controls automatic indentation behavior when typing.
+    pub auto_indent: AutoIndentMode,
     /// Whether indentation of pasted content should be adjusted based on the context.
     pub auto_indent_on_paste: bool,
     /// Controls how the editor handles the autoclosed characters.
@@ -229,6 +229,22 @@ pub struct IndentGuideSettings {
     pub background_coloring: settings::IndentGuideBackgroundColoring,
 }
 
+impl IndentGuideSettings {
+    /// Returns the clamped line width in pixels for an indent guide based on
+    /// whether it is active, or `None` when line coloring is disabled.
+    pub fn visible_line_width(&self, active: bool) -> Option<u32> {
+        if self.coloring == settings::IndentGuideColoring::Disabled {
+            return None;
+        }
+        let width = if active {
+            self.active_line_width
+        } else {
+            self.line_width
+        };
+        Some(width.clamp(1, 10))
+    }
+}
+
 #[derive(Debug, Clone, PartialEq)]
 pub struct LanguageTaskSettings {
     /// Extra task variables to set for a particular language.
@@ -380,8 +396,7 @@ impl InlayHintSettings {
     }
 }
 
-/// The settings for edit predictions, such as [GitHub Copilot](https://github.com/features/copilot)
-/// or [Supermaven](https://supermaven.com).
+/// The settings for edit predictions, such as [GitHub Copilot](https://github.com/features/copilot).
 #[derive(Clone, Debug, Default)]
 pub struct EditPredictionSettings {
     /// The provider that supplies edit predictions.

crates/language/src/proto.rs 🔗

@@ -496,7 +496,7 @@ pub fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
     };
     Some(Anchor::new(
         timestamp,
-        anchor.offset as usize,
+        anchor.offset as u32,
         bias,
         buffer_id,
     ))

crates/language/src/syntax_map.rs 🔗

@@ -13,7 +13,7 @@ use std::{
     collections::BinaryHeap,
     fmt, iter,
     ops::{ControlFlow, Deref, DerefMut, Range},
-    sync::Arc,
+    sync::{Arc, LazyLock},
     time::{Duration, Instant},
 };
 use streaming_iterator::StreamingIterator;
@@ -40,6 +40,27 @@ pub struct SyntaxSnapshot {
     update_count: usize,
 }
 
+// Dropping deep treesitter Trees can be quite slow due to deallocating lots of memory.
+// To avoid blocking the main thread, we offload the drop operation to a background thread.
+impl Drop for SyntaxSnapshot {
+    fn drop(&mut self) {
+        static DROP_TX: LazyLock<std::sync::mpsc::Sender<SumTree<SyntaxLayerEntry>>> =
+            LazyLock::new(|| {
+                let (tx, rx) = std::sync::mpsc::channel();
+                std::thread::Builder::new()
+                    .name("SyntaxSnapshot::drop".into())
+                    .spawn(move || while let Ok(_) = rx.recv() {})
+                    .expect("failed to spawn drop thread");
+                tx
+            });
+        // This does allocate a new Arc, but it's cheap and avoids blocking the main thread without needing to use an `Option` or `MaybeUninit`.
+        let _ = DROP_TX.send(std::mem::replace(
+            &mut self.layers,
+            SumTree::from_summary(Default::default()),
+        ));
+    }
+}
+
 #[derive(Default)]
 pub struct SyntaxMapCaptures<'a> {
     layers: Vec<SyntaxMapCapturesLayer<'a>>,

crates/language_extension/src/extension_lsp_adapter.rs 🔗

@@ -309,6 +309,7 @@ impl LspAdapter for ExtensionLspAdapter {
     async fn initialization_options(
         self: Arc<Self>,
         delegate: &Arc<dyn LspAdapterDelegate>,
+        _: &mut AsyncApp,
     ) -> Result<Option<serde_json::Value>> {
         let delegate = Arc::new(WorktreeDelegateAdapter(delegate.clone())) as _;
         let json_options = self

crates/language_model/src/language_model.rs 🔗

@@ -613,6 +613,10 @@ pub trait LanguageModel: Send + Sync {
         false
     }
 
+    fn supports_fast_mode(&self) -> bool {
+        false
+    }
+
     /// Returns the list of supported effort levels that can be used when thinking.
     fn supported_effort_levels(&self) -> Vec<LanguageModelEffortLevel> {
         Vec::new()

crates/language_model/src/model/cloud_model.rs 🔗

@@ -4,6 +4,7 @@ use std::sync::Arc;
 use anyhow::{Context as _, Result};
 use client::Client;
 use cloud_api_client::ClientApiError;
+use cloud_api_types::OrganizationId;
 use cloud_api_types::websocket_protocol::MessageToClient;
 use cloud_llm_client::{EXPIRED_LLM_TOKEN_HEADER_NAME, OUTDATED_LLM_TOKEN_HEADER_NAME};
 use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Global, ReadGlobal as _};
@@ -26,29 +27,46 @@ impl fmt::Display for PaymentRequiredError {
 pub struct LlmApiToken(Arc<RwLock<Option<String>>>);
 
 impl LlmApiToken {
-    pub async fn acquire(&self, client: &Arc<Client>) -> Result<String> {
+    pub async fn acquire(
+        &self,
+        client: &Arc<Client>,
+        organization_id: Option<OrganizationId>,
+    ) -> Result<String> {
         let lock = self.0.upgradable_read().await;
         if let Some(token) = lock.as_ref() {
             Ok(token.to_string())
         } else {
-            Self::fetch(RwLockUpgradableReadGuard::upgrade(lock).await, client).await
+            Self::fetch(
+                RwLockUpgradableReadGuard::upgrade(lock).await,
+                client,
+                organization_id,
+            )
+            .await
         }
     }
 
-    pub async fn refresh(&self, client: &Arc<Client>) -> Result<String> {
-        Self::fetch(self.0.write().await, client).await
+    pub async fn refresh(
+        &self,
+        client: &Arc<Client>,
+        organization_id: Option<OrganizationId>,
+    ) -> Result<String> {
+        Self::fetch(self.0.write().await, client, organization_id).await
     }
 
     async fn fetch(
         mut lock: RwLockWriteGuard<'_, Option<String>>,
         client: &Arc<Client>,
+        organization_id: Option<OrganizationId>,
     ) -> Result<String> {
         let system_id = client
             .telemetry()
             .system_id()
             .map(|system_id| system_id.to_string());
 
-        let result = client.cloud_client().create_llm_token(system_id).await;
+        let result = client
+            .cloud_client()
+            .create_llm_token(system_id, organization_id)
+            .await;
         match result {
             Ok(response) => {
                 *lock = Some(response.token.0.clone());

crates/language_model/src/request.rs 🔗

@@ -431,6 +431,7 @@ pub struct LanguageModelRequestTool {
     pub name: String,
     pub description: String,
     pub input_schema: serde_json::Value,
+    pub use_input_streaming: bool,
 }
 
 #[derive(Debug, PartialEq, Hash, Clone, Serialize, Deserialize)]
@@ -452,6 +453,33 @@ pub struct LanguageModelRequest {
     pub temperature: Option<f32>,
     pub thinking_allowed: bool,
     pub thinking_effort: Option<String>,
+    pub speed: Option<Speed>,
+}
+
+#[derive(Clone, Copy, Default, Debug, Serialize, Deserialize, PartialEq, Eq)]
+#[serde(rename_all = "snake_case")]
+pub enum Speed {
+    #[default]
+    Standard,
+    Fast,
+}
+
+impl Speed {
+    pub fn toggle(self) -> Self {
+        match self {
+            Speed::Standard => Speed::Fast,
+            Speed::Fast => Speed::Standard,
+        }
+    }
+}
+
+impl From<Speed> for anthropic::Speed {
+    fn from(speed: Speed) -> Self {
+        match speed {
+            Speed::Standard => anthropic::Speed::Standard,
+            Speed::Fast => anthropic::Speed::Fast,
+        }
+    }
 }
 
 #[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]

crates/language_models/src/language_models.rs 🔗

@@ -25,6 +25,7 @@ use crate::provider::open_ai::OpenAiLanguageModelProvider;
 use crate::provider::open_ai_compatible::OpenAiCompatibleLanguageModelProvider;
 use crate::provider::open_router::OpenRouterLanguageModelProvider;
 use crate::provider::vercel::VercelLanguageModelProvider;
+use crate::provider::vercel_ai_gateway::VercelAiGatewayLanguageModelProvider;
 use crate::provider::x_ai::XAiLanguageModelProvider;
 pub use crate::settings::*;
 
@@ -208,6 +209,13 @@ fn register_language_model_providers(
         Arc::new(VercelLanguageModelProvider::new(client.http_client(), cx)),
         cx,
     );
+    registry.register_provider(
+        Arc::new(VercelAiGatewayLanguageModelProvider::new(
+            client.http_client(),
+            cx,
+        )),
+        cx,
+    );
     registry.register_provider(
         Arc::new(XAiLanguageModelProvider::new(client.http_client(), cx)),
         cx,

crates/language_models/src/provider/anthropic.rs 🔗

@@ -370,6 +370,7 @@ pub fn into_anthropic_count_tokens_request(
                 name: tool.name,
                 description: tool.description,
                 input_schema: tool.input_schema,
+                eager_input_streaming: tool.use_input_streaming,
             })
             .collect(),
         tool_choice: request.tool_choice.map(|choice| match choice {
@@ -713,6 +714,7 @@ pub fn into_anthropic(
                 name: tool.name,
                 description: tool.description,
                 input_schema: tool.input_schema,
+                eager_input_streaming: tool.use_input_streaming,
             })
             .collect(),
         tool_choice: request.tool_choice.map(|choice| match choice {
@@ -723,6 +725,7 @@ pub fn into_anthropic(
         metadata: None,
         output_config: None,
         stop_sequences: Vec::new(),
+        speed: request.speed.map(From::from),
         temperature: request.temperature.or(Some(default_temperature)),
         top_k: None,
         top_p: None,
@@ -1103,6 +1106,7 @@ mod tests {
             tool_choice: None,
             thinking_allowed: true,
             thinking_effort: None,
+            speed: None,
         };
 
         let anthropic_request = into_anthropic(
@@ -1165,6 +1169,7 @@ mod tests {
             tools: vec![],
             tool_choice: None,
             thinking_allowed: true,
+            speed: None,
         };
         request.messages.push(LanguageModelRequestMessage {
             role: Role::Assistant,

crates/language_models/src/provider/bedrock.rs 🔗

@@ -684,6 +684,10 @@ impl LanguageModel for BedrockModel {
         }
     }
 
+    fn supports_streaming_tools(&self) -> bool {
+        true
+    }
+
     fn telemetry_id(&self) -> String {
         format!("bedrock/{}", self.model.id())
     }
@@ -1237,8 +1241,25 @@ pub fn map_to_language_model_completion_events(
                                     .get_mut(&cb_delta.content_block_index)
                                 {
                                     tool_use.input_json.push_str(tool_output.input());
+                                    if let Ok(input) = serde_json::from_str::<serde_json::Value>(
+                                        &partial_json_fixer::fix_json(&tool_use.input_json),
+                                    ) {
+                                        Some(Ok(LanguageModelCompletionEvent::ToolUse(
+                                            LanguageModelToolUse {
+                                                id: tool_use.id.clone().into(),
+                                                name: tool_use.name.clone().into(),
+                                                is_input_complete: false,
+                                                raw_input: tool_use.input_json.clone(),
+                                                input,
+                                                thought_signature: None,
+                                            },
+                                        )))
+                                    } else {
+                                        None
+                                    }
+                                } else {
+                                    None
                                 }
-                                None
                             }
                             Some(ContentBlockDelta::ReasoningContent(thinking)) => match thinking {
                                 ReasoningContentBlockDelta::Text(thoughts) => {

crates/language_models/src/provider/cloud.rs 🔗

@@ -3,7 +3,7 @@ use anthropic::AnthropicModelMode;
 use anyhow::{Context as _, Result, anyhow};
 use chrono::{DateTime, Utc};
 use client::{Client, UserStore, zed_urls};
-use cloud_api_types::Plan;
+use cloud_api_types::{OrganizationId, Plan};
 use cloud_llm_client::{
     CLIENT_SUPPORTS_STATUS_MESSAGES_HEADER_NAME, CLIENT_SUPPORTS_STATUS_STREAM_ENDED_HEADER_NAME,
     CLIENT_SUPPORTS_X_AI_HEADER_NAME, CompletionBody, CompletionEvent, CompletionRequestStatus,
@@ -43,7 +43,6 @@ use std::task::Poll;
 use std::time::Duration;
 use thiserror::Error;
 use ui::{TintColor, prelude::*};
-use util::{ResultExt as _, maybe};
 
 use crate::provider::anthropic::{
     AnthropicEventMapper, count_anthropic_tokens_with_tiktoken, into_anthropic,
@@ -97,7 +96,7 @@ pub struct State {
     default_model: Option<Arc<cloud_llm_client::LanguageModel>>,
     default_fast_model: Option<Arc<cloud_llm_client::LanguageModel>>,
     recommended_models: Vec<Arc<cloud_llm_client::LanguageModel>>,
-    _fetch_models_task: Task<()>,
+    _user_store_subscription: Subscription,
     _settings_subscription: Subscription,
     _llm_token_subscription: Subscription,
 }
@@ -110,34 +109,41 @@ impl State {
         cx: &mut Context<Self>,
     ) -> Self {
         let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx);
-        let mut current_user = user_store.read(cx).watch_current_user();
         Self {
             client: client.clone(),
             llm_api_token: LlmApiToken::default(),
-            user_store,
+            user_store: user_store.clone(),
             status,
             models: Vec::new(),
             default_model: None,
             default_fast_model: None,
             recommended_models: Vec::new(),
-            _fetch_models_task: cx.spawn(async move |this, cx| {
-                maybe!(async move {
-                    let (client, llm_api_token) = this
-                        .read_with(cx, |this, _cx| (client.clone(), this.llm_api_token.clone()))?;
+            _user_store_subscription: cx.subscribe(
+                &user_store,
+                move |this, _user_store, event, cx| match event {
+                    client::user::Event::PrivateUserInfoUpdated => {
+                        let status = *client.status().borrow();
+                        if status.is_signed_out() {
+                            return;
+                        }
 
-                    while current_user.borrow().is_none() {
-                        current_user.next().await;
+                        let client = this.client.clone();
+                        let llm_api_token = this.llm_api_token.clone();
+                        let organization_id = this
+                            .user_store
+                            .read(cx)
+                            .current_organization()
+                            .map(|organization| organization.id.clone());
+                        cx.spawn(async move |this, cx| {
+                            let response =
+                                Self::fetch_models(client, llm_api_token, organization_id).await?;
+                            this.update(cx, |this, cx| this.update_models(response, cx))
+                        })
+                        .detach_and_log_err(cx);
                     }
-
-                    let response =
-                        Self::fetch_models(client.clone(), llm_api_token.clone()).await?;
-                    this.update(cx, |this, cx| this.update_models(response, cx))?;
-                    anyhow::Ok(())
-                })
-                .await
-                .context("failed to fetch Zed models")
-                .log_err();
-            }),
+                    _ => {}
+                },
+            ),
             _settings_subscription: cx.observe_global::<SettingsStore>(|_, cx| {
                 cx.notify();
             }),
@@ -146,9 +152,17 @@ impl State {
                 move |this, _listener, _event, cx| {
                     let client = this.client.clone();
                     let llm_api_token = this.llm_api_token.clone();
+                    let organization_id = this
+                        .user_store
+                        .read(cx)
+                        .current_organization()
+                        .map(|o| o.id.clone());
                     cx.spawn(async move |this, cx| {
-                        llm_api_token.refresh(&client).await?;
-                        let response = Self::fetch_models(client, llm_api_token).await?;
+                        llm_api_token
+                            .refresh(&client, organization_id.clone())
+                            .await?;
+                        let response =
+                            Self::fetch_models(client, llm_api_token, organization_id).await?;
                         this.update(cx, |this, cx| {
                             this.update_models(response, cx);
                         })
@@ -209,9 +223,10 @@ impl State {
     async fn fetch_models(
         client: Arc<Client>,
         llm_api_token: LlmApiToken,
+        organization_id: Option<OrganizationId>,
     ) -> Result<ListModelsResponse> {
         let http_client = &client.http_client();
-        let token = llm_api_token.acquire(&client).await?;
+        let token = llm_api_token.acquire(&client, organization_id).await?;
 
         let request = http_client::Request::builder()
             .method(Method::GET)
@@ -273,11 +288,13 @@ impl CloudLanguageModelProvider {
         &self,
         model: Arc<cloud_llm_client::LanguageModel>,
         llm_api_token: LlmApiToken,
+        user_store: Entity<UserStore>,
     ) -> Arc<dyn LanguageModel> {
         Arc::new(CloudLanguageModel {
             id: LanguageModelId(SharedString::from(model.id.0.clone())),
             model,
             llm_api_token,
+            user_store,
             client: self.client.clone(),
             request_limiter: RateLimiter::new(4),
         })
@@ -306,36 +323,46 @@ impl LanguageModelProvider for CloudLanguageModelProvider {
     }
 
     fn default_model(&self, cx: &App) -> Option<Arc<dyn LanguageModel>> {
-        let default_model = self.state.read(cx).default_model.clone()?;
-        let llm_api_token = self.state.read(cx).llm_api_token.clone();
-        Some(self.create_language_model(default_model, llm_api_token))
+        let state = self.state.read(cx);
+        let default_model = state.default_model.clone()?;
+        let llm_api_token = state.llm_api_token.clone();
+        let user_store = state.user_store.clone();
+        Some(self.create_language_model(default_model, llm_api_token, user_store))
     }
 
     fn default_fast_model(&self, cx: &App) -> Option<Arc<dyn LanguageModel>> {
-        let default_fast_model = self.state.read(cx).default_fast_model.clone()?;
-        let llm_api_token = self.state.read(cx).llm_api_token.clone();
-        Some(self.create_language_model(default_fast_model, llm_api_token))
+        let state = self.state.read(cx);
+        let default_fast_model = state.default_fast_model.clone()?;
+        let llm_api_token = state.llm_api_token.clone();
+        let user_store = state.user_store.clone();
+        Some(self.create_language_model(default_fast_model, llm_api_token, user_store))
     }
 
     fn recommended_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
-        let llm_api_token = self.state.read(cx).llm_api_token.clone();
-        self.state
-            .read(cx)
+        let state = self.state.read(cx);
+        let llm_api_token = state.llm_api_token.clone();
+        let user_store = state.user_store.clone();
+        state
             .recommended_models
             .iter()
             .cloned()
-            .map(|model| self.create_language_model(model, llm_api_token.clone()))
+            .map(|model| {
+                self.create_language_model(model, llm_api_token.clone(), user_store.clone())
+            })
             .collect()
     }
 
     fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
-        let llm_api_token = self.state.read(cx).llm_api_token.clone();
-        self.state
-            .read(cx)
+        let state = self.state.read(cx);
+        let llm_api_token = state.llm_api_token.clone();
+        let user_store = state.user_store.clone();
+        state
             .models
             .iter()
             .cloned()
-            .map(|model| self.create_language_model(model, llm_api_token.clone()))
+            .map(|model| {
+                self.create_language_model(model, llm_api_token.clone(), user_store.clone())
+            })
             .collect()
     }
 
@@ -367,6 +394,7 @@ pub struct CloudLanguageModel {
     id: LanguageModelId,
     model: Arc<cloud_llm_client::LanguageModel>,
     llm_api_token: LlmApiToken,
+    user_store: Entity<UserStore>,
     client: Arc<Client>,
     request_limiter: RateLimiter,
 }
@@ -380,12 +408,15 @@ impl CloudLanguageModel {
     async fn perform_llm_completion(
         client: Arc<Client>,
         llm_api_token: LlmApiToken,
+        organization_id: Option<OrganizationId>,
         app_version: Option<Version>,
         body: CompletionBody,
     ) -> Result<PerformLlmCompletionResponse> {
         let http_client = &client.http_client();
 
-        let mut token = llm_api_token.acquire(&client).await?;
+        let mut token = llm_api_token
+            .acquire(&client, organization_id.clone())
+            .await?;
         let mut refreshed_token = false;
 
         loop {
@@ -416,7 +447,9 @@ impl CloudLanguageModel {
             }
 
             if !refreshed_token && response.needs_llm_token_refresh() {
-                token = llm_api_token.refresh(&client).await?;
+                token = llm_api_token
+                    .refresh(&client, organization_id.clone())
+                    .await?;
                 refreshed_token = true;
                 continue;
             }
@@ -571,6 +604,10 @@ impl LanguageModel for CloudLanguageModel {
         self.model.supports_thinking
     }
 
+    fn supports_fast_mode(&self) -> bool {
+        self.model.supports_fast_mode
+    }
+
     fn supported_effort_levels(&self) -> Vec<LanguageModelEffortLevel> {
         self.model
             .supported_effort_levels
@@ -666,12 +703,17 @@ impl LanguageModel for CloudLanguageModel {
             cloud_llm_client::LanguageModelProvider::Google => {
                 let client = self.client.clone();
                 let llm_api_token = self.llm_api_token.clone();
+                let organization_id = self
+                    .user_store
+                    .read(cx)
+                    .current_organization()
+                    .map(|o| o.id.clone());
                 let model_id = self.model.id.to_string();
                 let generate_content_request =
                     into_google(request, model_id.clone(), GoogleModelMode::Default);
                 async move {
                     let http_client = &client.http_client();
-                    let token = llm_api_token.acquire(&client).await?;
+                    let token = llm_api_token.acquire(&client, organization_id).await?;
 
                     let request_body = CountTokensBody {
                         provider: cloud_llm_client::LanguageModelProvider::Google,
@@ -732,6 +774,13 @@ impl LanguageModel for CloudLanguageModel {
         let prompt_id = request.prompt_id.clone();
         let intent = request.intent;
         let app_version = Some(cx.update(|cx| AppVersion::global(cx)));
+        let user_store = self.user_store.clone();
+        let organization_id = cx.update(|cx| {
+            user_store
+                .read(cx)
+                .current_organization()
+                .map(|o| o.id.clone())
+        });
         let thinking_allowed = request.thinking_allowed;
         let enable_thinking = thinking_allowed && self.model.supports_thinking;
         let provider_name = provider_name(&self.model.provider);
@@ -763,6 +812,7 @@ impl LanguageModel for CloudLanguageModel {
 
                 let client = self.client.clone();
                 let llm_api_token = self.llm_api_token.clone();
+                let organization_id = organization_id.clone();
                 let future = self.request_limiter.stream(async move {
                     let PerformLlmCompletionResponse {
                         response,
@@ -770,6 +820,7 @@ impl LanguageModel for CloudLanguageModel {
                     } = Self::perform_llm_completion(
                         client.clone(),
                         llm_api_token,
+                        organization_id,
                         app_version,
                         CompletionBody {
                             thread_id,
@@ -799,6 +850,7 @@ impl LanguageModel for CloudLanguageModel {
             cloud_llm_client::LanguageModelProvider::OpenAi => {
                 let client = self.client.clone();
                 let llm_api_token = self.llm_api_token.clone();
+                let organization_id = organization_id.clone();
                 let effort = request
                     .thinking_effort
                     .as_ref()
@@ -824,6 +876,7 @@ impl LanguageModel for CloudLanguageModel {
                     } = Self::perform_llm_completion(
                         client.clone(),
                         llm_api_token,
+                        organization_id,
                         app_version,
                         CompletionBody {
                             thread_id,
@@ -857,6 +910,7 @@ impl LanguageModel for CloudLanguageModel {
                     None,
                 );
                 let llm_api_token = self.llm_api_token.clone();
+                let organization_id = organization_id.clone();
                 let future = self.request_limiter.stream(async move {
                     let PerformLlmCompletionResponse {
                         response,
@@ -864,6 +918,7 @@ impl LanguageModel for CloudLanguageModel {
                     } = Self::perform_llm_completion(
                         client.clone(),
                         llm_api_token,
+                        organization_id,
                         app_version,
                         CompletionBody {
                             thread_id,
@@ -898,6 +953,7 @@ impl LanguageModel for CloudLanguageModel {
                     } = Self::perform_llm_completion(
                         client.clone(),
                         llm_api_token,
+                        organization_id,
                         app_version,
                         CompletionBody {
                             thread_id,

crates/language_models/src/provider/copilot_chat.rs 🔗

@@ -246,6 +246,10 @@ impl LanguageModel for CopilotChatLanguageModel {
         self.model.supports_tools()
     }
 
+    fn supports_streaming_tools(&self) -> bool {
+        true
+    }
+
     fn supports_images(&self) -> bool {
         self.model.supports_vision()
     }
@@ -455,6 +459,23 @@ pub fn map_to_language_model_completion_events(
                                     entry.thought_signature = Some(thought_signature);
                                 }
                             }
+
+                            if !entry.id.is_empty() && !entry.name.is_empty() {
+                                if let Ok(input) = serde_json::from_str::<serde_json::Value>(
+                                    &partial_json_fixer::fix_json(&entry.arguments),
+                                ) {
+                                    events.push(Ok(LanguageModelCompletionEvent::ToolUse(
+                                        LanguageModelToolUse {
+                                            id: entry.id.clone().into(),
+                                            name: entry.name.as_str().into(),
+                                            is_input_complete: false,
+                                            input,
+                                            raw_input: entry.arguments.clone(),
+                                            thought_signature: entry.thought_signature.clone(),
+                                        },
+                                    )));
+                                }
+                            }
                         }
 
                         if let Some(usage) = event.usage {
@@ -727,7 +748,7 @@ impl CopilotResponsesEventMapper {
             }
 
             copilot_responses::StreamEvent::GenericError { error } => vec![Err(
-                LanguageModelCompletionError::Other(anyhow!(format!("{error:?}"))),
+                LanguageModelCompletionError::Other(anyhow!(error.message)),
             )],
 
             copilot_responses::StreamEvent::Created { .. }
@@ -930,6 +951,7 @@ fn into_copilot_responses(
         temperature,
         thinking_allowed: _,
         thinking_effort: _,
+        speed: _,
     } = request;
 
     let mut input_items: Vec<responses::ResponseInputItem> = Vec::new();

crates/language_models/src/provider/deepseek.rs 🔗

@@ -246,6 +246,10 @@ impl LanguageModel for DeepSeekLanguageModel {
         true
     }
 
+    fn supports_streaming_tools(&self) -> bool {
+        true
+    }
+
     fn supports_tool_choice(&self, _choice: LanguageModelToolChoice) -> bool {
         true
     }
@@ -469,6 +473,23 @@ impl DeepSeekEventMapper {
                         entry.arguments.push_str(&arguments);
                     }
                 }
+
+                if !entry.id.is_empty() && !entry.name.is_empty() {
+                    if let Ok(input) = serde_json::from_str::<serde_json::Value>(
+                        &partial_json_fixer::fix_json(&entry.arguments),
+                    ) {
+                        events.push(Ok(LanguageModelCompletionEvent::ToolUse(
+                            LanguageModelToolUse {
+                                id: entry.id.clone().into(),
+                                name: entry.name.as_str().into(),
+                                is_input_complete: false,
+                                input,
+                                raw_input: entry.arguments.clone(),
+                                thought_signature: None,
+                            },
+                        )));
+                    }
+                }
             }
         }
 

crates/language_models/src/provider/mistral.rs 🔗

@@ -280,6 +280,10 @@ impl LanguageModel for MistralLanguageModel {
         self.model.supports_tools()
     }
 
+    fn supports_streaming_tools(&self) -> bool {
+        true
+    }
+
     fn supports_tool_choice(&self, _choice: LanguageModelToolChoice) -> bool {
         self.model.supports_tools()
     }
@@ -508,6 +512,13 @@ pub fn into_mistral(
             model: model.id().to_string(),
             messages,
             stream,
+            stream_options: if stream {
+                Some(mistral::StreamOptions {
+                    stream_tool_calls: Some(true),
+                })
+            } else {
+                None
+            },
             max_tokens: max_output_tokens,
             temperature: request.temperature,
             response_format: None,
@@ -616,12 +627,16 @@ impl MistralEventMapper {
             for tool_call in tool_calls {
                 let entry = self.tool_calls_by_index.entry(tool_call.index).or_default();
 
-                if let Some(tool_id) = tool_call.id.clone() {
+                if let Some(tool_id) = tool_call.id.clone()
+                    && !tool_id.is_empty()
+                {
                     entry.id = tool_id;
                 }
 
                 if let Some(function) = tool_call.function.as_ref() {
-                    if let Some(name) = function.name.clone() {
+                    if let Some(name) = function.name.clone()
+                        && !name.is_empty()
+                    {
                         entry.name = name;
                     }
 
@@ -629,6 +644,23 @@ impl MistralEventMapper {
                         entry.arguments.push_str(&arguments);
                     }
                 }
+
+                if !entry.id.is_empty() && !entry.name.is_empty() {
+                    if let Ok(input) = serde_json::from_str::<serde_json::Value>(
+                        &partial_json_fixer::fix_json(&entry.arguments),
+                    ) {
+                        events.push(Ok(LanguageModelCompletionEvent::ToolUse(
+                            LanguageModelToolUse {
+                                id: entry.id.clone().into(),
+                                name: entry.name.as_str().into(),
+                                is_input_complete: false,
+                                input,
+                                raw_input: entry.arguments.clone(),
+                                thought_signature: None,
+                            },
+                        )));
+                    }
+                }
             }
         }
 
@@ -883,6 +915,7 @@ mod tests {
             stop: vec![],
             thinking_allowed: true,
             thinking_effort: None,
+            speed: Default::default(),
         };
 
         let (mistral_request, affinity) =
@@ -919,6 +952,7 @@ mod tests {
             stop: vec![],
             thinking_allowed: true,
             thinking_effort: None,
+            speed: None,
         };
 
         let (mistral_request, _) = into_mistral(request, mistral::Model::Pixtral12BLatest, None);

crates/language_models/src/provider/open_ai.rs 🔗

@@ -309,6 +309,7 @@ impl LanguageModel for OpenAiLanguageModel {
             | Model::FivePointOne
             | Model::FivePointTwo
             | Model::FivePointTwoCodex
+            | Model::FivePointThreeCodex
             | Model::O1
             | Model::O3 => true,
             Model::ThreePointFiveTurbo
@@ -327,6 +328,10 @@ impl LanguageModel for OpenAiLanguageModel {
         }
     }
 
+    fn supports_streaming_tools(&self) -> bool {
+        true
+    }
+
     fn supports_thinking(&self) -> bool {
         self.model.reasoning_effort().is_some()
     }
@@ -554,6 +559,7 @@ pub fn into_open_ai_response(
         temperature,
         thinking_allowed: _,
         thinking_effort: _,
+        speed: _,
     } = request;
 
     let mut input_items = Vec::new();
@@ -822,6 +828,23 @@ impl OpenAiEventMapper {
                             entry.arguments.push_str(&arguments);
                         }
                     }
+
+                    if !entry.id.is_empty() && !entry.name.is_empty() {
+                        if let Ok(input) = serde_json::from_str::<serde_json::Value>(
+                            &partial_json_fixer::fix_json(&entry.arguments),
+                        ) {
+                            events.push(Ok(LanguageModelCompletionEvent::ToolUse(
+                                LanguageModelToolUse {
+                                    id: entry.id.clone().into(),
+                                    name: entry.name.as_str().into(),
+                                    is_input_complete: false,
+                                    input,
+                                    raw_input: entry.arguments.clone(),
+                                    thought_signature: None,
+                                },
+                            )));
+                        }
+                    }
                 }
             }
         }
@@ -952,6 +975,20 @@ impl OpenAiResponseEventMapper {
             ResponsesStreamEvent::FunctionCallArgumentsDelta { item_id, delta, .. } => {
                 if let Some(entry) = self.function_calls_by_item.get_mut(&item_id) {
                     entry.arguments.push_str(&delta);
+                    if let Ok(input) = serde_json::from_str::<serde_json::Value>(
+                        &partial_json_fixer::fix_json(&entry.arguments),
+                    ) {
+                        return vec![Ok(LanguageModelCompletionEvent::ToolUse(
+                            LanguageModelToolUse {
+                                id: LanguageModelToolUseId::from(entry.call_id.clone()),
+                                name: entry.name.clone(),
+                                is_input_complete: false,
+                                input,
+                                raw_input: entry.arguments.clone(),
+                                thought_signature: None,
+                            },
+                        ))];
+                    }
                 }
                 Vec::new()
             }
@@ -1032,9 +1069,9 @@ impl OpenAiResponseEventMapper {
             }
             ResponsesStreamEvent::Error { error }
             | ResponsesStreamEvent::GenericError { error } => {
-                vec![Err(LanguageModelCompletionError::Other(anyhow!(format!(
-                    "{error:?}"
-                ))))]
+                vec![Err(LanguageModelCompletionError::Other(anyhow!(
+                    error.message
+                )))]
             }
             ResponsesStreamEvent::OutputTextDone { .. } => Vec::new(),
             ResponsesStreamEvent::OutputItemDone { .. }
@@ -1180,8 +1217,11 @@ pub fn count_open_ai_tokens(
             | Model::FiveCodex
             | Model::FiveMini
             | Model::FiveNano => tiktoken_rs::num_tokens_from_messages(model.id(), &messages),
-            // GPT-5.1, 5.2, and 5.2-codex don't have dedicated tiktoken support; use gpt-5 tokenizer
-            Model::FivePointOne | Model::FivePointTwo | Model::FivePointTwoCodex => {
+            // GPT-5.1, 5.2, 5.2-codex, and 5.3-codex don't have dedicated tiktoken support; use gpt-5 tokenizer
+            Model::FivePointOne
+            | Model::FivePointTwo
+            | Model::FivePointTwoCodex
+            | Model::FivePointThreeCodex => {
                 tiktoken_rs::num_tokens_from_messages("gpt-5", &messages)
             }
         }
@@ -1431,6 +1471,7 @@ mod tests {
             temperature: None,
             thinking_allowed: true,
             thinking_effort: None,
+            speed: None,
         };
 
         // Validate that all models are supported by tiktoken-rs
@@ -1562,12 +1603,14 @@ mod tests {
                 name: "get_weather".into(),
                 description: "Fetches the weather".into(),
                 input_schema: json!({ "type": "object" }),
+                use_input_streaming: false,
             }],
             tool_choice: Some(LanguageModelToolChoice::Any),
             stop: vec!["<STOP>".into()],
             temperature: None,
             thinking_allowed: false,
             thinking_effort: None,
+            speed: None,
         };
 
         let response = into_open_ai_response(
@@ -1662,19 +1705,30 @@ mod tests {
         ];
 
         let mapped = map_response_events(events);
+        assert_eq!(mapped.len(), 3);
+        // First event is the partial tool use (from FunctionCallArgumentsDelta)
         assert!(matches!(
             mapped[0],
+            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
+                is_input_complete: false,
+                ..
+            })
+        ));
+        // Second event is the complete tool use (from FunctionCallArgumentsDone)
+        assert!(matches!(
+            mapped[1],
             LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
                 ref id,
                 ref name,
                 ref raw_input,
+                is_input_complete: true,
                 ..
             }) if id.to_string() == "call_123"
                 && name.as_ref() == "get_weather"
                 && raw_input == "{\"city\":\"Boston\"}"
         ));
         assert!(matches!(
-            mapped[1],
+            mapped[2],
             LanguageModelCompletionEvent::Stop(StopReason::ToolUse)
         ));
     }
@@ -1870,13 +1924,27 @@ mod tests {
         ];
 
         let mapped = map_response_events(events);
+        assert_eq!(mapped.len(), 3);
+        // First event is the partial tool use (from FunctionCallArgumentsDelta)
         assert!(matches!(
             mapped[0],
-            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { ref raw_input, .. })
-            if raw_input == "{\"city\":\"Boston\"}"
+            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
+                is_input_complete: false,
+                ..
+            })
         ));
+        // Second event is the complete tool use (from the Incomplete response output)
         assert!(matches!(
             mapped[1],
+            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
+                ref raw_input,
+                is_input_complete: true,
+                ..
+            })
+            if raw_input == "{\"city\":\"Boston\"}"
+        ));
+        assert!(matches!(
+            mapped[2],
             LanguageModelCompletionEvent::Stop(StopReason::MaxTokens)
         ));
     }
@@ -1968,4 +2036,80 @@ mod tests {
             LanguageModelCompletionEvent::Stop(StopReason::ToolUse)
         ));
     }
+
+    #[test]
+    fn responses_stream_emits_partial_tool_use_events() {
+        let events = vec![
+            ResponsesStreamEvent::OutputItemAdded {
+                output_index: 0,
+                sequence_number: None,
+                item: ResponseOutputItem::FunctionCall(ResponseFunctionToolCall {
+                    id: Some("item_fn".to_string()),
+                    status: Some("in_progress".to_string()),
+                    name: Some("get_weather".to_string()),
+                    call_id: Some("call_abc".to_string()),
+                    arguments: String::new(),
+                }),
+            },
+            ResponsesStreamEvent::FunctionCallArgumentsDelta {
+                item_id: "item_fn".into(),
+                output_index: 0,
+                delta: "{\"city\":\"Bos".into(),
+                sequence_number: None,
+            },
+            ResponsesStreamEvent::FunctionCallArgumentsDelta {
+                item_id: "item_fn".into(),
+                output_index: 0,
+                delta: "ton\"}".into(),
+                sequence_number: None,
+            },
+            ResponsesStreamEvent::FunctionCallArgumentsDone {
+                item_id: "item_fn".into(),
+                output_index: 0,
+                arguments: "{\"city\":\"Boston\"}".into(),
+                sequence_number: None,
+            },
+            ResponsesStreamEvent::Completed {
+                response: ResponseSummary::default(),
+            },
+        ];
+
+        let mapped = map_response_events(events);
+        // Two partial events + one complete event + Stop
+        assert!(mapped.len() >= 3);
+
+        // The last complete ToolUse event should have is_input_complete: true
+        let complete_tool_use = mapped.iter().find(|e| {
+            matches!(
+                e,
+                LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
+                    is_input_complete: true,
+                    ..
+                })
+            )
+        });
+        assert!(
+            complete_tool_use.is_some(),
+            "should have a complete tool use event"
+        );
+
+        // All ToolUse events before the final one should have is_input_complete: false
+        let tool_uses: Vec<_> = mapped
+            .iter()
+            .filter(|e| matches!(e, LanguageModelCompletionEvent::ToolUse(_)))
+            .collect();
+        assert!(
+            tool_uses.len() >= 2,
+            "should have at least one partial and one complete event"
+        );
+
+        let last = tool_uses.last().unwrap();
+        assert!(matches!(
+            last,
+            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
+                is_input_complete: true,
+                ..
+            })
+        ));
+    }
 }

crates/language_models/src/provider/open_router.rs 🔗

@@ -1,4 +1,4 @@
-use anyhow::{Result, anyhow};
+use anyhow::Result;
 use collections::HashMap;
 use futures::{FutureExt, Stream, StreamExt, future::BoxFuture};
 use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task};
@@ -314,6 +314,10 @@ impl LanguageModel for OpenRouterLanguageModel {
         self.model.supports_tool_calls()
     }
 
+    fn supports_streaming_tools(&self) -> bool {
+        true
+    }
+
     fn supports_thinking(&self) -> bool {
         matches!(self.model.mode, OpenRouterModelMode::Thinking { .. })
     }
@@ -591,14 +595,21 @@ impl OpenRouterEventMapper {
         &mut self,
         event: ResponseStreamEvent,
     ) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
+        let mut events = Vec::new();
+
+        if let Some(usage) = event.usage {
+            events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage {
+                input_tokens: usage.prompt_tokens,
+                output_tokens: usage.completion_tokens,
+                cache_creation_input_tokens: 0,
+                cache_read_input_tokens: 0,
+            })));
+        }
+
         let Some(choice) = event.choices.first() else {
-            return vec![Err(LanguageModelCompletionError::from(anyhow!(
-                "Response contained no choices"
-            )))];
+            return events;
         };
 
-        let mut events = Vec::new();
-
         if let Some(details) = choice.delta.reasoning_details.clone() {
             // Emit reasoning_details immediately
             events.push(Ok(LanguageModelCompletionEvent::ReasoningDetails(
@@ -643,16 +654,24 @@ impl OpenRouterEventMapper {
                         entry.thought_signature = Some(signature);
                     }
                 }
-            }
-        }
 
-        if let Some(usage) = event.usage {
-            events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage {
-                input_tokens: usage.prompt_tokens,
-                output_tokens: usage.completion_tokens,
-                cache_creation_input_tokens: 0,
-                cache_read_input_tokens: 0,
-            })));
+                if !entry.id.is_empty() && !entry.name.is_empty() {
+                    if let Ok(input) = serde_json::from_str::<serde_json::Value>(
+                        &partial_json_fixer::fix_json(&entry.arguments),
+                    ) {
+                        events.push(Ok(LanguageModelCompletionEvent::ToolUse(
+                            LanguageModelToolUse {
+                                id: entry.id.clone().into(),
+                                name: entry.name.as_str().into(),
+                                is_input_complete: false,
+                                input,
+                                raw_input: entry.arguments.clone(),
+                                thought_signature: entry.thought_signature.clone(),
+                            },
+                        )));
+                    }
+                }
+            }
         }
 
         match choice.finish_reason.as_deref() {
@@ -891,7 +910,7 @@ mod tests {
             ResponseStreamEvent {
                 id: Some("response_123".into()),
                 created: 1234567890,
-                model: "google/gemini-3-pro-preview".into(),
+                model: "google/gemini-3.1-pro-preview".into(),
                 choices: vec![ChoiceDelta {
                     index: 0,
                     delta: ResponseMessageDelta {
@@ -916,7 +935,7 @@ mod tests {
             ResponseStreamEvent {
                 id: Some("response_123".into()),
                 created: 1234567890,
-                model: "google/gemini-3-pro-preview".into(),
+                model: "google/gemini-3.1-pro-preview".into(),
                 choices: vec![ChoiceDelta {
                     index: 0,
                     delta: ResponseMessageDelta {
@@ -942,7 +961,7 @@ mod tests {
             ResponseStreamEvent {
                 id: Some("response_123".into()),
                 created: 1234567890,
-                model: "google/gemini-3-pro-preview".into(),
+                model: "google/gemini-3.1-pro-preview".into(),
                 choices: vec![ChoiceDelta {
                     index: 0,
                     delta: ResponseMessageDelta {
@@ -969,7 +988,7 @@ mod tests {
             ResponseStreamEvent {
                 id: Some("response_123".into()),
                 created: 1234567890,
-                model: "google/gemini-3-pro-preview".into(),
+                model: "google/gemini-3.1-pro-preview".into(),
                 choices: vec![ChoiceDelta {
                     index: 0,
                     delta: ResponseMessageDelta {
@@ -1055,6 +1074,32 @@ mod tests {
         );
     }
 
+    #[gpui::test]
+    async fn test_usage_only_chunk_with_empty_choices_does_not_error() {
+        let mut mapper = OpenRouterEventMapper::new();
+
+        let events = mapper.map_event(ResponseStreamEvent {
+            id: Some("response_123".into()),
+            created: 1234567890,
+            model: "google/gemini-3-flash-preview".into(),
+            choices: Vec::new(),
+            usage: Some(open_router::Usage {
+                prompt_tokens: 12,
+                completion_tokens: 7,
+                total_tokens: 19,
+            }),
+        });
+
+        assert_eq!(events.len(), 1);
+        match events.into_iter().next().unwrap() {
+            Ok(LanguageModelCompletionEvent::UsageUpdate(usage)) => {
+                assert_eq!(usage.input_tokens, 12);
+                assert_eq!(usage.output_tokens, 7);
+            }
+            other => panic!("Expected usage update event, got: {other:?}"),
+        }
+    }
+
     #[gpui::test]
     async fn test_agent_prevents_empty_reasoning_details_overwrite() {
         // This test verifies that the agent layer prevents empty reasoning_details

crates/language_models/src/provider/vercel.rs 🔗

@@ -248,6 +248,10 @@ impl LanguageModel for VercelLanguageModel {
         true
     }
 
+    fn supports_streaming_tools(&self) -> bool {
+        true
+    }
+
     fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
         match choice {
             LanguageModelToolChoice::Auto

crates/language_models/src/provider/vercel_ai_gateway.rs 🔗

@@ -0,0 +1,710 @@
+use anyhow::Result;
+use collections::BTreeMap;
+use futures::{AsyncReadExt, FutureExt, StreamExt, future::BoxFuture};
+use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window};
+use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest, http};
+use language_model::{
+    ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError,
+    LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider,
+    LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState,
+    LanguageModelRequest, LanguageModelToolChoice, LanguageModelToolSchemaFormat, RateLimiter,
+    env_var,
+};
+use open_ai::ResponseStreamEvent;
+use serde::Deserialize;
+pub use settings::OpenAiCompatibleModelCapabilities as ModelCapabilities;
+pub use settings::VercelAiGatewayAvailableModel as AvailableModel;
+use settings::{Settings, SettingsStore};
+use std::sync::{Arc, LazyLock};
+use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
+use ui_input::InputField;
+use util::ResultExt;
+
+const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("vercel_ai_gateway");
+const PROVIDER_NAME: LanguageModelProviderName =
+    LanguageModelProviderName::new("Vercel AI Gateway");
+
+const API_URL: &str = "https://ai-gateway.vercel.sh/v1";
+const API_KEY_ENV_VAR_NAME: &str = "VERCEL_AI_GATEWAY_API_KEY";
+static API_KEY_ENV_VAR: LazyLock<EnvVar> = env_var!(API_KEY_ENV_VAR_NAME);
+
+#[derive(Default, Clone, Debug, PartialEq)]
+pub struct VercelAiGatewaySettings {
+    pub api_url: String,
+    pub available_models: Vec<AvailableModel>,
+}
+
+pub struct VercelAiGatewayLanguageModelProvider {
+    http_client: Arc<dyn HttpClient>,
+    state: Entity<State>,
+}
+
+pub struct State {
+    api_key_state: ApiKeyState,
+    http_client: Arc<dyn HttpClient>,
+    available_models: Vec<AvailableModel>,
+    fetch_models_task: Option<Task<Result<(), LanguageModelCompletionError>>>,
+}
+
+impl State {
+    fn is_authenticated(&self) -> bool {
+        self.api_key_state.has_key()
+    }
+
+    fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
+        let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx);
+        self.api_key_state
+            .store(api_url, api_key, |this| &mut this.api_key_state, cx)
+    }
+
+    fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
+        let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx);
+        let task = self
+            .api_key_state
+            .load_if_needed(api_url, |this| &mut this.api_key_state, cx);
+
+        cx.spawn(async move |this, cx| {
+            let result = task.await;
+            this.update(cx, |this, cx| this.restart_fetch_models_task(cx))
+                .ok();
+            result
+        })
+    }
+
+    fn fetch_models(
+        &mut self,
+        cx: &mut Context<Self>,
+    ) -> Task<Result<(), LanguageModelCompletionError>> {
+        let http_client = self.http_client.clone();
+        let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx);
+        let api_key = self.api_key_state.key(&api_url);
+        cx.spawn(async move |this, cx| {
+            let models = list_models(http_client.as_ref(), &api_url, api_key.as_deref()).await?;
+            this.update(cx, |this, cx| {
+                this.available_models = models;
+                cx.notify();
+            })
+            .map_err(|e| LanguageModelCompletionError::Other(e))?;
+            Ok(())
+        })
+    }
+
+    fn restart_fetch_models_task(&mut self, cx: &mut Context<Self>) {
+        if self.is_authenticated() {
+            let task = self.fetch_models(cx);
+            self.fetch_models_task.replace(task);
+        } else {
+            self.available_models = Vec::new();
+        }
+    }
+}
+
+impl VercelAiGatewayLanguageModelProvider {
+    pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
+        let state = cx.new(|cx| {
+            cx.observe_global::<SettingsStore>({
+                let mut last_settings = VercelAiGatewayLanguageModelProvider::settings(cx).clone();
+                move |this: &mut State, cx| {
+                    let current_settings = VercelAiGatewayLanguageModelProvider::settings(cx);
+                    if current_settings != &last_settings {
+                        last_settings = current_settings.clone();
+                        this.authenticate(cx).detach();
+                        cx.notify();
+                    }
+                }
+            })
+            .detach();
+            State {
+                api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()),
+                http_client: http_client.clone(),
+                available_models: Vec::new(),
+                fetch_models_task: None,
+            }
+        });
+
+        Self { http_client, state }
+    }
+
+    fn settings(cx: &App) -> &VercelAiGatewaySettings {
+        &crate::AllLanguageModelSettings::get_global(cx).vercel_ai_gateway
+    }
+
+    fn api_url(cx: &App) -> SharedString {
+        let api_url = &Self::settings(cx).api_url;
+        if api_url.is_empty() {
+            API_URL.into()
+        } else {
+            SharedString::new(api_url.as_str())
+        }
+    }
+
+    fn default_available_model() -> AvailableModel {
+        AvailableModel {
+            name: "openai/gpt-5.3-codex".to_string(),
+            display_name: Some("GPT 5.3 Codex".to_string()),
+            max_tokens: 400_000,
+            max_output_tokens: Some(128_000),
+            max_completion_tokens: None,
+            capabilities: ModelCapabilities::default(),
+        }
+    }
+
+    fn create_language_model(&self, model: AvailableModel) -> Arc<dyn LanguageModel> {
+        Arc::new(VercelAiGatewayLanguageModel {
+            id: LanguageModelId::from(model.name.clone()),
+            model,
+            state: self.state.clone(),
+            http_client: self.http_client.clone(),
+            request_limiter: RateLimiter::new(4),
+        })
+    }
+}
+
+impl LanguageModelProviderState for VercelAiGatewayLanguageModelProvider {
+    type ObservableEntity = State;
+
+    fn observable_entity(&self) -> Option<Entity<Self::ObservableEntity>> {
+        Some(self.state.clone())
+    }
+}
+
+impl LanguageModelProvider for VercelAiGatewayLanguageModelProvider {
+    fn id(&self) -> LanguageModelProviderId {
+        PROVIDER_ID
+    }
+
+    fn name(&self) -> LanguageModelProviderName {
+        PROVIDER_NAME
+    }
+
+    fn icon(&self) -> IconOrSvg {
+        IconOrSvg::Icon(IconName::AiVercel)
+    }
+
+    fn default_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
+        Some(self.create_language_model(Self::default_available_model()))
+    }
+
+    fn default_fast_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
+        None
+    }
+
+    fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
+        let mut models = BTreeMap::default();
+
+        let default_model = Self::default_available_model();
+        models.insert(default_model.name.clone(), default_model);
+
+        for model in self.state.read(cx).available_models.clone() {
+            models.insert(model.name.clone(), model);
+        }
+
+        for model in &Self::settings(cx).available_models {
+            models.insert(model.name.clone(), model.clone());
+        }
+
+        models
+            .into_values()
+            .map(|model| self.create_language_model(model))
+            .collect()
+    }
+
+    fn is_authenticated(&self, cx: &App) -> bool {
+        self.state.read(cx).is_authenticated()
+    }
+
+    fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
+        self.state.update(cx, |state, cx| state.authenticate(cx))
+    }
+
+    fn configuration_view(
+        &self,
+        _target_agent: language_model::ConfigurationViewTargetAgent,
+        window: &mut Window,
+        cx: &mut App,
+    ) -> AnyView {
+        cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
+            .into()
+    }
+
+    fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
+        self.state
+            .update(cx, |state, cx| state.set_api_key(None, cx))
+    }
+}
+
+pub struct VercelAiGatewayLanguageModel {
+    id: LanguageModelId,
+    model: AvailableModel,
+    state: Entity<State>,
+    http_client: Arc<dyn HttpClient>,
+    request_limiter: RateLimiter,
+}
+
+impl VercelAiGatewayLanguageModel {
+    fn stream_open_ai(
+        &self,
+        request: open_ai::Request,
+        cx: &AsyncApp,
+    ) -> BoxFuture<
+        'static,
+        Result<
+            futures::stream::BoxStream<'static, Result<ResponseStreamEvent>>,
+            LanguageModelCompletionError,
+        >,
+    > {
+        let http_client = self.http_client.clone();
+        let (api_key, api_url) = self.state.read_with(cx, |state, cx| {
+            let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx);
+            (state.api_key_state.key(&api_url), api_url)
+        });
+
+        let future = self.request_limiter.stream(async move {
+            let provider = PROVIDER_NAME;
+            let Some(api_key) = api_key else {
+                return Err(LanguageModelCompletionError::NoApiKey { provider });
+            };
+            let request = open_ai::stream_completion(
+                http_client.as_ref(),
+                provider.0.as_str(),
+                &api_url,
+                &api_key,
+                request,
+            );
+            let response = request.await.map_err(map_open_ai_error)?;
+            Ok(response)
+        });
+
+        async move { Ok(future.await?.boxed()) }.boxed()
+    }
+}
+
+fn map_open_ai_error(error: open_ai::RequestError) -> LanguageModelCompletionError {
+    match error {
+        open_ai::RequestError::HttpResponseError {
+            status_code,
+            body,
+            headers,
+            ..
+        } => {
+            let retry_after = headers
+                .get(http::header::RETRY_AFTER)
+                .and_then(|value| value.to_str().ok()?.parse::<u64>().ok())
+                .map(std::time::Duration::from_secs);
+
+            LanguageModelCompletionError::from_http_status(
+                PROVIDER_NAME,
+                status_code,
+                extract_error_message(&body),
+                retry_after,
+            )
+        }
+        open_ai::RequestError::Other(error) => LanguageModelCompletionError::Other(error),
+    }
+}
+
+fn extract_error_message(body: &str) -> String {
+    let json = match serde_json::from_str::<serde_json::Value>(body) {
+        Ok(json) => json,
+        Err(_) => return body.to_string(),
+    };
+
+    let message = json
+        .get("error")
+        .and_then(|value| {
+            value
+                .get("message")
+                .and_then(serde_json::Value::as_str)
+                .or_else(|| value.as_str())
+        })
+        .or_else(|| json.get("message").and_then(serde_json::Value::as_str))
+        .map(ToString::to_string)
+        .unwrap_or_else(|| body.to_string());
+
+    clean_error_message(&message)
+}
+
+fn clean_error_message(message: &str) -> String {
+    let lower = message.to_lowercase();
+
+    if lower.contains("vercel_oidc_token") && lower.contains("oidc token") {
+        return "Authentication failed for Vercel AI Gateway. Use a Vercel AI Gateway key (vck_...).\nCreate or manage keys in Vercel AI Gateway console.\nIf this persists, regenerate the key and update it in Vercel AI Gateway provider settings in Zed.".to_string();
+    }
+
+    if lower.contains("invalid api key") || lower.contains("invalid_api_key") {
+        return "Authentication failed for Vercel AI Gateway. Check that your Vercel AI Gateway key starts with vck_ and is active.".to_string();
+    }
+
+    message.to_string()
+}
+
+fn has_tag(tags: &[String], expected: &str) -> bool {
+    tags.iter()
+        .any(|tag| tag.trim().eq_ignore_ascii_case(expected))
+}
+
+impl LanguageModel for VercelAiGatewayLanguageModel {
+    fn id(&self) -> LanguageModelId {
+        self.id.clone()
+    }
+
+    fn name(&self) -> LanguageModelName {
+        LanguageModelName::from(
+            self.model
+                .display_name
+                .clone()
+                .unwrap_or_else(|| self.model.name.clone()),
+        )
+    }
+
+    fn provider_id(&self) -> LanguageModelProviderId {
+        PROVIDER_ID
+    }
+
+    fn provider_name(&self) -> LanguageModelProviderName {
+        PROVIDER_NAME
+    }
+
+    fn supports_tools(&self) -> bool {
+        self.model.capabilities.tools
+    }
+
+    fn tool_input_format(&self) -> LanguageModelToolSchemaFormat {
+        LanguageModelToolSchemaFormat::JsonSchemaSubset
+    }
+
+    fn supports_images(&self) -> bool {
+        self.model.capabilities.images
+    }
+
+    fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
+        match choice {
+            LanguageModelToolChoice::Auto => self.model.capabilities.tools,
+            LanguageModelToolChoice::Any => self.model.capabilities.tools,
+            LanguageModelToolChoice::None => true,
+        }
+    }
+
+    fn supports_streaming_tools(&self) -> bool {
+        true
+    }
+
+    fn supports_split_token_display(&self) -> bool {
+        true
+    }
+
+    fn telemetry_id(&self) -> String {
+        format!("vercel_ai_gateway/{}", self.model.name)
+    }
+
+    fn max_token_count(&self) -> u64 {
+        self.model.max_tokens
+    }
+
+    fn max_output_tokens(&self) -> Option<u64> {
+        self.model.max_output_tokens
+    }
+
+    fn count_tokens(
+        &self,
+        request: LanguageModelRequest,
+        cx: &App,
+    ) -> BoxFuture<'static, Result<u64>> {
+        let max_token_count = self.max_token_count();
+        cx.background_spawn(async move {
+            let messages = crate::provider::open_ai::collect_tiktoken_messages(request);
+            let model = if max_token_count >= 100_000 {
+                "gpt-4o"
+            } else {
+                "gpt-4"
+            };
+            tiktoken_rs::num_tokens_from_messages(model, &messages).map(|tokens| tokens as u64)
+        })
+        .boxed()
+    }
+
+    fn stream_completion(
+        &self,
+        request: LanguageModelRequest,
+        cx: &AsyncApp,
+    ) -> BoxFuture<
+        'static,
+        Result<
+            futures::stream::BoxStream<
+                'static,
+                Result<LanguageModelCompletionEvent, LanguageModelCompletionError>,
+            >,
+            LanguageModelCompletionError,
+        >,
+    > {
+        let request = crate::provider::open_ai::into_open_ai(
+            request,
+            &self.model.name,
+            self.model.capabilities.parallel_tool_calls,
+            self.model.capabilities.prompt_cache_key,
+            self.max_output_tokens(),
+            None,
+        );
+        let completions = self.stream_open_ai(request, cx);
+        async move {
+            let mapper = crate::provider::open_ai::OpenAiEventMapper::new();
+            Ok(mapper.map_stream(completions.await?).boxed())
+        }
+        .boxed()
+    }
+}
+
+#[derive(Deserialize)]
+struct ModelsResponse {
+    data: Vec<ApiModel>,
+}
+
+#[derive(Deserialize)]
+struct ApiModel {
+    id: String,
+    name: Option<String>,
+    context_window: Option<u64>,
+    max_tokens: Option<u64>,
+    #[serde(default)]
+    r#type: Option<String>,
+    #[serde(default)]
+    supported_parameters: Vec<String>,
+    #[serde(default)]
+    tags: Vec<String>,
+    architecture: Option<ApiModelArchitecture>,
+}
+
+#[derive(Deserialize)]
+struct ApiModelArchitecture {
+    #[serde(default)]
+    input_modalities: Vec<String>,
+}
+
+async fn list_models(
+    client: &dyn HttpClient,
+    api_url: &str,
+    api_key: Option<&str>,
+) -> Result<Vec<AvailableModel>, LanguageModelCompletionError> {
+    let uri = format!("{api_url}/models?include_mappings=true");
+    let mut request_builder = HttpRequest::builder()
+        .method(Method::GET)
+        .uri(uri)
+        .header("Accept", "application/json");
+    if let Some(api_key) = api_key {
+        request_builder = request_builder.header("Authorization", format!("Bearer {}", api_key));
+    }
+    let request = request_builder
+        .body(AsyncBody::default())
+        .map_err(|error| LanguageModelCompletionError::BuildRequestBody {
+            provider: PROVIDER_NAME,
+            error,
+        })?;
+    let mut response =
+        client
+            .send(request)
+            .await
+            .map_err(|error| LanguageModelCompletionError::HttpSend {
+                provider: PROVIDER_NAME,
+                error,
+            })?;
+
+    let mut body = String::new();
+    response
+        .body_mut()
+        .read_to_string(&mut body)
+        .await
+        .map_err(|error| LanguageModelCompletionError::ApiReadResponseError {
+            provider: PROVIDER_NAME,
+            error,
+        })?;
+
+    if !response.status().is_success() {
+        return Err(LanguageModelCompletionError::from_http_status(
+            PROVIDER_NAME,
+            response.status(),
+            extract_error_message(&body),
+            None,
+        ));
+    }
+
+    let response: ModelsResponse = serde_json::from_str(&body).map_err(|error| {
+        LanguageModelCompletionError::DeserializeResponse {
+            provider: PROVIDER_NAME,
+            error,
+        }
+    })?;
+
+    let mut models = Vec::new();
+    for model in response.data {
+        if let Some(model_type) = model.r#type.as_deref()
+            && model_type != "language"
+        {
+            continue;
+        }
+        let supports_tools = model
+            .supported_parameters
+            .iter()
+            .any(|parameter| parameter == "tools")
+            || has_tag(&model.tags, "tool-use")
+            || has_tag(&model.tags, "tools");
+        let supports_images = model.architecture.is_some_and(|architecture| {
+            architecture
+                .input_modalities
+                .iter()
+                .any(|modality| modality == "image")
+        }) || has_tag(&model.tags, "vision")
+            || has_tag(&model.tags, "image-input");
+        let parallel_tool_calls = model
+            .supported_parameters
+            .iter()
+            .any(|parameter| parameter == "parallel_tool_calls");
+        let prompt_cache_key = model
+            .supported_parameters
+            .iter()
+            .any(|parameter| parameter == "prompt_cache_key" || parameter == "cache_control");
+        models.push(AvailableModel {
+            name: model.id.clone(),
+            display_name: model.name.or(Some(model.id)),
+            max_tokens: model.context_window.or(model.max_tokens).unwrap_or(128_000),
+            max_output_tokens: model.max_tokens,
+            max_completion_tokens: None,
+            capabilities: ModelCapabilities {
+                tools: supports_tools,
+                images: supports_images,
+                parallel_tool_calls,
+                prompt_cache_key,
+                chat_completions: true,
+            },
+        });
+    }
+
+    Ok(models)
+}
+
+struct ConfigurationView {
+    api_key_editor: Entity<InputField>,
+    state: Entity<State>,
+    load_credentials_task: Option<Task<()>>,
+}
+
+impl ConfigurationView {
+    fn new(state: Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
+        let api_key_editor =
+            cx.new(|cx| InputField::new(window, cx, "vck_000000000000000000000000000"));
+
+        cx.observe(&state, |_, _, cx| cx.notify()).detach();
+
+        let load_credentials_task = Some(cx.spawn_in(window, {
+            let state = state.clone();
+            async move |this, cx| {
+                if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) {
+                    let _ = task.await;
+                }
+                this.update(cx, |this, cx| {
+                    this.load_credentials_task = None;
+                    cx.notify();
+                })
+                .log_err();
+            }
+        }));
+
+        Self {
+            api_key_editor,
+            state,
+            load_credentials_task,
+        }
+    }
+
+    fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
+        let api_key = self.api_key_editor.read(cx).text(cx).trim().to_string();
+        if api_key.is_empty() {
+            return;
+        }
+
+        self.api_key_editor
+            .update(cx, |editor, cx| editor.set_text("", window, cx));
+
+        let state = self.state.clone();
+        cx.spawn_in(window, async move |_, cx| {
+            state
+                .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))
+                .await
+        })
+        .detach_and_log_err(cx);
+    }
+
+    fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+        self.api_key_editor
+            .update(cx, |editor, cx| editor.set_text("", window, cx));
+
+        let state = self.state.clone();
+        cx.spawn_in(window, async move |_, cx| {
+            state
+                .update(cx, |state, cx| state.set_api_key(None, cx))
+                .await
+        })
+        .detach_and_log_err(cx);
+    }
+
+    fn should_render_editor(&self, cx: &Context<Self>) -> bool {
+        !self.state.read(cx).is_authenticated()
+    }
+}
+
+impl Render for ConfigurationView {
+    fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+        let env_var_set = self.state.read(cx).api_key_state.is_from_env_var();
+        let configured_card_label = if env_var_set {
+            format!("API key set in {API_KEY_ENV_VAR_NAME} environment variable")
+        } else {
+            let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx);
+            if api_url == API_URL {
+                "API key configured".to_string()
+            } else {
+                format!("API key configured for {}", api_url)
+            }
+        };
+
+        if self.load_credentials_task.is_some() {
+            div().child(Label::new("Loading credentials...")).into_any()
+        } else if self.should_render_editor(cx) {
+            v_flex()
+                .size_full()
+                .on_action(cx.listener(Self::save_api_key))
+                .child(Label::new(
+                    "To use Zed's agent with Vercel AI Gateway, you need to add an API key. Follow these steps:",
+                ))
+                .child(
+                    List::new()
+                        .child(
+                            ListBulletItem::new("")
+                                .child(Label::new("Create an API key in"))
+                                .child(ButtonLink::new(
+                                    "Vercel AI Gateway's console",
+                                    "https://vercel.com/d?to=%2F%5Bteam%5D%2F%7E%2Fai%2Fapi-keys&title=Go+to+AI+Gateway",
+                                )),
+                        )
+                        .child(ListBulletItem::new(
+                            "Paste your API key below and hit enter to start using the assistant",
+                        )),
+                )
+                .child(self.api_key_editor.clone())
+                .child(
+                    Label::new(format!(
+                        "You can also set the {API_KEY_ENV_VAR_NAME} environment variable and restart Zed.",
+                    ))
+                    .size(LabelSize::Small)
+                    .color(Color::Muted),
+                )
+                .into_any_element()
+        } else {
+            ConfiguredApiCard::new(configured_card_label)
+                .disabled(env_var_set)
+                .when(env_var_set, |this| {
+                    this.tooltip_label(format!("To reset your API key, unset the {API_KEY_ENV_VAR_NAME} environment variable."))
+                })
+                .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx)))
+                .into_any_element()
+        }
+    }
+}

crates/language_models/src/provider/x_ai.rs 🔗

@@ -257,6 +257,10 @@ impl LanguageModel for XAiLanguageModel {
         self.model.supports_images()
     }
 
+    fn supports_streaming_tools(&self) -> bool {
+        true
+    }
+
     fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
         match choice {
             LanguageModelToolChoice::Auto
@@ -265,8 +269,7 @@ impl LanguageModel for XAiLanguageModel {
         }
     }
     fn tool_input_format(&self) -> LanguageModelToolSchemaFormat {
-        let model_id = self.model.id().trim().to_lowercase();
-        if model_id.eq(x_ai::Model::Grok4.id()) || model_id.eq(x_ai::Model::GrokCodeFast1.id()) {
+        if self.model.requires_json_schema_subset() {
             LanguageModelToolSchemaFormat::JsonSchemaSubset
         } else {
             LanguageModelToolSchemaFormat::JsonSchema

crates/language_models/src/settings.rs 🔗

@@ -8,7 +8,7 @@ use crate::provider::{
     deepseek::DeepSeekSettings, google::GoogleSettings, lmstudio::LmStudioSettings,
     mistral::MistralSettings, ollama::OllamaSettings, open_ai::OpenAiSettings,
     open_ai_compatible::OpenAiCompatibleSettings, open_router::OpenRouterSettings,
-    vercel::VercelSettings, x_ai::XAiSettings,
+    vercel::VercelSettings, vercel_ai_gateway::VercelAiGatewaySettings, x_ai::XAiSettings,
 };
 
 #[derive(Debug, RegisterSetting)]
@@ -24,6 +24,7 @@ pub struct AllLanguageModelSettings {
     pub openai: OpenAiSettings,
     pub openai_compatible: HashMap<Arc<str>, OpenAiCompatibleSettings>,
     pub vercel: VercelSettings,
+    pub vercel_ai_gateway: VercelAiGatewaySettings,
     pub x_ai: XAiSettings,
     pub zed_dot_dev: ZedDotDevSettings,
 }
@@ -44,6 +45,7 @@ impl settings::Settings for AllLanguageModelSettings {
         let openai = language_models.openai.unwrap();
         let openai_compatible = language_models.openai_compatible.unwrap();
         let vercel = language_models.vercel.unwrap();
+        let vercel_ai_gateway = language_models.vercel_ai_gateway.unwrap();
         let x_ai = language_models.x_ai.unwrap();
         let zed_dot_dev = language_models.zed_dot_dev.unwrap();
         Self {
@@ -107,6 +109,10 @@ impl settings::Settings for AllLanguageModelSettings {
                 api_url: vercel.api_url.unwrap(),
                 available_models: vercel.available_models.unwrap_or_default(),
             },
+            vercel_ai_gateway: VercelAiGatewaySettings {
+                api_url: vercel_ai_gateway.api_url.unwrap(),
+                available_models: vercel_ai_gateway.available_models.unwrap_or_default(),
+            },
             x_ai: XAiSettings {
                 api_url: x_ai.api_url.unwrap(),
                 available_models: x_ai.available_models.unwrap_or_default(),

crates/language_selector/src/language_selector.rs 🔗

@@ -71,11 +71,16 @@ impl LanguageSelector {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) -> Self {
+        let current_language_name = buffer
+            .read(cx)
+            .language()
+            .map(|language| language.name().as_ref().to_string());
         let delegate = LanguageSelectorDelegate::new(
             cx.entity().downgrade(),
             buffer,
             project,
             language_registry,
+            current_language_name,
         );
 
         let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx));
@@ -109,6 +114,7 @@ pub struct LanguageSelectorDelegate {
     candidates: Vec<StringMatchCandidate>,
     matches: Vec<StringMatch>,
     selected_index: usize,
+    current_language_candidate_index: Option<usize>,
 }
 
 impl LanguageSelectorDelegate {
@@ -117,6 +123,7 @@ impl LanguageSelectorDelegate {
         buffer: Entity<Buffer>,
         project: Entity<Project>,
         language_registry: Arc<LanguageRegistry>,
+        current_language_name: Option<String>,
     ) -> Self {
         let candidates = language_registry
             .language_names()
@@ -132,6 +139,12 @@ impl LanguageSelectorDelegate {
             .map(|(candidate_id, name)| StringMatchCandidate::new(candidate_id, name.as_ref()))
             .collect::<Vec<_>>();
 
+        let current_language_candidate_index = current_language_name.as_ref().and_then(|name| {
+            candidates
+                .iter()
+                .position(|candidate| candidate.string == *name)
+        });
+
         Self {
             language_selector,
             buffer,
@@ -139,7 +152,8 @@ impl LanguageSelectorDelegate {
             language_registry,
             candidates,
             matches: vec![],
-            selected_index: 0,
+            selected_index: current_language_candidate_index.unwrap_or(0),
+            current_language_candidate_index,
         }
     }
 
@@ -239,8 +253,9 @@ impl PickerDelegate for LanguageSelectorDelegate {
     ) -> gpui::Task<()> {
         let background = cx.background_executor().clone();
         let candidates = self.candidates.clone();
+        let query_is_empty = query.is_empty();
         cx.spawn_in(window, async move |this, cx| {
-            let matches = if query.is_empty() {
+            let matches = if query_is_empty {
                 candidates
                     .into_iter()
                     .enumerate()
@@ -264,12 +279,21 @@ impl PickerDelegate for LanguageSelectorDelegate {
                 .await
             };
 
-            this.update(cx, |this, cx| {
+            this.update_in(cx, |this, window, cx| {
                 let delegate = &mut this.delegate;
                 delegate.matches = matches;
                 delegate.selected_index = delegate
                     .selected_index
                     .min(delegate.matches.len().saturating_sub(1));
+
+                if query_is_empty {
+                    if let Some(index) = delegate
+                        .current_language_candidate_index
+                        .and_then(|ci| delegate.matches.iter().position(|m| m.candidate_id == ci))
+                    {
+                        this.set_selected_index(index, None, false, window, cx);
+                    }
+                }
                 cx.notify();
             })
             .log_err();
@@ -295,3 +319,255 @@ impl PickerDelegate for LanguageSelectorDelegate {
         )
     }
 }
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use editor::Editor;
+    use gpui::{TestAppContext, VisualTestContext};
+    use language::{Language, LanguageConfig};
+    use project::{Project, ProjectPath};
+    use serde_json::json;
+    use std::sync::Arc;
+    use util::{path, rel_path::rel_path};
+    use workspace::{AppState, MultiWorkspace, Workspace};
+
+    fn init_test(cx: &mut TestAppContext) -> Arc<AppState> {
+        cx.update(|cx| {
+            let app_state = AppState::test(cx);
+            settings::init(cx);
+            super::init(cx);
+            editor::init(cx);
+            app_state
+        })
+    }
+
+    fn register_test_languages(project: &Entity<Project>, cx: &mut VisualTestContext) {
+        project.read_with(cx, |project, _| {
+            let language_registry = project.languages();
+            language_registry.add(Arc::new(Language::new(
+                LanguageConfig {
+                    name: "Rust".into(),
+                    matcher: LanguageMatcher {
+                        path_suffixes: vec!["rs".to_string()],
+                        ..Default::default()
+                    },
+                    ..Default::default()
+                },
+                None,
+            )));
+            language_registry.add(Arc::new(Language::new(
+                LanguageConfig {
+                    name: "TypeScript".into(),
+                    matcher: LanguageMatcher {
+                        path_suffixes: vec!["ts".to_string()],
+                        ..Default::default()
+                    },
+                    ..Default::default()
+                },
+                None,
+            )));
+        });
+    }
+
+    async fn open_file_editor(
+        workspace: &Entity<Workspace>,
+        project: &Entity<Project>,
+        file_path: &str,
+        cx: &mut VisualTestContext,
+    ) -> Entity<Editor> {
+        let worktree_id = project.update(cx, |project, cx| {
+            project
+                .worktrees(cx)
+                .next()
+                .expect("project should have a worktree")
+                .read(cx)
+                .id()
+        });
+        let project_path = ProjectPath {
+            worktree_id,
+            path: rel_path(file_path).into(),
+        };
+        let opened_item = workspace
+            .update_in(cx, |workspace, window, cx| {
+                workspace.open_path(project_path, None, true, window, cx)
+            })
+            .await
+            .expect("file should open");
+
+        cx.update(|_, cx| {
+            opened_item
+                .act_as::<Editor>(cx)
+                .expect("opened item should be an editor")
+        })
+    }
+
+    async fn open_empty_editor(
+        workspace: &Entity<Workspace>,
+        project: &Entity<Project>,
+        cx: &mut VisualTestContext,
+    ) -> Entity<Editor> {
+        let create_buffer = project.update(cx, |project, cx| project.create_buffer(None, true, cx));
+        let buffer = create_buffer.await.expect("empty buffer should be created");
+        let editor = cx.new_window_entity(|window, cx| {
+            Editor::for_buffer(buffer.clone(), Some(project.clone()), window, cx)
+        });
+        workspace.update_in(cx, |workspace, window, cx| {
+            workspace.add_item_to_center(Box::new(editor.clone()), window, cx);
+        });
+        // Ensure the buffer has no language after the editor is created
+        buffer.update(cx, |buffer, cx| {
+            buffer.set_language(None, cx);
+        });
+        editor
+    }
+
+    async fn set_editor_language(
+        project: &Entity<Project>,
+        editor: &Entity<Editor>,
+        language_name: &str,
+        cx: &mut VisualTestContext,
+    ) {
+        let language = project
+            .read_with(cx, |project, _| {
+                project.languages().language_for_name(language_name)
+            })
+            .await
+            .expect("language should exist in registry");
+        editor.update(cx, move |editor, cx| {
+            let (_, buffer, _) = editor
+                .active_excerpt(cx)
+                .expect("editor should have an active excerpt");
+            buffer.update(cx, |buffer, cx| {
+                buffer.set_language(Some(language), cx);
+            });
+        });
+    }
+
+    fn active_picker(
+        workspace: &Entity<Workspace>,
+        cx: &mut VisualTestContext,
+    ) -> Entity<Picker<LanguageSelectorDelegate>> {
+        workspace.update(cx, |workspace, cx| {
+            workspace
+                .active_modal::<LanguageSelector>(cx)
+                .expect("language selector should be open")
+                .read(cx)
+                .picker
+                .clone()
+        })
+    }
+
+    fn open_selector(
+        workspace: &Entity<Workspace>,
+        cx: &mut VisualTestContext,
+    ) -> Entity<Picker<LanguageSelectorDelegate>> {
+        cx.dispatch_action(Toggle);
+        cx.run_until_parked();
+        active_picker(workspace, cx)
+    }
+
+    fn close_selector(workspace: &Entity<Workspace>, cx: &mut VisualTestContext) {
+        cx.dispatch_action(Toggle);
+        cx.run_until_parked();
+        workspace.read_with(cx, |workspace, cx| {
+            assert!(
+                workspace.active_modal::<LanguageSelector>(cx).is_none(),
+                "language selector should be closed"
+            );
+        });
+    }
+
+    fn assert_selected_language_for_editor(
+        workspace: &Entity<Workspace>,
+        editor: &Entity<Editor>,
+        expected_language_name: Option<&str>,
+        cx: &mut VisualTestContext,
+    ) {
+        workspace.update_in(cx, |workspace, window, cx| {
+            let was_activated = workspace.activate_item(editor, true, true, window, cx);
+            assert!(
+                was_activated,
+                "editor should be activated before opening the modal"
+            );
+        });
+        cx.run_until_parked();
+
+        let picker = open_selector(workspace, cx);
+        picker.read_with(cx, |picker, _| {
+            let selected_match = picker
+                .delegate
+                .matches
+                .get(picker.delegate.selected_index)
+                .expect("selected index should point to a match");
+            let selected_candidate = picker
+                .delegate
+                .candidates
+                .get(selected_match.candidate_id)
+                .expect("selected match should map to a candidate");
+
+            if let Some(expected_language_name) = expected_language_name {
+                let current_language_candidate_index = picker
+                    .delegate
+                    .current_language_candidate_index
+                    .expect("current language should map to a candidate");
+                assert_eq!(
+                    selected_match.candidate_id,
+                    current_language_candidate_index
+                );
+                assert_eq!(selected_candidate.string, expected_language_name);
+            } else {
+                assert!(picker.delegate.current_language_candidate_index.is_none());
+                assert_eq!(picker.delegate.selected_index, 0);
+            }
+        });
+        close_selector(workspace, cx);
+    }
+
+    #[gpui::test]
+    async fn test_language_selector_selects_current_language_per_active_editor(
+        cx: &mut TestAppContext,
+    ) {
+        let app_state = init_test(cx);
+        app_state
+            .fs
+            .as_fake()
+            .insert_tree(
+                path!("/test"),
+                json!({
+                    "rust_file.rs": "fn main() {}\n",
+                    "typescript_file.ts": "const value = 1;\n",
+                }),
+            )
+            .await;
+
+        let project = Project::test(app_state.fs.clone(), [path!("/test").as_ref()], cx).await;
+        let (multi_workspace, cx) =
+            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+        let workspace =
+            multi_workspace.read_with(cx, |multi_workspace, _| multi_workspace.workspace().clone());
+        register_test_languages(&project, cx);
+
+        let rust_editor = open_file_editor(&workspace, &project, "rust_file.rs", cx).await;
+        let typescript_editor =
+            open_file_editor(&workspace, &project, "typescript_file.ts", cx).await;
+        let empty_editor = open_empty_editor(&workspace, &project, cx).await;
+
+        set_editor_language(&project, &rust_editor, "Rust", cx).await;
+        set_editor_language(&project, &typescript_editor, "TypeScript", cx).await;
+        cx.run_until_parked();
+
+        assert_selected_language_for_editor(&workspace, &rust_editor, Some("Rust"), cx);
+        assert_selected_language_for_editor(&workspace, &typescript_editor, Some("TypeScript"), cx);
+        // Ensure the empty editor's buffer has no language before asserting
+        let (_, buffer, _) = empty_editor.read_with(cx, |editor, cx| {
+            editor
+                .active_excerpt(cx)
+                .expect("editor should have an active excerpt")
+        });
+        buffer.update(cx, |buffer, cx| {
+            buffer.set_language(None, cx);
+        });
+        assert_selected_language_for_editor(&workspace, &empty_editor, None, cx);
+    }
+}

crates/language_tools/src/highlights_tree_view.rs 🔗

@@ -8,6 +8,7 @@ use gpui::{
     MouseDownEvent, MouseMoveEvent, ParentElement, Render, ScrollStrategy, SharedString, Styled,
     Task, UniformListScrollHandle, WeakEntity, Window, actions, div, rems, uniform_list,
 };
+use language::ToOffset;
 use menu::{SelectNext, SelectPrevious};
 use std::{mem, ops::Range};
 use theme::ActiveTheme;
@@ -37,6 +38,8 @@ actions!(
         ToggleTextHighlights,
         /// Toggles showing semantic token highlights.
         ToggleSemanticTokens,
+        /// Toggles showing syntax token highlights.
+        ToggleSyntaxTokens,
     ]
 );
 
@@ -61,9 +64,14 @@ pub fn init(cx: &mut App) {
 #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
 pub enum HighlightCategory {
     Text(HighlightKey),
+    SyntaxToken {
+        capture_name: SharedString,
+        theme_key: Option<SharedString>,
+    },
     SemanticToken {
         token_type: Option<SharedString>,
         token_modifiers: Option<SharedString>,
+        theme_key: Option<SharedString>,
     },
 }
 
@@ -71,22 +79,34 @@ impl HighlightCategory {
     fn label(&self) -> SharedString {
         match self {
             HighlightCategory::Text(key) => format!("text: {key:?}").into(),
+            HighlightCategory::SyntaxToken {
+                capture_name,
+                theme_key: Some(theme_key),
+            } => format!("syntax: {capture_name} \u{2192} {theme_key}").into(),
+            HighlightCategory::SyntaxToken {
+                capture_name,
+                theme_key: None,
+            } => format!("syntax: {capture_name}").into(),
             HighlightCategory::SemanticToken {
-                token_type: Some(token_type),
-                token_modifiers: Some(modifiers),
-            } => format!("semantic token: {token_type} [{modifiers}]").into(),
-            HighlightCategory::SemanticToken {
-                token_type: Some(token_type),
-                token_modifiers: None,
-            } => format!("semantic token: {token_type}").into(),
-            HighlightCategory::SemanticToken {
-                token_type: None,
-                token_modifiers: Some(modifiers),
-            } => format!("semantic token [{modifiers}]").into(),
-            HighlightCategory::SemanticToken {
-                token_type: None,
-                token_modifiers: None,
-            } => "semantic token".into(),
+                token_type,
+                token_modifiers,
+                theme_key,
+            } => {
+                let label = match (token_type, token_modifiers) {
+                    (Some(token_type), Some(modifiers)) => {
+                        format!("semantic token: {token_type} [{modifiers}]")
+                    }
+                    (Some(token_type), None) => format!("semantic token: {token_type}"),
+                    (None, Some(modifiers)) => format!("semantic token [{modifiers}]"),
+                    (None, None) => "semantic token".to_string(),
+                };
+
+                if let Some(theme_key) = theme_key {
+                    format!("{label} \u{2192} {theme_key}").into()
+                } else {
+                    label.into()
+                }
+            }
         }
     }
 }
@@ -124,6 +144,7 @@ pub struct HighlightsTreeView {
     display_items: Vec<DisplayItem>,
     is_singleton: bool,
     show_text_highlights: bool,
+    show_syntax_tokens: bool,
     show_semantic_tokens: bool,
     skip_next_scroll: bool,
 }
@@ -157,6 +178,7 @@ impl HighlightsTreeView {
             display_items: Vec::new(),
             is_singleton: true,
             show_text_highlights: true,
+            show_syntax_tokens: true,
             show_semantic_tokens: true,
             skip_next_scroll: false,
         };
@@ -280,6 +302,7 @@ impl HighlightsTreeView {
 
         let mut entries = Vec::new();
 
+        let semantic_theme = cx.theme().syntax().clone();
         display_map.update(cx, |display_map, cx| {
             for (key, text_highlights) in display_map.all_text_highlights() {
                 for range in &text_highlights.1 {
@@ -323,6 +346,32 @@ impl HighlightsTreeView {
                         ) else {
                             continue;
                         };
+
+                        let theme_key =
+                            stylizer
+                                .rules_for_token(token.token_type)
+                                .and_then(|rules| {
+                                    rules
+                                        .iter()
+                                        .filter(|rule| {
+                                            rule.token_modifiers.iter().all(|modifier| {
+                                                stylizer
+                                                    .has_modifier(token.token_modifiers, modifier)
+                                            })
+                                        })
+                                        .fold(None, |theme_key, rule| {
+                                            rule.style
+                                                .iter()
+                                                .find(|style_name| {
+                                                    semantic_theme.get_opt(style_name).is_some()
+                                                })
+                                                .map(|style_name| {
+                                                    SharedString::from(style_name.clone())
+                                                })
+                                                .or(theme_key)
+                                        })
+                                });
+
                         entries.push(HighlightEntry {
                             excerpt_id,
                             range,
@@ -333,6 +382,7 @@ impl HighlightsTreeView {
                                 token_modifiers: stylizer
                                     .token_modifiers(token.token_modifiers)
                                     .map(SharedString::from),
+                                theme_key,
                             },
                             sort_key,
                         });
@@ -341,6 +391,64 @@ impl HighlightsTreeView {
             });
         });
 
+        let syntax_theme = cx.theme().syntax().clone();
+        for (excerpt_id, buffer_snapshot, excerpt_range) in multi_buffer_snapshot.excerpts() {
+            let start_offset = excerpt_range.context.start.to_offset(buffer_snapshot);
+            let end_offset = excerpt_range.context.end.to_offset(buffer_snapshot);
+            let range = start_offset..end_offset;
+
+            let captures = buffer_snapshot.captures(range, |grammar| {
+                grammar.highlights_config.as_ref().map(|c| &c.query)
+            });
+            let grammars: Vec<_> = captures.grammars().to_vec();
+            let highlight_maps: Vec<_> = grammars.iter().map(|g| g.highlight_map()).collect();
+
+            for capture in captures {
+                let highlight_id = highlight_maps[capture.grammar_index].get(capture.index);
+                let Some(style) = highlight_id.style(&syntax_theme) else {
+                    continue;
+                };
+
+                let theme_key = highlight_id
+                    .name(&syntax_theme)
+                    .map(|theme_key| SharedString::from(theme_key.to_string()));
+
+                let capture_name = grammars[capture.grammar_index]
+                    .highlights_config
+                    .as_ref()
+                    .and_then(|config| config.query.capture_names().get(capture.index as usize))
+                    .map(|capture_name| SharedString::from((*capture_name).to_string()))
+                    .unwrap_or_else(|| SharedString::from("unknown"));
+
+                let start_anchor = buffer_snapshot.anchor_before(capture.node.start_byte());
+                let end_anchor = buffer_snapshot.anchor_after(capture.node.end_byte());
+
+                let start = multi_buffer_snapshot.anchor_in_excerpt(excerpt_id, start_anchor);
+                let end = multi_buffer_snapshot.anchor_in_excerpt(excerpt_id, end_anchor);
+
+                let (start, end) = match (start, end) {
+                    (Some(s), Some(e)) => (s, e),
+                    _ => continue,
+                };
+
+                let range = start..end;
+                let (range_display, sort_key) =
+                    format_anchor_range(&range, excerpt_id, &multi_buffer_snapshot, is_singleton);
+
+                entries.push(HighlightEntry {
+                    excerpt_id,
+                    range,
+                    range_display,
+                    style,
+                    category: HighlightCategory::SyntaxToken {
+                        capture_name,
+                        theme_key,
+                    },
+                    sort_key,
+                });
+            }
+        }
+
         entries.sort_by(|a, b| {
             a.sort_key
                 .cmp(&b.sort_key)
@@ -387,6 +495,7 @@ impl HighlightsTreeView {
     fn should_show_entry(&self, entry: &HighlightEntry) -> bool {
         match entry.category {
             HighlightCategory::Text(_) => self.show_text_highlights,
+            HighlightCategory::SyntaxToken { .. } => self.show_syntax_tokens,
             HighlightCategory::SemanticToken { .. } => self.show_semantic_tokens,
         }
     }
@@ -695,14 +804,14 @@ impl Render for HighlightsTreeView {
                                     this.child(Label::new("All highlights are filtered out"))
                                         .child(
                                             Label::new(
-                                                "Enable text or semantic highlights in the toolbar",
+                                                "Enable text, syntax, or semantic highlights in the toolbar",
                                             )
                                             .size(LabelSize::Small),
                                         )
                                 } else {
                                     this.child(Label::new("No highlights found")).child(
                                         Label::new(
-                                            "The editor has no text or semantic token highlights",
+                                            "The editor has no text, syntax, or semantic token highlights",
                                         )
                                         .size(LabelSize::Small),
                                     )
@@ -762,6 +871,7 @@ impl Item for HighlightsTreeView {
         Task::ready(Some(cx.new(|cx| {
             let mut clone = Self::new(self.workspace_handle.clone(), None, window, cx);
             clone.show_text_highlights = self.show_text_highlights;
+            clone.show_syntax_tokens = self.show_syntax_tokens;
             clone.show_semantic_tokens = self.show_semantic_tokens;
             clone.skip_next_scroll = false;
             if let Some(editor) = &self.editor {
@@ -810,14 +920,18 @@ impl HighlightsTreeToolbarItemView {
     }
 
     fn render_settings_button(&self, cx: &Context<Self>) -> PopoverMenu<ContextMenu> {
-        let (show_text, show_semantic) = self
+        let (show_text, show_syntax, show_semantic) = self
             .tree_view
             .as_ref()
             .map(|view| {
                 let v = view.read(cx);
-                (v.show_text_highlights, v.show_semantic_tokens)
+                (
+                    v.show_text_highlights,
+                    v.show_syntax_tokens,
+                    v.show_semantic_tokens,
+                )
             })
-            .unwrap_or((true, true));
+            .unwrap_or((true, true, true));
 
         let tree_view = self.tree_view.as_ref().map(|v| v.downgrade());
 
@@ -833,6 +947,7 @@ impl HighlightsTreeToolbarItemView {
             .with_handle(self.toggle_settings_handle.clone())
             .menu(move |window, cx| {
                 let tree_view_for_text = tree_view.clone();
+                let tree_view_for_syntax = tree_view.clone();
                 let tree_view_for_semantic = tree_view.clone();
 
                 let menu = ContextMenu::build(window, cx, move |menu, _, _| {
@@ -860,6 +975,30 @@ impl HighlightsTreeToolbarItemView {
                             }
                         },
                     )
+                    .toggleable_entry(
+                        "Syntax Tokens",
+                        show_syntax,
+                        IconPosition::Start,
+                        Some(ToggleSyntaxTokens.boxed_clone()),
+                        {
+                            let tree_view = tree_view_for_syntax.clone();
+                            move |_, cx| {
+                                if let Some(view) = tree_view.as_ref() {
+                                    view.update(cx, |view, cx| {
+                                        view.show_syntax_tokens = !view.show_syntax_tokens;
+                                        let snapshot = view.editor.as_ref().map(|s| {
+                                            s.editor.read(cx).buffer().read(cx).snapshot(cx)
+                                        });
+                                        if let Some(snapshot) = snapshot {
+                                            view.rebuild_display_items(&snapshot, cx);
+                                        }
+                                        cx.notify();
+                                    })
+                                    .ok();
+                                }
+                            }
+                        },
+                    )
                     .toggleable_entry(
                         "Semantic Tokens",
                         show_semantic,

crates/language_tools/src/lsp_button.rs 🔗

@@ -333,13 +333,7 @@ impl LanguageServerState {
                 })
                 .unwrap_or((None, None, None));
 
-            let truncated_message = message.as_ref().and_then(|message| {
-                message
-                    .lines()
-                    .filter(|line| !line.trim().is_empty())
-                    .map(SharedString::new)
-                    .next()
-            });
+            let server_message = message.clone();
 
             let submenu_server_name = server_info.name.clone();
             let submenu_server_info = server_info.clone();
@@ -549,9 +543,9 @@ impl LanguageServerState {
                         submenu = submenu.separator().custom_row({
                             let binary_path = binary_path.clone();
                             let server_version = server_version.clone();
-                            let truncated_message = truncated_message.clone();
+                            let server_message = server_message.clone();
                             let process_memory_cache = process_memory_cache.clone();
-                            move |_, _| {
+                            move |_, cx| {
                                 let memory_usage = process_id.map(|pid| {
                                     process_memory_cache.borrow_mut().get_memory_usage(pid)
                                 });
@@ -567,63 +561,63 @@ impl LanguageServerState {
                                     }
                                 });
 
-                                let metadata_label =
-                                    match (&server_version, &memory_label, &truncated_message) {
-                                        (None, None, None) => None,
-                                        (Some(version), None, None) => {
-                                            Some(format!("v{}", version.as_ref()))
-                                        }
-                                        (None, Some(memory), None) => Some(memory.clone()),
-                                        (Some(version), Some(memory), None) => {
-                                            Some(format!("v{} • {}", version.as_ref(), memory))
-                                        }
-                                        (None, None, Some(message)) => Some(message.to_string()),
-                                        (Some(version), None, Some(message)) => Some(format!(
-                                            "v{}\n\n{}",
-                                            version.as_ref(),
-                                            message.as_ref()
-                                        )),
-                                        (None, Some(memory), Some(message)) => {
-                                            Some(format!("{}\n\n{}", memory, message.as_ref()))
-                                        }
-                                        (Some(version), Some(memory), Some(message)) => {
-                                            Some(format!(
-                                                "v{} • {}\n\n{}",
-                                                version.as_ref(),
-                                                memory,
-                                                message.as_ref()
-                                            ))
-                                        }
-                                    };
+                                let version_label =
+                                    server_version.as_ref().map(|v| format!("v{}", v.as_ref()));
+
+                                let separator_color =
+                                    cx.theme().colors().icon_disabled.opacity(0.8);
 
-                                h_flex()
+                                v_flex()
                                     .id("metadata-container")
-                                    .ml_neg_1()
                                     .gap_1()
-                                    .max_w(rems(164.))
-                                    .child(
-                                        Icon::new(IconName::Circle)
-                                            .color(status_color)
-                                            .size(IconSize::Small),
-                                    )
+                                    .when_some(server_message.as_ref(), |this, _| {
+                                        this.w(rems_from_px(240.))
+                                    })
                                     .child(
-                                        Label::new(status_label)
-                                            .size(LabelSize::Small)
-                                            .color(Color::Muted),
-                                    )
-                                    .when_some(metadata_label.as_ref(), |submenu, metadata| {
-                                        submenu
+                                        h_flex()
+                                            .ml_neg_1()
+                                            .gap_1()
                                             .child(
-                                                Icon::new(IconName::Dash)
-                                                    .color(Color::Disabled)
-                                                    .size(IconSize::XSmall),
+                                                Icon::new(IconName::Circle)
+                                                    .color(status_color)
+                                                    .size(IconSize::Small),
                                             )
                                             .child(
-                                                Label::new(metadata)
+                                                Label::new(status_label)
                                                     .size(LabelSize::Small)
-                                                    .color(Color::Muted)
-                                                    .truncate(),
+                                                    .color(Color::Muted),
                                             )
+                                            .when_some(version_label.as_ref(), |row, version| {
+                                                row.child(
+                                                    Icon::new(IconName::Dash)
+                                                        .color(Color::Custom(separator_color))
+                                                        .size(IconSize::XSmall),
+                                                )
+                                                .child(
+                                                    Label::new(version)
+                                                        .size(LabelSize::Small)
+                                                        .color(Color::Muted),
+                                                )
+                                            })
+                                            .when_some(memory_label.as_ref(), |row, memory| {
+                                                row.child(
+                                                    Icon::new(IconName::Dash)
+                                                        .color(Color::Custom(separator_color))
+                                                        .size(IconSize::XSmall),
+                                                )
+                                                .child(
+                                                    Label::new(memory)
+                                                        .size(LabelSize::Small)
+                                                        .color(Color::Muted),
+                                                )
+                                            }),
+                                    )
+                                    .when_some(server_message.clone(), |container, message| {
+                                        container.child(
+                                            Label::new(message)
+                                                .color(Color::Muted)
+                                                .size(LabelSize::Small),
+                                        )
                                     })
                                     .when_some(binary_path.clone(), |el, path| {
                                         el.tooltip(Tooltip::text(path))

crates/languages/src/bash/brackets.scm 🔗

@@ -1,12 +1,62 @@
-("(" @open ")" @close)
-("[" @open "]" @close)
-("{" @open "}" @close)
-(("\"" @open "\"" @close) (#set! rainbow.exclude))
-(("`" @open "`" @close) (#set! rainbow.exclude))
-(("do" @open "done" @close) (#set! newline.only) (#set! rainbow.exclude))
-((case_statement ("in" @open "esac" @close)) (#set! newline.only) (#set! rainbow.exclude))
-((if_statement (elif_clause ("then" @open)) (else_clause ("else" @close))) (#set! newline.only) (#set! rainbow.exclude))
-((if_statement (else_clause ("else" @open)) "fi" @close) (#set! newline.only) (#set! rainbow.exclude))
-((if_statement ("then" @open) (elif_clause ("elif" @close))) (#set! newline.only) (#set! rainbow.exclude))
-((if_statement ("then" @open) (else_clause ("else" @close))) (#set! newline.only) (#set! rainbow.exclude))
-((if_statement ("then" @open "fi" @close)) (#set! newline.only) (#set! rainbow.exclude))
+("(" @open
+  ")" @close)
+
+("[" @open
+  "]" @close)
+
+("{" @open
+  "}" @close)
+
+(("\"" @open
+  "\"" @close)
+  (#set! rainbow.exclude))
+
+(("`" @open
+  "`" @close)
+  (#set! rainbow.exclude))
+
+(("do" @open
+  "done" @close)
+  (#set! newline.only)
+  (#set! rainbow.exclude))
+
+((case_statement
+  ("in" @open
+    "esac" @close))
+  (#set! newline.only)
+  (#set! rainbow.exclude))
+
+((if_statement
+  (elif_clause
+    "then" @open)
+  (else_clause
+    "else" @close))
+  (#set! newline.only)
+  (#set! rainbow.exclude))
+
+((if_statement
+  (else_clause
+    "else" @open)
+  "fi" @close)
+  (#set! newline.only)
+  (#set! rainbow.exclude))
+
+((if_statement
+  "then" @open
+  (elif_clause
+    "elif" @close))
+  (#set! newline.only)
+  (#set! rainbow.exclude))
+
+((if_statement
+  "then" @open
+  (else_clause
+    "else" @close))
+  (#set! newline.only)
+  (#set! rainbow.exclude))
+
+((if_statement
+  ("then" @open
+    "fi" @close))
+  (#set! newline.only)
+  (#set! rainbow.exclude))

crates/languages/src/bash/highlights.scm 🔗

@@ -43,13 +43,17 @@
   (comment) @keyword.directive)
   (#match? @keyword.directive "^#![ \t]*/"))
 
-(function_definition name: (word) @function)
-(command_name (word) @function)
+(function_definition
+  name: (word) @function)
+
+(command_name
+  (word) @function)
 
 (command
   argument: [
     (word) @variable.parameter
-    (_ (word) @variable.parameter)
+    (_
+      (word) @variable.parameter)
   ])
 
 [
@@ -65,7 +69,6 @@
   (expansion)
 ] @embedded
 
-
 [
   "$"
   "&&"
@@ -89,9 +92,7 @@
 
 (test_operator) @keyword.operator
 
-[
-  ";"
-] @punctuation.delimiter
+";" @punctuation.delimiter
 
 [
   "("
@@ -104,6 +105,7 @@
 
 (simple_expansion
   "$" @punctuation.special)
+
 (expansion
   "${" @punctuation.special
   "}" @punctuation.special) @embedded
@@ -112,10 +114,11 @@
   "$(" @punctuation.special
   ")" @punctuation.special)
 
-(
-  (command (_) @constant)
-  (#match? @constant "^-")
-)
+((command
+  (_) @constant)
+  (#match? @constant "^-"))
+
+(case_item
+  value: (_) @string.regex)
 
-(case_item value: (_) @string.regex)
 (special_variable_name) @variable.special

crates/languages/src/bash/indents.scm 🔗

@@ -1,12 +1,27 @@
-(_ "[" "]" @end) @indent
-(_ "{" "}" @end) @indent
-(_ "(" ")" @end) @indent
+(_
+  "["
+  "]" @end) @indent
+
+(_
+  "{"
+  "}" @end) @indent
+
+(_
+  "("
+  ")" @end) @indent
 
 (function_definition) @start.function
+
 (if_statement) @start.if
+
 (elif_clause) @start.elif
+
 (else_clause) @start.else
+
 (for_statement) @start.for
+
 (while_statement) @start.while
+
 (case_statement) @start.case
+
 (case_item) @start.case_item

crates/languages/src/bash/runnables.scm 🔗

@@ -1,5 +1,5 @@
 ; Run bash scripts
-(
-  (program . (_) @run) @_bash-script
-  (#set! tag bash-script)
-)
+((program
+  .
+  (_) @run) @_bash-script
+  (#set! tag bash-script))

crates/languages/src/c/brackets.scm 🔗

@@ -1,5 +1,16 @@
-("(" @open ")" @close)
-("[" @open "]" @close)
-("{" @open "}" @close)
-(("\"" @open "\"" @close) (#set! rainbow.exclude))
-(("'" @open "'" @close) (#set! rainbow.exclude))
+("(" @open
+  ")" @close)
+
+("[" @open
+  "]" @close)
+
+("{" @open
+  "}" @close)
+
+(("\"" @open
+  "\"" @close)
+  (#set! rainbow.exclude))
+
+(("'" @open
+  "'" @close)
+  (#set! rainbow.exclude))

crates/languages/src/c/highlights.scm 🔗

@@ -116,19 +116,23 @@
 (identifier) @variable
 
 ((identifier) @constant
- (#match? @constant "^_*[A-Z][A-Z\\d_]*$"))
+  (#match? @constant "^_*[A-Z][A-Z\\d_]*$"))
 
 (call_expression
   function: (identifier) @function)
+
 (call_expression
   function: (field_expression
     field: (field_identifier) @function))
+
 (function_declarator
   declarator: (identifier) @function)
+
 (preproc_function_def
   name: (identifier) @function.special)
 
 (field_identifier) @property
+
 (statement_identifier) @label
 
 [
@@ -139,6 +143,7 @@
 
 ; GNU __attribute__
 (attribute_specifier) @attribute
+
 (attribute_specifier
   (argument_list
     (identifier) @attribute))
@@ -146,5 +151,6 @@
 ; C23 [[attributes]]
 (attribute
   prefix: (identifier) @attribute)
+
 (attribute
   name: (identifier) @attribute)

crates/languages/src/c/imports.scm 🔗

@@ -1,7 +1,7 @@
 (preproc_include
-    path: [
-        (
-            (system_lib_string) @source @wildcard
-            (#strip! @source "[<>]"))
-        (string_literal (string_content) @source @wildcard)
-    ]) @import
+  path: [
+    ((system_lib_string) @source @wildcard
+      (#strip! @source "[<>]"))
+    (string_literal
+      (string_content) @source @wildcard)
+  ]) @import

crates/languages/src/c/indents.scm 🔗

@@ -9,15 +9,25 @@
   (else_clause)
 ] @indent
 
-(_ "{" "}" @end) @indent
-(_ "(" ")" @end) @indent
+(_
+  "{"
+  "}" @end) @indent
+
+(_
+  "("
+  ")" @end) @indent
 
 ((comment) @indent
- (#match? @indent "^/\\*"))
+  (#match? @indent "^/\\*"))
 
 (if_statement) @start.if
+
 (for_statement) @start.for
+
 (while_statement) @start.while
+
 (do_statement) @start.do
+
 (switch_statement) @start.switch
+
 (else_clause) @start.else

crates/languages/src/c/injections.scm 🔗

@@ -1,6 +1,5 @@
 ((comment) @injection.content
- (#set! injection.language "comment")
-)
+  (#set! injection.language "comment"))
 
 ((comment) @injection.content
   (#match? @injection.content "^(///|//!|/\\*\\*|/\\*!)(.*)")
@@ -8,9 +7,9 @@
   (#set! injection.include-children))
 
 (preproc_def
-    value: (preproc_arg) @injection.content
-    (#set! injection.language "c"))
+  value: (preproc_arg) @injection.content
+  (#set! injection.language "c"))
 
 (preproc_function_def
-    value: (preproc_arg) @injection.content
-    (#set! injection.language "c"))
+  value: (preproc_arg) @injection.content
+  (#set! injection.language "c"))

crates/languages/src/c/outline.scm 🔗

@@ -1,91 +1,89 @@
 (preproc_def
-    "#define" @context
-    name: (_) @name) @item
+  "#define" @context
+  name: (_) @name) @item
 
 (preproc_function_def
-    "#define" @context
-    name: (_) @name
-    parameters: (preproc_params
-        "(" @context
-        ")" @context)) @item
+  "#define" @context
+  name: (_) @name
+  parameters: (preproc_params
+    "(" @context
+    ")" @context)) @item
 
 (struct_specifier
-    "struct" @context
-    name: (_) @name) @item
+  "struct" @context
+  name: (_) @name) @item
 
 (union_specifier
-    "union" @context
-    name: (_) @name) @item
+  "union" @context
+  name: (_) @name) @item
 
 (enum_specifier
-    "enum" @context
-    name: (_) @name) @item
+  "enum" @context
+  name: (_) @name) @item
 
 (enumerator
-    name: (_) @name) @item
+  name: (_) @name) @item
 
 (field_declaration
-    type: (_) @context
-    declarator: (field_identifier) @name) @item
+  type: (_) @context
+  declarator: (field_identifier) @name) @item
 
 (type_definition
-    "typedef" @context
-    declarator: (_) @name) @item
+  "typedef" @context
+  declarator: (_) @name) @item
 
 (declaration
-    (type_qualifier)? @context
-    type: (_)? @context
-    declarator: [
-        (function_declarator
-            declarator: (_) @name
-            parameters: (parameter_list
-                "(" @context
-                ")" @context))
-        (pointer_declarator
-            "*" @context
-            declarator: (function_declarator
-                declarator: (_) @name
-                parameters: (parameter_list
-                    "(" @context
-                    ")" @context)))
-        (pointer_declarator
-            "*" @context
-            declarator: (pointer_declarator
-                "*" @context
-                declarator: (function_declarator
-                    declarator: (_) @name
-                    parameters: (parameter_list
-                        "(" @context
-                        ")" @context))))
-    ]
-) @item
+  (type_qualifier)? @context
+  type: (_)? @context
+  declarator: [
+    (function_declarator
+      declarator: (_) @name
+      parameters: (parameter_list
+        "(" @context
+        ")" @context))
+    (pointer_declarator
+      "*" @context
+      declarator: (function_declarator
+        declarator: (_) @name
+        parameters: (parameter_list
+          "(" @context
+          ")" @context)))
+    (pointer_declarator
+      "*" @context
+      declarator: (pointer_declarator
+        "*" @context
+        declarator: (function_declarator
+          declarator: (_) @name
+          parameters: (parameter_list
+            "(" @context
+            ")" @context))))
+  ]) @item
 
 (function_definition
-    (type_qualifier)? @context
-    type: (_)? @context
-    declarator: [
-        (function_declarator
-            declarator: (_) @name
-            parameters: (parameter_list
-                "(" @context
-                ")" @context))
-        (pointer_declarator
-            "*" @context
-            declarator: (function_declarator
-                declarator: (_) @name
-                parameters: (parameter_list
-                    "(" @context
-                    ")" @context)))
-        (pointer_declarator
-            "*" @context
-            declarator: (pointer_declarator
-                "*" @context
-                declarator: (function_declarator
-                    declarator: (_) @name
-                    parameters: (parameter_list
-                        "(" @context
-                        ")" @context))))
-    ]
-) @item
+  (type_qualifier)? @context
+  type: (_)? @context
+  declarator: [
+    (function_declarator
+      declarator: (_) @name
+      parameters: (parameter_list
+        "(" @context
+        ")" @context))
+    (pointer_declarator
+      "*" @context
+      declarator: (function_declarator
+        declarator: (_) @name
+        parameters: (parameter_list
+          "(" @context
+          ")" @context)))
+    (pointer_declarator
+      "*" @context
+      declarator: (pointer_declarator
+        "*" @context
+        declarator: (function_declarator
+          declarator: (_) @name
+          parameters: (parameter_list
+            "(" @context
+            ")" @context))))
+  ]) @item
 
 (comment) @annotation

crates/languages/src/c/runnables.scm 🔗

@@ -1,10 +1,6 @@
 ; Tag the main function
-(
-  (function_definition
-    declarator: (function_declarator
-      declarator: (identifier) @run
-    )
-  ) @_c-main
+((function_definition
+  declarator: (function_declarator
+    declarator: (identifier) @run)) @_c-main
   (#eq? @run "main")
-  (#set! tag c-main)
-)
+  (#set! tag c-main))

crates/languages/src/c/textobjects.scm 🔗

@@ -1,31 +1,34 @@
 (declaration
-    declarator: (function_declarator)) @function.around
+  declarator: (function_declarator)) @function.around
 
 (function_definition
-    body: (_
-        "{"
-        (_)* @function.inside
-        "}" )) @function.around
+  body: (_
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
 
 (preproc_function_def
-    value: (_) @function.inside) @function.around
+  value: (_) @function.inside) @function.around
 
 (comment) @comment.around
 
 (struct_specifier
-    body: (_
-        "{"
-        (_)* @class.inside
-        "}")) @class.around
+  body: (_
+    "{"
+    (_)* @class.inside
+    "}")) @class.around
 
 (enum_specifier
-    body: (_
-        "{"
-        [(_) ","?]* @class.inside
-        "}")) @class.around
+  body: (_
+    "{"
+    [
+      (_)
+      ","?
+    ]* @class.inside
+    "}")) @class.around
 
 (union_specifier
-    body: (_
-        "{"
-        (_)* @class.inside
-        "}")) @class.around
+  body: (_
+    "{"
+    (_)* @class.inside
+    "}")) @class.around

crates/languages/src/cpp/brackets.scm 🔗

@@ -1,6 +1,19 @@
-("(" @open ")" @close)
-("[" @open "]" @close)
-("{" @open "}" @close)
-("<" @open ">" @close)
-(("\"" @open "\"" @close) (#set! rainbow.exclude))
-(("'" @open "'" @close) (#set! rainbow.exclude))
+("(" @open
+  ")" @close)
+
+("[" @open
+  "]" @close)
+
+("{" @open
+  "}" @close)
+
+("<" @open
+  ">" @close)
+
+(("\"" @open
+  "\"" @close)
+  (#set! rainbow.exclude))
+
+(("'" @open
+  "'" @close)
+  (#set! rainbow.exclude))

crates/languages/src/cpp/config.toml 🔗

@@ -1,6 +1,6 @@
 name = "C++"
 grammar = "cpp"
-path_suffixes = ["cc", "hh", "cpp", "h", "hpp", "cxx", "hxx", "c++", "h++", "ipp", "inl", "ino", "ixx", "cu", "cuh", "C", "H"]
+path_suffixes = ["cc", "hh", "cpp", "cppm", "h", "hpp", "cxx", "hxx", "c++", "h++", "ipp", "inl", "ino", "ixx", "cu", "cuh", "C", "H"]
 line_comments = ["// ", "/// ", "//! "]
 first_line_pattern = '^//.*-\*-\s*C\+\+\s*-\*-'
 decrease_indent_patterns = [

crates/languages/src/cpp/highlights.scm 🔗

@@ -1,13 +1,15 @@
 (identifier) @variable
+
 (field_identifier) @property
+
 (namespace_identifier) @namespace
 
 (concept_definition
-    name: (identifier) @concept)
+  name: (identifier) @concept)
 
 (requires_clause
-    constraint: (template_type
-        name: (type_identifier) @concept))
+  constraint: (template_type
+    name: (type_identifier) @concept))
 
 (module_name
   (identifier) @module)
@@ -83,18 +85,23 @@
 (operator_name
   "<=>" @operator.spaceship)
 
-(destructor_name (identifier) @function)
+(destructor_name
+  (identifier) @function)
 
 ((namespace_identifier) @type
- (#match? @type "^[A-Z]"))
+  (#match? @type "^[A-Z]"))
 
 (auto) @type
+
 (type_identifier) @type
+
 type: (primitive_type) @type.builtin
+
 (sized_type_specifier) @type.builtin
 
 ; GNU __attribute__
 (attribute_specifier) @attribute
+
 (attribute_specifier
   (argument_list
     (identifier) @attribute))
@@ -102,15 +109,18 @@ type: (primitive_type) @type.builtin
 ; C++11 [[attributes]]
 (attribute
   prefix: (identifier) @attribute)
+
 (attribute
   name: (identifier) @attribute)
 
 ((identifier) @constant.builtin
- (#match? @constant.builtin "^_*[A-Z][A-Z\\d_]*$"))
+  (#match? @constant.builtin "^_*[A-Z][A-Z\\d_]*$"))
 
 (statement_identifier) @label
+
 (this) @variable.builtin
-("static_assert") @function.builtin
+
+"static_assert" @function.builtin
 
 [
   "alignas"
@@ -197,7 +207,7 @@ type: (primitive_type) @type.builtin
 
 [
   (null)
-  ("nullptr")
+  "nullptr"
 ] @constant.builtin
 
 (number_literal) @number
@@ -285,5 +295,8 @@ type: (primitive_type) @type.builtin
 (binary_expression
   operator: "<=>" @operator.spaceship)
 
-(conditional_expression ":" @operator)
-(user_defined_literal (literal_suffix) @operator)
+(conditional_expression
+  ":" @operator)
+
+(user_defined_literal
+  (literal_suffix) @operator)

crates/languages/src/cpp/imports.scm 🔗

@@ -1,5 +1,6 @@
 (preproc_include
-    path: [
-        ((system_lib_string) @source @wildcard)
-        (string_literal (string_content) @source @wildcard)
-    ]) @import
+  path: [
+    (system_lib_string) @source @wildcard
+    (string_literal
+      (string_content) @source @wildcard)
+  ]) @import

crates/languages/src/cpp/indents.scm 🔗

@@ -1,23 +1,33 @@
 [
-    (field_expression)
-    (assignment_expression)
-    (init_declarator)
-    (if_statement)
-    (for_statement)
-    (while_statement)
-    (do_statement)
-    (else_clause)
+  (field_expression)
+  (assignment_expression)
+  (init_declarator)
+  (if_statement)
+  (for_statement)
+  (while_statement)
+  (do_statement)
+  (else_clause)
 ] @indent
 
-(_ "{" "}" @end) @indent
-(_ "(" ")" @end) @indent
+(_
+  "{"
+  "}" @end) @indent
+
+(_
+  "("
+  ")" @end) @indent
 
 ((comment) @indent
- (#match? @indent "^/\\*"))
+  (#match? @indent "^/\\*"))
 
 (if_statement) @start.if
+
 (for_statement) @start.for
+
 (while_statement) @start.while
+
 (do_statement) @start.do
+
 (switch_statement) @start.switch
+
 (else_clause) @start.else

crates/languages/src/cpp/injections.scm 🔗

@@ -1,6 +1,5 @@
 ((comment) @injection.content
- (#set! injection.language "comment")
-)
+  (#set! injection.language "comment"))
 
 ((comment) @injection.content
   (#match? @injection.content "^(///|//!|/\\*\\*|/\\*!)(.*)")
@@ -8,12 +7,12 @@
   (#set! injection.include-children))
 
 (preproc_def
-    value: (preproc_arg) @injection.content
-    (#set! injection.language "c++"))
+  value: (preproc_arg) @injection.content
+  (#set! injection.language "c++"))
 
 (preproc_function_def
-    value: (preproc_arg) @injection.content
-    (#set! injection.language "c++"))
+  value: (preproc_arg) @injection.content
+  (#set! injection.language "c++"))
 
 (raw_string_literal
   delimiter: (raw_string_delimiter) @injection.language

crates/languages/src/cpp/outline.scm 🔗

@@ -1,186 +1,195 @@
 (preproc_def
-    "#define" @context
-    name: (_) @name) @item
+  "#define" @context
+  name: (_) @name) @item
 
 (preproc_function_def
-    "#define" @context
-    name: (_) @name
-    parameters: (preproc_params
-        "(" @context
-        ")" @context)) @item
+  "#define" @context
+  name: (_) @name
+  parameters: (preproc_params
+    "(" @context
+    ")" @context)) @item
 
 (namespace_definition
-    "inline"? @context
-    "namespace" @context
-    name: (_) @name) @item
+  "inline"? @context
+  "namespace" @context
+  name: (_) @name) @item
 
 (type_definition
-    "typedef" @context
-    declarator: (_) @name) @item
+  "typedef" @context
+  declarator: (_) @name) @item
 
 (struct_specifier
-    "struct" @context
-    name: (_) @name) @item
+  "struct" @context
+  name: (_) @name) @item
 
 (class_specifier
-    "class" @context
-    name: (_) @name) @item
+  "class" @context
+  name: (_) @name) @item
 
 (enum_specifier
-    "enum" @context
-    [
-        "class"
-        "struct"
-    ]? @context
-    name: (_) @name) @item
+  "enum" @context
+  [
+    "class"
+    "struct"
+  ]? @context
+  name: (_) @name) @item
 
 (union_specifier
-    "union" @context
-    name: (_) @name) @item
+  "union" @context
+  name: (_) @name) @item
 
 (enumerator
-    name: (_) @name) @item
+  name: (_) @name) @item
 
 (concept_definition
-    "concept" @context
-    name: (_) @name) @item
+  "concept" @context
+  name: (_) @name) @item
 
 (declaration
-    [
-        (storage_class_specifier)
-        (type_qualifier)
-    ]* @context
-    type: (_) @context
-    declarator: [
-        ; The declaration may define multiple variables, using @item on the
-        ; declarator so that they get distinct ranges.
-        (init_declarator
-            declarator: (_) @item @name)
-        (identifier) @item @name
-    ] @item)
+  [
+    (storage_class_specifier)
+    (type_qualifier)
+  ]* @context
+  type: (_) @context
+  declarator: [
+    ; The declaration may define multiple variables, using @item on the
+    ; declarator so that they get distinct ranges.
+    (init_declarator
+      declarator: (_) @item @name)
+    (identifier) @item @name
+  ] @item)
 
 (function_definition
-    [
-        (storage_class_specifier)
-        (type_qualifier)
-    ]* @context
-    type: (_)? @context
-    declarator: [
-        (function_declarator
-            declarator: (_) @name
-            parameters: (parameter_list
-                "(" @context
-                ")" @context))
-        (pointer_declarator
-            "*" @context
-            declarator: (function_declarator
-                declarator: (_) @name
-                parameters: (parameter_list
-                    "(" @context
-                    ")" @context)))
-        (pointer_declarator
-            "*" @context
-            declarator: (pointer_declarator
-                "*" @context
-                declarator: (function_declarator
-                    declarator: (_) @name
-                    parameters: (parameter_list
-                        "(" @context
-                        ")" @context))))
-        (reference_declarator
-            ["&" "&&"] @context
-            (function_declarator
-                declarator: (_) @name
-                parameters: (parameter_list
-                    "(" @context
-                    ")" @context)))
-    ]
-    (type_qualifier)? @context) @item
+  [
+    (storage_class_specifier)
+    (type_qualifier)
+  ]* @context
+  type: (_)? @context
+  declarator: [
+    (function_declarator
+      declarator: (_) @name
+      parameters: (parameter_list
+        "(" @context
+        ")" @context))
+    (pointer_declarator
+      "*" @context
+      declarator: (function_declarator
+        declarator: (_) @name
+        parameters: (parameter_list
+          "(" @context
+          ")" @context)))
+    (pointer_declarator
+      "*" @context
+      declarator: (pointer_declarator
+        "*" @context
+        declarator: (function_declarator
+          declarator: (_) @name
+          parameters: (parameter_list
+            "(" @context
+            ")" @context))))
+    (reference_declarator
+      [
+        "&"
+        "&&"
+      ] @context
+      (function_declarator
+        declarator: (_) @name
+        parameters: (parameter_list
+          "(" @context
+          ")" @context)))
+  ]
+  (type_qualifier)? @context) @item
 
 (declaration
-    [
-        (storage_class_specifier)
-        (type_qualifier)
-    ]* @context
-    type: (_)? @context
-    declarator: [
-        (field_identifier) @name
-        (pointer_declarator
-            "*" @context
-            declarator: (field_identifier) @name)
-        (function_declarator
-            declarator: (_) @name
-            parameters: (parameter_list
-                "(" @context
-                ")" @context))
-        (pointer_declarator
-            "*" @context
-            declarator: (function_declarator
-                declarator: (_) @name
-                parameters: (parameter_list
-                    "(" @context
-                    ")" @context)))
-        (pointer_declarator
-            "*" @context
-            declarator: (pointer_declarator
-                "*" @context
-                declarator: (function_declarator
-                    declarator: (_) @name
-                    parameters: (parameter_list
-                        "(" @context
-                        ")" @context))))
-        (reference_declarator
-            ["&" "&&"] @context
-            (function_declarator
-                declarator: (_) @name
-                parameters: (parameter_list
-                    "(" @context
-                    ")" @context)))
-    ]
-    (type_qualifier)? @context) @item
+  [
+    (storage_class_specifier)
+    (type_qualifier)
+  ]* @context
+  type: (_)? @context
+  declarator: [
+    (field_identifier) @name
+    (pointer_declarator
+      "*" @context
+      declarator: (field_identifier) @name)
+    (function_declarator
+      declarator: (_) @name
+      parameters: (parameter_list
+        "(" @context
+        ")" @context))
+    (pointer_declarator
+      "*" @context
+      declarator: (function_declarator
+        declarator: (_) @name
+        parameters: (parameter_list
+          "(" @context
+          ")" @context)))
+    (pointer_declarator
+      "*" @context
+      declarator: (pointer_declarator
+        "*" @context
+        declarator: (function_declarator
+          declarator: (_) @name
+          parameters: (parameter_list
+            "(" @context
+            ")" @context))))
+    (reference_declarator
+      [
+        "&"
+        "&&"
+      ] @context
+      (function_declarator
+        declarator: (_) @name
+        parameters: (parameter_list
+          "(" @context
+          ")" @context)))
+  ]
+  (type_qualifier)? @context) @item
 
 (field_declaration
-    [
-        (storage_class_specifier)
-        (type_qualifier)
-    ]* @context
-    type: (_) @context
-    declarator: [
-        (field_identifier) @name
-        (pointer_declarator
-            "*" @context
-            declarator: (field_identifier) @name)
-        (function_declarator
-            declarator: (_) @name
-            parameters: (parameter_list
-                "(" @context
-                ")" @context))
-        (pointer_declarator
-            "*" @context
-            declarator: (function_declarator
-                declarator: (_) @name
-                parameters: (parameter_list
-                    "(" @context
-                    ")" @context)))
-        (pointer_declarator
-            "*" @context
-            declarator: (pointer_declarator
-                "*" @context
-                declarator: (function_declarator
-                    declarator: (_) @name
-                    parameters: (parameter_list
-                        "(" @context
-                        ")" @context))))
-        (reference_declarator
-            ["&" "&&"] @context
-            (function_declarator
-                declarator: (_) @name
-                parameters: (parameter_list
-                    "(" @context
-                    ")" @context)))
+  [
+    (storage_class_specifier)
+    (type_qualifier)
+  ]* @context
+  type: (_) @context
+  declarator: [
+    (field_identifier) @name
+    (pointer_declarator
+      "*" @context
+      declarator: (field_identifier) @name)
+    (function_declarator
+      declarator: (_) @name
+      parameters: (parameter_list
+        "(" @context
+        ")" @context))
+    (pointer_declarator
+      "*" @context
+      declarator: (function_declarator
+        declarator: (_) @name
+        parameters: (parameter_list
+          "(" @context
+          ")" @context)))
+    (pointer_declarator
+      "*" @context
+      declarator: (pointer_declarator
+        "*" @context
+        declarator: (function_declarator
+          declarator: (_) @name
+          parameters: (parameter_list
+            "(" @context
+            ")" @context))))
+    (reference_declarator
+      [
+        "&"
+        "&&"
+      ] @context
+      (function_declarator
+        declarator: (_) @name
+        parameters: (parameter_list
+          "(" @context
+          ")" @context)))
     ; Fields declarations may define multiple fields, and so @item is on the
     ; declarator so they each get distinct ranges.
-    ] @item
-    (type_qualifier)? @context)
+  ] @item
+  (type_qualifier)? @context)
 
 (comment) @annotation

crates/languages/src/cpp/textobjects.scm 🔗

@@ -1,37 +1,44 @@
 (declaration
-    declarator: (function_declarator)) @function.around
+  declarator: (function_declarator)) @function.around
 
 (function_definition
-    body: (_
-        "{"
-        (_)* @function.inside
-        "}" )) @function.around
+  body: (_
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
 
 (preproc_function_def
-    value: (_) @function.inside) @function.around
+  value: (_) @function.inside) @function.around
 
 (comment) @comment.around
 
 (struct_specifier
-    body: (_
-        "{"
-        (_)* @class.inside
-        "}")) @class.around
+  body: (_
+    "{"
+    (_)* @class.inside
+    "}")) @class.around
 
 (enum_specifier
-    body: (_
-        "{"
-        [(_) ","?]* @class.inside
-        "}")) @class.around
+  body: (_
+    "{"
+    [
+      (_)
+      ","?
+    ]* @class.inside
+    "}")) @class.around
 
 (union_specifier
-    body: (_
-        "{"
-        (_)* @class.inside
-        "}")) @class.around
+  body: (_
+    "{"
+    (_)* @class.inside
+    "}")) @class.around
 
 (class_specifier
   body: (_
-      "{"
-      [(_) ":"? ";"?]* @class.inside
-      "}"?)) @class.around
+    "{"
+    [
+      (_)
+      ":"?
+      ";"?
+    ]* @class.inside
+    "}"?)) @class.around

crates/languages/src/css.rs 🔗

@@ -134,6 +134,7 @@ impl LspAdapter for CssLspAdapter {
     async fn initialization_options(
         self: Arc<Self>,
         _: &Arc<dyn LspAdapterDelegate>,
+        _: &mut AsyncApp,
     ) -> Result<Option<serde_json::Value>> {
         Ok(Some(json!({
             "provideFormatter": true

crates/languages/src/css/brackets.scm 🔗

@@ -1,5 +1,16 @@
-("(" @open ")" @close)
-("[" @open "]" @close)
-("{" @open "}" @close)
-(("\"" @open "\"" @close) (#set! rainbow.exclude))
-(("'" @open "'" @close) (#set! rainbow.exclude))
+("(" @open
+  ")" @close)
+
+("[" @open
+  "]" @close)
+
+("{" @open
+  "}" @close)
+
+(("\"" @open
+  "\"" @close)
+  (#set! rainbow.exclude))
+
+(("'" @open
+  "'" @close)
+  (#set! rainbow.exclude))

crates/languages/src/css/highlights.scm 🔗

@@ -30,14 +30,24 @@
 ] @keyword.operator
 
 (id_name) @selector.id
+
 (class_name) @selector.class
 
 (namespace_name) @namespace
-(namespace_selector (tag_name) @namespace "|")
+
+(namespace_selector
+  (tag_name) @namespace
+  "|")
 
 (attribute_name) @attribute
-(pseudo_element_selector "::" (tag_name) @selector.pseudo)
-(pseudo_class_selector ":" (class_name) @selector.pseudo)
+
+(pseudo_element_selector
+  "::"
+  (tag_name) @selector.pseudo)
+
+(pseudo_class_selector
+  ":"
+  (class_name) @selector.pseudo)
 
 [
   (feature_name)
@@ -58,13 +68,11 @@
 (parenthesized_query
   (keyword_query) @property)
 
-(
-  [
-    (property_name)
-    (plain_value)
-  ] @variable
-  (#match? @variable "^--")
-)
+([
+  (property_name)
+  (plain_value)
+] @variable
+  (#match? @variable "^--"))
 
 [
   "@media"
@@ -80,6 +88,7 @@
 ] @keyword
 
 (string_value) @string
+
 (color_value) @string.special
 
 [
@@ -97,7 +106,8 @@
   ";"
 ] @punctuation.delimiter
 
-(id_selector "#" @punctuation.delimiter)
+(id_selector
+  "#" @punctuation.delimiter)
 
 [
   "{"

crates/languages/src/css/outline.scm 🔗

@@ -1,18 +1,16 @@
 (stylesheet
-    (import_statement
-        "@import" @context
-        ((string_value) @name)) @item)
-
+  (import_statement
+    "@import" @context
+    (string_value) @name) @item)
 
 (rule_set
-    (selectors
-      .
-      (_) @name
-      ("," @name (_) @name)*
-    )) @item
+  (selectors
+    .
+    (_) @name
+    ("," @name
+      (_) @name)*)) @item
 
 (media_statement
-    "@media" @context
-    (_) @name
-    (block)
-) @item
+  "@media" @context
+  (_) @name
+  (block)) @item

crates/languages/src/css/textobjects.scm 🔗

@@ -1,30 +1,31 @@
 (comment) @comment.around
 
 (rule_set
-    (block (
-        "{"
-        (_)* @function.inside
-        "}" ))) @function.around
+  (block
+    ("{"
+      (_)* @function.inside
+      "}"))) @function.around
+
 (keyframe_block
-    (block (
-        "{"
-        (_)* @function.inside
-        "}" ))) @function.around
+  (block
+    ("{"
+      (_)* @function.inside
+      "}"))) @function.around
 
 (media_statement
-    (block (
-        "{"
-        (_)* @class.inside
-        "}" ))) @class.around
+  (block
+    ("{"
+      (_)* @class.inside
+      "}"))) @class.around
 
 (supports_statement
-    (block (
-        "{"
-        (_)* @class.inside
-        "}" ))) @class.around
+  (block
+    ("{"
+      (_)* @class.inside
+      "}"))) @class.around
 
 (keyframes_statement
-    (keyframe_block_list (
-        "{"
-        (_)* @class.inside
-        "}" ))) @class.around
+  (keyframe_block_list
+    ("{"
+      (_)* @class.inside
+      "}"))) @class.around

crates/languages/src/diff/highlights.scm 🔗

@@ -4,14 +4,14 @@
   (addition)
   (new_file)
 ] @string
-;; TODO: This should eventually be `@diff.plus` with a fallback of `@string`
 
+; TODO: This should eventually be `@diff.plus` with a fallback of `@string`
 [
   (deletion)
   (old_file)
 ] @keyword
-;; TODO: This should eventually be `@diff.minus` with a fallback of `@keyword`
 
+; TODO: This should eventually be `@diff.minus` with a fallback of `@keyword`
 (commit) @constant
 
 (location) @attribute
@@ -22,7 +22,7 @@
 
 (mode) @number
 
-([
+[
   ".."
   "+"
   "++"
@@ -32,7 +32,7 @@
   "--"
   "---"
   "----"
-] @punctuation.special)
+] @punctuation.special
 
 [
   (binary_change)

crates/languages/src/gitcommit/highlights.scm 🔗

@@ -1,18 +1,36 @@
 (subject) @markup.heading
+
 (path) @string.special.path
+
 (branch) @string.special.symbol
+
 (commit) @constant
+
 (item) @markup.link.url
+
 (header) @tag
+
 (comment) @comment
 
-(change kind: "new file" @diff.plus)
-(change kind: "deleted" @diff.minus)
-(change kind: "modified" @diff.delta)
-(change kind: "renamed" @diff.delta.moved)
+(change
+  kind: "new file" @diff.plus)
+
+(change
+  kind: "deleted" @diff.minus)
+
+(change
+  kind: "modified" @diff.delta)
+
+(change
+  kind: "renamed" @diff.delta.moved)
 
 (trailer
   key: (trailer_key) @variable.other.member
   value: (trailer_value) @string)
 
-[":" "=" "->" (scissors)] @punctuation.delimiter
+[
+  ":"
+  "="
+  "->"
+  (scissors)
+] @punctuation.delimiter

crates/languages/src/gitcommit/injections.scm 🔗

@@ -1,9 +1,8 @@
 ((comment) @content
- (#set! injection.language "comment")
-)
+  (#set! injection.language "comment"))
 
 ((scissors) @content
- (#set! "language" "diff"))
+  (#set! "language" "diff"))
 
 ((rebase_command) @content
- (#set! "language" "git_rebase"))
+  (#set! "language" "git_rebase"))

crates/languages/src/go.rs 🔗

@@ -8,8 +8,10 @@ pub use language::*;
 use language::{LanguageToolchainStore, LspAdapterDelegate, LspInstaller};
 use lsp::{LanguageServerBinary, LanguageServerName};
 
+use project::lsp_store::language_server_settings;
 use regex::Regex;
-use serde_json::json;
+use serde_json::{Value, json};
+use settings::SemanticTokenRules;
 use smol::fs;
 use std::{
     borrow::Cow,
@@ -24,7 +26,17 @@ use std::{
     },
 };
 use task::{TaskTemplate, TaskTemplates, TaskVariables, VariableName};
-use util::{ResultExt, fs::remove_matching, maybe};
+use util::{ResultExt, fs::remove_matching, maybe, merge_json_value_into};
+
+use crate::LanguageDir;
+
+pub(crate) fn semantic_token_rules() -> SemanticTokenRules {
+    let content = LanguageDir::get("go/semantic_token_rules.json")
+        .expect("missing go/semantic_token_rules.json");
+    let json = std::str::from_utf8(&content.data).expect("invalid utf-8 in semantic_token_rules");
+    settings::parse_json_with_comments::<SemanticTokenRules>(json)
+        .expect("failed to parse go semantic_token_rules.json")
+}
 
 fn server_binary_arguments() -> Vec<OsString> {
     vec!["-mode=stdio".into()]
@@ -192,9 +204,10 @@ impl LspAdapter for GoLspAdapter {
 
     async fn initialization_options(
         self: Arc<Self>,
-        _: &Arc<dyn LspAdapterDelegate>,
+        delegate: &Arc<dyn LspAdapterDelegate>,
+        cx: &mut AsyncApp,
     ) -> Result<Option<serde_json::Value>> {
-        Ok(Some(json!({
+        let mut default_config = json!({
             "usePlaceholders": false,
             "hints": {
                 "assignVariableTypes": true,
@@ -205,7 +218,33 @@ impl LspAdapter for GoLspAdapter {
                 "parameterNames": true,
                 "rangeVariableTypes": true
             }
-        })))
+        });
+
+        let project_initialization_options = cx.update(|cx| {
+            language_server_settings(delegate.as_ref(), &self.name(), cx)
+                .and_then(|s| s.initialization_options.clone())
+        });
+
+        if let Some(override_options) = project_initialization_options {
+            merge_json_value_into(override_options, &mut default_config);
+        }
+
+        Ok(Some(default_config))
+    }
+
+    async fn workspace_configuration(
+        self: Arc<Self>,
+        delegate: &Arc<dyn LspAdapterDelegate>,
+        _: Option<Toolchain>,
+        _: Option<lsp::Uri>,
+        cx: &mut AsyncApp,
+    ) -> Result<Value> {
+        Ok(cx
+            .update(|cx| {
+                language_server_settings(delegate.as_ref(), &self.name(), cx)
+                    .and_then(|settings| settings.settings.clone())
+            })
+            .unwrap_or_default())
     }
 
     async fn label_for_completion(

crates/languages/src/go/brackets.scm 🔗

@@ -1,6 +1,19 @@
-("(" @open ")" @close)
-("[" @open "]" @close)
-("{" @open "}" @close)
-(("\"" @open "\"" @close) (#set! rainbow.exclude))
-(("`" @open "`" @close) (#set! rainbow.exclude))
-((rune_literal) @open @close (#set! rainbow.exclude))
+("(" @open
+  ")" @close)
+
+("[" @open
+  "]" @close)
+
+("{" @open
+  "}" @close)
+
+(("\"" @open
+  "\"" @close)
+  (#set! rainbow.exclude))
+
+(("`" @open
+  "`" @close)
+  (#set! rainbow.exclude))
+
+((rune_literal) @open @close
+  (#set! rainbow.exclude))

crates/languages/src/go/debugger.scm 🔗

@@ -1,26 +1,44 @@
-(parameter_declaration (identifier) @debug-variable)
+(parameter_declaration
+  (identifier) @debug-variable)
 
-(short_var_declaration (expression_list (identifier) @debug-variable))
+(short_var_declaration
+  (expression_list
+    (identifier) @debug-variable))
 
-(var_declaration (var_spec (identifier) @debug-variable))
+(var_declaration
+  (var_spec
+    (identifier) @debug-variable))
 
-(const_declaration (const_spec (identifier) @debug-variable))
+(const_declaration
+  (const_spec
+    (identifier) @debug-variable))
 
-(assignment_statement (expression_list (identifier) @debug-variable))
+(assignment_statement
+  (expression_list
+    (identifier) @debug-variable))
 
-(binary_expression (identifier) @debug-variable
+(binary_expression
+  (identifier) @debug-variable
   (#not-match? @debug-variable "^[A-Z]"))
 
-(call_expression (argument_list (identifier) @debug-variable
-  (#not-match? @debug-variable "^[A-Z]")))
+(call_expression
+  (argument_list
+    (identifier) @debug-variable
+    (#not-match? @debug-variable "^[A-Z]")))
 
-(return_statement (expression_list (identifier) @debug-variable
-  (#not-match? @debug-variable "^[A-Z]")))
+(return_statement
+  (expression_list
+    (identifier) @debug-variable
+    (#not-match? @debug-variable "^[A-Z]")))
 
-(range_clause (expression_list (identifier) @debug-variable))
+(range_clause
+  (expression_list
+    (identifier) @debug-variable))
 
-(parenthesized_expression (identifier) @debug-variable
+(parenthesized_expression
+  (identifier) @debug-variable
   (#not-match? @debug-variable "^[A-Z]"))
 
 (block) @debug-scope
+
 (function_declaration) @debug-scope

crates/languages/src/go/highlights.scm 🔗

@@ -1,10 +1,12 @@
 (identifier) @variable
 
 (type_identifier) @type
+
 (type_spec
   name: (type_identifier) @type.definition)
 
 (field_identifier) @property
+
 (package_identifier) @namespace
 
 (label_name) @label
@@ -26,6 +28,7 @@
 
 (method_declaration
   name: (field_identifier) @function.method)
+
 (method_elem
   name: (field_identifier) @function.method)
 
@@ -144,8 +147,7 @@
 
 ; Go directives
 ((comment) @preproc
- (#match? @preproc "^//go:"))
+  (#match? @preproc "^//go:"))
 
 ((comment) @preproc
- (#match? @preproc "^// \\+build"))
-
+  (#match? @preproc "^// \\+build"))

crates/languages/src/go/imports.scm 🔗

@@ -1,14 +1,12 @@
 (import_spec
-    name: [
-        (dot)
-        (package_identifier)
-    ]
-    path: (interpreted_string_literal
-        (interpreted_string_literal_content) @namespace)
-) @wildcard @import
+  name: [
+    (dot)
+    (package_identifier)
+  ]
+  path: (interpreted_string_literal
+    (interpreted_string_literal_content) @namespace)) @wildcard @import
 
 (import_spec
-    !name
-    path: (interpreted_string_literal
-        (interpreted_string_literal_content) @namespace)
-) @wildcard @import
+  !name
+  path: (interpreted_string_literal
+    (interpreted_string_literal_content) @namespace)) @wildcard @import

crates/languages/src/go/indents.scm 🔗

@@ -1,9 +1,17 @@
 [
-    (assignment_statement)
-    (call_expression)
-    (selector_expression)
+  (assignment_statement)
+  (call_expression)
+  (selector_expression)
 ] @indent
 
-(_ "[" "]" @end) @indent
-(_ "{" "}" @end) @indent
-(_ "(" ")" @end) @indent
+(_
+  "["
+  "]" @end) @indent
+
+(_
+  "{"
+  "}" @end) @indent
+
+(_
+  "("
+  ")" @end) @indent

crates/languages/src/go/injections.scm 🔗

@@ -1,7 +1,6 @@
 ; Refer to https://github.com/nvim-treesitter/nvim-treesitter/blob/master/queries/go/injections.scm#L4C1-L16C41
 ((comment) @injection.content
- (#set! injection.language "comment")
-)
+  (#set! injection.language "comment"))
 
 (call_expression
   (selector_expression) @_function
@@ -14,722 +13,718 @@
       (raw_string_literal)
       (interpreted_string_literal)
     ] @injection.content
-    (#set! injection.language "regex")
-    ))
+    (#set! injection.language "regex")))
 
 ; INJECT SQL
-(
-    [
-        (const_spec
-            name: (identifier)
-            "="
-            (comment) @_comment
-            value: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (var_spec
-            name: (identifier)
-            "="
-            (comment) @_comment
-            value: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (assignment_statement
-        left: (expression_list)
-        "="
+([
+  (const_spec
+    name: (identifier)
+    "="
+    (comment) @_comment
+    value: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (var_spec
+    name: (identifier)
+    "="
+    (comment) @_comment
+    value: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (assignment_statement
+    left: (expression_list)
+    "="
+    (comment) @_comment
+    right: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (short_var_declaration
+    left: (expression_list)
+    ":="
+    (comment) @_comment
+    right: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (composite_literal
+    body: (literal_value
+      (keyed_element
         (comment) @_comment
-        right: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (short_var_declaration
-        left: (expression_list)
-        ":="
+        value: (literal_element
+          [
+            (interpreted_string_literal
+              (interpreted_string_literal_content) @injection.content)
+            (raw_string_literal
+              (raw_string_literal_content) @injection.content)
+          ]))))
+  (expression_statement
+    (call_expression
+      (argument_list
         (comment) @_comment
-        right: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (composite_literal
-            body: (literal_value
-            (keyed_element
-            (comment) @_comment
-            value: (literal_element
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))))
-
-        (expression_statement
-            (call_expression
-            (argument_list
-            (comment) @_comment
-            [
-               	(interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        )))
-    ]
+        [
+          (interpreted_string_literal
+            (interpreted_string_literal_content) @injection.content)
+          (raw_string_literal
+            (raw_string_literal_content) @injection.content)
+        ])))
+]
   (#match? @_comment "^\\/\\*\\s*sql\\s*\\*\\/$")
-  (#set! injection.language "sql")
-)
+  (#set! injection.language "sql"))
 
 ; INJECT JSON
-(
-    [
-        (const_spec
-            name: (identifier)
-            "="
-            (comment) @_comment
-            value: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (var_spec
-            name: (identifier)
-            "="
-            (comment) @_comment
-            value: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (assignment_statement
-        left: (expression_list)
-        "="
+([
+  (const_spec
+    name: (identifier)
+    "="
+    (comment) @_comment
+    value: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (var_spec
+    name: (identifier)
+    "="
+    (comment) @_comment
+    value: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (assignment_statement
+    left: (expression_list)
+    "="
+    (comment) @_comment
+    right: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (short_var_declaration
+    left: (expression_list)
+    ":="
+    (comment) @_comment
+    right: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (composite_literal
+    body: (literal_value
+      (keyed_element
         (comment) @_comment
-        right: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (short_var_declaration
-        left: (expression_list)
-        ":="
+        value: (literal_element
+          [
+            (interpreted_string_literal
+              (interpreted_string_literal_content) @injection.content)
+            (raw_string_literal
+              (raw_string_literal_content) @injection.content)
+          ]))))
+  (expression_statement
+    (call_expression
+      (argument_list
         (comment) @_comment
-        right: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (composite_literal
-            body: (literal_value
-            (keyed_element
-            (comment) @_comment
-            value: (literal_element
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))))
-
-        (expression_statement
-            (call_expression
-            (argument_list
-            (comment) @_comment
-            [
-               	(interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        )))
-    ]
-    (#match? @_comment "^\\/\\*\\s*json\\s*\\*\\/") ; /* json */ or /*json*/
-    (#set! injection.language "json")
-)
+        [
+          (interpreted_string_literal
+            (interpreted_string_literal_content) @injection.content)
+          (raw_string_literal
+            (raw_string_literal_content) @injection.content)
+        ])))
+]
+  (#match? @_comment "^\\/\\*\\s*json\\s*\\*\\/")
+  ; /* json */ or /*json*/
+  (#set! injection.language "json"))
 
 ; INJECT YAML
-(
-    [
-        (const_spec
-            name: (identifier)
-            "="
-            (comment) @_comment
-            value: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (var_spec
-            name: (identifier)
-            "="
-            (comment) @_comment
-            value: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (assignment_statement
-        left: (expression_list)
-        "="
+([
+  (const_spec
+    name: (identifier)
+    "="
+    (comment) @_comment
+    value: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (var_spec
+    name: (identifier)
+    "="
+    (comment) @_comment
+    value: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (assignment_statement
+    left: (expression_list)
+    "="
+    (comment) @_comment
+    right: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (short_var_declaration
+    left: (expression_list)
+    ":="
+    (comment) @_comment
+    right: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (composite_literal
+    body: (literal_value
+      (keyed_element
         (comment) @_comment
-        right: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (short_var_declaration
-        left: (expression_list)
-        ":="
+        value: (literal_element
+          [
+            (interpreted_string_literal
+              (interpreted_string_literal_content) @injection.content)
+            (raw_string_literal
+              (raw_string_literal_content) @injection.content)
+          ]))))
+  (expression_statement
+    (call_expression
+      (argument_list
         (comment) @_comment
-        right: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (composite_literal
-            body: (literal_value
-            (keyed_element
-            (comment) @_comment
-            value: (literal_element
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))))
-
-        (expression_statement
-            (call_expression
-            (argument_list
-            (comment) @_comment
-            [
-               	(interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        )))
-    ]
-    (#match? @_comment "^\\/\\*\\s*yaml\\s*\\*\\/") ; /* yaml */ or /*yaml*/
-    (#set! injection.language "yaml")
-)
+        [
+          (interpreted_string_literal
+            (interpreted_string_literal_content) @injection.content)
+          (raw_string_literal
+            (raw_string_literal_content) @injection.content)
+        ])))
+]
+  (#match? @_comment "^\\/\\*\\s*yaml\\s*\\*\\/")
+  ; /* yaml */ or /*yaml*/
+  (#set! injection.language "yaml"))
 
 ; INJECT XML
-(
-    [
-        (const_spec
-            name: (identifier)
-            "="
-            (comment) @_comment
-            value: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (var_spec
-            name: (identifier)
-            "="
-            (comment) @_comment
-            value: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (assignment_statement
-        left: (expression_list)
-        "="
+([
+  (const_spec
+    name: (identifier)
+    "="
+    (comment) @_comment
+    value: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (var_spec
+    name: (identifier)
+    "="
+    (comment) @_comment
+    value: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (assignment_statement
+    left: (expression_list)
+    "="
+    (comment) @_comment
+    right: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (short_var_declaration
+    left: (expression_list)
+    ":="
+    (comment) @_comment
+    right: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (composite_literal
+    body: (literal_value
+      (keyed_element
         (comment) @_comment
-        right: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (short_var_declaration
-        left: (expression_list)
-        ":="
+        value: (literal_element
+          [
+            (interpreted_string_literal
+              (interpreted_string_literal_content) @injection.content)
+            (raw_string_literal
+              (raw_string_literal_content) @injection.content)
+          ]))))
+  (expression_statement
+    (call_expression
+      (argument_list
         (comment) @_comment
-        right: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (composite_literal
-            body: (literal_value
-            (keyed_element
-            (comment) @_comment
-            value: (literal_element
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))))
-
-        (expression_statement
-            (call_expression
-            (argument_list
-            (comment) @_comment
-            [
-               	(interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        )))
-    ]
-    (#match? @_comment "^\\/\\*\\s*xml\\s*\\*\\/") ; /* xml */ or /*xml*/
-    (#set! injection.language "xml")
-)
+        [
+          (interpreted_string_literal
+            (interpreted_string_literal_content) @injection.content)
+          (raw_string_literal
+            (raw_string_literal_content) @injection.content)
+        ])))
+]
+  (#match? @_comment "^\\/\\*\\s*xml\\s*\\*\\/")
+  ; /* xml */ or /*xml*/
+  (#set! injection.language "xml"))
 
 ; INJECT HTML
-(
-    [
-        (const_spec
-            name: (identifier)
-            "="
-            (comment) @_comment
-            value: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (var_spec
-            name: (identifier)
-            "="
-            (comment) @_comment
-            value: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (assignment_statement
-        left: (expression_list)
-        "="
+([
+  (const_spec
+    name: (identifier)
+    "="
+    (comment) @_comment
+    value: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (var_spec
+    name: (identifier)
+    "="
+    (comment) @_comment
+    value: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (assignment_statement
+    left: (expression_list)
+    "="
+    (comment) @_comment
+    right: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (short_var_declaration
+    left: (expression_list)
+    ":="
+    (comment) @_comment
+    right: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (composite_literal
+    body: (literal_value
+      (keyed_element
         (comment) @_comment
-        right: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (short_var_declaration
-        left: (expression_list)
-        ":="
+        value: (literal_element
+          [
+            (interpreted_string_literal
+              (interpreted_string_literal_content) @injection.content)
+            (raw_string_literal
+              (raw_string_literal_content) @injection.content)
+          ]))))
+  (expression_statement
+    (call_expression
+      (argument_list
         (comment) @_comment
-        right: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (composite_literal
-            body: (literal_value
-            (keyed_element
-            (comment) @_comment
-            value: (literal_element
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))))
-
-        (expression_statement
-            (call_expression
-            (argument_list
-            (comment) @_comment
-            [
-               	(interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        )))
-    ]
-    (#match? @_comment "^\\/\\*\\s*html\\s*\\*\\/") ; /* html */ or /*html*/
-    (#set! injection.language "html")
-)
+        [
+          (interpreted_string_literal
+            (interpreted_string_literal_content) @injection.content)
+          (raw_string_literal
+            (raw_string_literal_content) @injection.content)
+        ])))
+]
+  (#match? @_comment "^\\/\\*\\s*html\\s*\\*\\/")
+  ; /* html */ or /*html*/
+  (#set! injection.language "html"))
 
 ; INJECT JS
-(
-    [
-        (const_spec
-            name: (identifier)
-            "="
-            (comment) @_comment
-            value: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (var_spec
-            name: (identifier)
-            "="
-            (comment) @_comment
-            value: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (assignment_statement
-        left: (expression_list)
-        "="
+([
+  (const_spec
+    name: (identifier)
+    "="
+    (comment) @_comment
+    value: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (var_spec
+    name: (identifier)
+    "="
+    (comment) @_comment
+    value: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (assignment_statement
+    left: (expression_list)
+    "="
+    (comment) @_comment
+    right: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (short_var_declaration
+    left: (expression_list)
+    ":="
+    (comment) @_comment
+    right: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (composite_literal
+    body: (literal_value
+      (keyed_element
         (comment) @_comment
-        right: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (short_var_declaration
-        left: (expression_list)
-        ":="
+        value: (literal_element
+          [
+            (interpreted_string_literal
+              (interpreted_string_literal_content) @injection.content)
+            (raw_string_literal
+              (raw_string_literal_content) @injection.content)
+          ]))))
+  (expression_statement
+    (call_expression
+      (argument_list
         (comment) @_comment
-        right: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (composite_literal
-            body: (literal_value
-            (keyed_element
-            (comment) @_comment
-            value: (literal_element
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))))
-
-        (expression_statement
-            (call_expression
-            (argument_list
-            (comment) @_comment
-            [
-               	(interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        )))
-    ]
-    (#match? @_comment "^\\/\\*\\s*js\\s*\\*\\/") ; /* js */ or /*js*/
-    (#set! injection.language "javascript")
-)
-
+        [
+          (interpreted_string_literal
+            (interpreted_string_literal_content) @injection.content)
+          (raw_string_literal
+            (raw_string_literal_content) @injection.content)
+        ])))
+]
+  (#match? @_comment "^\\/\\*\\s*js\\s*\\*\\/")
+  ; /* js */ or /*js*/
+  (#set! injection.language "javascript"))
 
 ; INJECT CSS
-(
-    [
-        (const_spec
-            name: (identifier)
-            "="
-            (comment) @_comment
-            value: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (var_spec
-            name: (identifier)
-            "="
-            (comment) @_comment
-            value: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (assignment_statement
-        left: (expression_list)
-        "="
+([
+  (const_spec
+    name: (identifier)
+    "="
+    (comment) @_comment
+    value: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (var_spec
+    name: (identifier)
+    "="
+    (comment) @_comment
+    value: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (assignment_statement
+    left: (expression_list)
+    "="
+    (comment) @_comment
+    right: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (short_var_declaration
+    left: (expression_list)
+    ":="
+    (comment) @_comment
+    right: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (composite_literal
+    body: (literal_value
+      (keyed_element
         (comment) @_comment
-        right: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (short_var_declaration
-        left: (expression_list)
-        ":="
+        value: (literal_element
+          [
+            (interpreted_string_literal
+              (interpreted_string_literal_content) @injection.content)
+            (raw_string_literal
+              (raw_string_literal_content) @injection.content)
+          ]))))
+  (expression_statement
+    (call_expression
+      (argument_list
         (comment) @_comment
-        right: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (composite_literal
-            body: (literal_value
-            (keyed_element
-            (comment) @_comment
-            value: (literal_element
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))))
-
-        (expression_statement
-            (call_expression
-            (argument_list
-            (comment) @_comment
-            [
-               	(interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        )))
-    ]
-    (#match? @_comment "^\\/\\*\\s*css\\s*\\*\\/") ; /* css */ or /*css*/
-    (#set! injection.language "css")
-)
-
+        [
+          (interpreted_string_literal
+            (interpreted_string_literal_content) @injection.content)
+          (raw_string_literal
+            (raw_string_literal_content) @injection.content)
+        ])))
+]
+  (#match? @_comment "^\\/\\*\\s*css\\s*\\*\\/")
+  ; /* css */ or /*css*/
+  (#set! injection.language "css"))
 
 ; INJECT LUA
-(
-    [
-        (const_spec
-            name: (identifier)
-            "="
-            (comment) @_comment
-            value: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (var_spec
-            name: (identifier)
-            "="
-            (comment) @_comment
-            value: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (assignment_statement
-        left: (expression_list)
-        "="
+([
+  (const_spec
+    name: (identifier)
+    "="
+    (comment) @_comment
+    value: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (var_spec
+    name: (identifier)
+    "="
+    (comment) @_comment
+    value: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (assignment_statement
+    left: (expression_list)
+    "="
+    (comment) @_comment
+    right: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (short_var_declaration
+    left: (expression_list)
+    ":="
+    (comment) @_comment
+    right: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (composite_literal
+    body: (literal_value
+      (keyed_element
         (comment) @_comment
-        right: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (short_var_declaration
-        left: (expression_list)
-        ":="
+        value: (literal_element
+          [
+            (interpreted_string_literal
+              (interpreted_string_literal_content) @injection.content)
+            (raw_string_literal
+              (raw_string_literal_content) @injection.content)
+          ]))))
+  (expression_statement
+    (call_expression
+      (argument_list
         (comment) @_comment
-        right: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (composite_literal
-            body: (literal_value
-            (keyed_element
-            (comment) @_comment
-            value: (literal_element
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))))
-
-        (expression_statement
-            (call_expression
-            (argument_list
-            (comment) @_comment
-            [
-               	(interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        )))
-    ]
-    (#match? @_comment "^\\/\\*\\s*lua\\s*\\*\\/") ; /* lua */ or /*lua*/
-    (#set! injection.language "lua")
-)
+        [
+          (interpreted_string_literal
+            (interpreted_string_literal_content) @injection.content)
+          (raw_string_literal
+            (raw_string_literal_content) @injection.content)
+        ])))
+]
+  (#match? @_comment "^\\/\\*\\s*lua\\s*\\*\\/")
+  ; /* lua */ or /*lua*/
+  (#set! injection.language "lua"))
 
 ; INJECT BASH
-(
-    [
-        (const_spec
-            name: (identifier)
-            "="
-            (comment) @_comment
-            value: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (var_spec
-            name: (identifier)
-            "="
-            (comment) @_comment
-            value: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (assignment_statement
-        left: (expression_list)
-        "="
+([
+  (const_spec
+    name: (identifier)
+    "="
+    (comment) @_comment
+    value: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (var_spec
+    name: (identifier)
+    "="
+    (comment) @_comment
+    value: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (assignment_statement
+    left: (expression_list)
+    "="
+    (comment) @_comment
+    right: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (short_var_declaration
+    left: (expression_list)
+    ":="
+    (comment) @_comment
+    right: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (composite_literal
+    body: (literal_value
+      (keyed_element
         (comment) @_comment
-        right: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (short_var_declaration
-        left: (expression_list)
-        ":="
+        value: (literal_element
+          [
+            (interpreted_string_literal
+              (interpreted_string_literal_content) @injection.content)
+            (raw_string_literal
+              (raw_string_literal_content) @injection.content)
+          ]))))
+  (expression_statement
+    (call_expression
+      (argument_list
         (comment) @_comment
-        right: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (composite_literal
-            body: (literal_value
-            (keyed_element
-            (comment) @_comment
-            value: (literal_element
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))))
-
-        (expression_statement
-            (call_expression
-            (argument_list
-            (comment) @_comment
-            [
-               	(interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        )))
-    ]
-    (#match? @_comment "^\\/\\*\\s*bash\\s*\\*\\/") ; /* bash */ or /*bash*/
-    (#set! injection.language "bash")
-)
+        [
+          (interpreted_string_literal
+            (interpreted_string_literal_content) @injection.content)
+          (raw_string_literal
+            (raw_string_literal_content) @injection.content)
+        ])))
+]
+  (#match? @_comment "^\\/\\*\\s*bash\\s*\\*\\/")
+  ; /* bash */ or /*bash*/
+  (#set! injection.language "bash"))
 
 ; INJECT CSV
-(
+([
+  (const_spec
+    name: (identifier)
+    "="
+    (comment) @_comment
+    value: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (var_spec
+    name: (identifier)
+    "="
+    (comment) @_comment
+    value: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (assignment_statement
+    left: (expression_list)
+    "="
+    (comment) @_comment
+    right: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (short_var_declaration
+    left: (expression_list)
+    ":="
+    (comment) @_comment
+    right: (expression_list
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  ((comment) @_comment
+    value: (literal_element
+      [
+        (interpreted_string_literal
+          (interpreted_string_literal_content) @injection.content)
+        (raw_string_literal
+          (raw_string_literal_content) @injection.content)
+      ]))
+  (argument_list
+    (comment) @_comment
     [
-        (const_spec
-            name: (identifier)
-            "="
-            (comment) @_comment
-            value: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (var_spec
-            name: (identifier)
-            "="
-            (comment) @_comment
-            value: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (assignment_statement
-        left: (expression_list)
-        "="
-        (comment) @_comment
-        right: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (short_var_declaration
-        left: (expression_list)
-        ":="
-        (comment) @_comment
-        right: (expression_list
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        ((comment) @_comment
-            value: (literal_element
-            [
-                (interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        ))
-
-        (argument_list
-            (comment) @_comment
-            [
-               	(interpreted_string_literal (interpreted_string_literal_content) @injection.content)
-                (raw_string_literal (raw_string_literal_content) @injection.content)
-            ]
-        )
-    ]
-    (#match? @_comment "^\\/\\*\\s*csv\\s*\\*\\/") ; /* csv */ or /*csv */
-    (#set! injection.language "csv")
-)
+      (interpreted_string_literal
+        (interpreted_string_literal_content) @injection.content)
+      (raw_string_literal
+        (raw_string_literal_content) @injection.content)
+    ])
+]
+  (#match? @_comment "^\\/\\*\\s*csv\\s*\\*\\/")
+  ; /* csv */ or /*csv */
+  (#set! injection.language "csv"))

crates/languages/src/go/outline.scm 🔗

@@ -1,67 +1,61 @@
 (comment) @annotation
 
 (type_declaration
-    "type" @context
-    [
-        (type_spec
-            name: (_) @name) @item
-        (
-            "("
-            (type_spec
-                name: (_) @name) @item
-            ")"
-      )
-    ]
-)
+  "type" @context
+  [
+    (type_spec
+      name: (_) @name) @item
+    ("("
+      (type_spec
+        name: (_) @name) @item
+      ")")
+  ])
 
 (function_declaration
-    "func" @context
-    name: (identifier) @name
-    parameters: (parameter_list
-      "("
-      ")")) @item
+  "func" @context
+  name: (identifier) @name
+  parameters: (parameter_list
+    "("
+    ")")) @item
 
 (method_declaration
-    "func" @context
-    receiver: (parameter_list
-        "(" @context
-        (parameter_declaration
-            name: (_) @context
-            type: (_) @context)
-        ")" @context)
-    name: (field_identifier) @name
-    parameters: (parameter_list
-      "("
-      ")")) @item
+  "func" @context
+  receiver: (parameter_list
+    "(" @context
+    (parameter_declaration
+      name: (_) @context
+      type: (_) @context)
+    ")" @context)
+  name: (field_identifier) @name
+  parameters: (parameter_list
+    "("
+    ")")) @item
 
 (const_declaration
-    "const" @context
-    (const_spec
-        name: (identifier) @name) @item)
+  "const" @context
+  (const_spec
+    name: (identifier) @name) @item)
 
 (source_file
-    (var_declaration
-        "var" @context
-        [
-            ; The declaration may define multiple variables, and so @item is on
-            ; the identifier so they get distinct ranges.
-            (var_spec
-                name: (identifier) @name @item)
-            (var_spec_list
-                (var_spec
-                    name: (identifier) @name @item)
-            )
-        ]
-     )
-)
+  (var_declaration
+    "var" @context
+    [
+      ; The declaration may define multiple variables, and so @item is on
+      ; the identifier so they get distinct ranges.
+      (var_spec
+        name: (identifier) @name @item)
+      (var_spec_list
+        (var_spec
+          name: (identifier) @name @item))
+    ]))
 
 (method_elem
-    name: (_) @name
-    parameters: (parameter_list
-      "(" @context
-      ")" @context)) @item
+  name: (_) @name
+  parameters: (parameter_list
+    "(" @context
+    ")" @context)) @item
 
 ; Fields declarations may define multiple fields, and so @item is on the
 ; declarator so they each get distinct ranges.
 (field_declaration
-    name: (_) @name @item)
+  name: (_) @name @item)

crates/languages/src/go/runnables.scm 🔗

@@ -1,170 +1,118 @@
 ; Functions names start with `Test`
-(
-  (
-    (function_declaration name: (_) @run
-      (#match? @run "^Test.*")
-      (#not-match? @run "^TestMain$"))
-  ) @_
-  (#set! tag go-test)
-)
+(((function_declaration
+  name: (_) @run
+  (#match? @run "^Test.*")
+  (#not-match? @run "^TestMain$"))) @_
+  (#set! tag go-test))
 
 ; Suite test methods (testify/suite)
-(
-    (method_declaration
-      receiver: (parameter_list
-        (parameter_declaration
-            type: [
-                (pointer_type (type_identifier) @_suite_name)
-                (type_identifier) @_suite_name
-            ]
-        )
-      )
-      name: (field_identifier) @run @_subtest_name
-      (#match? @_subtest_name "^Test.*")
-      (#match? @_suite_name ".*Suite")
-    ) @_
-    (#set! tag go-testify-suite)
-)
+((method_declaration
+  receiver: (parameter_list
+    (parameter_declaration
+      type: [
+        (pointer_type
+          (type_identifier) @_suite_name)
+        (type_identifier) @_suite_name
+      ]))
+  name: (field_identifier) @run @_subtest_name
+  (#match? @_subtest_name "^Test.*")
+  (#match? @_suite_name ".*Suite")) @_
+  (#set! tag go-testify-suite))
 
 ; `go:generate` comments
-(
-    ((comment) @_comment @run
-    (#match? @_comment "^//go:generate"))
-    (#set! tag go-generate)
-)
+(((comment) @_comment @run
+  (#match? @_comment "^//go:generate"))
+  (#set! tag go-generate))
 
 ; `t.Run`
-(
-  (
-    (call_expression
-      function: (
-        selector_expression
-        field: _ @run @_name
-        (#eq? @_name "Run")
-      )
-      arguments: (
-        argument_list
-        .
-        [
-          (interpreted_string_literal)
-          (raw_string_literal)
-        ] @_subtest_name
-        .
-        (func_literal
-          parameters: (
-            parameter_list
-            (parameter_declaration
-              name: (identifier) @_param_name
-              type: (pointer_type
-                (qualified_type
-                  package: (package_identifier) @_pkg
-                  name: (type_identifier) @_type
-                  (#eq? @_pkg "testing")
-                  (#eq? @_type "T")
-                )
-              )
-            )
-          )
-        ) @_second_argument
-      )
-    )
-  ) @_
-  (#set! tag go-subtest)
-)
+(((call_expression
+  function: (selector_expression
+    field: _ @run @_name
+    (#eq? @_name "Run"))
+  arguments: (argument_list
+    .
+    [
+      (interpreted_string_literal)
+      (raw_string_literal)
+    ] @_subtest_name
+    .
+    (func_literal
+      parameters: (parameter_list
+        (parameter_declaration
+          name: (identifier) @_param_name
+          type: (pointer_type
+            (qualified_type
+              package: (package_identifier) @_pkg
+              name: (type_identifier) @_type
+              (#eq? @_pkg "testing")
+              (#eq? @_type "T")))))) @_second_argument))) @_
+  (#set! tag go-subtest))
 
 ; Functions names start with `Example`
-(
-  (
-    (function_declaration name: (_) @run @_name
-      (#match? @_name "^Example.*"))
-  ) @_
-  (#set! tag go-example)
-)
+(((function_declaration
+  name: (_) @run @_name
+  (#match? @_name "^Example.*"))) @_
+  (#set! tag go-example))
 
 ; Functions names start with `Benchmark`
-(
-  (
-    (function_declaration name: (_) @run @_name
-      (#match? @_name "^Benchmark.*"))
-  ) @_
-  (#set! tag go-benchmark)
-)
+(((function_declaration
+  name: (_) @run @_name
+  (#match? @_name "^Benchmark.*"))) @_
+  (#set! tag go-benchmark))
 
 ; Functions names start with `Fuzz`
-(
-  (
-    (function_declaration name: (_) @run @_name
-      (#match? @_name "^Fuzz"))
-  ) @_
-  (#set! tag go-fuzz)
-)
+(((function_declaration
+  name: (_) @run @_name
+  (#match? @_name "^Fuzz"))) @_
+  (#set! tag go-fuzz))
 
 ; go run
-(
-  (
-    (function_declaration name: (_) @run
-      (#eq? @run "main"))
-  ) @_
-  (#set! tag go-main)
-)
+(((function_declaration
+  name: (_) @run
+  (#eq? @run "main"))) @_
+  (#set! tag go-main))
 
 ; Table test cases - slice and map with explicit variable
-(
-  (short_var_declaration
-    left: (expression_list (identifier) @_collection_var)
-    right: (expression_list
-      (composite_literal
-        type: [
-          (slice_type)
-          (map_type
-            key: (type_identifier) @_key_type
-            (#eq? @_key_type "string")
-          )
-        ]
-        body: (literal_value
-          [
+((short_var_declaration
+  left: (expression_list
+    (identifier) @_collection_var)
+  right: (expression_list
+    (composite_literal
+      type: [
+        (slice_type)
+        (map_type
+          key: (type_identifier) @_key_type
+          (#eq? @_key_type "string"))
+      ]
+      body: (literal_value
+        [
+          (literal_element
+            (literal_value
+              (keyed_element
+                (literal_element
+                  (identifier) @_field_name)
+                (literal_element
+                  [
+                    (interpreted_string_literal) @run @_table_test_case_name
+                    (raw_string_literal) @run @_table_test_case_name
+                  ]))))
+          (keyed_element
             (literal_element
-              (literal_value
-                (keyed_element
-                  (literal_element
-                    (identifier) @_field_name
-                  )
-                  (literal_element
-                    [
-                      (interpreted_string_literal) @run @_table_test_case_name
-                      (raw_string_literal) @run @_table_test_case_name
-                    ]
-                  )
-                )
-              )
-            )
-            (keyed_element
-              (literal_element
-                [
-                  (interpreted_string_literal) @run @_table_test_case_name
-                  (raw_string_literal) @run @_table_test_case_name
-                ]
-              )
-            )
-          ]
-        )
-      )
-    )
-  )
+              [
+                (interpreted_string_literal) @run @_table_test_case_name
+                (raw_string_literal) @run @_table_test_case_name
+              ]))
+        ]))))
   (for_statement
     (range_clause
       left: (expression_list
         [
-          (
-            (identifier)
-            (identifier) @_loop_var_inner
-          )
+          ((identifier)
+            (identifier) @_loop_var_inner)
           (identifier) @_loop_var_outer
-        ]
-      )
+        ])
       right: (identifier) @_range_var
-      (#eq? @_range_var @_collection_var)
-    )
+      (#eq? @_range_var @_collection_var))
     body: (block
       (statement_list
         (expression_statement
@@ -172,8 +120,7 @@
             function: (selector_expression
               operand: (identifier)
               field: (field_identifier) @_run_method
-              (#eq? @_run_method "Run")
-            )
+              (#eq? @_run_method "Run"))
             arguments: (argument_list
               .
               [
@@ -181,8 +128,7 @@
                   operand: (identifier) @_tc_var
                   (#eq? @_tc_var @_loop_var_inner)
                   field: (field_identifier) @_field_check
-                  (#eq? @_field_check @_field_name)
-                )
+                  (#eq? @_field_check @_field_name))
                 (identifier) @_arg_var
                 (#eq? @_arg_var @_loop_var_outer)
               ]
@@ -195,113 +141,72 @@
                         package: (package_identifier) @_pkg
                         name: (type_identifier) @_type
                         (#eq? @_pkg "testing")
-                        (#eq? @_type "T")
-                      )
-                    )
-                  )
-                )
-              )
-            )
-          )
-        )
-      )
-    )
-  ) @_
-  (#set! tag go-table-test-case)
-)
+                        (#eq? @_type "T")))))))))))) @_
+  (#set! tag go-table-test-case))
 
 ; Table test cases - slice and map declared right inside the loop without
 ; explicit variable
-(
-  (for_statement
-    (range_clause
-      left: (expression_list
+((for_statement
+  (range_clause
+    left: (expression_list
+      [
+        ((identifier)
+          (identifier) @_loop_var_inner)
+        (identifier) @_loop_var_outer
+      ])
+    right: (composite_literal
+      type: [
+        (slice_type)
+        (map_type
+          key: (type_identifier) @_key_type
+          (#eq? @_key_type "string"))
+      ]
+      body: (literal_value
         [
-          (
-            (identifier)
-            (identifier) @_loop_var_inner
-          )
-          (identifier) @_loop_var_outer
-        ]
-      )
-      right: (composite_literal
-        type: [
-          (slice_type)
-          (map_type
-            key: (type_identifier) @_key_type
-            (#eq? @_key_type "string")
-          )
-        ]
-        body: (literal_value
-          [
+          (literal_element
+            (literal_value
+              (keyed_element
+                (literal_element
+                  (identifier) @_field_name)
+                (literal_element
+                  [
+                    (interpreted_string_literal) @run @_table_test_case_name
+                    (raw_string_literal) @run @_table_test_case_name
+                  ]))))
+          (keyed_element
             (literal_element
-              (literal_value
-                (keyed_element
-                  (literal_element
-                    (identifier) @_field_name
-                  )
-                  (literal_element
-                    [
-                      (interpreted_string_literal) @run @_table_test_case_name
-                      (raw_string_literal) @run @_table_test_case_name
-                    ]
-                  )
-                )
-              )
-            )
-            (keyed_element
-              (literal_element
-                [
-                  (interpreted_string_literal) @run @_table_test_case_name
-                  (raw_string_literal) @run @_table_test_case_name
-                ]
-              )
-            )
-          ]
-        )
-      )
-    )
-    body: (block
-      (statement_list
-        (expression_statement
-          (call_expression
-            function: (selector_expression
-              operand: (identifier)
-              field: (field_identifier) @_run_method
-              (#eq? @_run_method "Run")
-            )
-            arguments: (argument_list
-              .
               [
-                (selector_expression
-                  operand: (identifier) @_tc_var
-                  (#eq? @_tc_var @_loop_var_inner)
-                  field: (field_identifier) @_field_check
-                  (#eq? @_field_check @_field_name)
-                )
-                (identifier) @_arg_var
-                (#eq? @_arg_var @_loop_var_outer)
-              ]
-              .
-              (func_literal
-                parameters: (parameter_list
-                  (parameter_declaration
-                    type: (pointer_type
-                      (qualified_type
-                        package: (package_identifier) @_pkg
-                        name: (type_identifier) @_type
-                        (#eq? @_pkg "testing")
-                        (#eq? @_type "T")
-                      )
-                    )
-                  )
-                )
-              )
-            )
-          )
-        )
-      )
-    )
-  ) @_
-  (#set! tag go-table-test-case-without-explicit-variable)
-)
+                (interpreted_string_literal) @run @_table_test_case_name
+                (raw_string_literal) @run @_table_test_case_name
+              ]))
+        ])))
+  body: (block
+    (statement_list
+      (expression_statement
+        (call_expression
+          function: (selector_expression
+            operand: (identifier)
+            field: (field_identifier) @_run_method
+            (#eq? @_run_method "Run"))
+          arguments: (argument_list
+            .
+            [
+              (selector_expression
+                operand: (identifier) @_tc_var
+                (#eq? @_tc_var @_loop_var_inner)
+                field: (field_identifier) @_field_check
+                (#eq? @_field_check @_field_name))
+              (identifier) @_arg_var
+              (#eq? @_arg_var @_loop_var_outer)
+            ]
+            .
+            (func_literal
+              parameters: (parameter_list
+                (parameter_declaration
+                  type: (pointer_type
+                    (qualified_type
+                      package: (package_identifier) @_pkg
+                      name: (type_identifier) @_type
+                      (#eq? @_pkg "testing")
+                      (#eq? @_type "T")))))))))))) @_
+  (#set! tag go-table-test-case-without-explicit-variable))

crates/languages/src/go/textobjects.scm 🔗

@@ -1,24 +1,27 @@
 (function_declaration
-    body: (_
-        "{"
-        (_)* @function.inside
-        "}")) @function.around
+  body: (_
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
 
 (method_declaration
-    body: (_
-        "{"
-        (_)* @function.inside
-        "}")) @function.around
+  body: (_
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
 
 (type_declaration
-    (type_spec (struct_type (field_declaration_list (
-        "{"
-        (_)* @class.inside
-        "}")?)))) @class.around
+  (type_spec
+    (struct_type
+      (field_declaration_list
+        ("{"
+          (_)* @class.inside
+          "}")?)))) @class.around
 
 (type_declaration
-    (type_spec (interface_type
-        (_)* @class.inside))) @class.around
+  (type_spec
+    (interface_type
+      (_)* @class.inside))) @class.around
 
 (type_declaration) @class.around
 

crates/languages/src/gomod/structure.scm 🔗

@@ -1,35 +1,29 @@
 (require_directive
   "require" @structure.anchor
-  ("(") @structure.open
-  (")") @structure.close
-)
+  "(" @structure.open
+  ")" @structure.close)
 
 (exclude_directive
   "exclude" @structure.anchor
-  ("(") @structure.open
-  (")") @structure.close
-)
+  "(" @structure.open
+  ")" @structure.close)
 
 (module_directive
   "module" @structure.anchor
-  ("(") @structure.open
-  (")") @structure.close
-)
+  "(" @structure.open
+  ")" @structure.close)
 
 (replace_directive
   "replace" @structure.anchor
-  ("(") @structure.open
-  (")") @structure.close
-)
+  "(" @structure.open
+  ")" @structure.close)
 
 (retract_directive
   "retract" @structure.anchor
-  ("(") @structure.open
-  (")") @structure.close
-)
+  "(" @structure.open
+  ")" @structure.close)
 
 (ignore_directive
   "ignore" @structure.anchor
-  ("(") @structure.open
-  (")") @structure.close
-)
+  "(" @structure.open
+  ")" @structure.close)

crates/languages/src/javascript/brackets.scm 🔗

@@ -1,9 +1,29 @@
-("(" @open ")" @close)
-("[" @open "]" @close)
-("{" @open "}" @close)
-("<" @open ">" @close)
-("<" @open "/>" @close)
-("</" @open ">" @close)
-(("\"" @open "\"" @close) (#set! rainbow.exclude))
-(("'" @open "'" @close) (#set! rainbow.exclude))
-(("`" @open "`" @close) (#set! rainbow.exclude))
+("(" @open
+  ")" @close)
+
+("[" @open
+  "]" @close)
+
+("{" @open
+  "}" @close)
+
+("<" @open
+  ">" @close)
+
+("<" @open
+  "/>" @close)
+
+("</" @open
+  ">" @close)
+
+(("\"" @open
+  "\"" @close)
+  (#set! rainbow.exclude))
+
+(("'" @open
+  "'" @close)
+  (#set! rainbow.exclude))
+
+(("`" @open
+  "`" @close)
+  (#set! rainbow.exclude))

crates/languages/src/javascript/debugger.scm 🔗

@@ -1,23 +1,51 @@
-(lexical_declaration (variable_declarator name: (identifier) @debug-variable))
+(lexical_declaration
+  (variable_declarator
+    name: (identifier) @debug-variable))
 
-(for_in_statement left: (identifier) @debug-variable)
-(for_statement initializer: (lexical_declaration (variable_declarator name: (identifier) @debug-variable)))
+(for_in_statement
+  left: (identifier) @debug-variable)
 
-(binary_expression left: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
-(binary_expression right: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(for_statement
+  initializer: (lexical_declaration
+    (variable_declarator
+      name: (identifier) @debug-variable)))
 
-(unary_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
-(update_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(binary_expression
+  left: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
 
-(return_statement (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(binary_expression
+  right: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
 
-(parenthesized_expression (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(unary_expression
+  argument: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
 
-(array (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(update_expression
+  argument: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
 
-(pair value: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(return_statement
+  (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
 
-(member_expression object: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(parenthesized_expression
+  (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
+
+(array
+  (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
+
+(pair
+  value: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
+
+(member_expression
+  object: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
 
 (statement_block) @debug-scope
+
 (program) @debug-scope

crates/languages/src/javascript/highlights.scm 🔗

@@ -1,56 +1,33 @@
 ; Variables
-
 (identifier) @variable
 
 (call_expression
   function: (member_expression
     object: (identifier) @type
-    (#any-of?
-      @type
-      "Promise"
-      "Array"
-      "Object"
-      "Map"
-      "Set"
-      "WeakMap"
-      "WeakSet"
-      "Date"
-      "Error"
-      "TypeError"
-      "RangeError"
-      "SyntaxError"
-      "ReferenceError"
-      "EvalError"
-      "URIError"
-      "RegExp"
-      "Function"
-      "Number"
-      "String"
-      "Boolean"
-      "Symbol"
-      "BigInt"
-      "Proxy"
-      "ArrayBuffer"
-      "DataView"
-    )
-  )
-)
+    (#any-of? @type
+      "Promise" "Array" "Object" "Map" "Set" "WeakMap" "WeakSet" "Date" "Error" "TypeError"
+      "RangeError" "SyntaxError" "ReferenceError" "EvalError" "URIError" "RegExp" "Function"
+      "Number" "String" "Boolean" "Symbol" "BigInt" "Proxy" "ArrayBuffer" "DataView")))
 
 ; Properties
-
 (property_identifier) @property
+
 (shorthand_property_identifier) @property
+
 (shorthand_property_identifier_pattern) @property
+
 (private_property_identifier) @property
 
 ; Function and method calls
-
 (call_expression
   function: (identifier) @function)
 
 (call_expression
   function: (member_expression
-      property: [(property_identifier) (private_property_identifier)] @function.method))
+    property: [
+      (property_identifier)
+      (private_property_identifier)
+    ] @function.method))
 
 (new_expression
   constructor: (identifier) @type)
@@ -59,36 +36,58 @@
   module: (identifier) @type)
 
 ; Function and method definitions
-
 (function_expression
   name: (identifier) @function)
+
 (function_declaration
   name: (identifier) @function)
+
 (method_definition
-  name: [(property_identifier) (private_property_identifier)] @function.method)
+  name: [
+    (property_identifier)
+    (private_property_identifier)
+  ] @function.method)
+
 (method_definition
-    name: (property_identifier) @constructor
-    (#eq? @constructor "constructor"))
+  name: (property_identifier) @constructor
+  (#eq? @constructor "constructor"))
 
 (pair
-  key: [(property_identifier) (private_property_identifier)] @function.method
-  value: [(function_expression) (arrow_function)])
+  key: [
+    (property_identifier)
+    (private_property_identifier)
+  ] @function.method
+  value: [
+    (function_expression)
+    (arrow_function)
+  ])
 
 (assignment_expression
   left: (member_expression
-    property: [(property_identifier) (private_property_identifier)] @function.method)
-  right: [(function_expression) (arrow_function)])
+    property: [
+      (property_identifier)
+      (private_property_identifier)
+    ] @function.method)
+  right: [
+    (function_expression)
+    (arrow_function)
+  ])
 
 (variable_declarator
   name: (identifier) @function
-  value: [(function_expression) (arrow_function)])
+  value: [
+    (function_expression)
+    (arrow_function)
+  ])
 
 (assignment_expression
   left: (identifier) @function
-  right: [(function_expression) (arrow_function)])
+  right: [
+    (function_expression)
+    (arrow_function)
+  ])
 
 ; Parameters
-
 (required_parameter
   (identifier) @variable.parameter)
 
@@ -120,25 +119,26 @@
 
 ; Special identifiers
 ;
+(type_identifier) @type
+
+(predefined_type) @type.builtin
+
 (class_declaration
   (type_identifier) @type.class)
 
 (extends_clause
   value: (identifier) @type.class)
 
-(type_identifier) @type
-(predefined_type) @type.builtin
-
 ([
   (identifier)
   (shorthand_property_identifier)
   (shorthand_property_identifier_pattern)
- ] @constant
- (#match? @constant "^_*[A-Z_][A-Z\\d_]*$"))
+] @constant
+  (#match? @constant "^_*[A-Z_][A-Z\\d_]*$"))
 
 ; Literals
-
 (this) @variable.special
+
 (super) @variable.special
 
 [
@@ -163,11 +163,12 @@
 (escape_sequence) @string.escape
 
 (regex) @string.regex
+
 (regex_flags) @keyword.operator.regex
+
 (number) @number
 
 ; Tokens
-
 [
   ";"
   "?."
@@ -224,7 +225,8 @@
   "..."
 ] @operator
 
-(regex "/" @string.regex)
+(regex
+  "/" @string.regex)
 
 [
   "("
@@ -233,14 +235,13 @@
   "]"
   "{"
   "}"
-]  @punctuation.bracket
+] @punctuation.bracket
 
 (ternary_expression
   [
     "?"
     ":"
-  ] @operator
-)
+  ] @operator)
 
 [
   "abstract"
@@ -310,7 +311,8 @@
   "yield"
 ] @keyword.control
 
-(switch_default "default" @keyword.control)
+(switch_default
+  "default" @keyword.control)
 
 (template_substitution
   "${" @punctuation.special
@@ -320,7 +322,8 @@
   "<" @punctuation.bracket
   ">" @punctuation.bracket)
 
-(decorator "@" @punctuation.special)
+(decorator
+  "@" @punctuation.special)
 
 ; JSX elements
 (jsx_opening_element
@@ -328,36 +331,61 @@
     (identifier) @type
     (member_expression
       object: (identifier) @type
-      property: (property_identifier) @type
-    )
-  ]
-)
+      property: (property_identifier) @type)
+  ])
+
 (jsx_closing_element
   [
     (identifier) @type
     (member_expression
       object: (identifier) @type
-      property: (property_identifier) @type
-    )
-  ]
-)
+      property: (property_identifier) @type)
+  ])
+
 (jsx_self_closing_element
   [
     (identifier) @type
     (member_expression
       object: (identifier) @type
-      property: (property_identifier) @type
-    )
-  ]
-)
-
-(jsx_opening_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$"))
-(jsx_closing_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$"))
-(jsx_self_closing_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$"))
-(jsx_attribute (property_identifier) @attribute.jsx)
-(jsx_opening_element (["<" ">"]) @punctuation.bracket.jsx)
-(jsx_closing_element (["</" ">"]) @punctuation.bracket.jsx)
-(jsx_self_closing_element (["<" "/>"]) @punctuation.bracket.jsx)
-(jsx_attribute "=" @punctuation.delimiter.jsx)
+      property: (property_identifier) @type)
+  ])
+
+(jsx_opening_element
+  (identifier) @tag.jsx
+  (#match? @tag.jsx "^[a-z][^.]*$"))
+
+(jsx_closing_element
+  (identifier) @tag.jsx
+  (#match? @tag.jsx "^[a-z][^.]*$"))
+
+(jsx_self_closing_element
+  (identifier) @tag.jsx
+  (#match? @tag.jsx "^[a-z][^.]*$"))
+
+(jsx_attribute
+  (property_identifier) @attribute.jsx)
+
+(jsx_opening_element
+  ([
+    "<"
+    ">"
+  ]) @punctuation.bracket.jsx)
+
+(jsx_closing_element
+  ([
+    "</"
+    ">"
+  ]) @punctuation.bracket.jsx)
+
+(jsx_self_closing_element
+  ([
+    "<"
+    "/>"
+  ]) @punctuation.bracket.jsx)
+
+(jsx_attribute
+  "=" @punctuation.delimiter.jsx)
+
 (jsx_text) @text.jsx
+
 (html_character_reference) @string.special

crates/languages/src/javascript/imports.scm 🔗

@@ -1,14 +1,16 @@
 (import_statement
-    import_clause: (import_clause
-        [
-            (identifier) @name
-            (named_imports
-                (import_specifier
-                    name: (_) @name
-                    alias: (_)? @alias))
-        ])
-    source: (string (string_fragment) @source)) @import
+  import_clause: (import_clause
+    [
+      (identifier) @name
+      (named_imports
+        (import_specifier
+          name: (_) @name
+          alias: (_)? @alias))
+    ])
+  source: (string
+    (string_fragment) @source)) @import
 
 (import_statement
-    !import_clause
-    source: (string (string_fragment) @source @wildcard)) @import
+  !import_clause
+  source: (string
+    (string_fragment) @source @wildcard)) @import

crates/languages/src/javascript/indents.scm 🔗

@@ -1,20 +1,32 @@
 [
-    (call_expression)
-    (assignment_expression)
-    (member_expression)
-    (lexical_declaration)
-    (variable_declaration)
-    (assignment_expression)
-    (if_statement)
-    (for_statement)
+  (call_expression)
+  (assignment_expression)
+  (member_expression)
+  (lexical_declaration)
+  (variable_declaration)
+  (assignment_expression)
+  (if_statement)
+  (for_statement)
 ] @indent
 
-(_ "[" "]" @end) @indent
-(_ "<" ">" @end) @indent
-(_ "{" "}" @end) @indent
-(_ "(" ")" @end) @indent
+(_
+  "["
+  "]" @end) @indent
 
-(jsx_opening_element ">" @end) @indent
+(_
+  "<"
+  ">" @end) @indent
+
+(_
+  "{"
+  "}" @end) @indent
+
+(_
+  "("
+  ")" @end) @indent
+
+(jsx_opening_element
+  ">" @end) @indent
 
 (jsx_element
   (jsx_opening_element) @start

crates/languages/src/javascript/injections.scm 🔗

@@ -1,6 +1,5 @@
 ((comment) @injection.content
- (#set! injection.language "comment")
-)
+  (#set! injection.language "comment"))
 
 (((comment) @_jsdoc_comment
   (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @injection.content
@@ -10,119 +9,136 @@
   (#set! injection.language "regex"))
 
 (call_expression
-  function: (identifier) @_name (#eq? @_name "css")
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "css"))
-)
+  function: (identifier) @_name
+  (#eq? @_name "css")
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "css")))
 
 (call_expression
   function: (member_expression
-    object: (identifier) @_obj (#eq? @_obj "styled")
+    object: (identifier) @_obj
+    (#eq? @_obj "styled")
     property: (property_identifier))
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "css"))
-)
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "css")))
 
 (call_expression
   function: (call_expression
-    function: (identifier) @_name (#eq? @_name "styled"))
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "css"))
-)
+    function: (identifier) @_name
+    (#eq? @_name "styled"))
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "css")))
 
 (call_expression
-  function: (identifier) @_name (#eq? @_name "html")
+  function: (identifier) @_name
+  (#eq? @_name "html")
   arguments: (template_string) @injection.content
-                              (#set! injection.language "html")
-)
+  (#set! injection.language "html"))
 
 (call_expression
-  function: (identifier) @_name (#eq? @_name "js")
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "javascript"))
-)
+  function: (identifier) @_name
+  (#eq? @_name "js")
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "javascript")))
 
 (call_expression
-  function: (identifier) @_name (#eq? @_name "json")
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "json"))
-)
+  function: (identifier) @_name
+  (#eq? @_name "json")
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "json")))
 
 (call_expression
-  function: (identifier) @_name (#eq? @_name "sql")
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "sql"))
-)
+  function: (identifier) @_name
+  (#eq? @_name "sql")
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "sql")))
 
 (call_expression
-  function: (identifier) @_name (#eq? @_name "ts")
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "typescript"))
-)
+  function: (identifier) @_name
+  (#eq? @_name "ts")
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "typescript")))
 
 (call_expression
-  function: (identifier) @_name (#match? @_name "^ya?ml$")
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "yaml"))
-)
+  function: (identifier) @_name
+  (#match? @_name "^ya?ml$")
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "yaml")))
 
 (call_expression
-  function: (identifier) @_name (#match? @_name "^g(raph)?ql$")
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "graphql"))
-)
+  function: (identifier) @_name
+  (#match? @_name "^g(raph)?ql$")
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "graphql")))
 
 (call_expression
-  function: (identifier) @_name (#match? @_name "^g(raph)?ql$")
-  arguments: (arguments (template_string (string_fragment) @injection.content
-                              (#set! injection.language "graphql")))
-)
+  function: (identifier) @_name
+  (#match? @_name "^g(raph)?ql$")
+  arguments: (arguments
+    (template_string
+      (string_fragment) @injection.content
+      (#set! injection.language "graphql"))))
 
 (call_expression
-  function: (identifier) @_name(#match? @_name "^iso$")
-  arguments: (arguments (template_string (string_fragment) @injection.content
-                              (#set! injection.language "isograph")))
-)
+  function: (identifier) @_name
+  (#match? @_name "^iso$")
+  arguments: (arguments
+    (template_string
+      (string_fragment) @injection.content
+      (#set! injection.language "isograph"))))
 
 ; Parse the contents of strings and tagged template
 ; literals with leading ECMAScript comments:
 ; '/* html */' or '/*html*/'
-(
-  ((comment) @_ecma_comment [
-    (string (string_fragment) @injection.content)
-    (template_string (string_fragment) @injection.content)
+(((comment) @_ecma_comment
+  [
+    (string
+      (string_fragment) @injection.content)
+    (template_string
+      (string_fragment) @injection.content)
   ])
   (#match? @_ecma_comment "^\\/\\*\\s*html\\s*\\*\\/")
-  (#set! injection.language "html")
-)
+  (#set! injection.language "html"))
 
 ; '/* sql */' or '/*sql*/'
-(
-  ((comment) @_ecma_comment [
-    (string (string_fragment) @injection.content)
-    (template_string (string_fragment) @injection.content)
+(((comment) @_ecma_comment
+  [
+    (string
+      (string_fragment) @injection.content)
+    (template_string
+      (string_fragment) @injection.content)
   ])
   (#match? @_ecma_comment "^\\/\\*\\s*sql\\s*\\*\\/")
-  (#set! injection.language "sql")
-)
+  (#set! injection.language "sql"))
 
 ; '/* gql */' or '/*gql*/'
 ; '/* graphql */' or '/*graphql*/'
-(
-  ((comment) @_ecma_comment [
-    (string (string_fragment) @injection.content)
-    (template_string (string_fragment) @injection.content)
+(((comment) @_ecma_comment
+  [
+    (string
+      (string_fragment) @injection.content)
+    (template_string
+      (string_fragment) @injection.content)
   ])
   (#match? @_ecma_comment "^\\/\\*\\s*(gql|graphql)\\s*\\*\\/")
-  (#set! injection.language "graphql")
-)
+  (#set! injection.language "graphql"))
 
 ; '/* css */' or '/*css*/'
-(
-  ((comment) @_ecma_comment [
-    (string (string_fragment) @injection.content)
-    (template_string (string_fragment) @injection.content)
+(((comment) @_ecma_comment
+  [
+    (string
+      (string_fragment) @injection.content)
+    (template_string
+      (string_fragment) @injection.content)
   ])
   (#match? @_ecma_comment "^\\/\\*\\s*(css)\\s*\\*\\/")
-  (#set! injection.language "css")
-)
+  (#set! injection.language "css"))

crates/languages/src/javascript/outline.scm 🔗

@@ -1,223 +1,269 @@
 (internal_module
-    "namespace" @context
-    name: (_) @name) @item
+  "namespace" @context
+  name: (_) @name) @item
 
 (enum_declaration
-    "enum" @context
-    name: (_) @name) @item
+  "enum" @context
+  name: (_) @name) @item
 
 (function_declaration
-    "async"? @context
-    "function" @context
-    name: (_) @name
-    parameters: (formal_parameters
-      "(" @context
-      ")" @context)) @item
+  "async"? @context
+  "function" @context
+  name: (_) @name
+  parameters: (formal_parameters
+    "(" @context
+    ")" @context)) @item
 
 (generator_function_declaration
-    "async"? @context
-    "function" @context
-    "*" @context
-    name: (_) @name
-    parameters: (formal_parameters
-      "(" @context
-      ")" @context)) @item
+  "async"? @context
+  "function" @context
+  "*" @context
+  name: (_) @name
+  parameters: (formal_parameters
+    "(" @context
+    ")" @context)) @item
 
 (interface_declaration
-    "interface" @context
-    name: (_) @name) @item
+  "interface" @context
+  name: (_) @name) @item
 
 (program
-    (export_statement
-        (lexical_declaration
-            ["let" "const"] @context
-            (variable_declarator
-                name: (identifier) @name) @item)))
+  (export_statement
+    (lexical_declaration
+      [
+        "let"
+        "const"
+      ] @context
+      (variable_declarator
+        name: (identifier) @name) @item)))
 
 ; Exported array destructuring
 (program
-    (export_statement
-        (lexical_declaration
-            ["let" "const"] @context
-            (variable_declarator
-                name: (array_pattern
-                    [
-                        (identifier) @name @item
-                        (assignment_pattern left: (identifier) @name @item)
-                        (rest_pattern (identifier) @name @item)
-                    ])))))
+  (export_statement
+    (lexical_declaration
+      [
+        "let"
+        "const"
+      ] @context
+      (variable_declarator
+        name: (array_pattern
+          [
+            (identifier) @name @item
+            (assignment_pattern
+              left: (identifier) @name @item)
+            (rest_pattern
+              (identifier) @name @item)
+          ])))))
 
 ; Exported object destructuring
 (program
-    (export_statement
-        (lexical_declaration
-            ["let" "const"] @context
-            (variable_declarator
-                name: (object_pattern
-                    [(shorthand_property_identifier_pattern) @name @item
-                     (pair_pattern
-                         value: (identifier) @name @item)
-                     (pair_pattern
-                         value: (assignment_pattern left: (identifier) @name @item))
-                     (rest_pattern (identifier) @name @item)])))))
+  (export_statement
+    (lexical_declaration
+      [
+        "let"
+        "const"
+      ] @context
+      (variable_declarator
+        name: (object_pattern
+          [
+            (shorthand_property_identifier_pattern) @name @item
+            (pair_pattern
+              value: (identifier) @name @item)
+            (pair_pattern
+              value: (assignment_pattern
+                left: (identifier) @name @item))
+            (rest_pattern
+              (identifier) @name @item)
+          ])))))
 
 (program
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (identifier) @name) @item))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (identifier) @name) @item))
 
 ; Top-level array destructuring
 (program
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (array_pattern
-                [
-                    (identifier) @name @item
-                    (assignment_pattern left: (identifier) @name @item)
-                    (rest_pattern (identifier) @name @item)
-                ]))))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (array_pattern
+        [
+          (identifier) @name @item
+          (assignment_pattern
+            left: (identifier) @name @item)
+          (rest_pattern
+            (identifier) @name @item)
+        ]))))
 
 ; Top-level object destructuring
 (program
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (object_pattern
-                [(shorthand_property_identifier_pattern) @name @item
-                 (pair_pattern
-                     value: (identifier) @name @item)
-                 (pair_pattern
-                     value: (assignment_pattern left: (identifier) @name @item))
-                 (rest_pattern (identifier) @name @item)]))))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (object_pattern
+        [
+          (shorthand_property_identifier_pattern) @name @item
+          (pair_pattern
+            value: (identifier) @name @item)
+          (pair_pattern
+            value: (assignment_pattern
+              left: (identifier) @name @item))
+          (rest_pattern
+            (identifier) @name @item)
+        ]))))
 
 (class_declaration
-    "class" @context
-    name: (_) @name) @item
+  "class" @context
+  name: (_) @name) @item
 
 ; Method definitions in classes (not in object literals)
 (class_body
-    (method_definition
-        [
-            "get"
-            "set"
-            "async"
-            "*"
-            "readonly"
-            "static"
-            (override_modifier)
-            (accessibility_modifier)
-        ]* @context
-        name: (_) @name
-        parameters: (formal_parameters
-          "(" @context
-          ")" @context)) @item)
+  (method_definition
+    [
+      "get"
+      "set"
+      "async"
+      "*"
+      "readonly"
+      "static"
+      (override_modifier)
+      (accessibility_modifier)
+    ]* @context
+    name: (_) @name
+    parameters: (formal_parameters
+      "(" @context
+      ")" @context)) @item)
 
 ; Object literal methods
 (variable_declarator
-    value: (object
-        (method_definition
-            [
-                "get"
-                "set"
-                "async"
-                "*"
-            ]* @context
-            name: (_) @name
-            parameters: (formal_parameters
-              "(" @context
-              ")" @context)) @item))
+  value: (object
+    (method_definition
+      [
+        "get"
+        "set"
+        "async"
+        "*"
+      ]* @context
+      name: (_) @name
+      parameters: (formal_parameters
+        "(" @context
+        ")" @context)) @item))
 
 (public_field_definition
-    [
-        "declare"
-        "readonly"
-        "abstract"
-        "static"
-        (accessibility_modifier)
-    ]* @context
-    name: (_) @name) @item
+  [
+    "declare"
+    "readonly"
+    "abstract"
+    "static"
+    (accessibility_modifier)
+  ]* @context
+  name: (_) @name) @item
 
 ; Add support for (node:test, bun:test and Jest) runnable
-(
-    (call_expression
-        function: [
-            (identifier) @_name
-            (member_expression
-                object: [
-                    (identifier) @_name
-                    (member_expression object: (identifier) @_name)
-                ]
-            )
-        ] @context
-        (#any-of? @_name "it" "test" "describe" "context" "suite")
-        arguments: (
-            arguments . [
-                (string (string_fragment) @name)
-                (identifier) @name
-            ]
-        )
-    )
-) @item
+((call_expression
+  function: [
+    (identifier) @_name
+    (member_expression
+      object: [
+        (identifier) @_name
+        (member_expression
+          object: (identifier) @_name)
+      ])
+  ] @context
+  (#any-of? @_name "it" "test" "describe" "context" "suite")
+  arguments: (arguments
+    .
+    [
+      (string
+        (string_fragment) @name)
+      (identifier) @name
+    ]))) @item
 
 ; Add support for parameterized tests
-(
-    (call_expression
-        function: (call_expression
-            function: (member_expression
-                object: [(identifier) @_name (member_expression object: (identifier) @_name)]
-                property: (property_identifier) @_property
-            )
-            (#any-of? @_name "it" "test" "describe" "context" "suite")
-            (#eq? @_property "each")
-        )
-        arguments: (
-            arguments . [
-                (string (string_fragment) @name)
-                (identifier) @name
-            ]
-        )
-    )
-) @item
+((call_expression
+  function: (call_expression
+    function: (member_expression
+      object: [
+        (identifier) @_name
+        (member_expression
+          object: (identifier) @_name)
+      ]
+      property: (property_identifier) @_property)
+    (#any-of? @_name "it" "test" "describe" "context" "suite")
+    (#eq? @_property "each"))
+  arguments: (arguments
+    .
+    [
+      (string
+        (string_fragment) @name)
+      (identifier) @name
+    ]))) @item
 
 ; Object properties
 (pair
-    key: [
-        (property_identifier) @name
-        (string (string_fragment) @name)
-        (number) @name
-        (computed_property_name) @name
-    ]) @item
+  key: [
+    (property_identifier) @name
+    (string
+      (string_fragment) @name)
+    (number) @name
+    (computed_property_name) @name
+  ]) @item
 
 ; Nested variables in function bodies
 (statement_block
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (identifier) @name) @item))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (identifier) @name) @item))
 
 ; Nested array destructuring in functions
 (statement_block
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (array_pattern
-                [
-                    (identifier) @name @item
-                    (assignment_pattern left: (identifier) @name @item)
-                    (rest_pattern (identifier) @name @item)
-                ]))))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (array_pattern
+        [
+          (identifier) @name @item
+          (assignment_pattern
+            left: (identifier) @name @item)
+          (rest_pattern
+            (identifier) @name @item)
+        ]))))
 
 ; Nested object destructuring in functions
 (statement_block
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (object_pattern
-                [(shorthand_property_identifier_pattern) @name @item
-                 (pair_pattern value: (identifier) @name @item)
-                 (pair_pattern value: (assignment_pattern left: (identifier) @name @item))
-                 (rest_pattern (identifier) @name @item)]))))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (object_pattern
+        [
+          (shorthand_property_identifier_pattern) @name @item
+          (pair_pattern
+            value: (identifier) @name @item)
+          (pair_pattern
+            value: (assignment_pattern
+              left: (identifier) @name @item))
+          (rest_pattern
+            (identifier) @name @item)
+        ]))))
 
 (comment) @annotation

crates/languages/src/javascript/runnables.scm 🔗

@@ -1,46 +1,42 @@
 ; Add support for (node:test, bun:test and Jest) runnable
 ; Function expression that has `it`, `test` or `describe` as the function name
-(
-    (call_expression
-        function: [
-            (identifier) @_name
-            (member_expression
-                object: [
-                    (identifier) @_name
-                    (member_expression object: (identifier) @_name)
-                ]
-            )
-        ]
-        (#any-of? @_name "it" "test" "describe" "context" "suite")
-        arguments: (
-            arguments . [
-                (string (string_fragment) @run)
-                (identifier) @run
-            ]
-        )
-    ) @_js-test
-
-    (#set! tag js-test)
-)
+((call_expression
+  function: [
+    (identifier) @_name
+    (member_expression
+      object: [
+        (identifier) @_name
+        (member_expression
+          object: (identifier) @_name)
+      ])
+  ]
+  (#any-of? @_name "it" "test" "describe" "context" "suite")
+  arguments: (arguments
+    .
+    [
+      (string
+        (string_fragment) @run)
+      (identifier) @run
+    ])) @_js-test
+  (#set! tag js-test))
 
 ; Add support for parameterized tests
-(
-    (call_expression
-        function: (call_expression
-            function: (member_expression
-                object: [(identifier) @_name (member_expression object: (identifier) @_name)]
-                property: (property_identifier) @_property
-            )
-            (#any-of? @_name "it" "test" "describe" "context" "suite")
-            (#eq? @_property "each")
-        )
-        arguments: (
-            arguments . [
-                (string (string_fragment) @run)
-                (identifier) @run
-            ]
-        )
-    ) @_js-test
-
-    (#set! tag js-test)
-)
+((call_expression
+  function: (call_expression
+    function: (member_expression
+      object: [
+        (identifier) @_name
+        (member_expression
+          object: (identifier) @_name)
+      ]
+      property: (property_identifier) @_property)
+    (#any-of? @_name "it" "test" "describe" "context" "suite")
+    (#eq? @_property "each"))
+  arguments: (arguments
+    .
+    [
+      (string
+        (string_fragment) @run)
+      (identifier) @run
+    ])) @_js-test
+  (#set! tag js-test))

crates/languages/src/javascript/textobjects.scm 🔗

@@ -1,85 +1,91 @@
 (comment)+ @comment.around
 
 (function_declaration
-    body: (_
-        "{"
-        (_)* @function.inside
-        "}")) @function.around
+  body: (_
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
 
 (method_definition
-    body: (_
-        "{"
-        (_)* @function.inside
-        "}")) @function.around
+  body: (_
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
 
 (function_expression
-    body: (_
-        "{"
-        (_)* @function.inside
-        "}")) @function.around
+  body: (_
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
 
 ((arrow_function
-    body: (statement_block
-        "{"
-        (_)* @function.inside
-        "}")) @function.around
- (#not-has-parent? @function.around variable_declarator))
+  body: (statement_block
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
+  (#not-has-parent? @function.around variable_declarator))
 
 ; Arrow function in variable declaration - capture the full declaration
 ([
-    (lexical_declaration
-        (variable_declarator
-            value: (arrow_function
-                body: (statement_block
-                    "{"
-                    (_)* @function.inside
-                    "}"))))
-    (variable_declaration
-        (variable_declarator
-            value: (arrow_function
-                body: (statement_block
-                    "{"
-                    (_)* @function.inside
-                    "}"))))
+  (lexical_declaration
+    (variable_declarator
+      value: (arrow_function
+        body: (statement_block
+          "{"
+          (_)* @function.inside
+          "}"))))
+  (variable_declaration
+    (variable_declarator
+      value: (arrow_function
+        body: (statement_block
+          "{"
+          (_)* @function.inside
+          "}"))))
 ]) @function.around
 
 ; Arrow function in variable declaration (captures body for expression-bodied arrows)
 ([
-    (lexical_declaration
-        (variable_declarator
-            value: (arrow_function
-                body: (_) @function.inside)))
-    (variable_declaration
-        (variable_declarator
-            value: (arrow_function
-                body: (_) @function.inside)))
+  (lexical_declaration
+    (variable_declarator
+      value: (arrow_function
+        body: (_) @function.inside)))
+  (variable_declaration
+    (variable_declarator
+      value: (arrow_function
+        body: (_) @function.inside)))
 ]) @function.around
 
 ; Catch-all for arrow functions in other contexts (callbacks, etc.)
 ((arrow_function
-    body: (_) @function.inside) @function.around
- (#not-has-parent? @function.around variable_declarator))
+  body: (_) @function.inside) @function.around
+  (#not-has-parent? @function.around variable_declarator))
 
 (generator_function
-    body: (_
-        "{"
-        (_)* @function.inside
-        "}")) @function.around
+  body: (_
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
 
 (generator_function_declaration
-    body: (_
-        "{"
-        (_)* @function.inside
-        "}")) @function.around
+  body: (_
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
 
 (class_declaration
-    body: (_
-        "{"
-        [(_) ";"?]* @class.inside
-        "}" )) @class.around
+  body: (_
+    "{"
+    [
+      (_)
+      ";"?
+    ]* @class.inside
+    "}")) @class.around
 
 (class
-    body: (_
-        "{"
-        [(_) ";"?]* @class.inside
-        "}" )) @class.around
+  body: (_
+    "{"
+    [
+      (_)
+      ";"?
+    ]* @class.inside
+    "}")) @class.around

crates/languages/src/json.rs 🔗

@@ -245,6 +245,7 @@ impl LspAdapter for JsonLspAdapter {
     async fn initialization_options(
         self: Arc<Self>,
         _: &Arc<dyn LspAdapterDelegate>,
+        _: &mut AsyncApp,
     ) -> Result<Option<serde_json::Value>> {
         Ok(Some(json!({
             "provideFormatter": true

crates/languages/src/json/brackets.scm 🔗

@@ -1,3 +1,9 @@
-("[" @open "]" @close)
-("{" @open "}" @close)
-(("\"" @open "\"" @close) (#set! rainbow.exclude))
+("[" @open
+  "]" @close)
+
+("{" @open
+  "}" @close)
+
+(("\"" @open
+  "\"" @close)
+  (#set! rainbow.exclude))

crates/languages/src/json/config.toml 🔗

@@ -1,6 +1,6 @@
 name = "JSON"
 grammar = "json"
-path_suffixes = ["json", "flake.lock", "geojson"]
+path_suffixes = ["json", "flake.lock", "geojson", "prettierrc"]
 line_comments = ["// "]
 autoclose_before = ",]}"
 brackets = [

crates/languages/src/json/redactions.scm 🔗

@@ -1,4 +1,11 @@
-(pair value: (number) @redact)
-(pair value: (string) @redact)
-(array (number) @redact)
-(array (string) @redact)
+(pair
+  value: (number) @redact)
+
+(pair
+  value: (string) @redact)
+
+(array
+  (number) @redact)
+
+(array
+  (string) @redact)

crates/languages/src/json/runnables.scm 🔗

@@ -1,21 +1,13 @@
 ; Add support `package.json` and `composer.json` script runnable
-
-(
-    (document
-        (object
-            (pair
-                key: (string
-                    (string_content) @_name
-                    (#eq? @_name "scripts")
-                )
-                value: (object
-                    (pair
-                        key: (string (string_content) @run @script)
-                    )
-                )
-            )
-        )
-    )
-    (#set! tag package-script)
-    (#set! tag composer-script)
-)
+((document
+  (object
+    (pair
+      key: (string
+        (string_content) @_name
+        (#eq? @_name "scripts"))
+      value: (object
+        (pair
+          key: (string
+            (string_content) @run @script))))))
+  (#set! tag package-script)
+  (#set! tag composer-script))

crates/languages/src/jsonc/brackets.scm 🔗

@@ -1,3 +1,9 @@
-("[" @open "]" @close)
-("{" @open "}" @close)
-(("\"" @open "\"" @close) (#set! rainbow.exclude))
+("[" @open
+  "]" @close)
+
+("{" @open
+  "}" @close)
+
+(("\"" @open
+  "\"" @close)
+  (#set! rainbow.exclude))

crates/languages/src/jsonc/redactions.scm 🔗

@@ -1,4 +1,11 @@
-(pair value: (number) @redact)
-(pair value: (string) @redact)
-(array (number) @redact)
-(array (string) @redact)
+(pair
+  value: (number) @redact)
+
+(pair
+  value: (string) @redact)
+
+(array
+  (number) @redact)
+
+(array
+  (string) @redact)

crates/languages/src/lib.rs 🔗

@@ -141,6 +141,7 @@ pub fn init(languages: Arc<LanguageRegistry>, fs: Arc<dyn Fs>, node: NodeRuntime
             name: "go",
             adapters: vec![go_lsp_adapter.clone()],
             context: Some(go_context_provider.clone()),
+            semantic_token_rules: Some(go::semantic_token_rules()),
             ..Default::default()
         },
         LanguageInfo {
@@ -179,7 +180,13 @@ pub fn init(languages: Arc<LanguageRegistry>, fs: Arc<dyn Fs>, node: NodeRuntime
         },
         LanguageInfo {
             name: "python",
-            adapters: vec![basedpyright_lsp_adapter, ruff_lsp_adapter],
+            adapters: vec![
+                basedpyright_lsp_adapter,
+                ruff_lsp_adapter,
+                ty_lsp_adapter,
+                py_lsp_adapter,
+                python_lsp_adapter,
+            ],
             context: Some(python_context_provider),
             toolchain: Some(python_toolchain_provider),
             manifest_name: Some(SharedString::new_static("pyproject.toml").into()),
@@ -281,9 +288,6 @@ pub fn init(languages: Arc<LanguageRegistry>, fs: Arc<dyn Fs>, node: NodeRuntime
         typescript_lsp_adapter,
     );
 
-    languages.register_available_lsp_adapter(python_lsp_adapter.name(), python_lsp_adapter);
-    languages.register_available_lsp_adapter(py_lsp_adapter.name(), py_lsp_adapter);
-    languages.register_available_lsp_adapter(ty_lsp_adapter.name(), ty_lsp_adapter);
     // Register Tailwind for the existing languages that should have it by default.
     //
     // This can be driven by the `language_servers` setting once we have a way for
@@ -368,8 +372,8 @@ fn register_language(
 ) {
     let config = load_config(name);
     if let Some(rules) = &semantic_token_rules {
-        SettingsStore::update_global(cx, |store, _| {
-            store.set_language_semantic_token_rules(config.name.0.clone(), rules.clone());
+        SettingsStore::update_global(cx, |store, cx| {
+            store.set_language_semantic_token_rules(config.name.0.clone(), rules.clone(), cx);
         });
     }
     for adapter in adapters {

crates/languages/src/markdown-inline/highlights.scm 🔗

@@ -1,6 +1,9 @@
 (emphasis) @emphasis.markup
+
 (strong_emphasis) @emphasis.strong.markup
+
 (code_span) @text.literal.markup
+
 (strikethrough) @strikethrough.markup
 
 [
@@ -13,8 +16,18 @@
   (link_label)
 ] @link_text.markup
 
-(inline_link ["(" ")"] @link_uri.markup)
-(image ["(" ")"] @link_uri.markup)
+(inline_link
+  [
+    "("
+    ")"
+  ] @link_uri.markup)
+
+(image
+  [
+    "("
+    ")"
+  ] @link_uri.markup)
+
 [
   (link_destination)
   (uri_autolink)

crates/languages/src/markdown/brackets.scm 🔗

@@ -1,7 +1,24 @@
-("(" @open ")" @close)
-("[" @open "]" @close)
-("{" @open "}" @close)
-(("\"" @open "\"" @close) (#set! rainbow.exclude))
-(("`" @open "`" @close) (#set! rainbow.exclude))
-(("'" @open "'" @close) (#set! rainbow.exclude))
-(((fenced_code_block_delimiter) @open (fenced_code_block_delimiter) @close) (#set! rainbow.exclude))
+("(" @open
+  ")" @close)
+
+("[" @open
+  "]" @close)
+
+("{" @open
+  "}" @close)
+
+(("\"" @open
+  "\"" @close)
+  (#set! rainbow.exclude))
+
+(("`" @open
+  "`" @close)
+  (#set! rainbow.exclude))
+
+(("'" @open
+  "'" @close)
+  (#set! rainbow.exclude))
+
+(((fenced_code_block_delimiter) @open
+  (fenced_code_block_delimiter) @close)
+  (#set! rainbow.exclude))

crates/languages/src/markdown/config.toml 🔗

@@ -1,6 +1,6 @@
 name = "Markdown"
 grammar = "markdown"
-path_suffixes = ["md", "mdx", "mdwn", "markdown", "MD"]
+path_suffixes = ["md", "mdx", "mdwn", "mdc", "markdown", "MD"]
 completion_query_characters = ["-"]
 block_comment = { start = "<!--", prefix = "", end = "-->", tab_size = 0 }
 autoclose_before = ";:.,=}])>"

crates/languages/src/markdown/highlights.scm 🔗

@@ -9,7 +9,9 @@
   (setext_heading)
   (thematic_break)
 ] @title.markup
-(setext_heading (paragraph) @title.markup)
+
+(setext_heading
+  (paragraph) @title.markup)
 
 [
   (list_marker_plus)
@@ -20,10 +22,18 @@
 ] @punctuation.list_marker.markup
 
 (block_quote_marker) @punctuation.markup
-(pipe_table_header "|" @punctuation.markup)
-(pipe_table_row "|" @punctuation.markup)
-(pipe_table_delimiter_row "|" @punctuation.markup)
-(pipe_table_delimiter_cell "-" @punctuation.markup)
+
+(pipe_table_header
+  "|" @punctuation.markup)
+
+(pipe_table_row
+  "|" @punctuation.markup)
+
+(pipe_table_delimiter_row
+  "|" @punctuation.markup)
+
+(pipe_table_delimiter_cell
+  "-" @punctuation.markup)
 
 [
   (fenced_code_block_delimiter)
@@ -31,4 +41,5 @@
 ] @punctuation.embedded.markup
 
 (link_reference_definition) @link_text.markup
+
 (link_destination) @link_uri.markup

crates/languages/src/markdown/injections.scm 🔗

@@ -4,11 +4,13 @@
   (code_fence_content) @injection.content)
 
 ((inline) @injection.content
- (#set! injection.language "markdown-inline"))
+  (#set! injection.language "markdown-inline"))
 
 ((html_block) @injection.content
   (#set! injection.language "html"))
 
-((minus_metadata) @injection.content (#set! injection.language "yaml"))
+((minus_metadata) @injection.content
+  (#set! injection.language "yaml"))
 
-((plus_metadata) @injection.content (#set! injection.language "toml"))
+((plus_metadata) @injection.content
+  (#set! injection.language "toml"))

crates/languages/src/python.rs 🔗

@@ -112,6 +112,8 @@ impl FromStr for TestRunner {
 /// Decided to ignore Pyright's sortText() completely and to manually sort all entries
 fn process_pyright_completions(items: &mut [lsp::CompletionItem]) {
     for item in items {
+        let is_named_argument = item.label.ends_with('=');
+
         let is_dunder = item.label.starts_with("__") && item.label.ends_with("__");
 
         let visibility_priority = if is_dunder {
@@ -124,23 +126,35 @@ fn process_pyright_completions(items: &mut [lsp::CompletionItem]) {
             '0' // public
         };
 
+        let is_external = item
+            .detail
+            .as_ref()
+            .is_some_and(|detail| detail == "Auto-import");
+
+        let source_priority = if is_external { '1' } else { '0' };
+
         // Kind priority within same visibility level
         let kind_priority = match item.kind {
-            Some(lsp::CompletionItemKind::ENUM_MEMBER) => '0',
-            Some(lsp::CompletionItemKind::FIELD) => '1',
-            Some(lsp::CompletionItemKind::PROPERTY) => '2',
-            Some(lsp::CompletionItemKind::VARIABLE) => '3',
-            Some(lsp::CompletionItemKind::CONSTANT) => '4',
-            Some(lsp::CompletionItemKind::METHOD) => '5',
-            Some(lsp::CompletionItemKind::FUNCTION) => '5',
-            Some(lsp::CompletionItemKind::CLASS) => '6',
-            Some(lsp::CompletionItemKind::MODULE) => '7',
-            _ => '8',
+            Some(lsp::CompletionItemKind::KEYWORD) => '0',
+            Some(lsp::CompletionItemKind::ENUM_MEMBER) => '1',
+            Some(lsp::CompletionItemKind::FIELD) => '2',
+            Some(lsp::CompletionItemKind::PROPERTY) => '3',
+            Some(lsp::CompletionItemKind::VARIABLE) => '4',
+            Some(lsp::CompletionItemKind::CONSTANT) => '5',
+            Some(lsp::CompletionItemKind::METHOD) => '6',
+            Some(lsp::CompletionItemKind::FUNCTION) => '6',
+            Some(lsp::CompletionItemKind::CLASS) => '7',
+            Some(lsp::CompletionItemKind::MODULE) => '8',
+
+            _ => 'z',
         };
 
+        // Named arguments get higher priority
+        let argument_priority = if is_named_argument { '0' } else { '1' };
+
         item.sort_text = Some(format!(
-            "{}{}{}",
-            visibility_priority, kind_priority, item.label
+            "{}{}{}{}{}",
+            argument_priority, source_priority, visibility_priority, kind_priority, item.label
         ));
     }
 }
@@ -493,6 +507,7 @@ impl LspAdapter for PyrightLspAdapter {
     async fn initialization_options(
         self: Arc<Self>,
         _: &Arc<dyn LspAdapterDelegate>,
+        _: &mut AsyncApp,
     ) -> Result<Option<Value>> {
         // Provide minimal initialization options
         // Virtual environment configuration will be handled through workspace configuration
@@ -1689,7 +1704,14 @@ impl LspAdapter for PyLspAdapter {
         Self::SERVER_NAME
     }
 
-    async fn process_completions(&self, _items: &mut [lsp::CompletionItem]) {}
+    async fn process_completions(&self, items: &mut [lsp::CompletionItem]) {
+        for item in items {
+            let is_named_argument = item.label.ends_with('=');
+            let priority = if is_named_argument { '0' } else { '1' };
+            let sort_text = item.sort_text.take().unwrap_or_else(|| item.label.clone());
+            item.sort_text = Some(format!("{}{}", priority, sort_text));
+        }
+    }
 
     async fn label_for_completion(
         &self,
@@ -1951,6 +1973,7 @@ impl LspAdapter for BasedPyrightLspAdapter {
     async fn initialization_options(
         self: Arc<Self>,
         _: &Arc<dyn LspAdapterDelegate>,
+        _: &mut AsyncApp,
     ) -> Result<Option<Value>> {
         // Provide minimal initialization options
         // Virtual environment configuration will be handled through workspace configuration

crates/languages/src/python/brackets.scm 🔗

@@ -1,4 +1,12 @@
-("(" @open ")" @close)
-("[" @open "]" @close)
-("{" @open "}" @close)
-(((string_start) @open (string_end) @close) (#set! rainbow.exclude))
+("(" @open
+  ")" @close)
+
+("[" @open
+  "]" @close)
+
+("{" @open
+  "}" @close)
+
+(((string_start) @open
+  (string_end) @close)
+  (#set! rainbow.exclude))

crates/languages/src/python/debugger.scm 🔗

@@ -1,43 +1,97 @@
 (identifier) @debug-variable
 (#eq? @debug-variable "self")
 
-(assignment left: (identifier) @debug-variable)
-(assignment left: (pattern_list (identifier) @debug-variable))
-(assignment left: (tuple_pattern (identifier) @debug-variable))
+(assignment
+  left: (identifier) @debug-variable)
 
-(augmented_assignment left: (identifier) @debug-variable)
+(assignment
+  left: (pattern_list
+    (identifier) @debug-variable))
 
-(for_statement left: (identifier) @debug-variable)
-(for_statement left: (pattern_list (identifier) @debug-variable))
-(for_statement left: (tuple_pattern (identifier) @debug-variable))
+(assignment
+  left: (tuple_pattern
+    (identifier) @debug-variable))
 
-(for_in_clause left: (identifier) @debug-variable)
-(for_in_clause left: (pattern_list (identifier) @debug-variable))
-(for_in_clause left: (tuple_pattern (identifier) @debug-variable))
+(augmented_assignment
+  left: (identifier) @debug-variable)
 
-(as_pattern (identifier) @debug-variable)
+(for_statement
+  left: (identifier) @debug-variable)
 
-(binary_operator left: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
-(binary_operator right: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
-(comparison_operator (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(for_statement
+  left: (pattern_list
+    (identifier) @debug-variable))
 
-(list (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
-(tuple (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
-(set (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(for_statement
+  left: (tuple_pattern
+    (identifier) @debug-variable))
 
-(subscript value: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(for_in_clause
+  left: (identifier) @debug-variable)
 
-(attribute object: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(for_in_clause
+  left: (pattern_list
+    (identifier) @debug-variable))
 
-(return_statement (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(for_in_clause
+  left: (tuple_pattern
+    (identifier) @debug-variable))
 
-(parenthesized_expression (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(as_pattern
+  (identifier) @debug-variable)
 
-(argument_list (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(binary_operator
+  left: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
 
-(if_statement condition: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(binary_operator
+  right: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
 
-(while_statement condition: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(comparison_operator
+  (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
+
+(list
+  (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
+
+(tuple
+  (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
+
+(set
+  (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
+
+(subscript
+  value: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
+
+(attribute
+  object: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
+
+(return_statement
+  (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
+
+(parenthesized_expression
+  (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
+
+(argument_list
+  (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
+
+(if_statement
+  condition: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
+
+(while_statement
+  condition: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
 
 (block) @debug-scope
+
 (module) @debug-scope

crates/languages/src/python/highlights.scm 🔗

@@ -1,6 +1,8 @@
 ; Identifier naming conventions; these "soft conventions" should stay at the top of the file as they're often overridden
 (identifier) @variable
-(attribute attribute: (identifier) @property)
+
+(attribute
+  attribute: (identifier) @property)
 
 ; CamelCase for classes
 ((identifier) @type.class
@@ -10,45 +12,56 @@
 ((identifier) @constant
   (#match? @constant "^_*[A-Z][A-Z0-9_]*$"))
 
-(type (identifier) @type)
-(generic_type (identifier) @type)
+(type
+  (identifier) @type)
+
+(generic_type
+  (identifier) @type)
+
 (comment) @comment
+
 (string) @string
+
 (escape_sequence) @string.escape
 
 ; Type alias
-(type_alias_statement "type" @keyword)
+(type_alias_statement
+  "type" @keyword)
 
 ; TypeVar with constraints in type parameters
 (type
-  (tuple (identifier) @type)
-)
+  (tuple
+    (identifier) @type))
 
 ; Forward references
 (type
-  (string) @type
-)
-
+  (string) @type)
 
 ; Function calls
-
 (call
-  function: (attribute attribute: (identifier) @function.method.call))
+  function: (attribute
+    attribute: (identifier) @function.method.call))
+
 (call
   function: (identifier) @function.call)
 
-(decorator "@" @punctuation.special)
+(decorator
+  "@" @punctuation.special)
+
 (decorator
   "@" @punctuation.special
   [
     (identifier) @function.decorator
-    (attribute attribute: (identifier) @function.decorator)
-    (call function: (identifier) @function.decorator.call)
-    (call (attribute attribute: (identifier) @function.decorator.call))
+    (attribute
+      attribute: (identifier) @function.decorator)
+    (call
+      function: (identifier) @function.decorator.call)
+    (call
+      (attribute
+        attribute: (identifier) @function.decorator.call))
   ])
 
 ; Function and class definitions
-
 (function_definition
   name: (identifier) @function.definition)
 
@@ -69,15 +82,15 @@
 ; Function arguments
 (function_definition
   parameters: (parameters
-  [
-      (identifier) @variable.parameter; Simple parameters
+    [
+      (identifier) @variable.parameter ; Simple parameters
       (typed_parameter
         (identifier) @variable.parameter) ; Typed parameters
       (default_parameter
         name: (identifier) @variable.parameter) ; Default parameters
       (typed_default_parameter
         name: (identifier) @variable.parameter) ; Typed default parameters
-  ]))
+    ]))
 
 ; Keyword arguments
 (call
@@ -86,28 +99,30 @@
       name: (identifier) @function.kwargs)))
 
 ; Class definitions and calling: needs to come after the regex matching above
-
 (class_definition
   name: (identifier) @type.class.definition)
 
 (class_definition
   superclasses: (argument_list
-  (identifier) @type.class.inheritance))
+    (identifier) @type.class.inheritance))
 
 (call
   function: (identifier) @type.class.call
   (#match? @type.class.call "^_*[A-Z][A-Za-z0-9_]*$"))
 
 ; Builtins
-
 ((call
   function: (identifier) @function.builtin)
- (#any-of?
-   @function.builtin
-   "abs" "all" "any" "ascii" "bin" "bool" "breakpoint" "bytearray" "bytes" "callable" "chr" "classmethod" "compile" "complex" "delattr" "dict" "dir" "divmod" "enumerate" "eval" "exec" "filter" "float" "format" "frozenset" "getattr" "globals" "hasattr" "hash" "help" "hex" "id" "input" "int" "isinstance" "issubclass" "iter" "len" "list" "locals" "map" "max" "memoryview" "min" "next" "object" "oct" "open" "ord" "pow" "print" "property" "range" "repr" "reversed" "round" "set" "setattr" "slice" "sorted" "staticmethod" "str" "sum" "super" "tuple" "type" "vars" "zip" "__import__"))
+  (#any-of? @function.builtin
+    "abs" "all" "any" "ascii" "bin" "bool" "breakpoint" "bytearray" "bytes" "callable" "chr"
+    "classmethod" "compile" "complex" "delattr" "dict" "dir" "divmod" "enumerate" "eval" "exec"
+    "filter" "float" "format" "frozenset" "getattr" "globals" "hasattr" "hash" "help" "hex" "id"
+    "input" "int" "isinstance" "issubclass" "iter" "len" "list" "locals" "map" "max" "memoryview"
+    "min" "next" "object" "oct" "open" "ord" "pow" "print" "property" "range" "repr" "reversed"
+    "round" "set" "setattr" "slice" "sorted" "staticmethod" "str" "sum" "super" "tuple" "type"
+    "vars" "zip" "__import__"))
 
 ; Literals
-
 [
   (true)
   (false)
@@ -124,10 +139,11 @@
 ] @number
 
 ; Self references
-
 [
-  (parameters (identifier) @variable.special)
-  (attribute (identifier) @variable.special)
+  (parameters
+    (identifier) @variable.special)
+  (attribute
+    (identifier) @variable.special)
   (#any-of? @variable.special "self" "cls")
 ]
 
@@ -152,37 +168,57 @@
 
 ; Docstrings.
 ([
-  (expression_statement (assignment))
+  (expression_statement
+    (assignment))
   (type_alias_statement)
 ]
-. (expression_statement (string) @string.doc)+)
+  .
+  (expression_statement
+    (string) @string.doc)+)
 
 (module
-  .(expression_statement (string) @string.doc)+)
+  .
+  (expression_statement
+    (string) @string.doc)+)
 
 (class_definition
-  body: (block .(expression_statement (string) @string.doc)+))
+  body: (block
+    .
+    (expression_statement
+      (string) @string.doc)+))
 
 (function_definition
   "async"?
   "def"
   name: (_)
   (parameters)?
-  body: (block .(expression_statement (string) @string.doc)+))
+  body: (block
+    .
+    (expression_statement
+      (string) @string.doc)+))
 
 (class_definition
   body: (block
-    . (comment) @comment*
-    . (expression_statement (string) @string.doc)+))
+    .
+    (comment) @comment*
+    .
+    (expression_statement
+      (string) @string.doc)+))
 
 (module
-  . (comment) @comment*
-  . (expression_statement (string) @string.doc)+)
+  .
+  (comment) @comment*
+  .
+  (expression_statement
+    (string) @string.doc)+)
 
 (class_definition
   body: (block
-    (expression_statement (assignment))
-    . (expression_statement (string) @string.doc)+))
+    (expression_statement
+      (assignment))
+    .
+    (expression_statement
+      (string) @string.doc)+))
 
 (class_definition
   body: (block
@@ -190,9 +226,11 @@
       name: (identifier) @function.method.constructor
       (#eq? @function.method.constructor "__init__")
       body: (block
-        (expression_statement (assignment))
-        . (expression_statement (string) @string.doc)+))))
-
+        (expression_statement
+          (assignment))
+        .
+        (expression_statement
+          (string) @string.doc)+))))
 
 [
   "-"
@@ -286,18 +324,23 @@
   "lambda"
 ] @keyword.definition
 
-(decorator (identifier) @attribute.builtin
+(decorator
+  (identifier) @attribute.builtin
   (#any-of? @attribute.builtin "classmethod" "staticmethod" "property"))
 
 ; Builtin types as identifiers
 [
   (call
     function: (identifier) @type.builtin)
-  (type (identifier) @type.builtin)
-  (generic_type (identifier) @type.builtin)
+  (type
+    (identifier) @type.builtin)
+  (generic_type
+    (identifier) @type.builtin)
   ; also check if type binary operator left identifier for union types
   (type
     (binary_operator
       left: (identifier) @type.builtin))
-  (#any-of? @type.builtin "bool" "bytearray" "bytes" "complex" "dict" "float" "frozenset" "int" "list" "memoryview" "object" "range" "set" "slice" "str" "tuple")
+  (#any-of? @type.builtin
+    "bool" "bytearray" "bytes" "complex" "dict" "float" "frozenset" "int" "list" "memoryview"
+    "object" "range" "set" "slice" "str" "tuple")
 ]

crates/languages/src/python/imports.scm 🔗

@@ -1,32 +1,38 @@
 (import_statement
-    name: [
-        (dotted_name
-            ((identifier) @namespace ".")*
-            (identifier) @namespace .)
-        (aliased_import
-            name: (dotted_name
-                ((identifier) @namespace ".")*
-                (identifier) @namespace .))
-    ]) @wildcard @import
+  name: [
+    (dotted_name
+      ((identifier) @namespace
+        ".")*
+      (identifier) @namespace .)
+    (aliased_import
+      name: (dotted_name
+        ((identifier) @namespace
+          ".")*
+        (identifier) @namespace .))
+  ]) @wildcard @import
 
 (import_from_statement
-    module_name: [
-        (dotted_name
-            ((identifier) @namespace ".")*
-            (identifier) @namespace .)
-        (relative_import
-            (dotted_name
-                ((identifier) @namespace ".")*
-                (identifier) @namespace .)?)
-    ]
-    (wildcard_import)? @wildcard
-    name: [
-        (dotted_name
-            ((identifier) @namespace ".")*
-            (identifier) @name .)
-        (aliased_import
-            name: (dotted_name
-                ((identifier) @namespace ".")*
-                (identifier) @name .)
-            alias: (identifier) @alias)
-    ]?) @import
+  module_name: [
+    (dotted_name
+      ((identifier) @namespace
+        ".")*
+      (identifier) @namespace .)
+    (relative_import
+      (dotted_name
+        ((identifier) @namespace
+          ".")*
+        (identifier) @namespace .)?)
+  ]
+  (wildcard_import)? @wildcard
+  name: [
+    (dotted_name
+      ((identifier) @namespace
+        ".")*
+      (identifier) @name .)
+    (aliased_import
+      name: (dotted_name
+        ((identifier) @namespace
+          ".")*
+        (identifier) @name .)
+      alias: (identifier) @alias)
+  ]?) @import

crates/languages/src/python/indents.scm 🔗

@@ -1,17 +1,37 @@
-(_ "[" "]" @end) @indent
-(_ "{" "}" @end) @indent
-(_ "(" ")" @end) @indent
+(_
+  "["
+  "]" @end) @indent
+
+(_
+  "{"
+  "}" @end) @indent
+
+(_
+  "("
+  ")" @end) @indent
 
 (function_definition) @start.def
+
 (class_definition) @start.class
+
 (if_statement) @start.if
+
 (for_statement) @start.for
+
 (while_statement) @start.while
+
 (with_statement) @start.with
+
 (match_statement) @start.match
+
 (try_statement) @start.try
+
 (elif_clause) @start.elif
+
 (else_clause) @start.else
+
 (except_clause) @start.except
+
 (finally_clause) @start.finally
+
 (case_clause) @start.case

crates/languages/src/python/injections.scm 🔗

@@ -1,34 +1,26 @@
 ((comment) @injection.content
- (#set! injection.language "comment")
-)
+  (#set! injection.language "comment"))
 
 ; SQL -----------------------------------------------------------------------------
-(
+([
+  ; function calls
+  (call
     [
-        ; function calls
-        (call
-            [
-                (attribute attribute: (identifier) @function_name)
-                (identifier) @function_name
-            ]
-            arguments: (argument_list
-                (comment) @comment
-                (string
-                    (string_content) @injection.content
-                )
-        ))
-
-        ; string variables
-        ((comment) @comment
-            .
-            (expression_statement
-                (assignment
-                    right: (string
-                        (string_content) @injection.content
-                    )
-                )
-        ))
+      (attribute
+        attribute: (identifier))
+      (identifier)
     ]
-    (#match? @comment "^(#|#\\s+)(?i:sql)\\s*$")
-    (#set! injection.language "sql")
-)
+    arguments: (argument_list
+      (comment) @_comment
+      (string
+        (string_content) @injection.content)))
+  ; string variables
+  ((comment) @_comment
+    .
+    (expression_statement
+      (assignment
+        right: (string
+          (string_content) @injection.content))))
+]
+  (#match? @_comment "^(#|#\\s+)(?i:sql)\\s*$")
+  (#set! injection.language "sql"))

crates/languages/src/python/outline.scm 🔗

@@ -1,11 +1,10 @@
 (decorator) @annotation
 
 (class_definition
-    "class" @context
-    name: (identifier) @name
-    ) @item
+  "class" @context
+  name: (identifier) @name) @item
 
 (function_definition
-    "async"? @context
-    "def" @context
-    name: (_) @name) @item
+  "async"? @context
+  "def" @context
+  name: (_) @name) @item

crates/languages/src/python/runnables.scm 🔗

@@ -1,151 +1,108 @@
 ; subclasses of unittest.TestCase or TestCase
-(
-    (class_definition
-        name: (identifier) @run @_unittest_class_name
-        superclasses: (argument_list
-            [(identifier) @_superclass
-                (attribute (identifier) @_superclass)]
-            )
-        (#eq? @_superclass "TestCase")
-        ) @_python-unittest-class
-    (#set! tag python-unittest-class)
-    )
+((class_definition
+  name: (identifier) @run @_unittest_class_name
+  superclasses: (argument_list
+    [
+      (identifier) @_superclass
+      (attribute
+        (identifier) @_superclass)
+    ])
+  (#eq? @_superclass "TestCase")) @_python-unittest-class
+  (#set! tag python-unittest-class))
 
 ; test methods whose names start with `test` in a TestCase
-(
-    (class_definition
-        name: (identifier) @_unittest_class_name
-        superclasses: (argument_list
-            [(identifier) @_superclass
-                (attribute (identifier) @_superclass)]
-            )
-        (#eq? @_superclass "TestCase")
-        body: (block
-            (function_definition
-                name: (identifier) @run @_unittest_method_name
-                (#match? @_unittest_method_name "^test.*")
-                ) @_python-unittest-method
-            (#set! tag python-unittest-method)
-            )
-        )
-    )
+(class_definition
+  name: (identifier) @_unittest_class_name
+  superclasses: (argument_list
+    [
+      (identifier) @_superclass
+      (attribute
+        (identifier) @_superclass)
+    ])
+  (#eq? @_superclass "TestCase")
+  body: (block
+    (function_definition
+      name: (identifier) @run @_unittest_method_name
+      (#match? @_unittest_method_name "^test.*")) @_python-unittest-method
+    (#set! tag python-unittest-method)))
 
 ; pytest functions
-(
-    (module
-        (function_definition
-            name: (identifier) @run @_pytest_method_name
-            (#match? @_pytest_method_name "^test_")
-            ) @_python-pytest-method
-        )
-    (#set! tag python-pytest-method)
-    )
+((module
+  (function_definition
+    name: (identifier) @run @_pytest_method_name
+    (#match? @_pytest_method_name "^test_")) @_python-pytest-method)
+  (#set! tag python-pytest-method))
 
 ; decorated pytest functions
-(
-    (module
-        (decorated_definition
-            (decorator)+ @_decorator
-            definition: (function_definition
-                name: (identifier) @run @_pytest_method_name
-                (#match? @_pytest_method_name "^test_")
-                )
-            ) @_python-pytest-method
-        )
-    (#set! tag python-pytest-method)
-    )
-
+((module
+  (decorated_definition
+    (decorator)+ @_decorator
+    definition: (function_definition
+      name: (identifier) @run @_pytest_method_name
+      (#match? @_pytest_method_name "^test_"))) @_python-pytest-method)
+  (#set! tag python-pytest-method))
 
 ; pytest classes
-(
-    (module
-        (class_definition
-            name: (identifier) @run @_pytest_class_name
-            (#match? @_pytest_class_name "^Test")
-            )
-        (#set! tag python-pytest-class)
-        )
-    )
-
+(module
+  (class_definition
+    name: (identifier) @run @_pytest_class_name
+    (#match? @_pytest_class_name "^Test"))
+  (#set! tag python-pytest-class))
 
 ; decorated pytest classes
-(
-    (module
-        (decorated_definition
-            (decorator)+ @_decorator
-            definition: (class_definition
-                name: (identifier) @run @_pytest_class_name
-                (#match? @_pytest_class_name "^Test")
-                )
-            )
-        (#set! tag python-pytest-class)
-        )
-    )
-
+(module
+  (decorated_definition
+    (decorator)+ @_decorator
+    definition: (class_definition
+      name: (identifier) @run @_pytest_class_name
+      (#match? @_pytest_class_name "^Test")))
+  (#set! tag python-pytest-class))
 
 ; pytest class methods
-(
-    (module
-        (class_definition
-            name: (identifier) @_pytest_class_name
-            (#match? @_pytest_class_name "^Test")
-            body: (block
-                [(decorated_definition
-                    (decorator)+ @_decorator
-                    definition: (function_definition
-                        name: (identifier) @run @_pytest_method_name
-                        (#match? @_pytest_method_name "^test_")
-                        )
-                    )
-                (function_definition
-                    name: (identifier) @run @_pytest_method_name
-                    (#match? @_pytest_method_name "^test")
-                    )
-                ] @_python-pytest-method)
-            (#set! tag python-pytest-method)
-            )
-        )
-    )
+(module
+  (class_definition
+    name: (identifier) @_pytest_class_name
+    (#match? @_pytest_class_name "^Test")
+    body: (block
+      [
+        (decorated_definition
+          (decorator)+ @_decorator
+          definition: (function_definition
+            name: (identifier) @run @_pytest_method_name
+            (#match? @_pytest_method_name "^test_")))
+        (function_definition
+          name: (identifier) @run @_pytest_method_name
+          (#match? @_pytest_method_name "^test"))
+      ] @_python-pytest-method)
+    (#set! tag python-pytest-method)))
 
 ; decorated pytest class methods
-(
-    (module
-        (decorated_definition
+(module
+  (decorated_definition
+    (decorator)+ @_decorator
+    definition: (class_definition
+      name: (identifier) @_pytest_class_name
+      (#match? @_pytest_class_name "^Test")
+      body: (block
+        [
+          (decorated_definition
             (decorator)+ @_decorator
-            definition: (class_definition
-                name: (identifier) @_pytest_class_name
-                (#match? @_pytest_class_name "^Test")
-                body: (block
-                    [(decorated_definition
-                        (decorator)+ @_decorator
-                        definition: (function_definition
-                            name: (identifier) @run @_pytest_method_name
-                            (#match? @_pytest_method_name "^test_")
-                            )
-                        )
-                    (function_definition
-                        name: (identifier) @run @_pytest_method_name
-                        (#match? @_pytest_method_name "^test")
-                        )
-                    ] @_python-pytest-method)
-                (#set! tag python-pytest-method)
-                )
-            )
-        )
-    )
+            definition: (function_definition
+              name: (identifier) @run @_pytest_method_name
+              (#match? @_pytest_method_name "^test_")))
+          (function_definition
+            name: (identifier) @run @_pytest_method_name
+            (#match? @_pytest_method_name "^test"))
+        ] @_python-pytest-method)
+      (#set! tag python-pytest-method))))
 
 ; module main method
-(
-    (module
-        (if_statement
-            condition: (comparison_operator
-                (identifier) @run @_lhs
-                operators: "=="
-                (string) @_rhs
-                )
-            (#eq? @_lhs "__name__")
-            (#match? @_rhs "^[\"']__main__[\"']$")
-            (#set! tag python-module-main-method)
-            )
-        )
-    )
+(module
+  (if_statement
+    condition: (comparison_operator
+      (identifier) @run @_lhs
+      operators: "=="
+      (string) @_rhs)
+    (#eq? @_lhs "__name__")
+    (#match? @_rhs "^[\"']__main__[\"']$")
+    (#set! tag python-module-main-method)))

crates/languages/src/regex/brackets.scm 🔗

@@ -1,3 +1,8 @@
-("(" @open ")" @close)
-("[" @open "]" @close)
-("{" @open "}" @close)
+("(" @open
+  ")" @close)
+
+("[" @open
+  "]" @close)
+
+("{" @open
+  "}" @close)

crates/languages/src/rust/brackets.scm 🔗

@@ -1,7 +1,23 @@
-("(" @open ")" @close)
-("[" @open "]" @close)
-("{" @open "}" @close)
-("<" @open ">" @close)
-(closure_parameters "|" @open "|" @close)
-(("\"" @open "\"" @close) (#set! rainbow.exclude))
-(("'" @open "'" @close) (#set! rainbow.exclude))
+("(" @open
+  ")" @close)
+
+("[" @open
+  "]" @close)
+
+("{" @open
+  "}" @close)
+
+("<" @open
+  ">" @close)
+
+(closure_parameters
+  "|" @open
+  "|" @close)
+
+(("\"" @open
+  "\"" @close)
+  (#set! rainbow.exclude))
+
+(("'" @open
+  "'" @close)
+  (#set! rainbow.exclude))

crates/languages/src/rust/debugger.scm 🔗

@@ -1,50 +1,85 @@
 (metavariable) @debug-variable
 
-(parameter (identifier) @debug-variable)
+(parameter
+  (identifier) @debug-variable)
 
 (self) @debug-variable
 
-(static_item (identifier) @debug-variable)
-(const_item (identifier) @debug-variable)
+(static_item
+  (identifier) @debug-variable)
 
-(let_declaration pattern: (identifier) @debug-variable)
+(const_item
+  (identifier) @debug-variable)
 
-(let_condition (identifier) @debug-variable)
+(let_declaration
+  pattern: (identifier) @debug-variable)
 
-(match_arm (identifier) @debug-variable)
+(let_condition
+  (identifier) @debug-variable)
 
-(for_expression (identifier) @debug-variable)
+(match_arm
+  (identifier) @debug-variable)
 
-(closure_parameters (identifier) @debug-variable)
+(for_expression
+  (identifier) @debug-variable)
 
-(assignment_expression (identifier) @debug-variable)
+(closure_parameters
+  (identifier) @debug-variable)
 
-(field_expression (identifier) @debug-variable)
+(assignment_expression
+  (identifier) @debug-variable)
 
-(binary_expression (identifier) @debug-variable
+(field_expression
+  (identifier) @debug-variable)
+
+(binary_expression
+  (identifier) @debug-variable
   (#not-match? @debug-variable "^[A-Z]"))
 
-(reference_expression (identifier) @debug-variable
+(reference_expression
+  (identifier) @debug-variable
   (#not-match? @debug-variable "^[A-Z]"))
 
-(array_expression (identifier) @debug-variable)
-(tuple_expression (identifier) @debug-variable)
-(return_expression (identifier) @debug-variable)
-(await_expression (identifier) @debug-variable)
-(try_expression (identifier) @debug-variable)
-(index_expression (identifier) @debug-variable)
-(range_expression (identifier) @debug-variable)
-(unary_expression (identifier) @debug-variable)
+(array_expression
+  (identifier) @debug-variable)
+
+(tuple_expression
+  (identifier) @debug-variable)
+
+(return_expression
+  (identifier) @debug-variable)
+
+(await_expression
+  (identifier) @debug-variable)
+
+(try_expression
+  (identifier) @debug-variable)
+
+(index_expression
+  (identifier) @debug-variable)
+
+(range_expression
+  (identifier) @debug-variable)
+
+(unary_expression
+  (identifier) @debug-variable)
+
+(if_expression
+  (identifier) @debug-variable)
 
-(if_expression (identifier) @debug-variable)
-(while_expression (identifier) @debug-variable)
+(while_expression
+  (identifier) @debug-variable)
 
-(parenthesized_expression (identifier) @debug-variable)
+(parenthesized_expression
+  (identifier) @debug-variable)
 
-(arguments (identifier) @debug-variable
+(arguments
+  (identifier) @debug-variable
   (#not-match? @debug-variable "^[A-Z]"))
 
-(macro_invocation (token_tree (identifier) @debug-variable
-  (#not-match? @debug-variable "^[A-Z]")))
+(macro_invocation
+  (token_tree
+    (identifier) @debug-variable
+    (#not-match? @debug-variable "^[A-Z]")))
 
 (block) @debug-scope

crates/languages/src/rust/highlights.scm 🔗

@@ -1,17 +1,33 @@
 (identifier) @variable
+
 (metavariable) @variable
+
 (type_identifier) @type
+
 (fragment_specifier) @type
+
 (primitive_type) @type.builtin
+
 (self) @variable.special
+
 (field_identifier) @property
+
 (shorthand_field_identifier) @property
 
-(trait_item name: (type_identifier) @type.interface)
-(impl_item trait: (type_identifier) @type.interface)
-(abstract_type trait: (type_identifier) @type.interface)
-(dynamic_type trait: (type_identifier) @type.interface)
-(trait_bounds (type_identifier) @type.interface)
+(trait_item
+  name: (type_identifier) @type.interface)
+
+(impl_item
+  trait: (type_identifier) @type.interface)
+
+(abstract_type
+  trait: (type_identifier) @type.interface)
+
+(dynamic_type
+  trait: (type_identifier) @type.interface)
+
+(trait_bounds
+  (type_identifier) @type.interface)
 
 (call_expression
   function: [
@@ -31,8 +47,11 @@
       field: (field_identifier) @function.method)
   ])
 
-(function_item name: (identifier) @function.definition)
-(function_signature_item name: (identifier) @function.definition)
+(function_item
+  name: (identifier) @function.definition)
+
+(function_signature_item
+  name: (identifier) @function.definition)
 
 (macro_invocation
   macro: [
@@ -48,17 +67,17 @@
   name: (identifier) @function.special.definition)
 
 ; Identifier conventions
-
 ; Assume uppercase names are types/enum-constructors
 ((identifier) @type
- (#match? @type "^[A-Z]"))
+  (#match? @type "^[A-Z]"))
 
 ; Assume all-caps names are constants
 ((identifier) @constant
- (#match? @constant "^_*[A-Z][A-Z\\d_]*$"))
+  (#match? @constant "^_*[A-Z][A-Z\\d_]*$"))
 
 ; Ensure enum variants are highlighted correctly regardless of naming convention
-(enum_variant name: (identifier) @type)
+(enum_variant
+  name: (identifier) @type)
 
 [
   "("
@@ -81,9 +100,7 @@
   "::"
 ] @punctuation.delimiter
 
-[
-  "#"
-] @punctuation.special
+"#" @punctuation.special
 
 [
   "as"
@@ -131,7 +148,7 @@
 ] @keyword.control
 
 (for_expression
-  ("for" @keyword.control))
+  "for" @keyword.control)
 
 [
   (string_literal)
@@ -154,8 +171,10 @@
 ] @comment
 
 [
-  (line_comment (doc_comment))
-  (block_comment (doc_comment))
+  (line_comment
+    (doc_comment))
+  (block_comment
+    (doc_comment))
 ] @comment.doc
 
 [
@@ -198,25 +217,44 @@
 ] @operator
 
 ; Avoid highlighting these as operators when used in doc comments.
-(unary_expression "!" @operator)
+(unary_expression
+  "!" @operator)
+
 operator: "/" @operator
 
 (lifetime
   "'" @lifetime
   (identifier) @lifetime)
 
-(parameter (identifier) @variable.parameter)
-
-(attribute_item (attribute [
-  (identifier) @attribute
-  (scoped_identifier name: (identifier) @attribute)
-  (token_tree (identifier) @attribute (#match? @attribute "^[a-z\\d_]*$"))
-  (token_tree (identifier) @none "::" (#match? @none "^[a-z\\d_]*$"))
-]))
-
-(inner_attribute_item (attribute [
-  (identifier) @attribute
-  (scoped_identifier name: (identifier) @attribute)
-  (token_tree (identifier) @attribute (#match? @attribute "^[a-z\\d_]*$"))
-  (token_tree (identifier) @none "::" (#match? @none "^[a-z\\d_]*$"))
-]))
+(parameter
+  (identifier) @variable.parameter)
+
+(attribute_item
+  (attribute
+    [
+      (identifier) @attribute
+      (scoped_identifier
+        name: (identifier) @attribute)
+      (token_tree
+        (identifier) @attribute
+        (#match? @attribute "^[a-z\\d_]*$"))
+      (token_tree
+        (identifier) @none
+        "::"
+        (#match? @none "^[a-z\\d_]*$"))
+    ]))
+
+(inner_attribute_item
+  (attribute
+    [
+      (identifier) @attribute
+      (scoped_identifier
+        name: (identifier) @attribute)
+      (token_tree
+        (identifier) @attribute
+        (#match? @attribute "^[a-z\\d_]*$"))
+      (token_tree
+        (identifier) @none
+        "::"
+        (#match? @none "^[a-z\\d_]*$"))
+    ]))

crates/languages/src/rust/imports.scm 🔗

@@ -1,27 +1,29 @@
 (use_declaration) @import
 
 (scoped_use_list
-    path: (_) @namespace
-    list: (_) @list)
+  path: (_) @namespace
+  list: (_) @list)
 
 (scoped_identifier
-    path: (_) @namespace
-    name: (identifier) @name)
+  path: (_) @namespace
+  name: (identifier) @name)
 
-(use_list (identifier) @name)
+(use_list
+  (identifier) @name)
 
-(use_declaration (identifier) @name)
+(use_declaration
+  (identifier) @name)
 
 (use_as_clause
-    path: (scoped_identifier
-       path: (_) @namespace
-       name: (_) @name)
-    alias: (_) @alias)
+  path: (scoped_identifier
+    path: (_) @namespace
+    name: (_) @name)
+  alias: (_) @alias)
 
 (use_as_clause
-    path: (identifier) @name
-    alias: (_) @alias)
+  path: (identifier) @name
+  alias: (_) @alias)
 
 (use_wildcard
-    (_)? @namespace
-    "*" @wildcard)
+  (_)? @namespace
+  "*" @wildcard)

crates/languages/src/rust/indents.scm 🔗

@@ -1,14 +1,26 @@
 [
-    ((where_clause) _ @end)
-    (field_expression)
-    (call_expression)
-    (assignment_expression)
-    (let_declaration)
-    (let_chain)
-    (await_expression)
+  ((where_clause)
+    _ @end)
+  (field_expression)
+  (call_expression)
+  (assignment_expression)
+  (let_declaration)
+  (let_chain)
+  (await_expression)
 ] @indent
 
-(_ "[" "]" @end) @indent
-(_ "<" ">" @end) @indent
-(_ "{" "}" @end) @indent
-(_ "(" ")" @end) @indent
+(_
+  "["
+  "]" @end) @indent
+
+(_
+  "<"
+  ">" @end) @indent
+
+(_
+  "{"
+  "}" @end) @indent
+
+(_
+  "("
+  ")" @end) @indent

crates/languages/src/rust/injections.scm 🔗

@@ -1,64 +1,67 @@
 ([
-    (line_comment)
-    (block_comment)
+  (line_comment)
+  (block_comment)
 ] @injection.content
-    (#set! injection.language "comment"))
+  (#set! injection.language "comment"))
 
 (macro_invocation
-    macro: [
-        ((identifier) @_macro_name)
-        (scoped_identifier (identifier) @_macro_name .)
-    ]
-    (#not-any-of? @_macro_name "view" "html")
-    (token_tree) @injection.content
-    (#set! injection.language "rust"))
+  macro: [
+    (identifier) @_macro_name
+    (scoped_identifier
+      (identifier) @_macro_name .)
+  ]
+  (#not-any-of? @_macro_name "view" "html")
+  (token_tree) @injection.content
+  (#set! injection.language "rust"))
 
 ; we need a better way for the leptos extension to declare that
 ; it wants to inject inside of rust, instead of modifying the rust
 ; injections to support leptos injections
 (macro_invocation
-    macro: [
-        ((identifier) @_macro_name)
-        (scoped_identifier (identifier) @_macro_name .)
-    ]
-    (#any-of? @_macro_name "view" "html")
-    (token_tree) @injection.content
-    (#set! injection.language "rstml")
-    )
+  macro: [
+    (identifier) @_macro_name
+    (scoped_identifier
+      (identifier) @_macro_name .)
+  ]
+  (#any-of? @_macro_name "view" "html")
+  (token_tree) @injection.content
+  (#set! injection.language "rstml"))
 
 (macro_invocation
-    macro: [
-        ((identifier) @_macro_name)
-        (scoped_identifier (identifier) @_macro_name .)
-    ]
-    (#any-of? @_macro_name "sql")
-    (_) @injection.content
-    (#set! injection.language "sql")
-    )
+  macro: [
+    (identifier) @_macro_name
+    (scoped_identifier
+      (identifier) @_macro_name .)
+  ]
+  (#any-of? @_macro_name "sql")
+  (_) @injection.content
+  (#set! injection.language "sql"))
 
 ; lazy_regex
 (macro_invocation
-    macro: [
-        ((identifier) @_macro_name)
-        (scoped_identifier (identifier) @_macro_name .)
-    ]
-    (token_tree [
-        (string_literal (string_content) @injection.content)
-        (raw_string_literal (string_content) @injection.content)
+  macro: [
+    (identifier) @_macro_name
+    (scoped_identifier
+      (identifier) @_macro_name .)
+  ]
+  (token_tree
+    [
+      (string_literal
+        (string_content) @injection.content)
+      (raw_string_literal
+        (string_content) @injection.content)
     ])
-    (#set! injection.language "regex")
-    (#any-of? @_macro_name "regex" "bytes_regex")
-)
+  (#set! injection.language "regex")
+  (#any-of? @_macro_name "regex" "bytes_regex"))
 
 (call_expression
-    function: (scoped_identifier) @_fn_path
-    arguments: (arguments
-        [
-            (string_literal (string_content) @injection.content)
-            (raw_string_literal (string_content) @injection.content)
-        ]
-    )
-
-    (#match? @_fn_path ".*Regex(Builder)?::new")
-    (#set! injection.language "regex")
-)
+  function: (scoped_identifier) @_fn_path
+  arguments: (arguments
+    [
+      (string_literal
+        (string_content) @injection.content)
+      (raw_string_literal
+        (string_content) @injection.content)
+    ])
+  (#match? @_fn_path ".*Regex(Builder)?::new")
+  (#set! injection.language "regex"))

crates/languages/src/rust/outline.scm 🔗

@@ -1,73 +1,81 @@
 (attribute_item) @annotation
+
 (line_comment) @annotation
 
 (struct_item
-    (visibility_modifier)? @context
-    "struct" @context
-    name: (_) @name) @item
+  (visibility_modifier)? @context
+  "struct" @context
+  name: (_) @name) @item
 
 (enum_item
-    (visibility_modifier)? @context
-    "enum" @context
-    name: (_) @name) @item
+  (visibility_modifier)? @context
+  "enum" @context
+  name: (_) @name) @item
 
 (enum_variant
-    (visibility_modifier)? @context
-    name: (_) @name) @item
+  (visibility_modifier)? @context
+  name: (_) @name) @item
 
 (impl_item
-    "impl" @context
-    trait: (_)? @name
-    "for"? @context
-    type: (_) @name
-    body: (_ . "{" @open "}" @close .)) @item
+  "impl" @context
+  trait: (_)? @name
+  "for"? @context
+  type: (_) @name
+  body: (_
+    .
+    "{" @open
+    "}" @close .)) @item
 
 (trait_item
-    (visibility_modifier)? @context
-    "trait" @context
-    name: (_) @name) @item
+  (visibility_modifier)? @context
+  "trait" @context
+  name: (_) @name) @item
 
 (function_item
-    (visibility_modifier)? @context
-    (function_modifiers)? @context
-    "fn" @context
-    name: (_) @name
-    body: (_ . "{" @open "}" @close .)) @item
+  (visibility_modifier)? @context
+  (function_modifiers)? @context
+  "fn" @context
+  name: (_) @name
+  body: (_
+    .
+    "{" @open
+    "}" @close .)) @item
 
 (function_signature_item
-    (visibility_modifier)? @context
-    (function_modifiers)? @context
-    "fn" @context
-    name: (_) @name) @item
+  (visibility_modifier)? @context
+  (function_modifiers)? @context
+  "fn" @context
+  name: (_) @name) @item
 
 (macro_definition
-    . "macro_rules!" @context
-    name: (_) @name) @item
+  .
+  "macro_rules!" @context
+  name: (_) @name) @item
 
 (mod_item
-    (visibility_modifier)? @context
-    "mod" @context
-    name: (_) @name) @item
+  (visibility_modifier)? @context
+  "mod" @context
+  name: (_) @name) @item
 
 (type_item
-    (visibility_modifier)? @context
-    "type" @context
-    name: (_) @name) @item
+  (visibility_modifier)? @context
+  "type" @context
+  name: (_) @name) @item
 
 (associated_type
-    "type" @context
-    name: (_) @name) @item
+  "type" @context
+  name: (_) @name) @item
 
 (const_item
-    (visibility_modifier)? @context
-    "const" @context
-    name: (_) @name) @item
+  (visibility_modifier)? @context
+  "const" @context
+  name: (_) @name) @item
 
 (static_item
-    (visibility_modifier)? @context
-    "static" @context
-    name: (_) @name) @item
+  (visibility_modifier)? @context
+  "static" @context
+  name: (_) @name) @item
 
 (field_declaration
-    (visibility_modifier)? @context
-    name: (_) @name) @item
+  (visibility_modifier)? @context
+  name: (_) @name) @item

crates/languages/src/rust/runnables.scm 🔗

@@ -1,92 +1,75 @@
 ; Rust mod test
-(
-    (attribute_item (attribute
-        (
-          (identifier) @_attribute)
-          arguments: (
-              (token_tree (identifier) @_test)
-              (#eq? @_test "test")
-          )
-        )
-        (#eq? @_attribute "cfg")
-    )
-    .
-    (mod_item
-        name: (_) @run
-    )
-    (#set! tag rust-mod-test)
-)
+((attribute_item
+  (attribute
+    (identifier) @_attribute
+    arguments: ((token_tree
+      (identifier) @_test)
+      (#eq? @_test "test")))
+  (#eq? @_attribute "cfg"))
+  .
+  (mod_item
+    name: (_) @run)
+  (#set! tag rust-mod-test))
 
 ; Rust test
-(
-    (
-        (attribute_item (attribute
-            [((identifier) @_attribute)
-                (scoped_identifier (identifier) @_attribute)
-                ])
-            (#match? @_attribute "test")
-        ) @_start
-        .
-        (attribute_item) *
-        .
-        [(line_comment) (block_comment)] *
-        .
-        (function_item
-            name: (_) @run @_test_name
-            body: _
-        ) @_end
-    )
-    (#set! tag rust-test)
-)
+(((attribute_item
+  (attribute
+    [
+      (identifier) @_attribute
+      (scoped_identifier
+        (identifier) @_attribute)
+    ])
+  (#match? @_attribute "test")) @_start
+  .
+  (attribute_item)*
+  .
+  [
+    (line_comment)
+    (block_comment)
+  ]*
+  .
+  (function_item
+    name: (_) @run @_test_name
+    body: _) @_end)
+  (#set! tag rust-test))
 
 ; Rust doc test
-(
-    (
-        (line_comment) *
-        (line_comment
-            doc: (_) @_comment_content
-        ) @_start @run
-        (#match? @_comment_content "```")
-        .
-        (line_comment) *
-        .
-        (line_comment
-            doc: (_) @_end_comment_content
-        ) @_end_code_block
-        (#match? @_end_comment_content "```")
-        .
-        (line_comment) *
-        (attribute_item) *
-        .
-        [(function_item
-            name: (_)  @_doc_test_name
-            body: _
-        ) (function_signature_item
-            name: (_) @_doc_test_name
-        ) (struct_item
-            name: (_) @_doc_test_name
-        ) (enum_item
-            name: (_) @_doc_test_name
-            body: _
-        ) (
-            (attribute_item) ?
-            (macro_definition
-                name: (_) @_doc_test_name)
-        ) (mod_item
-            name: (_) @_doc_test_name
-        )] @_end
-    )
-    (#set! tag rust-doc-test)
-)
+(((line_comment)*
+  (line_comment
+    doc: (_) @_comment_content) @_start @run
+  (#match? @_comment_content "```")
+  .
+  (line_comment)*
+  .
+  (line_comment
+    doc: (_) @_end_comment_content) @_end_code_block
+  (#match? @_end_comment_content "```")
+  .
+  (line_comment)*
+  (attribute_item)*
+  .
+  [
+    (function_item
+      name: (_) @_doc_test_name
+      body: _)
+    (function_signature_item
+      name: (_) @_doc_test_name)
+    (struct_item
+      name: (_) @_doc_test_name)
+    (enum_item
+      name: (_) @_doc_test_name
+      body: _)
+    ((attribute_item)?
+      (macro_definition
+        name: (_) @_doc_test_name))
+    (mod_item
+      name: (_) @_doc_test_name)
+  ] @_end)
+  (#set! tag rust-doc-test))
 
 ; Rust main function
-(
-    (
-        (function_item
-            name: (_) @run
-            body: _
-        ) @_rust_main_function_end
-        (#eq? @run "main")
-    )
-    (#set! tag rust-main)
-)
+(((function_item
+  name: (_) @run
+  body: _) @_rust_main_function_end
+  (#eq? @run "main"))
+  (#set! tag rust-main))

crates/languages/src/rust/textobjects.scm 🔗

@@ -2,50 +2,73 @@
 (function_signature_item) @function.around
 
 (function_item
-    body: (_
-        "{"
-        (_)* @function.inside
-        "}" )) @function.around
+  body: (_
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
 
 ; classes
 (struct_item
-    body: (_
-        ["{" "("]?
-        [(_) ","?]* @class.inside
-        ["}" ")"]? )) @class.around
+  body: (_
+    [
+      "{"
+      "("
+    ]?
+    [
+      (_)
+      ","?
+    ]* @class.inside
+    [
+      "}"
+      ")"
+    ]?)) @class.around
 
 (enum_item
-   body: (_
-       "{"
-       [(_) ","?]* @class.inside
-       "}" )) @class.around
+  body: (_
+    "{"
+    [
+      (_)
+      ","?
+    ]* @class.inside
+    "}")) @class.around
 
 (union_item
-    body: (_
-        "{"
-        [(_) ","?]* @class.inside
-        "}" )) @class.around
+  body: (_
+    "{"
+    [
+      (_)
+      ","?
+    ]* @class.inside
+    "}")) @class.around
 
 (trait_item
-    body: (_
-        "{"
-        [(_) ","?]* @class.inside
-        "}" )) @class.around
+  body: (_
+    "{"
+    [
+      (_)
+      ","?
+    ]* @class.inside
+    "}")) @class.around
 
 (impl_item
-    body: (_
-        "{"
-        [(_) ","?]* @class.inside
-        "}" )) @class.around
+  body: (_
+    "{"
+    [
+      (_)
+      ","?
+    ]* @class.inside
+    "}")) @class.around
 
 (mod_item
-    body: (_
-        "{"
-        [(_) ","?]* @class.inside
-        "}" )) @class.around
+  body: (_
+    "{"
+    [
+      (_)
+      ","?
+    ]* @class.inside
+    "}")) @class.around
 
 ; comments
-
 (line_comment)+ @comment.around
 
 (block_comment) @comment.around

crates/languages/src/tailwind.rs 🔗

@@ -139,6 +139,7 @@ impl LspAdapter for TailwindLspAdapter {
     async fn initialization_options(
         self: Arc<Self>,
         _: &Arc<dyn LspAdapterDelegate>,
+        _: &mut AsyncApp,
     ) -> Result<Option<serde_json::Value>> {
         Ok(Some(json!({
             "provideFormatter": true,

crates/languages/src/tailwindcss.rs 🔗

@@ -135,6 +135,7 @@ impl LspAdapter for TailwindCssLspAdapter {
     async fn initialization_options(
         self: Arc<Self>,
         _: &Arc<dyn LspAdapterDelegate>,
+        _: &mut AsyncApp,
     ) -> Result<Option<serde_json::Value>> {
         Ok(Some(json!({
             "provideFormatter": true

crates/languages/src/tsx/brackets.scm 🔗

@@ -1,11 +1,35 @@
-("(" @open ")" @close)
-("[" @open "]" @close)
-("{" @open "}" @close)
-("<" @open ">" @close)
-("<" @open "/>" @close)
-("</" @open ">" @close)
-(("\"" @open "\"" @close) (#set! rainbow.exclude))
-(("'" @open "'" @close) (#set! rainbow.exclude))
-(("`" @open "`" @close) (#set! rainbow.exclude))
-
-((jsx_element (jsx_opening_element) @open (jsx_closing_element) @close) (#set! newline.only) (#set! rainbow.exclude))
+("(" @open
+  ")" @close)
+
+("[" @open
+  "]" @close)
+
+("{" @open
+  "}" @close)
+
+("<" @open
+  ">" @close)
+
+("<" @open
+  "/>" @close)
+
+("</" @open
+  ">" @close)
+
+(("\"" @open
+  "\"" @close)
+  (#set! rainbow.exclude))
+
+(("'" @open
+  "'" @close)
+  (#set! rainbow.exclude))
+
+(("`" @open
+  "`" @close)
+  (#set! rainbow.exclude))
+
+((jsx_element
+  (jsx_opening_element) @open
+  (jsx_closing_element) @close)
+  (#set! newline.only)
+  (#set! rainbow.exclude))

crates/languages/src/tsx/debugger.scm 🔗

@@ -1,25 +1,55 @@
-(lexical_declaration (variable_declarator name: (identifier) @debug-variable))
+(lexical_declaration
+  (variable_declarator
+    name: (identifier) @debug-variable))
 
-(for_in_statement left: (identifier) @debug-variable)
-(for_statement initializer: (lexical_declaration (variable_declarator name: (identifier) @debug-variable)))
+(for_in_statement
+  left: (identifier) @debug-variable)
 
-(binary_expression left: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
-(binary_expression right: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(for_statement
+  initializer: (lexical_declaration
+    (variable_declarator
+      name: (identifier) @debug-variable)))
 
-(unary_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
-(update_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(binary_expression
+  left: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
 
-(return_statement (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(binary_expression
+  right: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
 
-(parenthesized_expression (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(unary_expression
+  argument: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
 
-(jsx_expression (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(update_expression
+  argument: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
 
-(array (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(return_statement
+  (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
 
-(pair value: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(parenthesized_expression
+  (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
 
-(member_expression object: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(jsx_expression
+  (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
+
+(array
+  (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
+
+(pair
+  value: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
+
+(member_expression
+  object: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
 
 (statement_block) @debug-scope
+
 (program) @debug-scope

crates/languages/src/tsx/highlights.scm 🔗

@@ -1,56 +1,33 @@
 ; Variables
-
 (identifier) @variable
 
 (call_expression
   function: (member_expression
     object: (identifier) @type
-    (#any-of?
-      @type
-      "Promise"
-      "Array"
-      "Object"
-      "Map"
-      "Set"
-      "WeakMap"
-      "WeakSet"
-      "Date"
-      "Error"
-      "TypeError"
-      "RangeError"
-      "SyntaxError"
-      "ReferenceError"
-      "EvalError"
-      "URIError"
-      "RegExp"
-      "Function"
-      "Number"
-      "String"
-      "Boolean"
-      "Symbol"
-      "BigInt"
-      "Proxy"
-      "ArrayBuffer"
-      "DataView"
-    )
-  )
-)
+    (#any-of? @type
+      "Promise" "Array" "Object" "Map" "Set" "WeakMap" "WeakSet" "Date" "Error" "TypeError"
+      "RangeError" "SyntaxError" "ReferenceError" "EvalError" "URIError" "RegExp" "Function"
+      "Number" "String" "Boolean" "Symbol" "BigInt" "Proxy" "ArrayBuffer" "DataView")))
 
 ; Properties
-
 (property_identifier) @property
+
 (shorthand_property_identifier) @property
+
 (shorthand_property_identifier_pattern) @property
+
 (private_property_identifier) @property
 
 ; Function and method calls
-
 (call_expression
   function: (identifier) @function)
 
 (call_expression
   function: (member_expression
-    property: [(property_identifier) (private_property_identifier)] @function.method))
+    property: [
+      (property_identifier)
+      (private_property_identifier)
+    ] @function.method))
 
 (new_expression
   constructor: (identifier) @type)
@@ -59,36 +36,58 @@
   module: (identifier) @type)
 
 ; Function and method definitions
-
 (function_expression
   name: (identifier) @function)
+
 (function_declaration
   name: (identifier) @function)
+
 (method_definition
-  name: [(property_identifier) (private_property_identifier)] @function.method)
+  name: [
+    (property_identifier)
+    (private_property_identifier)
+  ] @function.method)
+
 (method_definition
-    name: (property_identifier) @constructor
-    (#eq? @constructor "constructor"))
+  name: (property_identifier) @constructor
+  (#eq? @constructor "constructor"))
 
 (pair
-  key: [(property_identifier) (private_property_identifier)] @function.method
-  value: [(function_expression) (arrow_function)])
+  key: [
+    (property_identifier)
+    (private_property_identifier)
+  ] @function.method
+  value: [
+    (function_expression)
+    (arrow_function)
+  ])
 
 (assignment_expression
   left: (member_expression
-    property: [(property_identifier) (private_property_identifier)] @function.method)
-  right: [(function_expression) (arrow_function)])
+    property: [
+      (property_identifier)
+      (private_property_identifier)
+    ] @function.method)
+  right: [
+    (function_expression)
+    (arrow_function)
+  ])
 
 (variable_declarator
   name: (identifier) @function
-  value: [(function_expression) (arrow_function)])
+  value: [
+    (function_expression)
+    (arrow_function)
+  ])
 
 (assignment_expression
   left: (identifier) @function
-  right: [(function_expression) (arrow_function)])
+  right: [
+    (function_expression)
+    (arrow_function)
+  ])
 
 ; Parameters
-
 (required_parameter
   (identifier) @variable.parameter)
 
@@ -122,9 +121,10 @@
   name: (identifier) @variable.parameter)
 
 ; Special identifiers
-
 (type_annotation) @type
+
 (type_identifier) @type
+
 (predefined_type) @type.builtin
 
 (type_alias_declaration
@@ -153,12 +153,12 @@
   (identifier)
   (shorthand_property_identifier)
   (shorthand_property_identifier_pattern)
- ] @constant
- (#match? @constant "^_*[A-Z_][A-Z\\d_]*$"))
+] @constant
+  (#match? @constant "^_*[A-Z_][A-Z\\d_]*$"))
 
 ; Literals
-
 (this) @variable.special
+
 (super) @variable.special
 
 [
@@ -182,11 +182,12 @@
 (escape_sequence) @string.escape
 
 (regex) @string.regex
+
 (regex_flags) @keyword.operator.regex
+
 (number) @number
 
 ; Tokens
-
 [
   ";"
   "?."
@@ -244,7 +245,8 @@
   "..."
 ] @operator
 
-(regex "/" @string.regex)
+(regex
+  "/" @string.regex)
 
 [
   "("
@@ -253,14 +255,13 @@
   "]"
   "{"
   "}"
-]  @punctuation.bracket
+] @punctuation.bracket
 
 (ternary_expression
   [
     "?"
     ":"
-  ] @operator
-)
+  ] @operator)
 
 ; Keywords
 [
@@ -334,7 +335,8 @@
   "yield"
 ] @keyword.control
 
-(switch_default "default" @keyword.control)
+(switch_default
+  "default" @keyword.control)
 
 (template_substitution
   "${" @punctuation.special
@@ -352,31 +354,32 @@
   "<" @punctuation.bracket
   ">" @punctuation.bracket)
 
-(decorator "@" @punctuation.special)
+(decorator
+  "@" @punctuation.special)
 
 (union_type
-  ("|") @punctuation.special)
+  "|" @punctuation.special)
 
 (intersection_type
-  ("&") @punctuation.special)
+  "&" @punctuation.special)
 
 (type_annotation
-  (":") @punctuation.special)
+  ":" @punctuation.special)
 
 (index_signature
-  (":") @punctuation.special)
+  ":" @punctuation.special)
 
 (type_predicate_annotation
-  (":") @punctuation.special)
+  ":" @punctuation.special)
 
 (public_field_definition
-  ("?") @punctuation.special)
+  "?" @punctuation.special)
 
 (property_signature
-  ("?") @punctuation.special)
+  "?" @punctuation.special)
 
 (method_signature
-  ("?") @punctuation.special)
+  "?" @punctuation.special)
 
 (optional_parameter
   ([
@@ -384,44 +387,66 @@
     ":"
   ]) @punctuation.special)
 
-
-
 (jsx_opening_element
   [
     (identifier) @type
     (member_expression
       object: (identifier) @type
-      property: (property_identifier) @type
-    )
-  ]
-)
+      property: (property_identifier) @type)
+  ])
+
 (jsx_closing_element
   [
     (identifier) @type
     (member_expression
       object: (identifier) @type
-      property: (property_identifier) @type
-    )
-  ]
-)
+      property: (property_identifier) @type)
+  ])
+
 (jsx_self_closing_element
   [
     (identifier) @type
     (member_expression
       object: (identifier) @type
-      property: (property_identifier) @type
-    )
-  ]
-)
-
-(jsx_opening_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$"))
-(jsx_closing_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$"))
-(jsx_self_closing_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$"))
-
-(jsx_attribute (property_identifier) @attribute.jsx)
-(jsx_opening_element (["<" ">"]) @punctuation.bracket.jsx)
-(jsx_closing_element (["</" ">"]) @punctuation.bracket.jsx)
-(jsx_self_closing_element (["<" "/>"]) @punctuation.bracket.jsx)
-(jsx_attribute "=" @punctuation.delimiter.jsx)
+      property: (property_identifier) @type)
+  ])
+
+(jsx_opening_element
+  (identifier) @tag.jsx
+  (#match? @tag.jsx "^[a-z][^.]*$"))
+
+(jsx_closing_element
+  (identifier) @tag.jsx
+  (#match? @tag.jsx "^[a-z][^.]*$"))
+
+(jsx_self_closing_element
+  (identifier) @tag.jsx
+  (#match? @tag.jsx "^[a-z][^.]*$"))
+
+(jsx_attribute
+  (property_identifier) @attribute.jsx)
+
+(jsx_opening_element
+  ([
+    "<"
+    ">"
+  ]) @punctuation.bracket.jsx)
+
+(jsx_closing_element
+  ([
+    "</"
+    ">"
+  ]) @punctuation.bracket.jsx)
+
+(jsx_self_closing_element
+  ([
+    "<"
+    "/>"
+  ]) @punctuation.bracket.jsx)
+
+(jsx_attribute
+  "=" @punctuation.delimiter.jsx)
+
 (jsx_text) @text.jsx
+
 (html_character_reference) @string.special

crates/languages/src/tsx/imports.scm 🔗

@@ -1,14 +1,16 @@
 (import_statement
-    import_clause: (import_clause
-        [
-            (identifier) @name
-            (named_imports
-                (import_specifier
-                    name: (_) @name
-                    alias: (_)? @alias))
-        ])
-    source: (string (string_fragment) @source)) @import
+  import_clause: (import_clause
+    [
+      (identifier) @name
+      (named_imports
+        (import_specifier
+          name: (_) @name
+          alias: (_)? @alias))
+    ])
+  source: (string
+    (string_fragment) @source)) @import
 
 (import_statement
-    !import_clause
-    source: (string (string_fragment) @source @wildcard)) @import
+  !import_clause
+  source: (string
+    (string_fragment) @source @wildcard)) @import

crates/languages/src/tsx/indents.scm 🔗

@@ -1,20 +1,32 @@
 [
-    (call_expression)
-    (assignment_expression)
-    (member_expression)
-    (lexical_declaration)
-    (variable_declaration)
-    (assignment_expression)
-    (if_statement)
-    (for_statement)
+  (call_expression)
+  (assignment_expression)
+  (member_expression)
+  (lexical_declaration)
+  (variable_declaration)
+  (assignment_expression)
+  (if_statement)
+  (for_statement)
 ] @indent
 
-(_ "[" "]" @end) @indent
-(_ "<" ">" @end) @indent
-(_ "{" "}" @end) @indent
-(_ "(" ")" @end) @indent
+(_
+  "["
+  "]" @end) @indent
 
-(jsx_opening_element ">" @end) @indent
+(_
+  "<"
+  ">" @end) @indent
+
+(_
+  "{"
+  "}" @end) @indent
+
+(_
+  "("
+  ")" @end) @indent
+
+(jsx_opening_element
+  ">" @end) @indent
 
 (jsx_element
   (jsx_opening_element) @start

crates/languages/src/tsx/injections.scm 🔗

@@ -1,6 +1,5 @@
 ((comment) @injection.content
- (#set! injection.language "comment")
-)
+  (#set! injection.language "comment"))
 
 (((comment) @_jsdoc_comment
   (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @injection.content
@@ -10,119 +9,137 @@
   (#set! injection.language "regex"))
 
 (call_expression
-  function: (identifier) @_name (#eq? @_name "css")
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "css"))
-)
+  function: (identifier) @_name
+  (#eq? @_name "css")
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "css")))
 
 (call_expression
   function: (member_expression
-    object: (identifier) @_obj (#eq? @_obj "styled")
+    object: (identifier) @_obj
+    (#eq? @_obj "styled")
     property: (property_identifier))
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "css"))
-)
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "css")))
 
 (call_expression
   function: (call_expression
-    function: (identifier) @_name (#eq? @_name "styled"))
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "css"))
-)
+    function: (identifier) @_name
+    (#eq? @_name "styled"))
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "css")))
 
 (call_expression
-  function: (identifier) @_name (#eq? @_name "html")
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "html"))
-)
+  function: (identifier) @_name
+  (#eq? @_name "html")
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "html")))
 
 (call_expression
-  function: (identifier) @_name (#eq? @_name "js")
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "javascript"))
-)
+  function: (identifier) @_name
+  (#eq? @_name "js")
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "javascript")))
 
 (call_expression
-  function: (identifier) @_name (#eq? @_name "json")
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "json"))
-)
+  function: (identifier) @_name
+  (#eq? @_name "json")
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "json")))
 
 (call_expression
-  function: (identifier) @_name (#eq? @_name "sql")
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "sql"))
-)
+  function: (identifier) @_name
+  (#eq? @_name "sql")
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "sql")))
 
 (call_expression
-  function: (identifier) @_name (#eq? @_name "ts")
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "typescript"))
-)
+  function: (identifier) @_name
+  (#eq? @_name "ts")
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "typescript")))
 
 (call_expression
-  function: (identifier) @_name (#match? @_name "^ya?ml$")
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "yaml"))
-)
+  function: (identifier) @_name
+  (#match? @_name "^ya?ml$")
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "yaml")))
 
 (call_expression
-  function: (identifier) @_name (#match? @_name "^g(raph)?ql$")
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "graphql"))
-)
+  function: (identifier) @_name
+  (#match? @_name "^g(raph)?ql$")
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "graphql")))
 
 (call_expression
-  function: (identifier) @_name (#match? @_name "^g(raph)?ql$")
-  arguments: (arguments (template_string (string_fragment) @injection.content
-                              (#set! injection.language "graphql")))
-)
+  function: (identifier) @_name
+  (#match? @_name "^g(raph)?ql$")
+  arguments: (arguments
+    (template_string
+      (string_fragment) @injection.content
+      (#set! injection.language "graphql"))))
 
 (call_expression
-  function: (identifier) @_name(#match? @_name "^iso$")
-  arguments: (arguments (template_string (string_fragment) @injection.content
-                              (#set! injection.language "isograph")))
-)
+  function: (identifier) @_name
+  (#match? @_name "^iso$")
+  arguments: (arguments
+    (template_string
+      (string_fragment) @injection.content
+      (#set! injection.language "isograph"))))
 
 ; Parse the contents of strings and tagged template
 ; literals with leading ECMAScript comments:
 ; '/* html */' or '/*html*/'
-(
-  ((comment) @_ecma_comment [
-    (string (string_fragment) @injection.content)
-    (template_string (string_fragment) @injection.content)
+(((comment) @_ecma_comment
+  [
+    (string
+      (string_fragment) @injection.content)
+    (template_string
+      (string_fragment) @injection.content)
   ])
   (#match? @_ecma_comment "^\\/\\*\\s*html\\s*\\*\\/")
-  (#set! injection.language "html")
-)
+  (#set! injection.language "html"))
 
 ; '/* sql */' or '/*sql*/'
-(
-  ((comment) @_ecma_comment [
-    (string (string_fragment) @injection.content)
-    (template_string (string_fragment) @injection.content)
+(((comment) @_ecma_comment
+  [
+    (string
+      (string_fragment) @injection.content)
+    (template_string
+      (string_fragment) @injection.content)
   ])
   (#match? @_ecma_comment "^\\/\\*\\s*sql\\s*\\*\\/")
-  (#set! injection.language "sql")
-)
+  (#set! injection.language "sql"))
 
 ; '/* gql */' or '/*gql*/'
 ; '/* graphql */' or '/*graphql*/'
-(
-  ((comment) @_ecma_comment [
-    (string (string_fragment) @injection.content)
-    (template_string (string_fragment) @injection.content)
+(((comment) @_ecma_comment
+  [
+    (string
+      (string_fragment) @injection.content)
+    (template_string
+      (string_fragment) @injection.content)
   ])
   (#match? @_ecma_comment "^\\/\\*\\s*(gql|graphql)\\s*\\*\\/")
-  (#set! injection.language "graphql")
-)
+  (#set! injection.language "graphql"))
 
 ; '/* css */' or '/*css*/'
-(
-  ((comment) @_ecma_comment [
-    (string (string_fragment) @injection.content)
-    (template_string (string_fragment) @injection.content)
+(((comment) @_ecma_comment
+  [
+    (string
+      (string_fragment) @injection.content)
+    (template_string
+      (string_fragment) @injection.content)
   ])
   (#match? @_ecma_comment "^\\/\\*\\s*(css)\\s*\\*\\/")
-  (#set! injection.language "css")
-)
+  (#set! injection.language "css"))

crates/languages/src/tsx/outline.scm 🔗

@@ -1,230 +1,275 @@
 (internal_module
-    "namespace" @context
-    name: (_) @name) @item
+  "namespace" @context
+  name: (_) @name) @item
 
 (enum_declaration
-    "enum" @context
-    name: (_) @name) @item
+  "enum" @context
+  name: (_) @name) @item
 
 (type_alias_declaration
-    "type" @context
-    name: (_) @name) @item
+  "type" @context
+  name: (_) @name) @item
 
 (function_declaration
-    "async"? @context
-    "function" @context
-    name: (_) @name
-    parameters: (formal_parameters
-      "(" @context
-      ")" @context)) @item
+  "async"? @context
+  "function" @context
+  name: (_) @name
+  parameters: (formal_parameters
+    "(" @context
+    ")" @context)) @item
 
 (generator_function_declaration
-    "async"? @context
-    "function" @context
-    "*" @context
-    name: (_) @name
-    parameters: (formal_parameters
-      "(" @context
-      ")" @context)) @item
+  "async"? @context
+  "function" @context
+  "*" @context
+  name: (_) @name
+  parameters: (formal_parameters
+    "(" @context
+    ")" @context)) @item
 
 (interface_declaration
-    "interface" @context
-    name: (_) @name) @item
+  "interface" @context
+  name: (_) @name) @item
 
 (export_statement
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (identifier) @name) @item))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (identifier) @name) @item))
 
 ; Exported array destructuring
 (export_statement
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (array_pattern
-                [
-                    (identifier) @name @item
-                    (assignment_pattern left: (identifier) @name @item)
-                    (rest_pattern (identifier) @name @item)
-                ]))))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (array_pattern
+        [
+          (identifier) @name @item
+          (assignment_pattern
+            left: (identifier) @name @item)
+          (rest_pattern
+            (identifier) @name @item)
+        ]))))
 
 ; Exported object destructuring
 (export_statement
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (object_pattern
-                [(shorthand_property_identifier_pattern) @name @item
-                 (pair_pattern
-                     value: (identifier) @name @item)
-                 (pair_pattern
-                     value: (assignment_pattern left: (identifier) @name @item))
-                 (rest_pattern (identifier) @name @item)]))))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (object_pattern
+        [
+          (shorthand_property_identifier_pattern) @name @item
+          (pair_pattern
+            value: (identifier) @name @item)
+          (pair_pattern
+            value: (assignment_pattern
+              left: (identifier) @name @item))
+          (rest_pattern
+            (identifier) @name @item)
+        ]))))
 
 (program
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (identifier) @name) @item))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (identifier) @name) @item))
 
 ; Top-level array destructuring
 (program
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (array_pattern
-                [
-                    (identifier) @name @item
-                    (assignment_pattern left: (identifier) @name @item)
-                    (rest_pattern (identifier) @name @item)
-                ]))))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (array_pattern
+        [
+          (identifier) @name @item
+          (assignment_pattern
+            left: (identifier) @name @item)
+          (rest_pattern
+            (identifier) @name @item)
+        ]))))
 
 ; Top-level object destructuring
 (program
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (object_pattern
-                [(shorthand_property_identifier_pattern) @name @item
-                 (pair_pattern
-                     value: (identifier) @name @item)
-                 (pair_pattern
-                     value: (assignment_pattern left: (identifier) @name @item))
-                 (rest_pattern (identifier) @name @item)]))))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (object_pattern
+        [
+          (shorthand_property_identifier_pattern) @name @item
+          (pair_pattern
+            value: (identifier) @name @item)
+          (pair_pattern
+            value: (assignment_pattern
+              left: (identifier) @name @item))
+          (rest_pattern
+            (identifier) @name @item)
+        ]))))
 
 (class_declaration
-    "class" @context
-    name: (_) @name) @item
+  "class" @context
+  name: (_) @name) @item
 
 (abstract_class_declaration
-    "abstract" @context
-    "class" @context
-    name: (_) @name) @item
+  "abstract" @context
+  "class" @context
+  name: (_) @name) @item
 
 ; Method definitions in classes (not in object literals)
 (class_body
-    (method_definition
-        [
-            "get"
-            "set"
-            "async"
-            "*"
-            "readonly"
-            "static"
-            (override_modifier)
-            (accessibility_modifier)
-        ]* @context
-        name: (_) @name
-        parameters: (formal_parameters
-          "(" @context
-          ")" @context)) @item)
+  (method_definition
+    [
+      "get"
+      "set"
+      "async"
+      "*"
+      "readonly"
+      "static"
+      (override_modifier)
+      (accessibility_modifier)
+    ]* @context
+    name: (_) @name
+    parameters: (formal_parameters
+      "(" @context
+      ")" @context)) @item)
 
 ; Object literal methods
 (variable_declarator
-    value: (object
-        (method_definition
-            [
-                "get"
-                "set"
-                "async"
-                "*"
-            ]* @context
-            name: (_) @name
-            parameters: (formal_parameters
-              "(" @context
-              ")" @context)) @item))
+  value: (object
+    (method_definition
+      [
+        "get"
+        "set"
+        "async"
+        "*"
+      ]* @context
+      name: (_) @name
+      parameters: (formal_parameters
+        "(" @context
+        ")" @context)) @item))
 
 (public_field_definition
-    [
-        "declare"
-        "readonly"
-        "abstract"
-        "static"
-        (accessibility_modifier)
-    ]* @context
-    name: (_) @name) @item
+  [
+    "declare"
+    "readonly"
+    "abstract"
+    "static"
+    (accessibility_modifier)
+  ]* @context
+  name: (_) @name) @item
 
 ; Add support for (node:test, bun:test and Jest) runnable
-(
-    (call_expression
-        function: [
-            (identifier) @_name
-            (member_expression
-                object: [
-                    (identifier) @_name
-                    (member_expression object: (identifier) @_name)
-                ]
-            )
-        ] @context
-        (#any-of? @_name "it" "test" "describe" "context" "suite")
-        arguments: (
-            arguments . [
-                (string (string_fragment) @name)
-                (identifier) @name
-            ]
-        )
-    )
-) @item
+((call_expression
+  function: [
+    (identifier) @_name
+    (member_expression
+      object: [
+        (identifier) @_name
+        (member_expression
+          object: (identifier) @_name)
+      ])
+  ] @context
+  (#any-of? @_name "it" "test" "describe" "context" "suite")
+  arguments: (arguments
+    .
+    [
+      (string
+        (string_fragment) @name)
+      (identifier) @name
+    ]))) @item
 
 ; Add support for parameterized tests
-(
-    (call_expression
-        function: (call_expression
-            function: (member_expression
-                object: [(identifier) @_name (member_expression object: (identifier) @_name)]
-                property: (property_identifier) @_property
-            )
-            (#any-of? @_name "it" "test" "describe" "context" "suite")
-            (#any-of? @_property "each")
-        )
-        arguments: (
-            arguments . [
-                (string (string_fragment) @name)
-                (identifier) @name
-            ]
-        )
-    )
-) @item
+((call_expression
+  function: (call_expression
+    function: (member_expression
+      object: [
+        (identifier) @_name
+        (member_expression
+          object: (identifier) @_name)
+      ]
+      property: (property_identifier) @_property)
+    (#any-of? @_name "it" "test" "describe" "context" "suite")
+    (#any-of? @_property "each"))
+  arguments: (arguments
+    .
+    [
+      (string
+        (string_fragment) @name)
+      (identifier) @name
+    ]))) @item
 
 ; Object properties
 (pair
-    key: [
-        (property_identifier) @name
-        (string (string_fragment) @name)
-        (number) @name
-        (computed_property_name) @name
-    ]) @item
-
+  key: [
+    (property_identifier) @name
+    (string
+      (string_fragment) @name)
+    (number) @name
+    (computed_property_name) @name
+  ]) @item
 
 ; Nested variables in function bodies
 (statement_block
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (identifier) @name) @item))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (identifier) @name) @item))
 
 ; Nested array destructuring in functions
 (statement_block
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (array_pattern
-                [
-                    (identifier) @name @item
-                    (assignment_pattern left: (identifier) @name @item)
-                    (rest_pattern (identifier) @name @item)
-                ]))))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (array_pattern
+        [
+          (identifier) @name @item
+          (assignment_pattern
+            left: (identifier) @name @item)
+          (rest_pattern
+            (identifier) @name @item)
+        ]))))
 
 ; Nested object destructuring in functions
 (statement_block
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (object_pattern
-                [(shorthand_property_identifier_pattern) @name @item
-                 (pair_pattern value: (identifier) @name @item)
-                 (pair_pattern value: (assignment_pattern left: (identifier) @name @item))
-                 (rest_pattern (identifier) @name @item)]))))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (object_pattern
+        [
+          (shorthand_property_identifier_pattern) @name @item
+          (pair_pattern
+            value: (identifier) @name @item)
+          (pair_pattern
+            value: (assignment_pattern
+              left: (identifier) @name @item))
+          (rest_pattern
+            (identifier) @name @item)
+        ]))))
 
 (comment) @annotation

crates/languages/src/tsx/overrides.scm 🔗

@@ -2,7 +2,8 @@
 
 (string) @string
 
-(template_string (string_fragment) @string)
+(template_string
+  (string_fragment) @string)
 
 (jsx_element) @element
 
@@ -13,6 +14,7 @@
   (jsx_expression)
 ] @default
 
-(_ value: (call_expression
-  function: (identifier) @function_name_before_type_arguments
-  type_arguments: (type_arguments)))
+(_
+  value: (call_expression
+    function: (identifier) @function_name_before_type_arguments
+    type_arguments: (type_arguments)))

crates/languages/src/tsx/runnables.scm 🔗

@@ -1,46 +1,42 @@
 ; Add support for (node:test, bun:test and Jest) runnable
 ; Function expression that has `it`, `test` or `describe` as the function name
-(
-    (call_expression
-        function: [
-            (identifier) @_name
-            (member_expression
-                object: [
-                    (identifier) @_name
-                    (member_expression object: (identifier) @_name)
-                ]
-            )
-        ]
-        (#any-of? @_name "it" "test" "describe" "context" "suite")
-        arguments: (
-            arguments . [
-                (string (string_fragment) @run)
-                (identifier) @run
-            ]
-        )
-    ) @_js-test
-
-    (#set! tag js-test)
-)
+((call_expression
+  function: [
+    (identifier) @_name
+    (member_expression
+      object: [
+        (identifier) @_name
+        (member_expression
+          object: (identifier) @_name)
+      ])
+  ]
+  (#any-of? @_name "it" "test" "describe" "context" "suite")
+  arguments: (arguments
+    .
+    [
+      (string
+        (string_fragment) @run)
+      (identifier) @run
+    ])) @_js-test
+  (#set! tag js-test))
 
 ; Add support for parameterized tests
-(
-    (call_expression
-        function: (call_expression
-            function: (member_expression
-                object: [(identifier) @_name (member_expression object: (identifier) @_name)]
-                property: (property_identifier) @_property
-            )
-            (#any-of? @_name "it" "test" "describe" "context" "suite")
-            (#any-of? @_property "each")
-        )
-        arguments: (
-            arguments . [
-                (string (string_fragment) @run)
-                (identifier) @run
-            ]
-        )
-    ) @_js-test
-
-    (#set! tag js-test)
-)
+((call_expression
+  function: (call_expression
+    function: (member_expression
+      object: [
+        (identifier) @_name
+        (member_expression
+          object: (identifier) @_name)
+      ]
+      property: (property_identifier) @_property)
+    (#any-of? @_name "it" "test" "describe" "context" "suite")
+    (#any-of? @_property "each"))
+  arguments: (arguments
+    .
+    [
+      (string
+        (string_fragment) @run)
+      (identifier) @run
+    ])) @_js-test
+  (#set! tag js-test))

crates/languages/src/tsx/textobjects.scm 🔗

@@ -1,113 +1,129 @@
 (comment)+ @comment.around
 
 (function_declaration
-    body: (_
-        "{"
-        (_)* @function.inside
-        "}")) @function.around
+  body: (_
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
 
 (method_definition
-    body: (_
-        "{"
-        (_)* @function.inside
-        "}")) @function.around
+  body: (_
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
 
 (function_expression
-    body: (_
-        "{"
-        (_)* @function.inside
-        "}")) @function.around
+  body: (_
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
 
 ((arrow_function
-    body: (statement_block
-        "{"
-        (_)* @function.inside
-        "}")) @function.around
- (#not-has-parent? @function.around variable_declarator))
+  body: (statement_block
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
+  (#not-has-parent? @function.around variable_declarator))
 
 ; Arrow function in variable declaration - capture the full declaration
 ([
-    (lexical_declaration
-        (variable_declarator
-            value: (arrow_function
-                body: (statement_block
-                    "{"
-                    (_)* @function.inside
-                    "}"))))
-    (variable_declaration
-        (variable_declarator
-            value: (arrow_function
-                body: (statement_block
-                    "{"
-                    (_)* @function.inside
-                    "}"))))
+  (lexical_declaration
+    (variable_declarator
+      value: (arrow_function
+        body: (statement_block
+          "{"
+          (_)* @function.inside
+          "}"))))
+  (variable_declaration
+    (variable_declarator
+      value: (arrow_function
+        body: (statement_block
+          "{"
+          (_)* @function.inside
+          "}"))))
 ]) @function.around
 
 ; Arrow function in variable declaration (expression body fallback)
 ([
-    (lexical_declaration
-        (variable_declarator
-            value: (arrow_function
-                body: (_) @function.inside)))
-    (variable_declaration
-        (variable_declarator
-            value: (arrow_function
-                body: (_) @function.inside)))
+  (lexical_declaration
+    (variable_declarator
+      value: (arrow_function
+        body: (_) @function.inside)))
+  (variable_declaration
+    (variable_declarator
+      value: (arrow_function
+        body: (_) @function.inside)))
 ]) @function.around
 
 ; Catch-all for arrow functions in other contexts (callbacks, etc.)
 ((arrow_function
-    body: (_) @function.inside) @function.around
- (#not-has-parent? @function.around variable_declarator))
+  body: (_) @function.inside) @function.around
+  (#not-has-parent? @function.around variable_declarator))
+
 (function_signature) @function.around
 
 (generator_function
-    body: (_
-        "{"
-        (_)* @function.inside
-        "}")) @function.around
+  body: (_
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
 
 (generator_function_declaration
-    body: (_
-        "{"
-        (_)* @function.inside
-        "}")) @function.around
+  body: (_
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
 
 (class_declaration
-    body: (_
-        "{"
-        [(_) ";"?]* @class.inside
-        "}" )) @class.around
+  body: (_
+    "{"
+    [
+      (_)
+      ";"?
+    ]* @class.inside
+    "}")) @class.around
 
 (class
-    body: (_
-        "{"
-        (_)* @class.inside
-        "}" )) @class.around
+  body: (_
+    "{"
+    (_)* @class.inside
+    "}")) @class.around
 
 (interface_declaration
-    body: (_
-        "{"
-        [(_) ";"?]* @class.inside
-        "}" )) @class.around
+  body: (_
+    "{"
+    [
+      (_)
+      ";"?
+    ]* @class.inside
+    "}")) @class.around
 
 (enum_declaration
-    body: (_
-        "{"
-        [(_) ","?]* @class.inside
-        "}" )) @class.around
+  body: (_
+    "{"
+    [
+      (_)
+      ","?
+    ]* @class.inside
+    "}")) @class.around
 
 (ambient_declaration
-    (module
+  (module
     body: (_
-        "{"
-        [(_) ";"?]* @class.inside
-        "}" ))) @class.around
+      "{"
+      [
+        (_)
+        ";"?
+      ]* @class.inside
+      "}"))) @class.around
 
 (internal_module
-    body: (_
-        "{"
-        [(_) ";"?]* @class.inside
-        "}" )) @class.around
+  body: (_
+    "{"
+    [
+      (_)
+      ";"?
+    ]* @class.inside
+    "}")) @class.around
 
 (type_alias_declaration) @class.around

crates/languages/src/typescript.rs 🔗

@@ -804,6 +804,7 @@ impl LspAdapter for TypeScriptLspAdapter {
     async fn initialization_options(
         self: Arc<Self>,
         adapter: &Arc<dyn LspAdapterDelegate>,
+        _: &mut AsyncApp,
     ) -> Result<Option<serde_json::Value>> {
         let tsdk_path = self.tsdk_path(adapter).await;
         Ok(Some(json!({

crates/languages/src/typescript/brackets.scm 🔗

@@ -1,7 +1,23 @@
-("(" @open ")" @close)
-("[" @open "]" @close)
-("{" @open "}" @close)
-("<" @open ">" @close)
-(("\"" @open "\"" @close) (#set! rainbow.exclude))
-(("'" @open "'" @close) (#set! rainbow.exclude))
-(("`" @open "`" @close) (#set! rainbow.exclude))
+("(" @open
+  ")" @close)
+
+("[" @open
+  "]" @close)
+
+("{" @open
+  "}" @close)
+
+("<" @open
+  ">" @close)
+
+(("\"" @open
+  "\"" @close)
+  (#set! rainbow.exclude))
+
+(("'" @open
+  "'" @close)
+  (#set! rainbow.exclude))
+
+(("`" @open
+  "`" @close)
+  (#set! rainbow.exclude))

crates/languages/src/typescript/debugger.scm 🔗

@@ -1,23 +1,51 @@
-(lexical_declaration (variable_declarator name: (identifier) @debug-variable))
+(lexical_declaration
+  (variable_declarator
+    name: (identifier) @debug-variable))
 
-(for_in_statement left: (identifier) @debug-variable)
-(for_statement initializer: (lexical_declaration (variable_declarator name: (identifier) @debug-variable)))
+(for_in_statement
+  left: (identifier) @debug-variable)
 
-(binary_expression left: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
-(binary_expression right: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(for_statement
+  initializer: (lexical_declaration
+    (variable_declarator
+      name: (identifier) @debug-variable)))
 
-(unary_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
-(update_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(binary_expression
+  left: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
 
-(return_statement (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(binary_expression
+  right: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
 
-(parenthesized_expression (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(unary_expression
+  argument: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
 
-(array (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(update_expression
+  argument: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
 
-(pair value: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(return_statement
+  (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
 
-(member_expression object: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
+(parenthesized_expression
+  (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
+
+(array
+  (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
+
+(pair
+  value: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
+
+(member_expression
+  object: (identifier) @debug-variable
+  (#not-match? @debug-variable "^[A-Z]"))
 
 (statement_block) @debug-scope
+
 (program) @debug-scope

crates/languages/src/typescript/highlights.scm 🔗

@@ -1,46 +1,19 @@
 ; Variables
-
 (identifier) @variable
 
 (call_expression
   function: (member_expression
     object: (identifier) @type
-    (#any-of?
-      @type
-      "Promise"
-      "Array"
-      "Object"
-      "Map"
-      "Set"
-      "WeakMap"
-      "WeakSet"
-      "Date"
-      "Error"
-      "TypeError"
-      "RangeError"
-      "SyntaxError"
-      "ReferenceError"
-      "EvalError"
-      "URIError"
-      "RegExp"
-      "Function"
-      "Number"
-      "String"
-      "Boolean"
-      "Symbol"
-      "BigInt"
-      "Proxy"
-      "ArrayBuffer"
-      "DataView"
-    )
-  )
-)
+    (#any-of? @type
+      "Promise" "Array" "Object" "Map" "Set" "WeakMap" "WeakSet" "Date" "Error" "TypeError"
+      "RangeError" "SyntaxError" "ReferenceError" "EvalError" "URIError" "RegExp" "Function"
+      "Number" "String" "Boolean" "Symbol" "BigInt" "Proxy" "ArrayBuffer" "DataView")))
 
 ; Special identifiers
-
 (type_annotation) @type
 
 (type_identifier) @type
+
 (predefined_type) @type.builtin
 
 (type_alias_declaration
@@ -65,49 +38,47 @@
 (implements_clause
   (type_identifier) @type)
 
-;; Enables ts-pretty-errors
-;; The Lsp returns "snippets" of typescript, which are not valid typescript in totality,
-;; but should still be highlighted
-;; Highlights object literals by hijacking the statement_block pattern, but only if
-;; the statement block follows an object literal pattern
-((statement_block
-   (labeled_statement
-     ;; highlight the label like a property name
-     label: (statement_identifier) @property.name
-     body: [
-       ;; match a terminating expression statement
-       (expression_statement
-            ;; single identifier - treat as a type name
-           [(identifier) @type.name
-            ;; object - treat as a property - type pair
-            (object
-                (pair
-                    key: (_) @property.name
-                    value: (_) @type.name))
-            ;; subscript_expression - treat as an array declaration
-            (subscript_expression
-                object: (_) @type.name
-                index: (_)
-                )
-            ;; templated string - treat each identifier contained as a type name
-            (template_string
-                (template_substitution
-                    (identifier) @type.name))
-            ])
-       ;; match a nested statement block
-       (statement_block) @nested
-     ])))
+; Enables ts-pretty-errors
+; The Lsp returns "snippets" of typescript, which are not valid typescript in totality,
+; but should still be highlighted
+; Highlights object literals by hijacking the statement_block pattern, but only if
+; the statement block follows an object literal pattern
+(statement_block
+  (labeled_statement
+    ; highlight the label like a property name
+    label: (statement_identifier) @property.name
+    body: [
+      ; match a terminating expression statement
+      (expression_statement
+        ; single identifier - treat as a type name
+        [
+          (identifier) @type.name
+          ; object - treat as a property - type pair
+          (object
+            (pair
+              key: (_) @property.name
+              value: (_) @type.name))
+          ; subscript_expression - treat as an array declaration
+          (subscript_expression
+            object: (_) @type.name
+            index: (_))
+          ; templated string - treat each identifier contained as a type name
+          (template_string
+            (template_substitution
+              (identifier) @type.name))
+        ])
+      ; match a nested statement block
+      (statement_block) @nested
+    ]))
 
 ; Inline type imports: import { type Foo } or import { type Foo as Bar }
 (import_specifier
   "type"
-  name: (identifier) @type
-)
+  name: (identifier) @type)
 
 (import_specifier
   "type"
-  alias: (identifier) @type
-)
+  alias: (identifier) @type)
 
 ; Full type imports: import type { Foo } or import type { Foo as Bar }
 (import_statement
@@ -115,45 +86,41 @@
   (import_clause
     (named_imports
       (import_specifier
-        name: (identifier) @type
-      )
-    )
-  )
-)
+        name: (identifier) @type))))
 
 (import_statement
   "type"
   (import_clause
     (named_imports
       (import_specifier
-        alias: (identifier) @type
-      )
-    )
-  )
-)
+        alias: (identifier) @type))))
 
 ([
   (identifier)
   (shorthand_property_identifier)
   (shorthand_property_identifier_pattern)
- ] @constant
- (#match? @constant "^_*[A-Z_][A-Z\\d_]*$"))
+] @constant
+  (#match? @constant "^_*[A-Z_][A-Z\\d_]*$"))
 
 ; Properties
-
 (property_identifier) @property
+
 (shorthand_property_identifier) @property
+
 (shorthand_property_identifier_pattern) @property
+
 (private_property_identifier) @property
 
 ; Function and method calls
-
 (call_expression
   function: (identifier) @function)
 
 (call_expression
   function: (member_expression
-    property: [(property_identifier) (private_property_identifier)] @function.method))
+    property: [
+      (property_identifier)
+      (private_property_identifier)
+    ] @function.method))
 
 (new_expression
   constructor: (identifier) @type)
@@ -162,38 +129,60 @@
   module: (identifier) @type)
 
 ; Function and method definitions
-
 (function_expression
   name: (identifier) @function)
+
 (function_declaration
   name: (identifier) @function)
+
 (method_definition
-  name: [(property_identifier) (private_property_identifier)] @function.method)
+  name: [
+    (property_identifier)
+    (private_property_identifier)
+  ] @function.method)
+
 (method_definition
-    name: (property_identifier) @constructor
-    (#eq? @constructor "constructor"))
+  name: (property_identifier) @constructor
+  (#eq? @constructor "constructor"))
 
 (pair
-  key: [(property_identifier) (private_property_identifier)] @function.method
-  value: [(function_expression) (arrow_function)])
+  key: [
+    (property_identifier)
+    (private_property_identifier)
+  ] @function.method
+  value: [
+    (function_expression)
+    (arrow_function)
+  ])
 
 (assignment_expression
   left: (member_expression
-    property: [(property_identifier) (private_property_identifier)] @function.method)
-  right: [(function_expression) (arrow_function)])
+    property: [
+      (property_identifier)
+      (private_property_identifier)
+    ] @function.method)
+  right: [
+    (function_expression)
+    (arrow_function)
+  ])
 
 (variable_declarator
   name: (identifier) @function
-  value: [(function_expression) (arrow_function)])
+  value: [
+    (function_expression)
+    (arrow_function)
+  ])
 
 (assignment_expression
   left: (identifier) @function
-  right: [(function_expression) (arrow_function)])
+  right: [
+    (function_expression)
+    (arrow_function)
+  ])
 
 (arrow_function) @function
 
 ; Parameters
-
 (required_parameter
   (identifier) @variable.parameter)
 
@@ -227,8 +216,8 @@
   name: (identifier) @variable.parameter)
 
 ; Literals
-
 (this) @variable.special
+
 (super) @variable.special
 
 [
@@ -247,8 +236,7 @@
     (undefined)
     (true)
     (false)
-  ] @type.builtin
-)
+  ] @type.builtin)
 
 (comment) @comment
 
@@ -263,11 +251,12 @@
 (escape_sequence) @string.escape
 
 (regex) @string.regex
+
 (regex_flags) @keyword.operator.regex
+
 (number) @number
 
 ; Tokens
-
 [
   ";"
   "?."
@@ -326,14 +315,14 @@
   "..."
 ] @operator
 
-(regex "/" @string.regex)
+(regex
+  "/" @string.regex)
 
 (ternary_expression
   [
     "?"
     ":"
-  ] @operator
-)
+  ] @operator)
 
 [
   "("
@@ -342,7 +331,7 @@
   "]"
   "{"
   "}"
-]  @punctuation.bracket
+] @punctuation.bracket
 
 (template_substitution
   "${" @punctuation.special
@@ -360,31 +349,32 @@
   "<" @punctuation.bracket
   ">" @punctuation.bracket)
 
-(decorator "@" @punctuation.special)
+(decorator
+  "@" @punctuation.special)
 
 (union_type
-  ("|") @punctuation.special)
+  "|" @punctuation.special)
 
 (intersection_type
-  ("&") @punctuation.special)
+  "&" @punctuation.special)
 
 (type_annotation
-  (":") @punctuation.special)
+  ":" @punctuation.special)
 
 (index_signature
-  (":") @punctuation.special)
+  ":" @punctuation.special)
 
 (type_predicate_annotation
-  (":") @punctuation.special)
+  ":" @punctuation.special)
 
 (public_field_definition
-  ("?") @punctuation.special)
+  "?" @punctuation.special)
 
 (property_signature
-  ("?") @punctuation.special)
+  "?" @punctuation.special)
 
 (method_signature
-  ("?") @punctuation.special)
+  "?" @punctuation.special)
 
 (optional_parameter
   ([
@@ -393,7 +383,6 @@
   ]) @punctuation.special)
 
 ; Keywords
-
 [
   "abstract"
   "as"
@@ -465,4 +454,5 @@
   "yield"
 ] @keyword.control
 
-(switch_default "default" @keyword.control)
+(switch_default
+  "default" @keyword.control)

crates/languages/src/typescript/imports.scm 🔗

@@ -1,20 +1,23 @@
 (import_statement
-    import_clause: (import_clause
-        [
-            (identifier) @name
-            (named_imports
-                (import_specifier
-                    name: (_) @name
-                    alias: (_)? @alias))
-            (namespace_import) @wildcard
-        ])
-    source: (string (string_fragment) @source)) @import
+  import_clause: (import_clause
+    [
+      (identifier) @name
+      (named_imports
+        (import_specifier
+          name: (_) @name
+          alias: (_)? @alias))
+      (namespace_import) @wildcard
+    ])
+  source: (string
+    (string_fragment) @source)) @import
 
 (import_statement
-    !source
-    import_clause: (import_require_clause
-        source: (string (string_fragment) @source))) @wildcard @import
+  !source
+  import_clause: (import_require_clause
+    source: (string
+      (string_fragment) @source))) @wildcard @import
 
 (import_statement
-    !import_clause
-    source: (string (string_fragment) @source)) @wildcard @import
+  !import_clause
+  source: (string
+    (string_fragment) @source)) @wildcard @import

crates/languages/src/typescript/indents.scm 🔗

@@ -1,17 +1,28 @@
 [
-    (call_expression)
-    (assignment_expression)
-    (member_expression)
-    (lexical_declaration)
-    (variable_declaration)
-    (assignment_expression)
-    ; below handled by  `(_ "{" "}" @end) @indent`
-    ; (if_statement)
-    ; (for_statement)
-    ; (while_statement)
+  (call_expression)
+  (assignment_expression)
+  (member_expression)
+  (lexical_declaration)
+  (variable_declaration)
+  (assignment_expression)
+  ; below handled by  `(_ "{" "}" @end) @indent`
+  ; (if_statement)
+  ; (for_statement)
+  ; (while_statement)
 ] @indent
 
-(_ "[" "]" @end) @indent
-(_ "<" ">" @end) @indent
-(_ "{" "}" @end) @indent
-(_ "(" ")" @end) @indent
+(_
+  "["
+  "]" @end) @indent
+
+(_
+  "<"
+  ">" @end) @indent
+
+(_
+  "{"
+  "}" @end) @indent
+
+(_
+  "("
+  ")" @end) @indent

crates/languages/src/typescript/injections.scm 🔗

@@ -1,6 +1,5 @@
 ((comment) @injection.content
- (#set! injection.language "comment")
-)
+  (#set! injection.language "comment"))
 
 (((comment) @_jsdoc_comment
   (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @injection.content
@@ -14,156 +13,187 @@
   (#set! injection.language "regex"))
 
 (call_expression
-  function: (identifier) @_name (#eq? @_name "css")
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "css"))
-)
+  function: (identifier) @_name
+  (#eq? @_name "css")
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "css")))
 
 (call_expression
   function: (member_expression
-    object: (identifier) @_obj (#eq? @_obj "styled")
+    object: (identifier) @_obj
+    (#eq? @_obj "styled")
     property: (property_identifier))
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "css"))
-)
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "css")))
 
 (call_expression
   function: (call_expression
-    function: (identifier) @_name (#eq? @_name "styled"))
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "css"))
-)
+    function: (identifier) @_name
+    (#eq? @_name "styled"))
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "css")))
 
 (call_expression
-  function: (identifier) @_name (#eq? @_name "html")
+  function: (identifier) @_name
+  (#eq? @_name "html")
   arguments: (template_string) @injection.content
-                              (#set! injection.language "html")
-)
+  (#set! injection.language "html"))
 
 (call_expression
-  function: (identifier) @_name (#eq? @_name "js")
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "javascript"))
-)
+  function: (identifier) @_name
+  (#eq? @_name "js")
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "javascript")))
 
 (call_expression
-  function: (identifier) @_name (#eq? @_name "json")
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "json"))
-)
+  function: (identifier) @_name
+  (#eq? @_name "json")
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "json")))
 
 (call_expression
-  function: (identifier) @_name (#eq? @_name "sql")
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "sql"))
-)
+  function: (identifier) @_name
+  (#eq? @_name "sql")
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "sql")))
 
 (call_expression
-  function: (identifier) @_name (#eq? @_name "ts")
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "typescript"))
-)
+  function: (identifier) @_name
+  (#eq? @_name "ts")
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "typescript")))
 
 (call_expression
-  function: (identifier) @_name (#match? @_name "^ya?ml$")
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "yaml"))
-)
+  function: (identifier) @_name
+  (#match? @_name "^ya?ml$")
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "yaml")))
 
 (call_expression
-  function: (identifier) @_name (#match? @_name "^g(raph)?ql$")
-  arguments: (template_string (string_fragment) @injection.content
-                              (#set! injection.language "graphql"))
-)
+  function: (identifier) @_name
+  (#match? @_name "^g(raph)?ql$")
+  arguments: (template_string
+    (string_fragment) @injection.content
+    (#set! injection.language "graphql")))
 
 (call_expression
-  function: (identifier) @_name (#match? @_name "^g(raph)?ql$")
-  arguments: (arguments (template_string (string_fragment) @injection.content
-                              (#set! injection.language "graphql")))
-)
+  function: (identifier) @_name
+  (#match? @_name "^g(raph)?ql$")
+  arguments: (arguments
+    (template_string
+      (string_fragment) @injection.content
+      (#set! injection.language "graphql"))))
 
 (call_expression
-  function: (identifier) @_name(#match? @_name "^iso$")
-  arguments: (arguments (template_string (string_fragment) @injection.content
-                              (#set! injection.language "isograph")))
-)
-
-;; Angular Component template injection
+  function: (identifier) @_name
+  (#match? @_name "^iso$")
+  arguments: (arguments
+    (template_string
+      (string_fragment) @injection.content
+      (#set! injection.language "isograph"))))
+
+; Angular Component template injection
 (call_expression
   function: [
-    (identifier) @_decorator (#eq? @_decorator "Component")
-    (member_expression property: (property_identifier) @_decorator (#eq? @_decorator "Component"))
+    (identifier) @_decorator
+    (#eq? @_decorator "Component")
+    (member_expression
+      property: (property_identifier) @_decorator
+      (#eq? @_decorator "Component"))
   ]
-  arguments: (arguments (object
-    (pair
-      key: (property_identifier) @_prop (#eq? @_prop "template")
-      value: [
-        (string) @injection.content
-        (template_string) @injection.content
-        (template_string (string_fragment) @injection.content)
-      ]
-    )))
+  arguments: (arguments
+    (object
+      (pair
+        key: (property_identifier) @_prop
+        (#eq? @_prop "template")
+        value: [
+          (string) @injection.content
+          (template_string) @injection.content
+          (template_string
+            (string_fragment) @injection.content)
+        ])))
   (#set! injection.language "angular"))
 
-;; Angular Component styles injection
+; Angular Component styles injection
 (call_expression
   function: [
-    (identifier) @_decorator (#eq? @_decorator "Component")
-    (member_expression property: (property_identifier) @_decorator (#eq? @_decorator "Component"))
+    (identifier) @_decorator
+    (#eq? @_decorator "Component")
+    (member_expression
+      property: (property_identifier) @_decorator
+      (#eq? @_decorator "Component"))
   ]
-  arguments: (arguments (object
-    (pair
-      key: (property_identifier) @_prop (#eq? @_prop "styles")
-      value: [
-        (string) @injection.content
-        (template_string) @injection.content
-        (template_string (string_fragment) @injection.content)
-        (array (string) @injection.content)
-        (array (template_string) @injection.content)
-        (array (template_string (string_fragment)) @injection.content)
-      ]
-    )))
+  arguments: (arguments
+    (object
+      (pair
+        key: (property_identifier) @_prop
+        (#eq? @_prop "styles")
+        value: [
+          (string) @injection.content
+          (template_string) @injection.content
+          (template_string
+            (string_fragment) @injection.content)
+          (array
+            (string) @injection.content)
+          (array
+            (template_string) @injection.content)
+          (array
+            (template_string
+              (string_fragment)) @injection.content)
+        ])))
   (#set! injection.language "css"))
 
 ; Parse the contents of strings and tagged template
 ; literals with leading ECMAScript comments:
 ; '/* html */' or '/*html*/'
-(
-  ((comment) @_ecma_comment [
-    (string (string_fragment) @injection.content)
-    (template_string (string_fragment) @injection.content)
+(((comment) @_ecma_comment
+  [
+    (string
+      (string_fragment) @injection.content)
+    (template_string
+      (string_fragment) @injection.content)
   ])
   (#match? @_ecma_comment "^\\/\\*\\s*html\\s*\\*\\/")
-  (#set! injection.language "html")
-)
+  (#set! injection.language "html"))
 
 ; '/* sql */' or '/*sql*/'
-(
-  ((comment) @_ecma_comment [
-    (string (string_fragment) @injection.content)
-    (template_string (string_fragment) @injection.content)
+(((comment) @_ecma_comment
+  [
+    (string
+      (string_fragment) @injection.content)
+    (template_string
+      (string_fragment) @injection.content)
   ])
   (#match? @_ecma_comment "^\\/\\*\\s*sql\\s*\\*\\/")
-  (#set! injection.language "sql")
-)
+  (#set! injection.language "sql"))
 
 ; '/* gql */' or '/*gql*/'
 ; '/* graphql */' or '/*graphql*/'
-(
-  ((comment) @_ecma_comment [
-    (string (string_fragment) @injection.content)
-    (template_string (string_fragment) @injection.content)
+(((comment) @_ecma_comment
+  [
+    (string
+      (string_fragment) @injection.content)
+    (template_string
+      (string_fragment) @injection.content)
   ])
   (#match? @_ecma_comment "^\\/\\*\\s*(gql|graphql)\\s*\\*\\/")
-  (#set! injection.language "graphql")
-)
+  (#set! injection.language "graphql"))
 
 ; '/* css */' or '/*css*/'
-(
-  ((comment) @_ecma_comment [
-    (string (string_fragment) @injection.content)
-    (template_string (string_fragment) @injection.content)
+(((comment) @_ecma_comment
+  [
+    (string
+      (string_fragment) @injection.content)
+    (template_string
+      (string_fragment) @injection.content)
   ])
   (#match? @_ecma_comment "^\\/\\*\\s*(css)\\s*\\*\\/")
-  (#set! injection.language "css")
-)
+  (#set! injection.language "css"))

crates/languages/src/typescript/outline.scm 🔗

@@ -1,230 +1,275 @@
 (internal_module
-    "namespace" @context
-    name: (_) @name) @item
+  "namespace" @context
+  name: (_) @name) @item
 
 (enum_declaration
-    "enum" @context
-    name: (_) @name) @item
+  "enum" @context
+  name: (_) @name) @item
 
 (type_alias_declaration
-    "type" @context
-    name: (_) @name) @item
+  "type" @context
+  name: (_) @name) @item
 
 (function_declaration
-    "async"? @context
-    "function" @context
-    name: (_) @name
-    parameters: (formal_parameters
-      "(" @context
-      ")" @context)) @item
+  "async"? @context
+  "function" @context
+  name: (_) @name
+  parameters: (formal_parameters
+    "(" @context
+    ")" @context)) @item
 
 (generator_function_declaration
-    "async"? @context
-    "function" @context
-    "*" @context
-    name: (_) @name
-    parameters: (formal_parameters
-      "(" @context
-      ")" @context)) @item
+  "async"? @context
+  "function" @context
+  "*" @context
+  name: (_) @name
+  parameters: (formal_parameters
+    "(" @context
+    ")" @context)) @item
 
 (interface_declaration
-    "interface" @context
-    name: (_) @name) @item
+  "interface" @context
+  name: (_) @name) @item
 
 (export_statement
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (identifier) @name) @item))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (identifier) @name) @item))
 
 ; Exported array destructuring
 (export_statement
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (array_pattern
-                [
-                    (identifier) @name @item
-                    (assignment_pattern left: (identifier) @name @item)
-                    (rest_pattern (identifier) @name @item)
-                ]))))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (array_pattern
+        [
+          (identifier) @name @item
+          (assignment_pattern
+            left: (identifier) @name @item)
+          (rest_pattern
+            (identifier) @name @item)
+        ]))))
 
 ; Exported object destructuring
 (export_statement
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (object_pattern
-                [(shorthand_property_identifier_pattern) @name @item
-                 (pair_pattern
-                     value: (identifier) @name @item)
-                 (pair_pattern
-                     value: (assignment_pattern left: (identifier) @name @item))
-                 (rest_pattern (identifier) @name @item)]))))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (object_pattern
+        [
+          (shorthand_property_identifier_pattern) @name @item
+          (pair_pattern
+            value: (identifier) @name @item)
+          (pair_pattern
+            value: (assignment_pattern
+              left: (identifier) @name @item))
+          (rest_pattern
+            (identifier) @name @item)
+        ]))))
 
 (program
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (identifier) @name) @item))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (identifier) @name) @item))
 
 ; Top-level array destructuring
 (program
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (array_pattern
-                [
-                    (identifier) @name @item
-                    (assignment_pattern left: (identifier) @name @item)
-                    (rest_pattern (identifier) @name @item)
-                ]))))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (array_pattern
+        [
+          (identifier) @name @item
+          (assignment_pattern
+            left: (identifier) @name @item)
+          (rest_pattern
+            (identifier) @name @item)
+        ]))))
 
 ; Top-level object destructuring
 (program
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (object_pattern
-                [(shorthand_property_identifier_pattern) @name @item
-                 (pair_pattern
-                     value: (identifier) @name @item)
-                 (pair_pattern
-                     value: (assignment_pattern left: (identifier) @name @item))
-                 (rest_pattern (identifier) @name @item)]))))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (object_pattern
+        [
+          (shorthand_property_identifier_pattern) @name @item
+          (pair_pattern
+            value: (identifier) @name @item)
+          (pair_pattern
+            value: (assignment_pattern
+              left: (identifier) @name @item))
+          (rest_pattern
+            (identifier) @name @item)
+        ]))))
 
 (class_declaration
-    "class" @context
-    name: (_) @name) @item
+  "class" @context
+  name: (_) @name) @item
 
 (abstract_class_declaration
-    "abstract" @context
-    "class" @context
-    name: (_) @name) @item
+  "abstract" @context
+  "class" @context
+  name: (_) @name) @item
 
 ; Method definitions in classes (not in object literals)
 (class_body
-    (method_definition
-        [
-            "get"
-            "set"
-            "async"
-            "*"
-            "readonly"
-            "static"
-            (override_modifier)
-            (accessibility_modifier)
-        ]* @context
-        name: (_) @name
-        parameters: (formal_parameters
-          "(" @context
-          ")" @context)) @item)
+  (method_definition
+    [
+      "get"
+      "set"
+      "async"
+      "*"
+      "readonly"
+      "static"
+      (override_modifier)
+      (accessibility_modifier)
+    ]* @context
+    name: (_) @name
+    parameters: (formal_parameters
+      "(" @context
+      ")" @context)) @item)
 
 ; Object literal methods
 (variable_declarator
-    value: (object
-        (method_definition
-            [
-                "get"
-                "set"
-                "async"
-                "*"
-            ]* @context
-            name: (_) @name
-            parameters: (formal_parameters
-              "(" @context
-              ")" @context)) @item))
+  value: (object
+    (method_definition
+      [
+        "get"
+        "set"
+        "async"
+        "*"
+      ]* @context
+      name: (_) @name
+      parameters: (formal_parameters
+        "(" @context
+        ")" @context)) @item))
 
 (public_field_definition
-    [
-        "declare"
-        "readonly"
-        "abstract"
-        "static"
-        (accessibility_modifier)
-    ]* @context
-    name: (_) @name) @item
+  [
+    "declare"
+    "readonly"
+    "abstract"
+    "static"
+    (accessibility_modifier)
+  ]* @context
+  name: (_) @name) @item
 
 ; Add support for (node:test, bun:test and Jest) runnable
-(
-    (call_expression
-        function: [
-            (identifier) @_name
-            (member_expression
-                object: [
-                    (identifier) @_name
-                    (member_expression object: (identifier) @_name)
-                ]
-            )
-        ] @context
-        (#any-of? @_name "it" "test" "describe" "context" "suite")
-        arguments: (
-            arguments . [
-                (string (string_fragment) @name)
-                (identifier) @name
-            ]
-        )
-    )
-) @item
+((call_expression
+  function: [
+    (identifier) @_name
+    (member_expression
+      object: [
+        (identifier) @_name
+        (member_expression
+          object: (identifier) @_name)
+      ])
+  ] @context
+  (#any-of? @_name "it" "test" "describe" "context" "suite")
+  arguments: (arguments
+    .
+    [
+      (string
+        (string_fragment) @name)
+      (identifier) @name
+    ]))) @item
 
 ; Add support for parameterized tests
-(
-    (call_expression
-        function: (call_expression
-            function: (member_expression
-                object: [(identifier) @_name (member_expression object: (identifier) @_name)]
-                property: (property_identifier) @_property
-            )
-            (#any-of? @_name "it" "test" "describe" "context" "suite")
-            (#any-of? @_property "each")
-        )
-        arguments: (
-            arguments . [
-                (string (string_fragment) @name)
-                (identifier) @name
-            ]
-        )
-    )
-) @item
+((call_expression
+  function: (call_expression
+    function: (member_expression
+      object: [
+        (identifier) @_name
+        (member_expression
+          object: (identifier) @_name)
+      ]
+      property: (property_identifier) @_property)
+    (#any-of? @_name "it" "test" "describe" "context" "suite")
+    (#any-of? @_property "each"))
+  arguments: (arguments
+    .
+    [
+      (string
+        (string_fragment) @name)
+      (identifier) @name
+    ]))) @item
 
 ; Object properties
 (pair
-    key: [
-        (property_identifier) @name
-        (string (string_fragment) @name)
-        (number) @name
-        (computed_property_name) @name
-    ]) @item
-
+  key: [
+    (property_identifier) @name
+    (string
+      (string_fragment) @name)
+    (number) @name
+    (computed_property_name) @name
+  ]) @item
 
 ; Nested variables in function bodies
 (statement_block
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (identifier) @name) @item))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (identifier) @name) @item))
 
 ; Nested array destructuring in functions
 (statement_block
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (array_pattern
-                [
-                    (identifier) @name @item
-                    (assignment_pattern left: (identifier) @name @item)
-                    (rest_pattern (identifier) @name @item)
-                ]))))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (array_pattern
+        [
+          (identifier) @name @item
+          (assignment_pattern
+            left: (identifier) @name @item)
+          (rest_pattern
+            (identifier) @name @item)
+        ]))))
 
 ; Nested object destructuring in functions
 (statement_block
-    (lexical_declaration
-        ["let" "const"] @context
-        (variable_declarator
-            name: (object_pattern
-                [(shorthand_property_identifier_pattern) @name @item
-                 (pair_pattern value: (identifier) @name @item)
-                 (pair_pattern value: (assignment_pattern left: (identifier) @name @item))
-                 (rest_pattern (identifier) @name @item)]))))
+  (lexical_declaration
+    [
+      "let"
+      "const"
+    ] @context
+    (variable_declarator
+      name: (object_pattern
+        [
+          (shorthand_property_identifier_pattern) @name @item
+          (pair_pattern
+            value: (identifier) @name @item)
+          (pair_pattern
+            value: (assignment_pattern
+              left: (identifier) @name @item))
+          (rest_pattern
+            (identifier) @name @item)
+        ]))))
 
 (comment) @annotation

crates/languages/src/typescript/overrides.scm 🔗

@@ -2,8 +2,10 @@
 
 (string) @string
 
-(template_string (string_fragment) @string)
+(template_string
+  (string_fragment) @string)
 
-(_ value: (call_expression
-  function: (identifier) @function_name_before_type_arguments
-  type_arguments: (type_arguments)))
+(_
+  value: (call_expression
+    function: (identifier) @function_name_before_type_arguments
+    type_arguments: (type_arguments)))

crates/languages/src/typescript/runnables.scm 🔗

@@ -1,85 +1,71 @@
 ; Add support for (node:test, bun:test, Jest and Deno.test) runnable
 ; Function expression that has `it`, `test` or `describe` as the function name
-(
-    (call_expression
-        function: [
-            (identifier) @_name
-            (member_expression
-                object: [
-                    (identifier) @_name
-                    (member_expression object: (identifier) @_name)
-                ]
-            )
-        ]
-        (#any-of? @_name "it" "test" "describe" "context" "suite")
-        arguments: (
-            arguments . [
-                (string (string_fragment) @run)
-                (identifier) @run
-            ]
-        )
-    ) @_js-test
-
-    (#set! tag js-test)
-)
+((call_expression
+  function: [
+    (identifier) @_name
+    (member_expression
+      object: [
+        (identifier) @_name
+        (member_expression
+          object: (identifier) @_name)
+      ])
+  ]
+  (#any-of? @_name "it" "test" "describe" "context" "suite")
+  arguments: (arguments
+    .
+    [
+      (string
+        (string_fragment) @run)
+      (identifier) @run
+    ])) @_js-test
+  (#set! tag js-test))
 
 ; Add support for parameterized tests
-(
-    (call_expression
-        function: (call_expression
-            function: (member_expression
-                object: [(identifier) @_name (member_expression object: (identifier) @_name)]
-                property: (property_identifier) @_property
-            )
-            (#any-of? @_name "it" "test" "describe" "context" "suite")
-            (#any-of? @_property "each")
-        )
-        arguments: (
-            arguments . [
-                (string (string_fragment) @run)
-                (identifier) @run
-            ]
-        )
-    ) @_js-test
-
-    (#set! tag js-test)
-)
+((call_expression
+  function: (call_expression
+    function: (member_expression
+      object: [
+        (identifier) @_name
+        (member_expression
+          object: (identifier) @_name)
+      ]
+      property: (property_identifier) @_property)
+    (#any-of? @_name "it" "test" "describe" "context" "suite")
+    (#any-of? @_property "each"))
+  arguments: (arguments
+    .
+    [
+      (string
+        (string_fragment) @run)
+      (identifier) @run
+    ])) @_js-test
+  (#set! tag js-test))
 
 ; Add support for Deno.test with string names
-(
-    (call_expression
-        function: (member_expression
-            object: (identifier) @_namespace
-            property: (property_identifier) @_method
-        )
-        (#eq? @_namespace "Deno")
-        (#eq? @_method "test")
-        arguments: (
-            arguments . [
-                (string (string_fragment) @run @DENO_TEST_NAME)
-                (identifier) @run @DENO_TEST_NAME
-            ]
-        )
-    ) @_js-test
-
-    (#set! tag js-test)
-)
+((call_expression
+  function: (member_expression
+    object: (identifier) @_namespace
+    property: (property_identifier) @_method)
+  (#eq? @_namespace "Deno")
+  (#eq? @_method "test")
+  arguments: (arguments
+    .
+    [
+      (string
+        (string_fragment) @run @DENO_TEST_NAME)
+      (identifier) @run @DENO_TEST_NAME
+    ])) @_js-test
+  (#set! tag js-test))
 
 ; Add support for Deno.test with named function expressions
-(
-    (call_expression
-        function: (member_expression
-            object: (identifier) @_namespace
-            property: (property_identifier) @_method
-        )
-        (#eq? @_namespace "Deno")
-        (#eq? @_method "test")
-        arguments: (
-            arguments . (function_expression
-                name: (identifier) @run @DENO_TEST_NAME
-            )
-        )
-    ) @_js-test
-
-    (#set! tag js-test)
-)
+((call_expression
+  function: (member_expression
+    object: (identifier) @_namespace
+    property: (property_identifier) @_method)
+  (#eq? @_namespace "Deno")
+  (#eq? @_method "test")
+  arguments: (arguments
+    .
+    (function_expression
+      name: (identifier) @run @DENO_TEST_NAME))) @_js-test
+  (#set! tag js-test))

crates/languages/src/typescript/textobjects.scm 🔗

@@ -1,114 +1,130 @@
 (comment)+ @comment.around
 
 (function_declaration
-    body: (_
-        "{"
-        (_)* @function.inside
-        "}")) @function.around
+  body: (_
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
 
 (method_definition
-    body: (_
-        "{"
-        (_)* @function.inside
-        "}")) @function.around
+  body: (_
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
 
 (function_expression
-    body: (_
-        "{"
-        (_)* @function.inside
-        "}")) @function.around
+  body: (_
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
 
 ((arrow_function
-    body: (statement_block
-        "{"
-        (_)* @function.inside
-        "}")) @function.around
- (#not-has-parent? @function.around variable_declarator))
+  body: (statement_block
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
+  (#not-has-parent? @function.around variable_declarator))
 
 ; Arrow function in variable declaration - capture the full declaration
 ([
-    (lexical_declaration
-        (variable_declarator
-            value: (arrow_function
-                body: (statement_block
-                    "{"
-                    (_)* @function.inside
-                    "}"))))
-    (variable_declaration
-        (variable_declarator
-            value: (arrow_function
-                body: (statement_block
-                    "{"
-                    (_)* @function.inside
-                    "}"))))
+  (lexical_declaration
+    (variable_declarator
+      value: (arrow_function
+        body: (statement_block
+          "{"
+          (_)* @function.inside
+          "}"))))
+  (variable_declaration
+    (variable_declarator
+      value: (arrow_function
+        body: (statement_block
+          "{"
+          (_)* @function.inside
+          "}"))))
 ]) @function.around
 
 ; Arrow function in variable declaration - capture body as @function.inside
 ; (for statement blocks, the more specific pattern above captures just the contents)
 ([
-    (lexical_declaration
-        (variable_declarator
-            value: (arrow_function
-                body: (_) @function.inside)))
-    (variable_declaration
-        (variable_declarator
-            value: (arrow_function
-                body: (_) @function.inside)))
+  (lexical_declaration
+    (variable_declarator
+      value: (arrow_function
+        body: (_) @function.inside)))
+  (variable_declaration
+    (variable_declarator
+      value: (arrow_function
+        body: (_) @function.inside)))
 ]) @function.around
 
 ; Catch-all for arrow functions in other contexts (callbacks, etc.)
 ((arrow_function
-    body: (_) @function.inside) @function.around
- (#not-has-parent? @function.around variable_declarator))
+  body: (_) @function.inside) @function.around
+  (#not-has-parent? @function.around variable_declarator))
+
 (function_signature) @function.around
 
 (generator_function
-    body: (_
-        "{"
-        (_)* @function.inside
-        "}")) @function.around
+  body: (_
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
 
 (generator_function_declaration
-    body: (_
-        "{"
-        (_)* @function.inside
-        "}")) @function.around
+  body: (_
+    "{"
+    (_)* @function.inside
+    "}")) @function.around
 
 (class_declaration
-    body: (_
-        "{"
-        [(_) ";"?]* @class.inside
-        "}" )) @class.around
+  body: (_
+    "{"
+    [
+      (_)
+      ";"?
+    ]* @class.inside
+    "}")) @class.around
 
 (class
-    body: (_
-        "{"
-        (_)* @class.inside
-        "}" )) @class.around
+  body: (_
+    "{"
+    (_)* @class.inside
+    "}")) @class.around
 
 (interface_declaration
-    body: (_
-        "{"
-        [(_) ";"?]* @class.inside
-        "}" )) @class.around
+  body: (_
+    "{"
+    [
+      (_)
+      ";"?
+    ]* @class.inside
+    "}")) @class.around
 
 (enum_declaration
-    body: (_
-        "{"
-        [(_) ","?]* @class.inside
-        "}" )) @class.around
+  body: (_
+    "{"
+    [
+      (_)
+      ","?
+    ]* @class.inside
+    "}")) @class.around
 
 (ambient_declaration
-    (module
+  (module
     body: (_
-        "{"
-        [(_) ";"?]* @class.inside
-        "}" ))) @class.around
+      "{"
+      [
+        (_)
+        ";"?
+      ]* @class.inside
+      "}"))) @class.around
 
 (internal_module
-    body: (_
-        "{"
-        [(_) ";"?]* @class.inside
-        "}" )) @class.around
+  body: (_
+    "{"
+    [
+      (_)
+      ";"?
+    ]* @class.inside
+    "}")) @class.around
 
 (type_alias_declaration) @class.around

crates/languages/src/yaml/brackets.scm 🔗

@@ -1,4 +1,13 @@
-("[" @open "]" @close)
-("{" @open "}" @close)
-(("\"" @open "\"" @close) (#set! rainbow.exclude))
-(("'" @open "'" @close) (#set! rainbow.exclude))
+("[" @open
+  "]" @close)
+
+("{" @open
+  "}" @close)
+
+(("\"" @open
+  "\"" @close)
+  (#set! rainbow.exclude))
+
+(("'" @open
+  "'" @close)
+  (#set! rainbow.exclude))

crates/languages/src/yaml/highlights.scm 🔗

@@ -1,4 +1,5 @@
 (boolean_scalar) @boolean
+
 (null_scalar) @constant.builtin
 
 [
@@ -25,30 +26,31 @@
 
 key: (flow_node
   [
-    (plain_scalar (string_scalar))
+    (plain_scalar
+      (string_scalar))
     (double_quote_scalar)
     (single_quote_scalar)
   ] @property)
 
 [
- ","
- "-"
- ":"
- ">"
- "?"
- "|"
+  ","
+  "-"
+  ":"
+  ">"
+  "?"
+  "|"
 ] @punctuation.delimiter
 
 [
- "["
- "]"
- "{"
- "}"
+  "["
+  "]"
+  "{"
+  "}"
 ] @punctuation.bracket
 
 [
- "*"
- "&"
- "---"
- "..."
+  "*"
+  "&"
+  "---"
+  "..."
 ] @punctuation.special

crates/languages/src/yaml/injections.scm 🔗

@@ -1,25 +1,26 @@
 ((comment) @injection.content
- (#set! injection.language "comment")
-)
+  (#set! injection.language "comment"))
 
 ; GitHub actions: JavaScript for workflow scripting (inline and block)
 (block_mapping
   (block_mapping_pair
-    key: (flow_node) @_uses (#eq? @_uses "uses")
-    value: (flow_node) @_actions_ghs (#match? @_actions_ghs "^actions/github-script"))
+    key: (flow_node) @_uses
+    (#eq? @_uses "uses")
+    value: (flow_node) @_actions_ghs
+    (#match? @_actions_ghs "^actions/github-script"))
   (block_mapping_pair
-    key: (flow_node) @_with (#eq? @_with "with")
+    key: (flow_node) @_with
+    (#eq? @_with "with")
     value: (block_node
       (block_mapping
         (block_mapping_pair
-          key: (flow_node) @_run (#eq? @_run "script")
+          key: (flow_node) @_run
+          (#eq? @_run "script")
           value: [
-            (flow_node (plain_scalar (string_scalar) @injection.content))
-            (block_node (block_scalar) @injection.content)
+            (flow_node
+              (plain_scalar
+                (string_scalar) @injection.content))
+            (block_node
+              (block_scalar) @injection.content)
           ]
-          (#set! injection.language "javascript")
-        )
-      )
-    )
-  )
-)
+          (#set! injection.language "javascript"))))))

crates/languages/src/yaml/outline.scm 🔗

@@ -1,9 +1,7 @@
 (block_mapping_pair
-    key:
-        (flow_node
-            (plain_scalar
-                (string_scalar) @name))
-    value:
-        (flow_node
-            (plain_scalar
-                (string_scalar) @context))?) @item
+  key: (flow_node
+    (plain_scalar
+      (string_scalar) @name))
+  value: (flow_node
+    (plain_scalar
+      (string_scalar) @context))?) @item

crates/livekit_client/Cargo.toml 🔗

@@ -40,15 +40,12 @@ serde.workspace = true
 serde_urlencoded.workspace = true
 settings.workspace = true
 smallvec.workspace = true
-tokio-tungstenite.workspace = true
 ui.workspace = true
 util.workspace = true
 
 [target.'cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))'.dependencies]
-libwebrtc = { rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d", git = "https://github.com/zed-industries/livekit-rust-sdks" }
-livekit = { rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d", git = "https://github.com/zed-industries/livekit-rust-sdks", features = [
-    "__rustls-tls"
-] }
+libwebrtc.workspace = true
+livekit.workspace = true
 
 [target.'cfg(any(target_os = "linux", target_os = "freebsd", target_os = "windows"))'.dependencies]
 scap.workspace = true

crates/livekit_client/src/livekit_client.rs 🔗

@@ -1,5 +1,3 @@
-use std::sync::Arc;
-
 use anyhow::{Context as _, Result, anyhow};
 use audio::AudioSettings;
 use collections::HashMap;
@@ -54,10 +52,8 @@ impl Room {
         token: String,
         cx: &mut AsyncApp,
     ) -> Result<(Self, mpsc::UnboundedReceiver<RoomEvent>)> {
-        let connector =
-            tokio_tungstenite::Connector::Rustls(Arc::new(http_client_tls::tls_config()));
         let mut config = livekit::RoomOptions::default();
-        config.connector = Some(connector);
+        config.tls_config = livekit::TlsConfig(Some(http_client_tls::tls_config()));
         let (room, mut events) = Tokio::spawn(cx, async move {
             livekit::Room::connect(&url, &token, config).await
         })

crates/livekit_client/src/livekit_client/playback.rs 🔗

@@ -466,10 +466,13 @@ pub(crate) async fn capture_local_video_track(
 ) -> Result<(crate::LocalVideoTrack, Box<dyn ScreenCaptureStream>)> {
     let metadata = capture_source.metadata()?;
     let track_source = gpui_tokio::Tokio::spawn(cx, async move {
-        NativeVideoSource::new(VideoResolution {
-            width: metadata.resolution.width.0 as u32,
-            height: metadata.resolution.height.0 as u32,
-        })
+        NativeVideoSource::new(
+            VideoResolution {
+                width: metadata.resolution.width.0 as u32,
+                height: metadata.resolution.height.0 as u32,
+            },
+            true,
+        )
     })
     .await?;
 

crates/lsp/Cargo.toml 🔗

@@ -13,12 +13,13 @@ path = "src/lsp.rs"
 doctest = false
 
 [features]
-test-support = ["async-pipe"]
+test-support = ["async-pipe", "gpui_util"]
 
 [dependencies]
 anyhow.workspace = true
 async-pipe = { workspace = true, optional = true }
 collections.workspace = true
+gpui_util = { workspace = true, optional = true }
 futures.workspace = true
 gpui.workspace = true
 log.workspace = true
@@ -34,6 +35,7 @@ release_channel.workspace = true
 
 [dev-dependencies]
 async-pipe.workspace = true
+gpui_util.workspace = true
 ctor.workspace = true
 gpui = { workspace = true, features = ["test-support"] }
 semver.workspace = true

crates/lsp/src/lsp.rs 🔗

@@ -1970,10 +1970,14 @@ impl FakeLanguageServer {
                 let responded_tx = responded_tx.clone();
                 let executor = cx.background_executor().clone();
                 async move {
+                    let _guard = gpui_util::defer({
+                        let responded_tx = responded_tx.clone();
+                        move || {
+                            responded_tx.unbounded_send(()).ok();
+                        }
+                    });
                     executor.simulate_random_delay().await;
-                    let result = result.await;
-                    responded_tx.unbounded_send(()).ok();
-                    result
+                    result.await
                 }
             })
             .detach();

crates/markdown/src/markdown.rs 🔗

@@ -15,6 +15,7 @@ use ui::Checkbox;
 use ui::CopyButton;
 
 use std::borrow::Cow;
+use std::collections::BTreeMap;
 use std::iter;
 use std::mem;
 use std::ops::Range;
@@ -246,7 +247,7 @@ pub struct Markdown {
     fallback_code_block_language: Option<LanguageName>,
     options: Options,
     copied_code_blocks: HashSet<ElementId>,
-    code_block_scroll_handles: HashMap<usize, ScrollHandle>,
+    code_block_scroll_handles: BTreeMap<usize, ScrollHandle>,
     context_menu_selected_text: Option<String>,
 }
 
@@ -316,7 +317,7 @@ impl Markdown {
                 parse_links_only: false,
             },
             copied_code_blocks: HashSet::default(),
-            code_block_scroll_handles: HashMap::default(),
+            code_block_scroll_handles: BTreeMap::default(),
             context_menu_selected_text: None,
         };
         this.parse(cx);
@@ -341,7 +342,7 @@ impl Markdown {
                 parse_links_only: true,
             },
             copied_code_blocks: HashSet::default(),
-            code_block_scroll_handles: HashMap::default(),
+            code_block_scroll_handles: BTreeMap::default(),
             context_menu_selected_text: None,
         };
         this.parse(cx);
@@ -364,6 +365,32 @@ impl Markdown {
         self.code_block_scroll_handles.clear();
     }
 
+    fn autoscroll_code_block(&self, source_index: usize, cursor_position: Point<Pixels>) {
+        let Some((_, scroll_handle)) = self
+            .code_block_scroll_handles
+            .range(..=source_index)
+            .next_back()
+        else {
+            return;
+        };
+
+        let bounds = scroll_handle.bounds();
+        if cursor_position.y < bounds.top() || cursor_position.y > bounds.bottom() {
+            return;
+        }
+
+        let horizontal_delta = if cursor_position.x < bounds.left() {
+            bounds.left() - cursor_position.x
+        } else if cursor_position.x > bounds.right() {
+            bounds.right() - cursor_position.x
+        } else {
+            return;
+        };
+
+        let offset = scroll_handle.offset();
+        scroll_handle.set_offset(point(offset.x + horizontal_delta, offset.y));
+    }
+
     pub fn is_parsing(&self) -> bool {
         self.pending_parse.is_some()
     }
@@ -902,6 +929,7 @@ impl MarkdownElement {
                         Ok(ix) | Err(ix) => ix,
                     };
                     markdown.selection.set_head(source_index, &rendered_text);
+                    markdown.autoscroll_code_block(source_index, event.position);
                     markdown.autoscroll_request = Some(source_index);
                     cx.notify();
                 } else {

crates/markdown/src/parser.rs 🔗

@@ -10,7 +10,7 @@ use collections::HashSet;
 
 use crate::path_range::PathWithRange;
 
-const PARSE_OPTIONS: Options = Options::ENABLE_TABLES
+pub const PARSE_OPTIONS: Options = Options::ENABLE_TABLES
     .union(Options::ENABLE_FOOTNOTES)
     .union(Options::ENABLE_STRIKETHROUGH)
     .union(Options::ENABLE_TASKLISTS)

crates/markdown_preview/Cargo.toml 🔗

@@ -25,6 +25,7 @@ html5ever.workspace = true
 language.workspace = true
 linkify.workspace = true
 log.workspace = true
+markdown.workspace = true
 markup5ever_rcdom.workspace = true
 pretty_assertions.workspace = true
 pulldown-cmark.workspace = true

crates/markdown_preview/src/markdown_parser.rs 🔗

@@ -7,8 +7,9 @@ use collections::FxHashMap;
 use gpui::{DefiniteLength, FontWeight, px, relative};
 use html5ever::{ParseOpts, local_name, parse_document, tendril::TendrilSink};
 use language::LanguageRegistry;
+use markdown::parser::PARSE_OPTIONS;
 use markup5ever_rcdom::RcDom;
-use pulldown_cmark::{Alignment, Event, Options, Parser, Tag, TagEnd};
+use pulldown_cmark::{Alignment, Event, Parser, Tag, TagEnd};
 use std::{
     cell::RefCell, collections::HashMap, mem, ops::Range, path::PathBuf, rc::Rc, sync::Arc, vec,
 };
@@ -19,10 +20,7 @@ pub async fn parse_markdown(
     file_location_directory: Option<PathBuf>,
     language_registry: Option<Arc<LanguageRegistry>>,
 ) -> ParsedMarkdown {
-    let mut options = Options::all();
-    options.remove(pulldown_cmark::Options::ENABLE_DEFINITION_LIST);
-
-    let parser = Parser::new_ext(markdown_input, options);
+    let parser = Parser::new_ext(markdown_input, PARSE_OPTIONS);
     let parser = MarkdownParser::new(
         parser.into_offset_iter().collect(),
         file_location_directory,
@@ -3076,6 +3074,26 @@ More text
         );
     }
 
+    #[gpui::test]
+    async fn test_dollar_signs_are_plain_text() {
+        // Dollar signs should be preserved as plain text, not treated as math delimiters.
+        // Regression test for https://github.com/zed-industries/zed/issues/50170
+        let parsed = parse("$100$ per unit").await;
+        assert_eq!(parsed.children, vec![p("$100$ per unit", 0..14)]);
+    }
+
+    #[gpui::test]
+    async fn test_dollar_signs_in_list_items() {
+        let parsed = parse("- $18,000 budget\n- $20,000 budget\n").await;
+        assert_eq!(
+            parsed.children,
+            vec![
+                list_item(0..16, 1, Unordered, vec![p("$18,000 budget", 2..16)]),
+                list_item(17..33, 1, Unordered, vec![p("$20,000 budget", 19..33)]),
+            ]
+        );
+    }
+
     #[gpui::test]
     async fn test_code_block() {
         let parsed = parse(

crates/migrator/src/migrations.rs 🔗

@@ -275,6 +275,12 @@ pub(crate) mod m_2025_12_15 {
     pub(crate) use settings::SETTINGS_PATTERNS;
 }
 
+pub(crate) mod m_2025_01_27 {
+    mod settings;
+
+    pub(crate) use settings::make_auto_indent_an_enum;
+}
+
 pub(crate) mod m_2026_02_02 {
     mod settings;
 
@@ -292,3 +298,9 @@ pub(crate) mod m_2026_02_04 {
 
     pub(crate) use settings::migrate_tool_permission_defaults;
 }
+
+pub(crate) mod m_2026_02_25 {
+    mod settings;
+
+    pub(crate) use settings::migrate_builtin_agent_servers_to_registry;
+}

crates/migrator/src/migrations/m_2025_01_27/settings.rs 🔗

@@ -0,0 +1,27 @@
+use anyhow::Result;
+use serde_json::Value;
+
+use crate::migrations::migrate_language_setting;
+
+pub fn make_auto_indent_an_enum(value: &mut Value) -> Result<()> {
+    migrate_language_setting(value, migrate_auto_indent)
+}
+
+fn migrate_auto_indent(value: &mut Value, _path: &[&str]) -> Result<()> {
+    let Some(auto_indent) = value
+        .as_object_mut()
+        .and_then(|obj| obj.get_mut("auto_indent"))
+    else {
+        return Ok(());
+    };
+
+    *auto_indent = match auto_indent {
+        Value::Bool(true) => Value::String("syntax_aware".to_string()),
+        Value::Bool(false) => Value::String("none".to_string()),
+        Value::String(s) if s == "syntax_aware" || s == "preserve_indent" || s == "none" => {
+            return Ok(());
+        }
+        _ => anyhow::bail!("Expected auto_indent to be a boolean or valid enum value"),
+    };
+    Ok(())
+}

crates/migrator/src/migrations/m_2026_02_25/settings.rs 🔗

@@ -0,0 +1,161 @@
+use anyhow::Result;
+use serde_json::Value;
+
+use crate::migrations::migrate_settings;
+
+const AGENT_SERVERS_KEY: &str = "agent_servers";
+
+struct BuiltinMapping {
+    old_key: &'static str,
+    registry_key: &'static str,
+}
+
+const BUILTIN_MAPPINGS: &[BuiltinMapping] = &[
+    BuiltinMapping {
+        old_key: "gemini",
+        registry_key: "gemini",
+    },
+    BuiltinMapping {
+        old_key: "claude",
+        registry_key: "claude-acp",
+    },
+    BuiltinMapping {
+        old_key: "codex",
+        registry_key: "codex-acp",
+    },
+];
+
+const REGISTRY_COMPATIBLE_FIELDS: &[&str] = &[
+    "env",
+    "default_mode",
+    "default_model",
+    "favorite_models",
+    "default_config_options",
+    "favorite_config_option_values",
+];
+
+pub fn migrate_builtin_agent_servers_to_registry(value: &mut Value) -> Result<()> {
+    migrate_settings(value, &mut migrate_one)
+}
+
+fn migrate_one(obj: &mut serde_json::Map<String, Value>) -> Result<()> {
+    let Some(agent_servers) = obj.get_mut(AGENT_SERVERS_KEY) else {
+        return Ok(());
+    };
+    let Some(servers_map) = agent_servers.as_object_mut() else {
+        return Ok(());
+    };
+
+    for mapping in BUILTIN_MAPPINGS {
+        migrate_builtin_entry(servers_map, mapping);
+    }
+
+    Ok(())
+}
+
+fn migrate_builtin_entry(
+    servers_map: &mut serde_json::Map<String, Value>,
+    mapping: &BuiltinMapping,
+) {
+    // Check if the old key exists and needs migration before taking ownership.
+    let needs_migration = servers_map
+        .get(mapping.old_key)
+        .and_then(|v| v.as_object())
+        .is_some_and(|obj| !obj.contains_key("type"));
+
+    if !needs_migration {
+        return;
+    }
+
+    // When the registry key differs from the old key and the target already
+    // exists, just remove the stale old entry to avoid overwriting user data.
+    if mapping.old_key != mapping.registry_key && servers_map.contains_key(mapping.registry_key) {
+        servers_map.remove(mapping.old_key);
+        return;
+    }
+
+    let Some(old_entry) = servers_map.remove(mapping.old_key) else {
+        return;
+    };
+    let Some(old_obj) = old_entry.as_object() else {
+        return;
+    };
+
+    let has_command = old_obj.contains_key("command");
+    let ignore_system_version = old_obj
+        .get("ignore_system_version")
+        .and_then(|v| v.as_bool());
+
+    // A custom entry is needed when the user configured a custom binary
+    // or explicitly opted into using the system version via
+    // `ignore_system_version: false` (only meaningful for gemini).
+    let needs_custom = has_command
+        || (mapping.old_key == "gemini" && matches!(ignore_system_version, Some(false)));
+
+    if needs_custom {
+        let local_key = format!("{}-custom", mapping.registry_key);
+
+        // Don't overwrite an existing `-custom` entry.
+        if servers_map.contains_key(&local_key) {
+            return;
+        }
+
+        let mut custom_obj = serde_json::Map::new();
+        custom_obj.insert("type".to_string(), Value::String("custom".to_string()));
+
+        if has_command {
+            if let Some(command) = old_obj.get("command") {
+                custom_obj.insert("command".to_string(), command.clone());
+            }
+            if let Some(args) = old_obj.get("args") {
+                if !args.as_array().is_some_and(|a| a.is_empty()) {
+                    custom_obj.insert("args".to_string(), args.clone());
+                }
+            }
+        } else {
+            // ignore_system_version: false — the user wants the binary from $PATH
+            custom_obj.insert(
+                "command".to_string(),
+                Value::String(mapping.old_key.to_string()),
+            );
+        }
+
+        // Carry over all compatible fields to the custom entry.
+        for &field in REGISTRY_COMPATIBLE_FIELDS {
+            if let Some(value) = old_obj.get(field) {
+                match value {
+                    Value::Array(arr) if arr.is_empty() => continue,
+                    Value::Object(map) if map.is_empty() => continue,
+                    Value::Null => continue,
+                    _ => {
+                        custom_obj.insert(field.to_string(), value.clone());
+                    }
+                }
+            }
+        }
+
+        servers_map.insert(local_key, Value::Object(custom_obj));
+    } else {
+        // Build a registry entry with compatible fields only.
+        let mut registry_obj = serde_json::Map::new();
+        registry_obj.insert("type".to_string(), Value::String("registry".to_string()));
+
+        for &field in REGISTRY_COMPATIBLE_FIELDS {
+            if let Some(value) = old_obj.get(field) {
+                match value {
+                    Value::Array(arr) if arr.is_empty() => continue,
+                    Value::Object(map) if map.is_empty() => continue,
+                    Value::Null => continue,
+                    _ => {
+                        registry_obj.insert(field.to_string(), value.clone());
+                    }
+                }
+            }
+        }
+
+        servers_map.insert(
+            mapping.registry_key.to_string(),
+            Value::Object(registry_obj),
+        );
+    }
+}

crates/migrator/src/migrator.rs 🔗

@@ -232,11 +232,13 @@ pub fn migrate_settings(text: &str) -> Result<Option<String>> {
             migrations::m_2025_12_15::SETTINGS_PATTERNS,
             &SETTINGS_QUERY_2025_12_15,
         ),
+        MigrationType::Json(migrations::m_2025_01_27::make_auto_indent_an_enum),
         MigrationType::Json(
             migrations::m_2026_02_02::move_edit_prediction_provider_to_edit_predictions,
         ),
         MigrationType::Json(migrations::m_2026_02_03::migrate_experimental_sweep_mercury),
         MigrationType::Json(migrations::m_2026_02_04::migrate_tool_permission_defaults),
+        MigrationType::Json(migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry),
     ];
     run_migrations(text, migrations)
 }
@@ -2605,6 +2607,91 @@ mod tests {
         );
     }
 
+    #[test]
+    fn test_make_auto_indent_an_enum() {
+        // Empty settings should not change
+        assert_migrate_settings_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2025_01_27::make_auto_indent_an_enum,
+            )],
+            &r#"{ }"#.unindent(),
+            None,
+        );
+
+        // true should become "syntax_aware"
+        assert_migrate_settings_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2025_01_27::make_auto_indent_an_enum,
+            )],
+            &r#"{
+                "auto_indent": true
+            }"#
+            .unindent(),
+            Some(
+                &r#"{
+                "auto_indent": "syntax_aware"
+            }"#
+                .unindent(),
+            ),
+        );
+
+        // false should become "none"
+        assert_migrate_settings_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2025_01_27::make_auto_indent_an_enum,
+            )],
+            &r#"{
+                "auto_indent": false
+            }"#
+            .unindent(),
+            Some(
+                &r#"{
+                "auto_indent": "none"
+            }"#
+                .unindent(),
+            ),
+        );
+
+        // Already valid enum values should not change
+        assert_migrate_settings_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2025_01_27::make_auto_indent_an_enum,
+            )],
+            &r#"{
+                "auto_indent": "preserve_indent"
+            }"#
+            .unindent(),
+            None,
+        );
+
+        // Should also work inside languages
+        assert_migrate_settings_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2025_01_27::make_auto_indent_an_enum,
+            )],
+            &r#"{
+                "auto_indent": true,
+                "languages": {
+                    "Python": {
+                        "auto_indent": false
+                    }
+                }
+            }"#
+            .unindent(),
+            Some(
+                &r#"{
+                    "auto_indent": "syntax_aware",
+                    "languages": {
+                        "Python": {
+                            "auto_indent": "none"
+                        }
+                    }
+                }"#
+                .unindent(),
+            ),
+        );
+    }
+
     #[test]
     fn test_move_edit_prediction_provider_to_edit_predictions() {
         assert_migrate_settings_with_migrations(
@@ -3820,4 +3907,415 @@ mod tests {
             ),
         );
     }
+
+    #[test]
+    fn test_migrate_builtin_agent_servers_to_registry_simple() {
+        assert_migrate_settings_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry,
+            )],
+            r#"{
+    "agent_servers": {
+        "gemini": {
+            "default_model": "gemini-2.0-flash"
+        },
+        "claude": {
+            "default_mode": "plan"
+        },
+        "codex": {
+            "default_model": "o4-mini"
+        }
+    }
+}"#,
+            Some(
+                r#"{
+    "agent_servers": {
+        "codex-acp": {
+            "type": "registry",
+            "default_model": "o4-mini"
+        },
+        "claude-acp": {
+            "type": "registry",
+            "default_mode": "plan"
+        },
+        "gemini": {
+            "type": "registry",
+            "default_model": "gemini-2.0-flash"
+        }
+    }
+}"#,
+            ),
+        );
+    }
+
+    #[test]
+    fn test_migrate_builtin_agent_servers_empty_entries() {
+        assert_migrate_settings_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry,
+            )],
+            r#"{
+    "agent_servers": {
+        "gemini": {},
+        "claude": {},
+        "codex": {}
+    }
+}"#,
+            Some(
+                r#"{
+    "agent_servers": {
+        "codex-acp": {
+            "type": "registry"
+        },
+        "claude-acp": {
+            "type": "registry"
+        },
+        "gemini": {
+            "type": "registry"
+        }
+    }
+}"#,
+            ),
+        );
+    }
+
+    #[test]
+    fn test_migrate_builtin_agent_servers_with_command() {
+        assert_migrate_settings_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry,
+            )],
+            r#"{
+    "agent_servers": {
+        "claude": {
+            "command": "/usr/local/bin/claude",
+            "args": ["--verbose"],
+            "env": {"CLAUDE_KEY": "abc123"},
+            "default_mode": "plan",
+            "default_model": "claude-sonnet-4"
+        }
+    }
+}"#,
+            Some(
+                r#"{
+    "agent_servers": {
+        "claude-acp-custom": {
+            "type": "custom",
+            "command": "/usr/local/bin/claude",
+            "args": [
+                "--verbose"
+            ],
+            "env": {
+                "CLAUDE_KEY": "abc123"
+            },
+            "default_mode": "plan",
+            "default_model": "claude-sonnet-4"
+        }
+    }
+}"#,
+            ),
+        );
+    }
+
+    #[test]
+    fn test_migrate_builtin_agent_servers_gemini_with_command() {
+        assert_migrate_settings_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry,
+            )],
+            r#"{
+    "agent_servers": {
+        "gemini": {
+            "command": "/opt/gemini/bin/gemini",
+            "default_model": "gemini-2.0-flash"
+        }
+    }
+}"#,
+            Some(
+                r#"{
+    "agent_servers": {
+        "gemini-custom": {
+            "type": "custom",
+            "command": "/opt/gemini/bin/gemini",
+            "default_model": "gemini-2.0-flash"
+        }
+    }
+}"#,
+            ),
+        );
+    }
+
+    #[test]
+    fn test_migrate_builtin_agent_servers_gemini_ignore_system_version_false() {
+        assert_migrate_settings_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry,
+            )],
+            r#"{
+    "agent_servers": {
+        "gemini": {
+            "ignore_system_version": false,
+            "default_model": "gemini-2.0-flash"
+        }
+    }
+}"#,
+            Some(
+                r#"{
+    "agent_servers": {
+        "gemini-custom": {
+            "type": "custom",
+            "command": "gemini",
+            "default_model": "gemini-2.0-flash"
+        }
+    }
+}"#,
+            ),
+        );
+    }
+
+    #[test]
+    fn test_migrate_builtin_agent_servers_gemini_ignore_system_version_true() {
+        assert_migrate_settings_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry,
+            )],
+            r#"{
+    "agent_servers": {
+        "gemini": {
+            "ignore_system_version": true,
+            "default_model": "gemini-2.0-flash"
+        }
+    }
+}"#,
+            Some(
+                r#"{
+    "agent_servers": {
+        "gemini": {
+            "type": "registry",
+            "default_model": "gemini-2.0-flash"
+        }
+    }
+}"#,
+            ),
+        );
+    }
+
+    #[test]
+    fn test_migrate_builtin_agent_servers_already_typed_unchanged() {
+        assert_migrate_settings_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry,
+            )],
+            r#"{
+    "agent_servers": {
+        "gemini": {
+            "type": "registry",
+            "default_model": "gemini-2.0-flash"
+        },
+        "claude-acp": {
+            "type": "registry",
+            "default_mode": "plan"
+        }
+    }
+}"#,
+            None,
+        );
+    }
+
+    #[test]
+    fn test_migrate_builtin_agent_servers_preserves_custom_entries() {
+        assert_migrate_settings_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry,
+            )],
+            r#"{
+    "agent_servers": {
+        "claude": {
+            "default_mode": "plan"
+        },
+        "my-custom-agent": {
+            "type": "custom",
+            "command": "/path/to/agent"
+        }
+    }
+}"#,
+            Some(
+                r#"{
+    "agent_servers": {
+        "claude-acp": {
+            "type": "registry",
+            "default_mode": "plan"
+        },
+        "my-custom-agent": {
+            "type": "custom",
+            "command": "/path/to/agent"
+        }
+    }
+}"#,
+            ),
+        );
+    }
+
+    #[test]
+    fn test_migrate_builtin_agent_servers_target_already_exists() {
+        assert_migrate_settings_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry,
+            )],
+            r#"{
+    "agent_servers": {
+        "claude": {
+            "default_mode": "plan"
+        },
+        "claude-acp": {
+            "type": "registry",
+            "default_model": "claude-sonnet-4"
+        }
+    }
+}"#,
+            Some(
+                r#"{
+    "agent_servers": {
+        "claude-acp": {
+            "type": "registry",
+            "default_model": "claude-sonnet-4"
+        }
+    }
+}"#,
+            ),
+        );
+    }
+
+    #[test]
+    fn test_migrate_builtin_agent_servers_no_agent_servers_key() {
+        assert_migrate_settings_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry,
+            )],
+            r#"{
+    "agent": {
+        "enabled": true
+    }
+}"#,
+            None,
+        );
+    }
+
+    #[test]
+    fn test_migrate_builtin_agent_servers_all_fields() {
+        assert_migrate_settings_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry,
+            )],
+            r#"{
+    "agent_servers": {
+        "codex": {
+            "env": {"OPENAI_API_KEY": "sk-123"},
+            "default_mode": "read-only",
+            "default_model": "o4-mini",
+            "favorite_models": ["o4-mini", "codex-mini-latest"],
+            "default_config_options": {"approval_mode": "auto-edit"},
+            "favorite_config_option_values": {"approval_mode": ["auto-edit", "suggest"]}
+        }
+    }
+}"#,
+            Some(
+                r#"{
+    "agent_servers": {
+        "codex-acp": {
+            "type": "registry",
+            "env": {
+                "OPENAI_API_KEY": "sk-123"
+            },
+            "default_mode": "read-only",
+            "default_model": "o4-mini",
+            "favorite_models": [
+                "o4-mini",
+                "codex-mini-latest"
+            ],
+            "default_config_options": {
+                "approval_mode": "auto-edit"
+            },
+            "favorite_config_option_values": {
+                "approval_mode": [
+                    "auto-edit",
+                    "suggest"
+                ]
+            }
+        }
+    }
+}"#,
+            ),
+        );
+    }
+
+    #[test]
+    fn test_migrate_builtin_agent_servers_codex_with_command() {
+        assert_migrate_settings_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry,
+            )],
+            r#"{
+    "agent_servers": {
+        "codex": {
+            "command": "/usr/local/bin/codex",
+            "args": ["--full-auto"],
+            "default_model": "o4-mini"
+        }
+    }
+}"#,
+            Some(
+                r#"{
+    "agent_servers": {
+        "codex-acp-custom": {
+            "type": "custom",
+            "command": "/usr/local/bin/codex",
+            "args": [
+                "--full-auto"
+            ],
+            "default_model": "o4-mini"
+        }
+    }
+}"#,
+            ),
+        );
+    }
+
+    #[test]
+    fn test_migrate_builtin_agent_servers_mixed_migrated_and_not() {
+        assert_migrate_settings_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry,
+            )],
+            r#"{
+    "agent_servers": {
+        "gemini": {
+            "type": "registry",
+            "default_model": "gemini-2.0-flash"
+        },
+        "claude": {
+            "default_mode": "plan"
+        },
+        "codex": {}
+    }
+}"#,
+            Some(
+                r#"{
+    "agent_servers": {
+        "codex-acp": {
+            "type": "registry"
+        },
+        "claude-acp": {
+            "type": "registry",
+            "default_mode": "plan"
+        },
+        "gemini": {
+            "type": "registry",
+            "default_model": "gemini-2.0-flash"
+        }
+    }
+}"#,
+            ),
+        );
+    }
 }

crates/miniprofiler_ui/src/miniprofiler_ui.rs 🔗

@@ -464,7 +464,7 @@ impl Render for ProfilerWindow {
 
         let scroll_offset = self.scroll_handle.offset();
         let max_offset = self.scroll_handle.max_offset();
-        self.autoscroll = -scroll_offset.y >= (max_offset.height - px(24.));
+        self.autoscroll = -scroll_offset.y >= (max_offset.y - px(24.));
         if self.autoscroll {
             self.scroll_handle.scroll_to_bottom();
         }
@@ -544,7 +544,7 @@ impl Render for ProfilerWindow {
 
                                         let path = cx.prompt_for_new_path(
                                             &active_path,
-                                            Some("performance_profile.miniprof"),
+                                            Some("performance_profile.miniprof.json"),
                                         );
 
                                         cx.background_spawn(async move {

crates/mistral/src/mistral.rs 🔗

@@ -233,6 +233,8 @@ pub struct Request {
     pub messages: Vec<RequestMessage>,
     pub stream: bool,
     #[serde(default, skip_serializing_if = "Option::is_none")]
+    pub stream_options: Option<StreamOptions>,
+    #[serde(default, skip_serializing_if = "Option::is_none")]
     pub max_tokens: Option<u64>,
     #[serde(default, skip_serializing_if = "Option::is_none")]
     pub temperature: Option<f32>,
@@ -246,6 +248,12 @@ pub struct Request {
     pub tools: Vec<ToolDefinition>,
 }
 
+#[derive(Debug, Serialize, Deserialize)]
+pub struct StreamOptions {
+    #[serde(default, skip_serializing_if = "Option::is_none")]
+    pub stream_tool_calls: Option<bool>,
+}
+
 #[derive(Debug, Serialize, Deserialize)]
 #[serde(rename_all = "snake_case")]
 pub enum ResponseFormat {

crates/multi_buffer/src/multi_buffer.rs 🔗

@@ -105,6 +105,8 @@ pub enum Event {
     },
     ExcerptsRemoved {
         ids: Vec<ExcerptId>,
+        /// Contains only buffer IDs for which all excerpts have been removed.
+        /// Buffers that still have remaining excerpts are never included.
         removed_buffer_ids: Vec<BufferId>,
     },
     ExcerptsExpanded {
@@ -624,7 +626,7 @@ pub struct MultiBufferSnapshot {
     diffs: TreeMap<BufferId, DiffStateSnapshot>,
     diff_transforms: SumTree<DiffTransform>,
     excerpt_ids: SumTree<ExcerptIdMapping>,
-    replaced_excerpts: TreeMap<ExcerptId, ExcerptId>,
+    replaced_excerpts: Arc<HashMap<ExcerptId, ExcerptId>>,
     non_text_state_update_count: usize,
     edit_count: usize,
     is_dirty: bool,
@@ -1160,12 +1162,11 @@ impl MultiBuffer {
             },
         );
         this.singleton = true;
-        let buffer_id = buffer.read(cx).remote_id();
-        this.push_excerpts(
-            buffer,
-            [ExcerptRange::new(text::Anchor::min_max_range_for_buffer(
-                buffer_id,
-            ))],
+        this.set_excerpts_for_path(
+            PathKey::sorted(0),
+            buffer.clone(),
+            [Point::zero()..buffer.read(cx).max_point()],
+            0,
             cx,
         );
         this
@@ -1734,18 +1735,6 @@ impl MultiBuffer {
         }
     }
 
-    pub fn push_excerpts<O>(
-        &mut self,
-        buffer: Entity<Buffer>,
-        ranges: impl IntoIterator<Item = ExcerptRange<O>>,
-        cx: &mut Context<Self>,
-    ) -> Vec<ExcerptId>
-    where
-        O: text::ToOffset,
-    {
-        self.insert_excerpts_after(ExcerptId::max(), buffer, ranges, cx)
-    }
-
     #[instrument(skip_all)]
     fn merge_excerpt_ranges<'a>(
         expanded_ranges: impl IntoIterator<Item = &'a ExcerptRange<Point>> + 'a,
@@ -1967,7 +1956,10 @@ impl MultiBuffer {
         *has_deleted_file = false;
         *has_conflict = false;
         *has_inverted_diff = false;
-        replaced_excerpts.clear();
+        match Arc::get_mut(replaced_excerpts) {
+            Some(replaced_excerpts) => replaced_excerpts.clear(),
+            None => *replaced_excerpts = Default::default(),
+        }
 
         let edits = Self::sync_diff_transforms(
             self.snapshot.get_mut(),
@@ -3746,11 +3738,21 @@ impl MultiBuffer {
         cx: &mut gpui::App,
     ) -> Entity<Self> {
         let multi = cx.new(|_| Self::new(Capability::ReadWrite));
-        for (text, ranges) in excerpts {
+        for (ix, (text, ranges)) in excerpts.into_iter().enumerate() {
             let buffer = cx.new(|cx| Buffer::local(text, cx));
-            let excerpt_ranges = ranges.into_iter().map(ExcerptRange::new);
+            let snapshot = buffer.read(cx).snapshot();
+            let excerpt_ranges = ranges
+                .into_iter()
+                .map(ExcerptRange::new)
+                .collect::<Vec<_>>();
             multi.update(cx, |multi, cx| {
-                multi.push_excerpts(buffer, excerpt_ranges, cx)
+                multi.set_excerpt_ranges_for_path(
+                    PathKey::sorted(ix as u64),
+                    buffer,
+                    &snapshot,
+                    excerpt_ranges,
+                    cx,
+                )
             });
         }
 
@@ -3884,7 +3886,8 @@ impl MultiBuffer {
                         .collect::<Vec<_>>()
                 );
 
-                let excerpt_id = self.push_excerpts(buffer_handle.clone(), ranges, cx);
+                let excerpt_id =
+                    self.insert_excerpts_after(ExcerptId::max(), buffer_handle, ranges, cx);
                 log::info!("Inserted with ids: {:?}", excerpt_id);
             } else {
                 let remove_count = rng.random_range(1..=excerpt_ids.len());
@@ -6938,18 +6941,23 @@ impl MultiBufferSnapshot {
     }
 
     fn excerpt_locator_for_id(&self, id: ExcerptId) -> &Locator {
+        self.try_excerpt_locator_for_id(id)
+            .unwrap_or_else(|| panic!("invalid excerpt id {id:?}"))
+    }
+
+    fn try_excerpt_locator_for_id(&self, id: ExcerptId) -> Option<&Locator> {
         if id == ExcerptId::min() {
-            Locator::min_ref()
+            Some(Locator::min_ref())
         } else if id == ExcerptId::max() {
-            Locator::max_ref()
+            Some(Locator::max_ref())
         } else {
             let (_, _, item) = self.excerpt_ids.find::<ExcerptId, _>((), &id, Bias::Left);
             if let Some(entry) = item
                 && entry.id == id
             {
-                return &entry.locator;
+                return Some(&entry.locator);
             }
-            panic!("invalid excerpt id {id:?}")
+            None
         }
     }
 
@@ -7034,7 +7042,7 @@ impl MultiBufferSnapshot {
     /// afterwards.
     fn excerpt(&self, excerpt_id: ExcerptId) -> Option<&Excerpt> {
         let excerpt_id = self.latest_excerpt_id(excerpt_id);
-        let locator = self.excerpt_locator_for_id(excerpt_id);
+        let locator = self.try_excerpt_locator_for_id(excerpt_id)?;
         let (_, _, item) =
             self.excerpts
                 .find::<Option<&Locator>, _>((), &Some(locator), Bias::Left);

crates/multi_buffer/src/multi_buffer_tests.rs 🔗

@@ -105,8 +105,8 @@ fn test_remote(cx: &mut App) {
 
 #[gpui::test]
 fn test_excerpt_boundaries_and_clipping(cx: &mut App) {
-    let buffer_1 = cx.new(|cx| Buffer::local(sample_text(6, 6, 'a'), cx));
-    let buffer_2 = cx.new(|cx| Buffer::local(sample_text(6, 6, 'g'), cx));
+    let buffer_1 = cx.new(|cx| Buffer::local(sample_text(7, 6, 'a'), cx));
+    let buffer_2 = cx.new(|cx| Buffer::local(sample_text(7, 6, 'g'), cx));
     let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
 
     let events = Arc::new(RwLock::new(Vec::<Event>::new()));
@@ -122,9 +122,11 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) {
 
     let subscription = multibuffer.update(cx, |multibuffer, cx| {
         let subscription = multibuffer.subscribe();
-        multibuffer.push_excerpts(
+        multibuffer.set_excerpt_ranges_for_path(
+            PathKey::sorted(0),
             buffer_1.clone(),
-            [ExcerptRange::new(Point::new(1, 2)..Point::new(2, 5))],
+            &buffer_1.read(cx).snapshot(),
+            vec![ExcerptRange::new(Point::new(1, 2)..Point::new(2, 5))],
             cx,
         );
         assert_eq!(
@@ -135,14 +137,21 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) {
             }]
         );
 
-        multibuffer.push_excerpts(
+        multibuffer.set_excerpt_ranges_for_path(
+            PathKey::sorted(0),
             buffer_1.clone(),
-            [ExcerptRange::new(Point::new(3, 3)..Point::new(4, 4))],
+            &buffer_1.read(cx).snapshot(),
+            vec![
+                ExcerptRange::new(Point::new(1, 2)..Point::new(2, 5)),
+                ExcerptRange::new(Point::new(5, 3)..Point::new(6, 4)),
+            ],
             cx,
         );
-        multibuffer.push_excerpts(
+        multibuffer.set_excerpt_ranges_for_path(
+            PathKey::sorted(1),
             buffer_2.clone(),
-            [ExcerptRange::new(Point::new(3, 1)..Point::new(3, 3))],
+            &buffer_2.read(cx).snapshot(),
+            vec![ExcerptRange::new(Point::new(3, 1)..Point::new(3, 3))],
             cx,
         );
         assert_eq!(
@@ -179,8 +188,8 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) {
             "
             bbbb
             ccccc
-            ddd
-            eeee
+            fff
+            gggg
             jj"
         ),
     );
@@ -189,14 +198,14 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) {
             .row_infos(MultiBufferRow(0))
             .map(|info| info.buffer_row)
             .collect::<Vec<_>>(),
-        [Some(1), Some(2), Some(3), Some(4), Some(3)]
+        [Some(1), Some(2), Some(5), Some(6), Some(3)]
     );
     assert_eq!(
         snapshot
             .row_infos(MultiBufferRow(2))
             .map(|info| info.buffer_row)
             .collect::<Vec<_>>(),
-        [Some(3), Some(4), Some(3)]
+        [Some(5), Some(6), Some(3)]
     );
     assert_eq!(
         snapshot
@@ -217,7 +226,7 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) {
         boundaries_in_range(Point::new(0, 0)..Point::new(4, 2), &snapshot),
         &[
             (MultiBufferRow(0), "bbbb\nccccc".to_string(), true),
-            (MultiBufferRow(2), "ddd\neeee".to_string(), false),
+            (MultiBufferRow(2), "fff\ngggg".to_string(), false),
             (MultiBufferRow(4), "jj".to_string(), true),
         ]
     );
@@ -235,15 +244,15 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) {
     );
     assert_eq!(
         boundaries_in_range(Point::new(1, 0)..Point::new(4, 0), &snapshot),
-        &[(MultiBufferRow(2), "ddd\neeee".to_string(), false)]
+        &[(MultiBufferRow(2), "fff\ngggg".to_string(), false)]
     );
     assert_eq!(
         boundaries_in_range(Point::new(1, 0)..Point::new(4, 0), &snapshot),
-        &[(MultiBufferRow(2), "ddd\neeee".to_string(), false)]
+        &[(MultiBufferRow(2), "fff\ngggg".to_string(), false)]
     );
     assert_eq!(
         boundaries_in_range(Point::new(2, 0)..Point::new(3, 0), &snapshot),
-        &[(MultiBufferRow(2), "ddd\neeee".to_string(), false)]
+        &[(MultiBufferRow(2), "fff\ngggg".to_string(), false)]
     );
     assert_eq!(
         boundaries_in_range(Point::new(4, 0)..Point::new(4, 2), &snapshot),
@@ -273,8 +282,8 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) {
             "bbbb\n", // Preserve newlines
             "c\n",    //
             "cc\n",   //
-            "ddd\n",  //
-            "eeee\n", //
+            "fff\n",  //
+            "gggg\n", //
             "jj"      //
         )
     );
@@ -310,9 +319,7 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) {
     );
 
     let snapshot = multibuffer.update(cx, |multibuffer, cx| {
-        let (buffer_2_excerpt_id, _) =
-            multibuffer.excerpts_for_buffer(buffer_2.read(cx).remote_id(), cx)[0].clone();
-        multibuffer.remove_excerpts([buffer_2_excerpt_id], cx);
+        multibuffer.remove_excerpts_for_path(PathKey::sorted(1), cx);
         multibuffer.snapshot(cx)
     });
 
@@ -322,8 +329,8 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) {
             "bbbb\n", // Preserve newlines
             "c\n",    //
             "cc\n",   //
-            "ddd\n",  //
-            "eeee",   //
+            "fff\n",  //
+            "gggg",   //
         )
     );
 
@@ -747,18 +754,29 @@ fn test_excerpt_events(cx: &mut App) {
         .detach();
     });
 
+    let buffer_1_snapshot = buffer_1.read(cx).snapshot();
+    let buffer_2_snapshot = buffer_2.read(cx).snapshot();
     leader_multibuffer.update(cx, |leader, cx| {
-        leader.push_excerpts(
+        leader.set_excerpt_ranges_for_path(
+            PathKey::sorted(0),
             buffer_1.clone(),
-            [ExcerptRange::new(0..8), ExcerptRange::new(12..16)],
+            &buffer_1_snapshot,
+            vec![
+                ExcerptRange::new((0..8).to_point(&buffer_1_snapshot)),
+                ExcerptRange::new((22..26).to_point(&buffer_1_snapshot)),
+            ],
             cx,
         );
-        leader.insert_excerpts_after(
-            leader.excerpt_ids()[0],
+        leader.set_excerpt_ranges_for_path(
+            PathKey::sorted(1),
             buffer_2.clone(),
-            [ExcerptRange::new(0..5), ExcerptRange::new(10..15)],
+            &buffer_2_snapshot,
+            vec![
+                ExcerptRange::new((0..5).to_point(&buffer_2_snapshot)),
+                ExcerptRange::new((20..25).to_point(&buffer_2_snapshot)),
+            ],
             cx,
-        )
+        );
     });
     assert_eq!(
         leader_multibuffer.read(cx).snapshot(cx).text(),
@@ -767,34 +785,26 @@ fn test_excerpt_events(cx: &mut App) {
     assert_eq!(*follower_edit_event_count.read(), 2);
 
     leader_multibuffer.update(cx, |leader, cx| {
-        let excerpt_ids = leader.excerpt_ids();
-        leader.remove_excerpts([excerpt_ids[1], excerpt_ids[3]], cx);
-    });
-    assert_eq!(
-        leader_multibuffer.read(cx).snapshot(cx).text(),
-        follower_multibuffer.read(cx).snapshot(cx).text(),
-    );
-    assert_eq!(*follower_edit_event_count.read(), 3);
-
-    // Removing an empty set of excerpts is a noop.
-    leader_multibuffer.update(cx, |leader, cx| {
-        leader.remove_excerpts([], cx);
-    });
-    assert_eq!(
-        leader_multibuffer.read(cx).snapshot(cx).text(),
-        follower_multibuffer.read(cx).snapshot(cx).text(),
-    );
-    assert_eq!(*follower_edit_event_count.read(), 3);
-
-    // Adding an empty set of excerpts is a noop.
-    leader_multibuffer.update(cx, |leader, cx| {
-        leader.push_excerpts::<usize>(buffer_2.clone(), [], cx);
+        leader.set_excerpt_ranges_for_path(
+            PathKey::sorted(0),
+            buffer_1.clone(),
+            &buffer_1_snapshot,
+            vec![ExcerptRange::new((0..8).to_point(&buffer_1_snapshot))],
+            cx,
+        );
+        leader.set_excerpt_ranges_for_path(
+            PathKey::sorted(1),
+            buffer_2,
+            &buffer_2_snapshot,
+            vec![ExcerptRange::new((0..5).to_point(&buffer_2_snapshot))],
+            cx,
+        );
     });
     assert_eq!(
         leader_multibuffer.read(cx).snapshot(cx).text(),
         follower_multibuffer.read(cx).snapshot(cx).text(),
     );
-    assert_eq!(*follower_edit_event_count.read(), 3);
+    assert_eq!(*follower_edit_event_count.read(), 4);
 
     leader_multibuffer.update(cx, |leader, cx| {
         leader.clear(cx);
@@ -803,7 +813,7 @@ fn test_excerpt_events(cx: &mut App) {
         leader_multibuffer.read(cx).snapshot(cx).text(),
         follower_multibuffer.read(cx).snapshot(cx).text(),
     );
-    assert_eq!(*follower_edit_event_count.read(), 4);
+    assert_eq!(*follower_edit_event_count.read(), 5);
 }
 
 #[gpui::test]
@@ -1013,7 +1023,13 @@ async fn test_empty_diff_excerpt(cx: &mut TestAppContext) {
     let diff = cx
         .new(|cx| BufferDiff::new_with_base_text(base_text, &buffer.read(cx).text_snapshot(), cx));
     multibuffer.update(cx, |multibuffer, cx| {
-        multibuffer.push_excerpts(buffer.clone(), [ExcerptRange::new(0..0)], cx);
+        multibuffer.set_excerpt_ranges_for_path(
+            PathKey::sorted(0),
+            buffer.clone(),
+            &buffer.read(cx).snapshot(),
+            vec![ExcerptRange::new(Point::zero()..Point::zero())],
+            cx,
+        );
         multibuffer.set_all_diff_hunks_expanded(cx);
         multibuffer.add_diff(diff.clone(), cx);
     });
@@ -1031,7 +1047,13 @@ async fn test_empty_diff_excerpt(cx: &mut TestAppContext) {
 
     let buf2 = cx.new(|cx| Buffer::local("X", cx));
     multibuffer.update(cx, |multibuffer, cx| {
-        multibuffer.push_excerpts(buf2, [ExcerptRange::new(0..1)], cx);
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(1),
+            buf2,
+            [Point::new(0, 0)..Point::new(0, 1)],
+            0,
+            cx,
+        );
     });
 
     buffer.update(cx, |buffer, cx| {
@@ -1105,8 +1127,20 @@ fn test_multibuffer_anchors(cx: &mut App) {
     let buffer_2 = cx.new(|cx| Buffer::local("efghi", cx));
     let multibuffer = cx.new(|cx| {
         let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
-        multibuffer.push_excerpts(buffer_1.clone(), [ExcerptRange::new(0..4)], cx);
-        multibuffer.push_excerpts(buffer_2.clone(), [ExcerptRange::new(0..5)], cx);
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(0),
+            buffer_1.clone(),
+            [Point::new(0, 0)..Point::new(0, 4)],
+            0,
+            cx,
+        );
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(1),
+            buffer_2.clone(),
+            [Point::new(0, 0)..Point::new(0, 5)],
+            0,
+            cx,
+        );
         multibuffer
     });
     let old_snapshot = multibuffer.read(cx).snapshot(cx);
@@ -1219,29 +1253,39 @@ fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut App) {
     // Add an excerpt from buffer 1 that spans this new insertion.
     buffer_1.update(cx, |buffer, cx| buffer.edit([(4..4, "123")], None, cx));
     let excerpt_id_1 = multibuffer.update(cx, |multibuffer, cx| {
-        multibuffer
-            .push_excerpts(buffer_1.clone(), [ExcerptRange::new(0..7)], cx)
-            .pop()
-            .unwrap()
+        let buffer_1_snapshot = buffer_1.read(cx).snapshot();
+        multibuffer.set_excerpt_ranges_for_path(
+            PathKey::sorted(0),
+            buffer_1,
+            &buffer_1_snapshot,
+            vec![ExcerptRange::new((0..7).to_point(&buffer_1_snapshot))],
+            cx,
+        );
+        multibuffer.excerpt_ids().into_iter().next().unwrap()
     });
 
     let snapshot_1 = multibuffer.read(cx).snapshot(cx);
     assert_eq!(snapshot_1.text(), "abcd123");
 
     // Replace the buffer 1 excerpt with new excerpts from buffer 2.
-    let (excerpt_id_2, excerpt_id_3) = multibuffer.update(cx, |multibuffer, cx| {
-        multibuffer.remove_excerpts([excerpt_id_1], cx);
+    let (excerpt_id_2, _excerpt_id_3) = multibuffer.update(cx, |multibuffer, cx| {
+        multibuffer.remove_excerpts_for_path(PathKey::sorted(0), cx);
+        let snapshot_2 = buffer_2.read(cx).snapshot();
+        multibuffer.set_excerpt_ranges_for_path(
+            PathKey::sorted(1),
+            buffer_2.clone(),
+            &buffer_2.read(cx).snapshot(),
+            vec![
+                ExcerptRange::new((0..4).to_point(&snapshot_2)),
+                ExcerptRange::new((6..10).to_point(&snapshot_2)),
+                ExcerptRange::new((12..16).to_point(&snapshot_2)),
+            ],
+            cx,
+        );
         let mut ids = multibuffer
-            .push_excerpts(
-                buffer_2.clone(),
-                [
-                    ExcerptRange::new(0..4),
-                    ExcerptRange::new(6..10),
-                    ExcerptRange::new(12..16),
-                ],
-                cx,
-            )
-            .into_iter();
+            .excerpts_for_buffer(buffer_2.read(cx).remote_id(), cx)
+            .into_iter()
+            .map(|(id, _)| id);
         (ids.next().unwrap(), ids.next().unwrap())
     });
     let snapshot_2 = multibuffer.read(cx).snapshot(cx);
@@ -1283,22 +1327,33 @@ fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut App) {
 
     // Replace the middle excerpt with a smaller excerpt in buffer 2,
     // that intersects the old excerpt.
-    let excerpt_id_5 = multibuffer.update(cx, |multibuffer, cx| {
-        multibuffer.remove_excerpts([excerpt_id_3], cx);
-        multibuffer
-            .insert_excerpts_after(
-                excerpt_id_2,
-                buffer_2.clone(),
-                [ExcerptRange::new(5..8)],
-                cx,
-            )
-            .pop()
-            .unwrap()
+    multibuffer.update(cx, |multibuffer, cx| {
+        let snapshot_2 = buffer_2.read(cx).snapshot();
+        multibuffer.set_excerpt_ranges_for_path(
+            PathKey::sorted(1),
+            buffer_2.clone(),
+            &buffer_2.read(cx).snapshot(),
+            vec![
+                ExcerptRange::new((0..4).to_point(&snapshot_2)),
+                ExcerptRange::new((12..16).to_point(&snapshot_2)),
+            ],
+            cx,
+        );
+        multibuffer.set_excerpt_ranges_for_path(
+            PathKey::sorted(1),
+            buffer_2.clone(),
+            &buffer_2.read(cx).snapshot(),
+            vec![
+                ExcerptRange::new((0..4).to_point(&snapshot_2)),
+                ExcerptRange::new((5..8).to_point(&snapshot_2)),
+                ExcerptRange::new((12..16).to_point(&snapshot_2)),
+            ],
+            cx,
+        );
     });
 
     let snapshot_3 = multibuffer.read(cx).snapshot(cx);
     assert_eq!(snapshot_3.text(), "ABCD\nFGH\nMNOP");
-    assert_ne!(excerpt_id_5, excerpt_id_3);
 
     // Resolve some anchors from the previous snapshot in the new snapshot.
     // The third anchor can't be resolved, since its excerpt has been removed,
@@ -2149,14 +2204,18 @@ async fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) {
 
     let multibuffer = cx.new(|cx| {
         let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
-        multibuffer.push_excerpts(
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(0),
             buffer_1.clone(),
-            [ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)],
+            [Point::zero()..buffer_1.read(cx).max_point()],
+            0,
             cx,
         );
-        multibuffer.push_excerpts(
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(1),
             buffer_2.clone(),
-            [ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)],
+            [Point::zero()..buffer_2.read(cx).max_point()],
+            0,
             cx,
         );
         multibuffer.add_diff(diff_1.clone(), cx);
@@ -3431,14 +3490,18 @@ fn test_history(cx: &mut App) {
         this.set_group_interval(group_interval);
     });
     multibuffer.update(cx, |multibuffer, cx| {
-        multibuffer.push_excerpts(
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(0),
             buffer_1.clone(),
-            [ExcerptRange::new(0..buffer_1.read(cx).len())],
+            [Point::zero()..buffer_1.read(cx).max_point()],
+            0,
             cx,
         );
-        multibuffer.push_excerpts(
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(1),
             buffer_2.clone(),
-            [ExcerptRange::new(0..buffer_2.read(cx).len())],
+            [Point::zero()..buffer_2.read(cx).max_point()],
+            0,
             cx,
         );
     });
@@ -3691,18 +3754,23 @@ async fn test_summaries_for_anchors(cx: &mut TestAppContext) {
     let multibuffer = cx.new(|cx| {
         let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
         multibuffer.set_all_diff_hunks_expanded(cx);
-        ids.extend(multibuffer.push_excerpts(
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(0),
             buffer_1.clone(),
-            [ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)],
+            [Point::zero()..buffer_1.read(cx).max_point()],
+            0,
             cx,
-        ));
-        ids.extend(multibuffer.push_excerpts(
+        );
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(1),
             buffer_2.clone(),
-            [ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)],
+            [Point::zero()..buffer_2.read(cx).max_point()],
+            0,
             cx,
-        ));
+        );
         multibuffer.add_diff(diff_1.clone(), cx);
         multibuffer.add_diff(diff_2.clone(), cx);
+        ids = multibuffer.excerpt_ids();
         multibuffer
     });
 
@@ -3747,7 +3815,14 @@ async fn test_trailing_deletion_without_newline(cx: &mut TestAppContext) {
     cx.run_until_parked();
 
     let multibuffer = cx.new(|cx| {
-        let mut multibuffer = MultiBuffer::singleton(buffer_1.clone(), cx);
+        let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(0),
+            buffer_1.clone(),
+            [Point::zero()..buffer_1.read(cx).max_point()],
+            0,
+            cx,
+        );
         multibuffer.add_diff(diff_1.clone(), cx);
         multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx);
         multibuffer
@@ -3790,9 +3865,11 @@ async fn test_trailing_deletion_without_newline(cx: &mut TestAppContext) {
     let text_2 = "foo\n".to_owned();
     let buffer_2 = cx.new(|cx| Buffer::local(&text_2, cx));
     multibuffer.update(cx, |multibuffer, cx| {
-        multibuffer.push_excerpts(
+        multibuffer.set_excerpt_ranges_for_path(
+            PathKey::sorted(1),
             buffer_2.clone(),
-            [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))],
+            &buffer_2.read(cx).snapshot(),
+            vec![ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))],
             cx,
         );
     });
@@ -4893,25 +4970,36 @@ fn test_excerpts_containment_functions(cx: &mut App) {
     let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
 
     let (excerpt_1_id, excerpt_2_id, excerpt_3_id) = multibuffer.update(cx, |multibuffer, cx| {
-        let excerpt_1_id = multibuffer.push_excerpts(
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(0),
             buffer_1.clone(),
-            [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 3))],
+            [Point::new(0, 0)..Point::new(1, 3)],
+            0,
             cx,
-        )[0];
+        );
 
-        let excerpt_2_id = multibuffer.push_excerpts(
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(1),
             buffer_2.clone(),
-            [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 3))],
+            [Point::new(0, 0)..Point::new(1, 3)],
+            0,
             cx,
-        )[0];
+        );
 
-        let excerpt_3_id = multibuffer.push_excerpts(
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(2),
             buffer_3.clone(),
-            [ExcerptRange::new(Point::new(0, 0)..Point::new(0, 3))],
+            [Point::new(0, 0)..Point::new(0, 3)],
+            0,
             cx,
-        )[0];
+        );
 
-        (excerpt_1_id, excerpt_2_id, excerpt_3_id)
+        let mut ids = multibuffer.excerpt_ids().into_iter();
+        (
+            ids.next().unwrap(),
+            ids.next().unwrap(),
+            ids.next().unwrap(),
+        )
     });
 
     let snapshot = multibuffer.read(cx).snapshot(cx);
@@ -4996,19 +5084,25 @@ fn test_range_to_buffer_ranges_with_range_bounds(cx: &mut App) {
 
     let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
     let (excerpt_1_id, excerpt_2_id) = multibuffer.update(cx, |multibuffer, cx| {
-        let excerpt_1_id = multibuffer.push_excerpts(
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(0),
             buffer_1.clone(),
-            [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 3))],
+            [Point::new(0, 0)..Point::new(1, 3)],
+            0,
             cx,
-        )[0];
+        );
 
-        let excerpt_2_id = multibuffer.push_excerpts(
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(1),
             buffer_2.clone(),
-            [ExcerptRange::new(Point::new(0, 0)..Point::new(0, 3))],
+            [Point::new(0, 0)..Point::new(0, 3)],
+            0,
             cx,
-        )[0];
+        );
 
-        (excerpt_1_id, excerpt_2_id)
+        let excerpt_ids = multibuffer.excerpt_ids();
+
+        (excerpt_ids[0], excerpt_ids[1])
     });
 
     let snapshot = multibuffer.read(cx).snapshot(cx);
@@ -5058,19 +5152,24 @@ fn test_range_to_buffer_ranges_with_range_bounds(cx: &mut App) {
     let multibuffer_trailing_empty = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
     let (te_excerpt_1_id, te_excerpt_2_id) =
         multibuffer_trailing_empty.update(cx, |multibuffer, cx| {
-            let excerpt_1_id = multibuffer.push_excerpts(
+            multibuffer.set_excerpts_for_path(
+                PathKey::sorted(0),
                 buffer_1.clone(),
-                [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 3))],
+                [Point::new(0, 0)..Point::new(1, 3)],
+                0,
                 cx,
-            )[0];
+            );
 
-            let excerpt_2_id = multibuffer.push_excerpts(
+            multibuffer.set_excerpts_for_path(
+                PathKey::sorted(1),
                 buffer_empty.clone(),
-                [ExcerptRange::new(Point::new(0, 0)..Point::new(0, 0))],
+                [Point::new(0, 0)..Point::new(0, 0)],
+                0,
                 cx,
-            )[0];
+            );
 
-            (excerpt_1_id, excerpt_2_id)
+            let excerpt_ids = multibuffer.excerpt_ids();
+            (excerpt_ids[0], excerpt_ids[1])
         });
 
     let snapshot_trailing = multibuffer_trailing_empty.read(cx).snapshot(cx);

crates/multi_buffer/src/path_key.rs 🔗

@@ -5,7 +5,7 @@ use gpui::{App, AppContext, Context, Entity};
 use itertools::Itertools;
 use language::{Buffer, BufferSnapshot};
 use rope::Point;
-use text::{Bias, BufferId, OffsetRangeExt, locator::Locator};
+use text::{Bias, OffsetRangeExt, locator::Locator};
 use util::{post_inc, rel_path::RelPath};
 use ztracing::instrument;
 
@@ -27,6 +27,12 @@ pub struct PathKey {
 }
 
 impl PathKey {
+    pub fn sorted(sort_prefix: u64) -> Self {
+        Self {
+            sort_prefix: Some(sort_prefix),
+            path: RelPath::empty().into_arc(),
+        }
+    }
     pub fn with_sort_prefix(sort_prefix: u64, path: Arc<RelPath>) -> Self {
         Self {
             sort_prefix: Some(sort_prefix),
@@ -86,6 +92,17 @@ impl MultiBuffer {
         Some(Anchor::in_buffer(excerpt.id, excerpt.range.context.start))
     }
 
+    pub fn set_excerpts_for_buffer(
+        &mut self,
+        buffer: Entity<Buffer>,
+        ranges: impl IntoIterator<Item = Range<Point>>,
+        context_line_count: u32,
+        cx: &mut Context<Self>,
+    ) -> (Vec<Range<Anchor>>, bool) {
+        let path = PathKey::for_buffer(&buffer, cx);
+        self.set_excerpts_for_path(path, buffer, ranges, context_line_count, cx)
+    }
+
     /// Sets excerpts, returns `true` if at least one new excerpt was added.
     #[instrument(skip_all)]
     pub fn set_excerpts_for_path(
@@ -172,15 +189,6 @@ impl MultiBuffer {
         }
     }
 
-    pub fn remove_excerpts_for_buffer(&mut self, buffer: BufferId, cx: &mut Context<Self>) {
-        self.remove_excerpts(
-            self.excerpts_for_buffer(buffer, cx)
-                .into_iter()
-                .map(|(excerpt, _)| excerpt),
-            cx,
-        );
-    }
-
     pub(super) fn expand_excerpts_with_paths(
         &mut self,
         ids: impl IntoIterator<Item = ExcerptId>,
@@ -382,9 +390,7 @@ impl MultiBuffer {
                 {
                     last.context.end = last.context.end.max(existing_range.end);
                     to_remove.push(*existing_id);
-                    self.snapshot
-                        .get_mut()
-                        .replaced_excerpts
+                    Arc::make_mut(&mut self.snapshot.get_mut().replaced_excerpts)
                         .insert(*existing_id, *last_id);
                     existing_iter.next();
                     continue;
@@ -462,9 +468,7 @@ impl MultiBuffer {
                 (Some(_), Some((_, existing_range))) => {
                     let existing_id = existing_iter.next().unwrap();
                     let new_id = next_excerpt_id();
-                    self.snapshot
-                        .get_mut()
-                        .replaced_excerpts
+                    Arc::make_mut(&mut self.snapshot.get_mut().replaced_excerpts)
                         .insert(existing_id, new_id);
                     to_remove.push(existing_id);
                     let mut range = new_iter.next().unwrap();

crates/open_ai/src/open_ai.rs 🔗

@@ -88,6 +88,8 @@ pub enum Model {
     FivePointTwo,
     #[serde(rename = "gpt-5.2-codex")]
     FivePointTwoCodex,
+    #[serde(rename = "gpt-5.3-codex")]
+    FivePointThreeCodex,
     #[serde(rename = "custom")]
     Custom {
         name: String,
@@ -128,6 +130,7 @@ impl Model {
             "gpt-5.1" => Ok(Self::FivePointOne),
             "gpt-5.2" => Ok(Self::FivePointTwo),
             "gpt-5.2-codex" => Ok(Self::FivePointTwoCodex),
+            "gpt-5.3-codex" => Ok(Self::FivePointThreeCodex),
             invalid_id => anyhow::bail!("invalid model id '{invalid_id}'"),
         }
     }
@@ -149,6 +152,7 @@ impl Model {
             Self::FivePointOne => "gpt-5.1",
             Self::FivePointTwo => "gpt-5.2",
             Self::FivePointTwoCodex => "gpt-5.2-codex",
+            Self::FivePointThreeCodex => "gpt-5.3-codex",
             Self::Custom { name, .. } => name,
         }
     }
@@ -170,6 +174,7 @@ impl Model {
             Self::FivePointOne => "gpt-5.1",
             Self::FivePointTwo => "gpt-5.2",
             Self::FivePointTwoCodex => "gpt-5.2-codex",
+            Self::FivePointThreeCodex => "gpt-5.3-codex",
             Self::Custom { display_name, .. } => display_name.as_deref().unwrap_or(&self.id()),
         }
     }
@@ -191,6 +196,7 @@ impl Model {
             Self::FivePointOne => 400_000,
             Self::FivePointTwo => 400_000,
             Self::FivePointTwoCodex => 400_000,
+            Self::FivePointThreeCodex => 400_000,
             Self::Custom { max_tokens, .. } => *max_tokens,
         }
     }
@@ -215,6 +221,7 @@ impl Model {
             Self::FivePointOne => Some(128_000),
             Self::FivePointTwo => Some(128_000),
             Self::FivePointTwoCodex => Some(128_000),
+            Self::FivePointThreeCodex => Some(128_000),
         }
     }
 
@@ -223,6 +230,7 @@ impl Model {
             Self::Custom {
                 reasoning_effort, ..
             } => reasoning_effort.to_owned(),
+            Self::FivePointThreeCodex => Some(ReasoningEffort::Medium),
             _ => None,
         }
     }
@@ -233,7 +241,7 @@ impl Model {
                 supports_chat_completions,
                 ..
             } => *supports_chat_completions,
-            Self::FiveCodex | Self::FivePointTwoCodex => false,
+            Self::FiveCodex | Self::FivePointTwoCodex | Self::FivePointThreeCodex => false,
             _ => true,
         }
     }
@@ -254,6 +262,7 @@ impl Model {
             | Self::FivePointOne
             | Self::FivePointTwo
             | Self::FivePointTwoCodex
+            | Self::FivePointThreeCodex
             | Self::FiveNano => true,
             Self::O1 | Self::O3 | Self::O3Mini | Model::Custom { .. } => false,
         }

crates/paths/src/paths.rs 🔗

@@ -419,12 +419,6 @@ pub fn copilot_dir() -> &'static PathBuf {
     COPILOT_DIR.get_or_init(|| data_dir().join("copilot"))
 }
 
-/// Returns the path to the Supermaven directory.
-pub fn supermaven_dir() -> &'static PathBuf {
-    static SUPERMAVEN_DIR: OnceLock<PathBuf> = OnceLock::new();
-    SUPERMAVEN_DIR.get_or_init(|| data_dir().join("supermaven"))
-}
-
 /// Returns the path to the default Prettier directory.
 pub fn default_prettier_dir() -> &'static PathBuf {
     static DEFAULT_PRETTIER_DIR: OnceLock<PathBuf> = OnceLock::new();

crates/picker/Cargo.toml 🔗

@@ -28,8 +28,6 @@ workspace.workspace = true
 zed_actions.workspace = true
 
 [dev-dependencies]
-ctor.workspace = true
 editor = { workspace = true, features = ["test-support"] }
-env_logger.workspace = true
 gpui = { workspace = true, features = ["test-support"] }
-serde_json.workspace = true
+settings.workspace = true

crates/picker/src/picker.rs 🔗

@@ -114,7 +114,7 @@ pub trait PickerDelegate: Sized + 'static {
         None
     }
     fn can_select(
-        &mut self,
+        &self,
         _ix: usize,
         _window: &mut Window,
         _cx: &mut Context<Picker<Self>>,
@@ -619,6 +619,9 @@ impl<D: PickerDelegate> Picker<D> {
     ) {
         cx.stop_propagation();
         window.prevent_default();
+        if !self.delegate.can_select(ix, window, cx) {
+            return;
+        }
         self.set_selected_index(ix, None, false, window, cx);
         self.do_confirm(secondary, window, cx)
     }
@@ -753,10 +756,11 @@ impl<D: PickerDelegate> Picker<D> {
         ix: usize,
     ) -> impl IntoElement + use<D> {
         let item_bounds = self.item_bounds.clone();
+        let selectable = self.delegate.can_select(ix, window, cx);
 
         div()
             .id(("item", ix))
-            .cursor_pointer()
+            .when(selectable, |this| this.cursor_pointer())
             .child(
                 canvas(
                     move |bounds, _window, _cx| {
@@ -850,6 +854,175 @@ impl<D: PickerDelegate> Picker<D> {
     }
 }
 
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use gpui::TestAppContext;
+    use std::cell::Cell;
+
+    struct TestDelegate {
+        items: Vec<bool>,
+        selected_index: usize,
+        confirmed_index: Rc<Cell<Option<usize>>>,
+    }
+
+    impl TestDelegate {
+        fn new(items: Vec<bool>) -> Self {
+            Self {
+                items,
+                selected_index: 0,
+                confirmed_index: Rc::new(Cell::new(None)),
+            }
+        }
+    }
+
+    impl PickerDelegate for TestDelegate {
+        type ListItem = ui::ListItem;
+
+        fn match_count(&self) -> usize {
+            self.items.len()
+        }
+
+        fn selected_index(&self) -> usize {
+            self.selected_index
+        }
+
+        fn set_selected_index(
+            &mut self,
+            ix: usize,
+            _window: &mut Window,
+            _cx: &mut Context<Picker<Self>>,
+        ) {
+            self.selected_index = ix;
+        }
+
+        fn can_select(
+            &self,
+            ix: usize,
+            _window: &mut Window,
+            _cx: &mut Context<Picker<Self>>,
+        ) -> bool {
+            self.items.get(ix).copied().unwrap_or(false)
+        }
+
+        fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> {
+            "Test".into()
+        }
+
+        fn update_matches(
+            &mut self,
+            _query: String,
+            _window: &mut Window,
+            _cx: &mut Context<Picker<Self>>,
+        ) -> Task<()> {
+            Task::ready(())
+        }
+
+        fn confirm(
+            &mut self,
+            _secondary: bool,
+            _window: &mut Window,
+            _cx: &mut Context<Picker<Self>>,
+        ) {
+            self.confirmed_index.set(Some(self.selected_index));
+        }
+
+        fn dismissed(&mut self, _window: &mut Window, _cx: &mut Context<Picker<Self>>) {}
+
+        fn render_match(
+            &self,
+            ix: usize,
+            selected: bool,
+            _window: &mut Window,
+            _cx: &mut Context<Picker<Self>>,
+        ) -> Option<Self::ListItem> {
+            Some(
+                ui::ListItem::new(ix)
+                    .inset(true)
+                    .toggle_state(selected)
+                    .child(ui::Label::new(format!("Item {ix}"))),
+            )
+        }
+    }
+
+    fn init_test(cx: &mut TestAppContext) {
+        cx.update(|cx| {
+            let store = settings::SettingsStore::test(cx);
+            cx.set_global(store);
+            theme::init(theme::LoadThemes::JustBase, cx);
+            editor::init(cx);
+        });
+    }
+
+    #[gpui::test]
+    async fn test_clicking_non_selectable_item_does_not_confirm(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let confirmed_index = Rc::new(Cell::new(None));
+        let (picker, cx) = cx.add_window_view(|window, cx| {
+            let mut delegate = TestDelegate::new(vec![true, false, true]);
+            delegate.confirmed_index = confirmed_index.clone();
+            Picker::uniform_list(delegate, window, cx)
+        });
+
+        picker.update(cx, |picker, _cx| {
+            assert_eq!(picker.delegate.selected_index(), 0);
+        });
+
+        picker.update_in(cx, |picker, window, cx| {
+            picker.handle_click(1, false, window, cx);
+        });
+        assert!(
+            confirmed_index.get().is_none(),
+            "clicking a non-selectable item should not confirm"
+        );
+
+        picker.update_in(cx, |picker, window, cx| {
+            picker.handle_click(0, false, window, cx);
+        });
+        assert_eq!(
+            confirmed_index.get(),
+            Some(0),
+            "clicking a selectable item should confirm"
+        );
+    }
+
+    #[gpui::test]
+    async fn test_keyboard_navigation_skips_non_selectable_items(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let (picker, cx) = cx.add_window_view(|window, cx| {
+            Picker::uniform_list(TestDelegate::new(vec![true, false, true]), window, cx)
+        });
+
+        picker.update(cx, |picker, _cx| {
+            assert_eq!(picker.delegate.selected_index(), 0);
+        });
+
+        picker.update_in(cx, |picker, window, cx| {
+            picker.select_next(&menu::SelectNext, window, cx);
+        });
+        picker.update(cx, |picker, _cx| {
+            assert_eq!(
+                picker.delegate.selected_index(),
+                2,
+                "select_next should skip non-selectable item at index 1"
+            );
+        });
+
+        picker.update_in(cx, |picker, window, cx| {
+            picker.select_previous(&menu::SelectPrevious, window, cx);
+        });
+        picker.update(cx, |picker, _cx| {
+            assert_eq!(
+                picker.delegate.selected_index(),
+                0,
+                "select_previous should skip non-selectable item at index 1"
+            );
+        });
+    }
+}
+
 impl<D: PickerDelegate> EventEmitter<DismissEvent> for Picker<D> {}
 impl<D: PickerDelegate> ModalView for Picker<D> {}
 

crates/platform_title_bar/Cargo.toml 🔗

@@ -15,6 +15,7 @@ doctest = false
 [dependencies]
 feature_flags.workspace = true
 gpui.workspace = true
+project.workspace = true
 settings.workspace = true
 smallvec.workspace = true
 theme.workspace = true

crates/platform_title_bar/src/platform_title_bar.rs 🔗

@@ -7,6 +7,8 @@ use gpui::{
     MouseButton, ParentElement, StatefulInteractiveElement, Styled, Window, WindowControlArea, div,
     px,
 };
+use project::DisableAiSettings;
+use settings::Settings;
 use smallvec::SmallVec;
 use std::mem;
 use ui::{
@@ -95,7 +97,7 @@ impl PlatformTitleBar {
     }
 
     pub fn is_multi_workspace_enabled(cx: &App) -> bool {
-        cx.has_flag::<AgentV2FeatureFlag>()
+        cx.has_flag::<AgentV2FeatureFlag>() && !DisableAiSettings::get_global(cx).disable_ai
     }
 }
 

crates/project/src/agent_registry_store.rs 🔗

@@ -9,9 +9,9 @@ use futures::AsyncReadExt;
 use gpui::{App, AppContext as _, Context, Entity, Global, SharedString, Task};
 use http_client::{AsyncBody, HttpClient};
 use serde::Deserialize;
-use settings::Settings;
+use settings::Settings as _;
 
-use crate::agent_server_store::AllAgentServersSettings;
+use crate::DisableAiSettings;
 
 const REGISTRY_URL: &str = "https://cdn.agentclientprotocol.com/registry/v1/latest/registry.json";
 const REFRESH_THROTTLE_DURATION: Duration = Duration::from_secs(60 * 60);
@@ -129,13 +129,11 @@ impl AgentRegistryStore {
         let store = cx.new(|cx| Self::new(fs, http_client, cx));
         cx.set_global(GlobalAgentRegistryStore(store.clone()));
 
-        if AllAgentServersSettings::get_global(cx).has_registry_agents() {
-            store.update(cx, |store, cx| {
-                if store.agents.is_empty() {
-                    store.refresh(cx);
-                }
-            });
-        }
+        store.update(cx, |store, cx| {
+            if store.agents.is_empty() {
+                store.refresh(cx);
+            }
+        });
 
         store
     }
@@ -173,6 +171,10 @@ impl AgentRegistryStore {
             return;
         }
 
+        if DisableAiSettings::get_global(cx).disable_ai {
+            return;
+        }
+
         self.is_fetching = true;
         self.fetch_error = None;
         self.last_refresh = Some(Instant::now());
@@ -249,6 +251,10 @@ impl AgentRegistryStore {
         http_client: Arc<dyn HttpClient>,
         cx: &mut Context<Self>,
     ) {
+        if DisableAiSettings::get_global(cx).disable_ai {
+            return;
+        }
+
         cx.spawn(async move |this, cx| -> Result<()> {
             let cache_path = registry_cache_path();
             if !fs.is_file(&cache_path).await {

crates/project/src/agent_server_store.rs 🔗

@@ -1,20 +1,15 @@
 use remote::Interactive;
 use std::{
     any::Any,
-    borrow::Borrow,
     path::{Path, PathBuf},
-    str::FromStr as _,
     sync::Arc,
     time::Duration,
 };
 
 use anyhow::{Context as _, Result, bail};
 use collections::HashMap;
-use fs::{Fs, RemoveOptions, RenameOptions};
-use futures::StreamExt as _;
-use gpui::{
-    AppContext as _, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
-};
+use fs::Fs;
+use gpui::{AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task};
 use http_client::{HttpClient, github::AssetKind};
 use node_runtime::NodeRuntime;
 use remote::RemoteClient;
@@ -23,10 +18,10 @@ use rpc::{
     proto::{self, ExternalExtensionAgent},
 };
 use schemars::JsonSchema;
-use semver::Version;
 use serde::{Deserialize, Serialize};
 use settings::{RegisterSetting, SettingsStore};
-use task::{Shell, SpawnInTerminal};
+use sha2::{Digest, Sha256};
+use task::Shell;
 use util::{ResultExt as _, debug_panic};
 
 use crate::ProjectEnvironment;
@@ -66,7 +61,7 @@ impl std::fmt::Debug for AgentServerCommand {
     }
 }
 
-#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
 pub struct ExternalAgentServerName(pub SharedString);
 
 impl std::fmt::Display for ExternalAgentServerName {
@@ -87,7 +82,7 @@ impl From<ExternalAgentServerName> for SharedString {
     }
 }
 
-impl Borrow<str> for ExternalAgentServerName {
+impl std::borrow::Borrow<str> for ExternalAgentServerName {
     fn borrow(&self) -> &str {
         &self.0
     }
@@ -95,7 +90,6 @@ impl Borrow<str> for ExternalAgentServerName {
 
 #[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
 pub enum ExternalAgentSource {
-    Builtin,
     #[default]
     Custom,
     Extension,
@@ -105,12 +99,11 @@ pub enum ExternalAgentSource {
 pub trait ExternalAgentServer {
     fn get_command(
         &mut self,
-        root_dir: Option<&str>,
         extra_env: HashMap<String, String>,
         status_tx: Option<watch::Sender<SharedString>>,
         new_version_available_tx: Option<watch::Sender<Option<String>>>,
         cx: &mut AsyncApp,
-    ) -> Task<Result<(AgentServerCommand, String, Option<task::SpawnInTerminal>)>>;
+    ) -> Task<Result<AgentServerCommand>>;
 
     fn as_any_mut(&mut self) -> &mut dyn Any;
 }
@@ -410,86 +403,13 @@ impl AgentServerStore {
 
         // If we don't have agents from the registry loaded yet, trigger a
         // refresh, which will cause this function to be called again
+        let registry_store = AgentRegistryStore::try_global(cx);
         if new_settings.has_registry_agents()
-            && let Some(registry) = AgentRegistryStore::try_global(cx)
+            && let Some(registry) = registry_store.as_ref()
         {
             registry.update(cx, |registry, cx| registry.refresh_if_stale(cx));
         }
 
-        self.external_agents.clear();
-        self.external_agents.insert(
-            GEMINI_NAME.into(),
-            ExternalAgentEntry::new(
-                Box::new(LocalGemini {
-                    fs: fs.clone(),
-                    node_runtime: node_runtime.clone(),
-                    project_environment: project_environment.clone(),
-                    custom_command: new_settings
-                        .gemini
-                        .clone()
-                        .and_then(|settings| settings.custom_command()),
-                    settings_env: new_settings
-                        .gemini
-                        .as_ref()
-                        .and_then(|settings| settings.env.clone()),
-                    ignore_system_version: new_settings
-                        .gemini
-                        .as_ref()
-                        .and_then(|settings| settings.ignore_system_version)
-                        .unwrap_or(true),
-                }),
-                ExternalAgentSource::Builtin,
-                None,
-                None,
-            ),
-        );
-        self.external_agents.insert(
-            CODEX_NAME.into(),
-            ExternalAgentEntry::new(
-                Box::new(LocalCodex {
-                    fs: fs.clone(),
-                    project_environment: project_environment.clone(),
-                    custom_command: new_settings
-                        .codex
-                        .clone()
-                        .and_then(|settings| settings.custom_command()),
-                    settings_env: new_settings
-                        .codex
-                        .as_ref()
-                        .and_then(|settings| settings.env.clone()),
-                    http_client: http_client.clone(),
-                    no_browser: downstream_client
-                        .as_ref()
-                        .is_some_and(|(_, client)| !client.has_wsl_interop()),
-                }),
-                ExternalAgentSource::Builtin,
-                None,
-                None,
-            ),
-        );
-        self.external_agents.insert(
-            CLAUDE_AGENT_NAME.into(),
-            ExternalAgentEntry::new(
-                Box::new(LocalClaudeCode {
-                    fs: fs.clone(),
-                    node_runtime: node_runtime.clone(),
-                    project_environment: project_environment.clone(),
-                    custom_command: new_settings
-                        .claude
-                        .clone()
-                        .and_then(|settings| settings.custom_command()),
-                    settings_env: new_settings
-                        .claude
-                        .as_ref()
-                        .and_then(|settings| settings.env.clone()),
-                }),
-                ExternalAgentSource::Builtin,
-                None,
-                None,
-            ),
-        );
-
-        let registry_store = AgentRegistryStore::try_global(cx);
         let registry_agents_by_id = registry_store
             .as_ref()
             .map(|store| {
@@ -503,13 +423,14 @@ impl AgentServerStore {
             })
             .unwrap_or_default();
 
+        self.external_agents.clear();
+
         // Insert extension agents before custom/registry so registry entries override extensions.
         for (agent_name, ext_id, targets, env, icon_path, display_name) in extension_agents.iter() {
             let name = ExternalAgentServerName(agent_name.clone().into());
             let mut env = env.clone();
             if let Some(settings_env) =
                 new_settings
-                    .custom
                     .get(agent_name.as_ref())
                     .and_then(|settings| match settings {
                         CustomAgentServerSettings::Extension { env, .. } => Some(env.clone()),
@@ -542,7 +463,7 @@ impl AgentServerStore {
             );
         }
 
-        for (name, settings) in &new_settings.custom {
+        for (name, settings) in new_settings.iter() {
             match settings {
                 CustomAgentServerSettings::Custom { command, .. } => {
                     let agent_name = ExternalAgentServerName(name.clone().into());
@@ -672,7 +593,7 @@ impl AgentServerStore {
                 extension_agents: vec![],
                 _subscriptions: subscriptions,
             },
-            external_agents: Default::default(),
+            external_agents: HashMap::default(),
         };
         if let Some(_events) = extension::ExtensionEvents::try_global(cx) {}
         this.agent_servers_settings_changed(cx);
@@ -680,70 +601,19 @@ impl AgentServerStore {
     }
 
     pub(crate) fn remote(project_id: u64, upstream_client: Entity<RemoteClient>) -> Self {
-        // Set up the builtin agents here so they're immediately available in
-        // remote projects--we know that the HeadlessProject on the other end
-        // will have them.
-        let external_agents: [(ExternalAgentServerName, ExternalAgentEntry); 3] = [
-            (
-                CLAUDE_AGENT_NAME.into(),
-                ExternalAgentEntry::new(
-                    Box::new(RemoteExternalAgentServer {
-                        project_id,
-                        upstream_client: upstream_client.clone(),
-                        name: CLAUDE_AGENT_NAME.into(),
-                        status_tx: None,
-                        new_version_available_tx: None,
-                    }) as Box<dyn ExternalAgentServer>,
-                    ExternalAgentSource::Builtin,
-                    None,
-                    None,
-                ),
-            ),
-            (
-                CODEX_NAME.into(),
-                ExternalAgentEntry::new(
-                    Box::new(RemoteExternalAgentServer {
-                        project_id,
-                        upstream_client: upstream_client.clone(),
-                        name: CODEX_NAME.into(),
-                        status_tx: None,
-                        new_version_available_tx: None,
-                    }) as Box<dyn ExternalAgentServer>,
-                    ExternalAgentSource::Builtin,
-                    None,
-                    None,
-                ),
-            ),
-            (
-                GEMINI_NAME.into(),
-                ExternalAgentEntry::new(
-                    Box::new(RemoteExternalAgentServer {
-                        project_id,
-                        upstream_client: upstream_client.clone(),
-                        name: GEMINI_NAME.into(),
-                        status_tx: None,
-                        new_version_available_tx: None,
-                    }) as Box<dyn ExternalAgentServer>,
-                    ExternalAgentSource::Builtin,
-                    None,
-                    None,
-                ),
-            ),
-        ];
-
         Self {
             state: AgentServerStoreState::Remote {
                 project_id,
                 upstream_client,
             },
-            external_agents: external_agents.into_iter().collect(),
+            external_agents: HashMap::default(),
         }
     }
 
     pub fn collab() -> Self {
         Self {
             state: AgentServerStoreState::Collab,
-            external_agents: Default::default(),
+            external_agents: HashMap::default(),
         }
     }
 
@@ -790,6 +660,17 @@ impl AgentServerStore {
             .map(|entry| entry.server.as_mut())
     }
 
+    pub fn no_browser(&self) -> bool {
+        match &self.state {
+            AgentServerStoreState::Local {
+                downstream_client, ..
+            } => downstream_client
+                .as_ref()
+                .is_some_and(|(_, client)| !client.has_wsl_interop()),
+            _ => false,
+        }
+    }
+
     pub fn external_agents(&self) -> impl Iterator<Item = &ExternalAgentServerName> {
         self.external_agents.keys()
     }
@@ -799,7 +680,7 @@ impl AgentServerStore {
         envelope: TypedEnvelope<proto::GetAgentServerCommand>,
         mut cx: AsyncApp,
     ) -> Result<proto::AgentServerCommand> {
-        let (command, root_dir, login_command) = this
+        let command = this
             .update(&mut cx, |this, cx| {
                 let AgentServerStoreState::Local {
                     downstream_client, ..
@@ -808,6 +689,7 @@ impl AgentServerStore {
                     debug_panic!("should not receive GetAgentServerCommand in a non-local project");
                     bail!("unexpected GetAgentServerCommand request in a non-local project");
                 };
+                let no_browser = this.no_browser();
                 let agent = this
                     .external_agents
                     .get_mut(&*envelope.payload.name)
@@ -857,9 +739,12 @@ impl AgentServerStore {
                         (status_tx, new_version_available_tx)
                     })
                     .unzip();
+                let mut extra_env = HashMap::default();
+                if no_browser {
+                    extra_env.insert("NO_BROWSER".to_owned(), "1".to_owned());
+                }
                 anyhow::Ok(agent.get_command(
-                    envelope.payload.root_dir.as_deref(),
-                    HashMap::default(),
+                    extra_env,
                     status_tx,
                     new_version_available_tx,
                     &mut cx.to_async(),
@@ -873,8 +758,9 @@ impl AgentServerStore {
                 .env
                 .map(|env| env.into_iter().collect())
                 .unwrap_or_default(),
-            root_dir: root_dir,
-            login: login_command.map(|cmd| cmd.to_proto()),
+            // root_dir and login are no longer used, but returned for backwards compatibility
+            root_dir: paths::home_dir().to_string_lossy().to_string(),
+            login: None,
         })
     }
 
@@ -915,13 +801,7 @@ impl AgentServerStore {
                 .names
                 .into_iter()
                 .map(|name| {
-                    let agent_name = ExternalAgentServerName(name.clone().into());
-                    let fallback_source =
-                        if name == GEMINI_NAME || name == CLAUDE_AGENT_NAME || name == CODEX_NAME {
-                            ExternalAgentSource::Builtin
-                        } else {
-                            ExternalAgentSource::Custom
-                        };
+                    let agent_name = ExternalAgentServerName(name.into());
                     let (icon, display_name, source) = metadata
                         .remove(&agent_name)
                         .or_else(|| {
@@ -935,12 +815,7 @@ impl AgentServerStore {
                                     )
                                 })
                         })
-                        .unwrap_or((None, None, fallback_source));
-                    let source = if fallback_source == ExternalAgentSource::Builtin {
-                        ExternalAgentSource::Builtin
-                    } else {
-                        source
-                    };
+                        .unwrap_or((None, None, ExternalAgentSource::default()));
                     let agent = RemoteExternalAgentServer {
                         project_id: *project_id,
                         upstream_client: upstream_client.clone(),
@@ -1057,192 +932,6 @@ impl AgentServerStore {
     }
 }
 
-fn get_or_npm_install_builtin_agent(
-    binary_name: SharedString,
-    package_name: SharedString,
-    entrypoint_path: PathBuf,
-    minimum_version: Option<semver::Version>,
-    status_tx: Option<watch::Sender<SharedString>>,
-    new_version_available: Option<watch::Sender<Option<String>>>,
-    fs: Arc<dyn Fs>,
-    node_runtime: NodeRuntime,
-    cx: &mut AsyncApp,
-) -> Task<std::result::Result<AgentServerCommand, anyhow::Error>> {
-    cx.spawn(async move |cx| {
-        let node_path = node_runtime.binary_path().await?;
-        let dir = paths::external_agents_dir().join(binary_name.as_str());
-        fs.create_dir(&dir).await?;
-
-        let mut stream = fs.read_dir(&dir).await?;
-        let mut versions = Vec::new();
-        let mut to_delete = Vec::new();
-        while let Some(entry) = stream.next().await {
-            let Ok(entry) = entry else { continue };
-            let Some(file_name) = entry.file_name() else {
-                continue;
-            };
-
-            if let Some(name) = file_name.to_str()
-                && let Some(version) = semver::Version::from_str(name).ok()
-                && fs
-                    .is_file(&dir.join(file_name).join(&entrypoint_path))
-                    .await
-            {
-                versions.push((version, file_name.to_owned()));
-            } else {
-                to_delete.push(file_name.to_owned())
-            }
-        }
-
-        versions.sort();
-        let newest_version = if let Some((version, _)) = versions.last().cloned()
-            && minimum_version.is_none_or(|minimum_version| version >= minimum_version)
-        {
-            versions.pop()
-        } else {
-            None
-        };
-        log::debug!("existing version of {package_name}: {newest_version:?}");
-        to_delete.extend(versions.into_iter().map(|(_, file_name)| file_name));
-
-        cx.background_spawn({
-            let fs = fs.clone();
-            let dir = dir.clone();
-            async move {
-                for file_name in to_delete {
-                    fs.remove_dir(
-                        &dir.join(file_name),
-                        RemoveOptions {
-                            recursive: true,
-                            ignore_if_not_exists: false,
-                        },
-                    )
-                    .await
-                    .ok();
-                }
-            }
-        })
-        .detach();
-
-        let version = if let Some((version, file_name)) = newest_version {
-            cx.background_spawn({
-                let dir = dir.clone();
-                let fs = fs.clone();
-                async move {
-                    let latest_version = node_runtime
-                        .npm_package_latest_version(&package_name)
-                        .await
-                        .ok();
-                    if let Some(latest_version) = latest_version
-                        && latest_version != version
-                    {
-                        let download_result = download_latest_version(
-                            fs,
-                            dir.clone(),
-                            node_runtime,
-                            package_name.clone(),
-                        )
-                        .await
-                        .log_err();
-                        if let Some(mut new_version_available) = new_version_available
-                            && download_result.is_some()
-                        {
-                            new_version_available
-                                .send(Some(latest_version.to_string()))
-                                .ok();
-                        }
-                    }
-                }
-            })
-            .detach();
-            file_name
-        } else {
-            if let Some(mut status_tx) = status_tx {
-                status_tx.send("Installing…".into()).ok();
-            }
-            let dir = dir.clone();
-            cx.background_spawn(download_latest_version(
-                fs.clone(),
-                dir.clone(),
-                node_runtime,
-                package_name.clone(),
-            ))
-            .await?
-            .to_string()
-            .into()
-        };
-
-        let agent_server_path = dir.join(version).join(entrypoint_path);
-        let agent_server_path_exists = fs.is_file(&agent_server_path).await;
-        anyhow::ensure!(
-            agent_server_path_exists,
-            "Missing entrypoint path {} after installation",
-            agent_server_path.to_string_lossy()
-        );
-
-        anyhow::Ok(AgentServerCommand {
-            path: node_path,
-            args: vec![agent_server_path.to_string_lossy().into_owned()],
-            env: None,
-        })
-    })
-}
-
-fn find_bin_in_path(
-    bin_name: SharedString,
-    root_dir: PathBuf,
-    env: HashMap<String, String>,
-    cx: &mut AsyncApp,
-) -> Task<Option<PathBuf>> {
-    cx.background_executor().spawn(async move {
-        let which_result = if cfg!(windows) {
-            which::which(bin_name.as_str())
-        } else {
-            let shell_path = env.get("PATH").cloned();
-            which::which_in(bin_name.as_str(), shell_path.as_ref(), &root_dir)
-        };
-
-        if let Err(which::Error::CannotFindBinaryPath) = which_result {
-            return None;
-        }
-
-        which_result.log_err()
-    })
-}
-
-async fn download_latest_version(
-    fs: Arc<dyn Fs>,
-    dir: PathBuf,
-    node_runtime: NodeRuntime,
-    package_name: SharedString,
-) -> Result<Version> {
-    log::debug!("downloading latest version of {package_name}");
-
-    let tmp_dir = tempfile::tempdir_in(&dir)?;
-
-    node_runtime
-        .npm_install_packages(tmp_dir.path(), &[(&package_name, "latest")])
-        .await?;
-
-    let version = node_runtime
-        .npm_package_installed_version(tmp_dir.path(), &package_name)
-        .await?
-        .context("expected package to be installed")?;
-
-    fs.rename(
-        &tmp_dir.keep(),
-        &dir.join(version.to_string()),
-        RenameOptions {
-            ignore_if_exists: true,
-            overwrite: true,
-            create_parents: false,
-        },
-    )
-    .await?;
-
-    anyhow::Ok(version)
-}
-
 struct RemoteExternalAgentServer {
     project_id: u64,
     upstream_client: Entity<RemoteClient>,
@@ -1254,16 +943,14 @@ struct RemoteExternalAgentServer {
 impl ExternalAgentServer for RemoteExternalAgentServer {
     fn get_command(
         &mut self,
-        root_dir: Option<&str>,
         extra_env: HashMap<String, String>,
         status_tx: Option<watch::Sender<SharedString>>,
         new_version_available_tx: Option<watch::Sender<Option<String>>>,
         cx: &mut AsyncApp,
-    ) -> Task<Result<(AgentServerCommand, String, Option<task::SpawnInTerminal>)>> {
+    ) -> Task<Result<AgentServerCommand>> {
         let project_id = self.project_id;
         let name = self.name.to_string();
         let upstream_client = self.upstream_client.downgrade();
-        let root_dir = root_dir.map(|root_dir| root_dir.to_owned());
         self.status_tx = status_tx;
         self.new_version_available_tx = new_version_available_tx;
         cx.spawn(async move |cx| {
@@ -1274,7 +961,7 @@ impl ExternalAgentServer for RemoteExternalAgentServer {
                         .request(proto::GetAgentServerCommand {
                             project_id,
                             name,
-                            root_dir: root_dir.clone(),
+                            root_dir: None,
                         })
                 })?
                 .await?;
@@ -1290,381 +977,11 @@ impl ExternalAgentServer for RemoteExternalAgentServer {
                     Interactive::No,
                 )
             })??;
-            Ok((
-                AgentServerCommand {
-                    path: command.program.into(),
-                    args: command.args,
-                    env: Some(command.env),
-                },
-                root_dir,
-                response.login.map(SpawnInTerminal::from_proto),
-            ))
-        })
-    }
-
-    fn as_any_mut(&mut self) -> &mut dyn Any {
-        self
-    }
-}
-
-struct LocalGemini {
-    fs: Arc<dyn Fs>,
-    node_runtime: NodeRuntime,
-    project_environment: Entity<ProjectEnvironment>,
-    custom_command: Option<AgentServerCommand>,
-    settings_env: Option<HashMap<String, String>>,
-    ignore_system_version: bool,
-}
-
-impl ExternalAgentServer for LocalGemini {
-    fn get_command(
-        &mut self,
-        root_dir: Option<&str>,
-        extra_env: HashMap<String, String>,
-        status_tx: Option<watch::Sender<SharedString>>,
-        new_version_available_tx: Option<watch::Sender<Option<String>>>,
-        cx: &mut AsyncApp,
-    ) -> Task<Result<(AgentServerCommand, String, Option<task::SpawnInTerminal>)>> {
-        let fs = self.fs.clone();
-        let node_runtime = self.node_runtime.clone();
-        let project_environment = self.project_environment.downgrade();
-        let custom_command = self.custom_command.clone();
-        let settings_env = self.settings_env.clone();
-        let ignore_system_version = self.ignore_system_version;
-        let root_dir: Arc<Path> = root_dir
-            .map(|root_dir| Path::new(root_dir))
-            .unwrap_or(paths::home_dir())
-            .into();
-
-        cx.spawn(async move |cx| {
-            let mut env = project_environment
-                .update(cx, |project_environment, cx| {
-                    project_environment.local_directory_environment(
-                        &Shell::System,
-                        root_dir.clone(),
-                        cx,
-                    )
-                })?
-                .await
-                .unwrap_or_default();
-
-            env.extend(settings_env.unwrap_or_default());
-
-            let mut command = if let Some(mut custom_command) = custom_command {
-                custom_command.env = Some(env);
-                custom_command
-            } else if !ignore_system_version
-                && let Some(bin) =
-                    find_bin_in_path("gemini".into(), root_dir.to_path_buf(), env.clone(), cx).await
-            {
-                AgentServerCommand {
-                    path: bin,
-                    args: Vec::new(),
-                    env: Some(env),
-                }
-            } else {
-                let mut command = get_or_npm_install_builtin_agent(
-                    GEMINI_NAME.into(),
-                    "@google/gemini-cli".into(),
-                    "node_modules/@google/gemini-cli/dist/index.js".into(),
-                    if cfg!(windows) {
-                        // v0.8.x on Windows has a bug that causes the initialize request to hang forever
-                        Some("0.9.0".parse().unwrap())
-                    } else {
-                        Some("0.2.1".parse().unwrap())
-                    },
-                    status_tx,
-                    new_version_available_tx,
-                    fs,
-                    node_runtime,
-                    cx,
-                )
-                .await?;
-                command.env = Some(env);
-                command
-            };
-
-            // Gemini CLI doesn't seem to have a dedicated invocation for logging in--we just run it normally without any arguments.
-            let login = task::SpawnInTerminal {
-                command: Some(command.path.to_string_lossy().into_owned()),
-                args: command.args.clone(),
-                env: command.env.clone().unwrap_or_default(),
-                label: "gemini /auth".into(),
-                ..Default::default()
-            };
-
-            command.env.get_or_insert_default().extend(extra_env);
-            command.args.push("--experimental-acp".into());
-            Ok((
-                command,
-                root_dir.to_string_lossy().into_owned(),
-                Some(login),
-            ))
-        })
-    }
-
-    fn as_any_mut(&mut self) -> &mut dyn Any {
-        self
-    }
-}
-
-struct LocalClaudeCode {
-    fs: Arc<dyn Fs>,
-    node_runtime: NodeRuntime,
-    project_environment: Entity<ProjectEnvironment>,
-    custom_command: Option<AgentServerCommand>,
-    settings_env: Option<HashMap<String, String>>,
-}
-
-impl ExternalAgentServer for LocalClaudeCode {
-    fn get_command(
-        &mut self,
-        root_dir: Option<&str>,
-        extra_env: HashMap<String, String>,
-        status_tx: Option<watch::Sender<SharedString>>,
-        new_version_available_tx: Option<watch::Sender<Option<String>>>,
-        cx: &mut AsyncApp,
-    ) -> Task<Result<(AgentServerCommand, String, Option<task::SpawnInTerminal>)>> {
-        let fs = self.fs.clone();
-        let node_runtime = self.node_runtime.clone();
-        let project_environment = self.project_environment.downgrade();
-        let custom_command = self.custom_command.clone();
-        let settings_env = self.settings_env.clone();
-        let root_dir: Arc<Path> = root_dir
-            .map(|root_dir| Path::new(root_dir))
-            .unwrap_or(paths::home_dir())
-            .into();
-
-        cx.spawn(async move |cx| {
-            let mut env = project_environment
-                .update(cx, |project_environment, cx| {
-                    project_environment.local_directory_environment(
-                        &Shell::System,
-                        root_dir.clone(),
-                        cx,
-                    )
-                })?
-                .await
-                .unwrap_or_default();
-            env.insert("ANTHROPIC_API_KEY".into(), "".into());
-
-            env.extend(settings_env.unwrap_or_default());
-
-            let (mut command, login_command) = if let Some(mut custom_command) = custom_command {
-                custom_command.env = Some(env);
-                (custom_command, None)
-            } else {
-                let mut command = get_or_npm_install_builtin_agent(
-                    "claude-agent-acp".into(),
-                    "@zed-industries/claude-agent-acp".into(),
-                    "node_modules/@zed-industries/claude-agent-acp/dist/index.js".into(),
-                    Some("0.17.0".parse().unwrap()),
-                    status_tx,
-                    new_version_available_tx,
-                    fs,
-                    node_runtime,
-                    cx,
-                )
-                .await?;
-                command.env = Some(env);
-
-                (command, None)
-            };
-
-            command.env.get_or_insert_default().extend(extra_env);
-            Ok((
-                command,
-                root_dir.to_string_lossy().into_owned(),
-                login_command,
-            ))
-        })
-    }
-
-    fn as_any_mut(&mut self) -> &mut dyn Any {
-        self
-    }
-}
-
-struct LocalCodex {
-    fs: Arc<dyn Fs>,
-    project_environment: Entity<ProjectEnvironment>,
-    http_client: Arc<dyn HttpClient>,
-    custom_command: Option<AgentServerCommand>,
-    settings_env: Option<HashMap<String, String>>,
-    no_browser: bool,
-}
-
-impl ExternalAgentServer for LocalCodex {
-    fn get_command(
-        &mut self,
-        root_dir: Option<&str>,
-        extra_env: HashMap<String, String>,
-        mut status_tx: Option<watch::Sender<SharedString>>,
-        _new_version_available_tx: Option<watch::Sender<Option<String>>>,
-        cx: &mut AsyncApp,
-    ) -> Task<Result<(AgentServerCommand, String, Option<task::SpawnInTerminal>)>> {
-        let fs = self.fs.clone();
-        let project_environment = self.project_environment.downgrade();
-        let http = self.http_client.clone();
-        let custom_command = self.custom_command.clone();
-        let settings_env = self.settings_env.clone();
-        let root_dir: Arc<Path> = root_dir
-            .map(|root_dir| Path::new(root_dir))
-            .unwrap_or(paths::home_dir())
-            .into();
-        let no_browser = self.no_browser;
-
-        cx.spawn(async move |cx| {
-            let mut env = project_environment
-                .update(cx, |project_environment, cx| {
-                    project_environment.local_directory_environment(
-                        &Shell::System,
-                        root_dir.clone(),
-                        cx,
-                    )
-                })?
-                .await
-                .unwrap_or_default();
-            if no_browser {
-                env.insert("NO_BROWSER".to_owned(), "1".to_owned());
-            }
-
-            env.extend(settings_env.unwrap_or_default());
-
-            let mut command = if let Some(mut custom_command) = custom_command {
-                custom_command.env = Some(env);
-                custom_command
-            } else {
-                let dir = paths::external_agents_dir().join(CODEX_NAME);
-                fs.create_dir(&dir).await?;
-
-                let bin_name = if cfg!(windows) {
-                    "codex-acp.exe"
-                } else {
-                    "codex-acp"
-                };
-
-                let find_latest_local_version = async || -> Option<PathBuf> {
-                    let mut local_versions: Vec<(semver::Version, String)> = Vec::new();
-                    let mut stream = fs.read_dir(&dir).await.ok()?;
-                    while let Some(entry) = stream.next().await {
-                        let Ok(entry) = entry else { continue };
-                        let Some(file_name) = entry.file_name() else {
-                            continue;
-                        };
-                        let version_path = dir.join(&file_name);
-                        if fs.is_file(&version_path.join(bin_name)).await {
-                            let version_str = file_name.to_string_lossy();
-                            if let Ok(version) =
-                                semver::Version::from_str(version_str.trim_start_matches('v'))
-                            {
-                                local_versions.push((version, version_str.into_owned()));
-                            }
-                        }
-                    }
-                    local_versions.sort_by(|(a, _), (b, _)| a.cmp(b));
-                    local_versions.last().map(|(_, v)| dir.join(v))
-                };
-
-                let fallback_to_latest_local_version =
-                    async |err: anyhow::Error| -> Result<PathBuf, anyhow::Error> {
-                        if let Some(local) = find_latest_local_version().await {
-                            log::info!(
-                                "Falling back to locally installed Codex version: {}",
-                                local.display()
-                            );
-                            Ok(local)
-                        } else {
-                            Err(err)
-                        }
-                    };
-
-                let version_dir = match ::http_client::github::latest_github_release(
-                    CODEX_ACP_REPO,
-                    true,
-                    false,
-                    http.clone(),
-                )
-                .await
-                {
-                    Ok(release) => {
-                        let version_dir = dir.join(&release.tag_name);
-                        if !fs.is_dir(&version_dir).await {
-                            if let Some(ref mut status_tx) = status_tx {
-                                status_tx.send("Installing…".into()).ok();
-                            }
-
-                            let tag = release.tag_name.clone();
-                            let version_number = tag.trim_start_matches('v');
-                            let asset_name = asset_name(version_number)
-                                .context("codex acp is not supported for this architecture")?;
-                            let asset = release
-                                .assets
-                                .into_iter()
-                                .find(|asset| asset.name == asset_name)
-                                .with_context(|| {
-                                    format!("no asset found matching `{asset_name:?}`")
-                                })?;
-                            // Strip "sha256:" prefix from digest if present (GitHub API format)
-                            let digest = asset
-                                .digest
-                                .as_deref()
-                                .and_then(|d| d.strip_prefix("sha256:").or(Some(d)));
-                            match ::http_client::github_download::download_server_binary(
-                                &*http,
-                                &asset.browser_download_url,
-                                digest,
-                                &version_dir,
-                                if cfg!(target_os = "windows") && cfg!(target_arch = "x86_64") {
-                                    AssetKind::Zip
-                                } else {
-                                    AssetKind::TarGz
-                                },
-                            )
-                            .await
-                            {
-                                Ok(()) => {
-                                    // remove older versions
-                                    util::fs::remove_matching(&dir, |entry| entry != version_dir)
-                                        .await;
-                                    version_dir
-                                }
-                                Err(err) => {
-                                    log::error!(
-                                        "Failed to download Codex release {}: {err:#}",
-                                        release.tag_name
-                                    );
-                                    fallback_to_latest_local_version(err).await?
-                                }
-                            }
-                        } else {
-                            version_dir
-                        }
-                    }
-                    Err(err) => {
-                        log::error!("Failed to fetch Codex latest release: {err:#}");
-                        fallback_to_latest_local_version(err).await?
-                    }
-                };
-
-                let bin_path = version_dir.join(bin_name);
-                anyhow::ensure!(
-                    fs.is_file(&bin_path).await,
-                    "Missing Codex binary at {} after installation",
-                    bin_path.to_string_lossy()
-                );
-
-                let mut cmd = AgentServerCommand {
-                    path: bin_path,
-                    args: Vec::new(),
-                    env: None,
-                };
-                cmd.env = Some(env);
-                cmd
-            };
-
-            command.env.get_or_insert_default().extend(extra_env);
-            Ok((command, root_dir.to_string_lossy().into_owned(), None))
+            Ok(AgentServerCommand {
+                path: command.program.into(),
+                args: command.args,
+                env: Some(command.env),
+            })
         })
     }
 
@@ -1673,42 +990,6 @@ impl ExternalAgentServer for LocalCodex {
     }
 }
 
-pub const CODEX_ACP_REPO: &str = "zed-industries/codex-acp";
-
-fn get_platform_info() -> Option<(&'static str, &'static str, &'static str)> {
-    let arch = if cfg!(target_arch = "x86_64") {
-        "x86_64"
-    } else if cfg!(target_arch = "aarch64") {
-        "aarch64"
-    } else {
-        return None;
-    };
-
-    let platform = if cfg!(target_os = "macos") {
-        "apple-darwin"
-    } else if cfg!(target_os = "windows") {
-        "pc-windows-msvc"
-    } else if cfg!(target_os = "linux") {
-        "unknown-linux-gnu"
-    } else {
-        return None;
-    };
-
-    // Windows uses .zip in release assets
-    let ext = if cfg!(target_os = "windows") {
-        "zip"
-    } else {
-        "tar.gz"
-    };
-
-    Some((arch, platform, ext))
-}
-
-fn asset_name(version: &str) -> Option<String> {
-    let (arch, platform, ext) = get_platform_info()?;
-    Some(format!("codex-acp-{version}-{arch}-{platform}.{ext}"))
-}
-
 pub struct LocalExtensionArchiveAgent {
     pub fs: Arc<dyn Fs>,
     pub http_client: Arc<dyn HttpClient>,

crates/project/src/buffer_store.rs 🔗

@@ -869,7 +869,6 @@ impl BufferStore {
 
                 entry
                     .insert(
-                        // todo(lw): hot foreground spawn
                         cx.spawn(async move |this, cx| {
                             let load_result = load_buffer.await;
                             this.update(cx, |this, _cx| {

crates/project/src/context_server_store.rs 🔗

@@ -8,7 +8,7 @@ use std::time::Duration;
 use anyhow::{Context as _, Result};
 use collections::{HashMap, HashSet};
 use context_server::{ContextServer, ContextServerCommand, ContextServerId};
-use futures::{FutureExt as _, future::join_all};
+use futures::{FutureExt as _, future::Either, future::join_all};
 use gpui::{App, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity, actions};
 use itertools::Itertools;
 use registry::ContextServerDescriptorRegistry;
@@ -141,6 +141,8 @@ impl ContextServerConfiguration {
         worktree_store: Entity<WorktreeStore>,
         cx: &AsyncApp,
     ) -> Option<Self> {
+        const EXTENSION_COMMAND_TIMEOUT: Duration = Duration::from_secs(30);
+
         match settings {
             ContextServerSettings::Stdio {
                 enabled: _,
@@ -155,18 +157,27 @@ impl ContextServerConfiguration {
                 let descriptor =
                     cx.update(|cx| registry.read(cx).context_server_descriptor(&id.0))?;
 
-                match descriptor.command(worktree_store, cx).await {
-                    Ok(command) => Some(ContextServerConfiguration::Extension {
+                let command_future = descriptor.command(worktree_store, cx);
+                let timeout_future = cx.background_executor().timer(EXTENSION_COMMAND_TIMEOUT);
+
+                match futures::future::select(command_future, timeout_future).await {
+                    Either::Left((Ok(command), _)) => Some(ContextServerConfiguration::Extension {
                         command,
                         settings,
                         remote,
                     }),
-                    Err(e) => {
+                    Either::Left((Err(e), _)) => {
                         log::error!(
                             "Failed to create context server configuration from settings: {e:#}"
                         );
                         None
                     }
+                    Either::Right(_) => {
+                        log::error!(
+                            "Timed out resolving command for extension context server {id}"
+                        );
+                        None
+                    }
                 }
             }
             ContextServerSettings::Http {
@@ -855,6 +866,7 @@ impl ContextServerStore {
 
                 this.update(cx, |this, cx| {
                     this.populate_server_ids(cx);
+                    cx.notify();
                     this.update_servers_task.take();
                     if this.needs_server_update {
                         this.available_context_servers_changed(cx);
@@ -959,11 +971,23 @@ impl ContextServerStore {
         })??;
 
         for (id, config) in servers_to_start {
-            let (server, config) =
-                Self::create_context_server(this.clone(), id, config, cx).await?;
-            this.update(cx, |this, cx| {
-                this.run_server(server, config, cx);
-            })?;
+            match Self::create_context_server(this.clone(), id.clone(), config, cx).await {
+                Ok((server, config)) => {
+                    this.update(cx, |this, cx| {
+                        this.run_server(server, config, cx);
+                    })?;
+                }
+                Err(err) => {
+                    log::error!("{id} context server failed to create: {err:#}");
+                    this.update(cx, |_this, cx| {
+                        cx.emit(ServerStatusChangedEvent {
+                            server_id: id,
+                            status: ContextServerStatus::Error(err.to_string().into()),
+                        });
+                        cx.notify();
+                    })?;
+                }
+            }
         }
 
         Ok(())

crates/project/src/debugger/session.rs 🔗

@@ -2645,10 +2645,40 @@ impl Session {
         self.fetch(
             command,
             move |this, variables, cx| {
-                let Some(variables) = variables.log_err() else {
+                let Some(mut variables) = variables.log_err() else {
                     return;
                 };
 
+                if this.adapter.0.as_ref() == "Debugpy" {
+                    for variable in variables.iter_mut() {
+                        if variable.type_ == Some("str".into()) {
+                            // reverse Python repr() escaping
+                            let mut unescaped = String::with_capacity(variable.value.len());
+                            let mut chars = variable.value.chars();
+                            while let Some(c) = chars.next() {
+                                if c != '\\' {
+                                    unescaped.push(c);
+                                } else {
+                                    match chars.next() {
+                                        Some('\\') => unescaped.push('\\'),
+                                        Some('n') => unescaped.push('\n'),
+                                        Some('t') => unescaped.push('\t'),
+                                        Some('r') => unescaped.push('\r'),
+                                        Some('\'') => unescaped.push('\''),
+                                        Some('"') => unescaped.push('"'),
+                                        Some(c) => {
+                                            unescaped.push('\\');
+                                            unescaped.push(c);
+                                        }
+                                        None => {}
+                                    }
+                                }
+                            }
+                            variable.value = unescaped;
+                        }
+                    }
+                }
+
                 this.active_snapshot
                     .variables
                     .insert(variables_reference, variables);

crates/project/src/git_store.rs 🔗

@@ -6,6 +6,9 @@ pub mod pending_op;
 use crate::{
     ProjectEnvironment, ProjectItem, ProjectPath,
     buffer_store::{BufferStore, BufferStoreEvent},
+    trusted_worktrees::{
+        PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
+    },
     worktree_store::{WorktreeStore, WorktreeStoreEvent},
 };
 use anyhow::{Context as _, Result, anyhow, bail};
@@ -21,7 +24,7 @@ use futures::{
         mpsc,
         oneshot::{self, Canceled},
     },
-    future::{self, Shared},
+    future::{self, BoxFuture, Shared},
     stream::FuturesOrdered,
 };
 use git::{
@@ -36,8 +39,8 @@ use git::{
     },
     stash::{GitStash, StashEntry},
     status::{
-        DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
-        UnmergedStatus, UnmergedStatusCode,
+        self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
+        TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
     },
 };
 use gpui::{
@@ -60,7 +63,7 @@ use settings::WorktreeId;
 use smol::future::yield_now;
 use std::{
     cmp::Ordering,
-    collections::{BTreeSet, HashSet, VecDeque},
+    collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
     future::Future,
     mem,
     ops::Range,
@@ -72,7 +75,7 @@ use std::{
     },
     time::Instant,
 };
-use sum_tree::{Edit, SumTree, TreeSet};
+use sum_tree::{Edit, SumTree, TreeMap};
 use task::Shell;
 use text::{Bias, BufferId};
 use util::{
@@ -192,6 +195,7 @@ pub struct GitStoreCheckpoint {
 pub struct StatusEntry {
     pub repo_path: RepoPath,
     pub status: FileStatus,
+    pub diff_stat: Option<DiffStat>,
 }
 
 impl StatusEntry {
@@ -213,6 +217,8 @@ impl StatusEntry {
             repo_path: self.repo_path.to_proto(),
             simple_status,
             status: Some(status_to_proto(self.status)),
+            diff_stat_added: self.diff_stat.map(|ds| ds.added),
+            diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
         }
     }
 }
@@ -223,7 +229,15 @@ impl TryFrom<proto::StatusEntry> for StatusEntry {
     fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
         let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
         let status = status_from_proto(value.simple_status, value.status)?;
-        Ok(Self { repo_path, status })
+        let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
+            (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
+            _ => None,
+        };
+        Ok(Self {
+            repo_path,
+            status,
+            diff_stat,
+        })
     }
 }
 
@@ -251,9 +265,8 @@ pub struct RepositoryId(pub u64);
 
 #[derive(Clone, Debug, Default, PartialEq, Eq)]
 pub struct MergeDetails {
-    pub conflicted_paths: TreeSet<RepoPath>,
+    pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
     pub message: Option<SharedString>,
-    pub heads: Vec<Option<SharedString>>,
 }
 
 #[derive(Clone)]
@@ -267,6 +280,11 @@ pub struct RepositorySnapshot {
     pub id: RepositoryId,
     pub statuses_by_path: SumTree<StatusEntry>,
     pub work_directory_abs_path: Arc<Path>,
+    /// The working directory of the original repository. For a normal
+    /// checkout this equals `work_directory_abs_path`. For a git worktree
+    /// checkout, this is the original repo's working directory — used to
+    /// anchor new worktree creation so they don't nest.
+    pub original_repo_abs_path: Arc<Path>,
     pub path_style: PathStyle,
     pub branch: Option<Branch>,
     pub head_commit: Option<CommitDetails>,
@@ -296,6 +314,19 @@ enum GraphCommitHandlerState {
     Closed,
 }
 
+pub struct InitialGitGraphData {
+    fetch_task: Task<()>,
+    pub error: Option<SharedString>,
+    pub commit_data: Vec<Arc<InitialGraphCommitData>>,
+    pub commit_oid_to_index: HashMap<Oid, usize>,
+}
+
+pub struct GraphDataResponse<'a> {
+    pub commits: &'a [Arc<InitialGraphCommitData>],
+    pub is_loading: bool,
+    pub error: Option<SharedString>,
+}
+
 pub struct Repository {
     this: WeakEntity<Self>,
     snapshot: RepositorySnapshot,
@@ -311,13 +342,7 @@ pub struct Repository {
     askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
     latest_askpass_id: u64,
     repository_state: Shared<Task<Result<RepositoryState, String>>>,
-    pub initial_graph_data: HashMap<
-        (LogOrder, LogSource),
-        (
-            Task<Result<(), SharedString>>,
-            Vec<Arc<InitialGraphCommitData>>,
-        ),
-    >,
+    initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
     graph_commit_data_handler: GraphCommitHandlerState,
     commit_data: HashMap<Oid, CommitDataState>,
 }
@@ -343,6 +368,7 @@ impl LocalRepositoryState {
         dot_git_abs_path: Arc<Path>,
         project_environment: WeakEntity<ProjectEnvironment>,
         fs: Arc<dyn Fs>,
+        is_trusted: bool,
         cx: &mut AsyncApp,
     ) -> anyhow::Result<Self> {
         let environment = project_environment
@@ -370,6 +396,7 @@ impl LocalRepositoryState {
                 }
             })
             .await?;
+        backend.set_trusted(is_trusted);
         Ok(LocalRepositoryState {
             backend,
             environment: Arc::new(environment),
@@ -390,14 +417,20 @@ pub enum RepositoryState {
     Remote(RemoteRepositoryState),
 }
 
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum GitGraphEvent {
+    CountUpdated(usize),
+    FullyLoaded,
+    LoadingError,
+}
+
 #[derive(Clone, Debug, PartialEq, Eq)]
 pub enum RepositoryEvent {
     StatusesChanged,
-    MergeHeadsChanged,
     BranchChanged,
     StashEntriesChanged,
     PendingOpsChanged { pending_ops: SumTree<PendingOps> },
-    GitGraphCountUpdated((LogOrder, LogSource), usize),
+    GraphEvent((LogSource, LogOrder), GitGraphEvent),
 }
 
 #[derive(Clone, Debug)]
@@ -478,11 +511,15 @@ impl GitStore {
         state: GitStoreState,
         cx: &mut Context<Self>,
     ) -> Self {
-        let _subscriptions = vec![
+        let mut _subscriptions = vec![
             cx.subscribe(&worktree_store, Self::on_worktree_store_event),
             cx.subscribe(&buffer_store, Self::on_buffer_store_event),
         ];
 
+        if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
+            _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
+        }
+
         GitStore {
             state,
             buffer_store,
@@ -1492,19 +1529,30 @@ impl GitStore {
                 new_work_directory_abs_path: Some(work_directory_abs_path),
                 dot_git_abs_path: Some(dot_git_abs_path),
                 repository_dir_abs_path: Some(_repository_dir_abs_path),
-                common_dir_abs_path: Some(_common_dir_abs_path),
+                common_dir_abs_path: Some(common_dir_abs_path),
                 ..
             } = update
             {
+                let original_repo_abs_path: Arc<Path> =
+                    git::repository::original_repo_path_from_common_dir(common_dir_abs_path).into();
                 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
+                let is_trusted = TrustedWorktrees::try_get_global(cx)
+                    .map(|trusted_worktrees| {
+                        trusted_worktrees.update(cx, |trusted_worktrees, cx| {
+                            trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
+                        })
+                    })
+                    .unwrap_or(false);
                 let git_store = cx.weak_entity();
                 let repo = cx.new(|cx| {
                     let mut repo = Repository::local(
                         id,
                         work_directory_abs_path.clone(),
+                        original_repo_abs_path.clone(),
                         dot_git_abs_path.clone(),
                         project_environment.downgrade(),
                         fs.clone(),
+                        is_trusted,
                         git_store,
                         cx,
                     );
@@ -1545,6 +1593,39 @@ impl GitStore {
         }
     }
 
+    fn on_trusted_worktrees_event(
+        &mut self,
+        _: Entity<TrustedWorktreesStore>,
+        event: &TrustedWorktreesEvent,
+        cx: &mut Context<Self>,
+    ) {
+        if !matches!(self.state, GitStoreState::Local { .. }) {
+            return;
+        }
+
+        let (is_trusted, event_paths) = match event {
+            TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
+            TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
+        };
+
+        for (repo_id, worktree_ids) in &self.worktree_ids {
+            if worktree_ids
+                .iter()
+                .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
+            {
+                if let Some(repo) = self.repositories.get(repo_id) {
+                    let repository_state = repo.read(cx).repository_state.clone();
+                    cx.background_spawn(async move {
+                        if let Ok(RepositoryState::Local(state)) = repository_state.await {
+                            state.backend.set_trusted(is_trusted);
+                        }
+                    })
+                    .detach();
+                }
+            }
+        }
+    }
+
     fn on_buffer_store_event(
         &mut self,
         _: Entity<BufferStore>,
@@ -1827,6 +1908,11 @@ impl GitStore {
             let id = RepositoryId::from_proto(update.id);
             let client = this.upstream_client().context("no upstream client")?;
 
+            let original_repo_abs_path: Option<Arc<Path>> = update
+                .original_repo_abs_path
+                .as_deref()
+                .map(|p| Path::new(p).into());
+
             let mut repo_subscription = None;
             let repo = this.repositories.entry(id).or_insert_with(|| {
                 let git_store = cx.weak_entity();
@@ -1834,6 +1920,7 @@ impl GitStore {
                     Repository::remote(
                         id,
                         Path::new(&update.abs_path).into(),
+                        original_repo_abs_path.clone(),
                         path_style,
                         ProjectId(update.project_id),
                         client,
@@ -3429,10 +3516,17 @@ impl RepositoryId {
 }
 
 impl RepositorySnapshot {
-    fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
+    fn empty(
+        id: RepositoryId,
+        work_directory_abs_path: Arc<Path>,
+        original_repo_abs_path: Option<Arc<Path>>,
+        path_style: PathStyle,
+    ) -> Self {
         Self {
             id,
             statuses_by_path: Default::default(),
+            original_repo_abs_path: original_repo_abs_path
+                .unwrap_or_else(|| work_directory_abs_path.clone()),
             work_directory_abs_path,
             branch: None,
             head_commit: None,
@@ -3457,9 +3551,9 @@ impl RepositorySnapshot {
             removed_statuses: Default::default(),
             current_merge_conflicts: self
                 .merge
-                .conflicted_paths
+                .merge_heads_by_conflicted_path
                 .iter()
-                .map(|repo_path| repo_path.to_proto())
+                .map(|(repo_path, _)| repo_path.to_proto())
                 .collect(),
             merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
             project_id,
@@ -3476,6 +3570,9 @@ impl RepositorySnapshot {
                 .collect(),
             remote_upstream_url: self.remote_upstream_url.clone(),
             remote_origin_url: self.remote_origin_url.clone(),
+            original_repo_abs_path: Some(
+                self.original_repo_abs_path.to_string_lossy().into_owned(),
+            ),
         }
     }
 
@@ -3497,7 +3594,9 @@ impl RepositorySnapshot {
                             current_new_entry = new_statuses.next();
                         }
                         Ordering::Equal => {
-                            if new_entry.status != old_entry.status {
+                            if new_entry.status != old_entry.status
+                                || new_entry.diff_stat != old_entry.diff_stat
+                            {
                                 updated_statuses.push(new_entry.to_proto());
                             }
                             current_old_entry = old_statuses.next();
@@ -3528,9 +3627,9 @@ impl RepositorySnapshot {
             removed_statuses,
             current_merge_conflicts: self
                 .merge
-                .conflicted_paths
+                .merge_heads_by_conflicted_path
                 .iter()
-                .map(|path| path.to_proto())
+                .map(|(path, _)| path.to_proto())
                 .collect(),
             merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
             project_id,
@@ -3547,6 +3646,9 @@ impl RepositorySnapshot {
                 .collect(),
             remote_upstream_url: self.remote_upstream_url.clone(),
             remote_origin_url: self.remote_origin_url.clone(),
+            original_repo_abs_path: Some(
+                self.original_repo_abs_path.to_string_lossy().into_owned(),
+            ),
         }
     }
 
@@ -3564,6 +3666,12 @@ impl RepositorySnapshot {
             .cloned()
     }
 
+    pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
+        self.statuses_by_path
+            .get(&PathKey(path.as_ref().clone()), ())
+            .and_then(|entry| entry.diff_stat)
+    }
+
     pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
         Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
     }
@@ -3586,12 +3694,16 @@ impl RepositorySnapshot {
     }
 
     pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
-        self.merge.conflicted_paths.contains(repo_path)
+        self.merge
+            .merge_heads_by_conflicted_path
+            .contains_key(repo_path)
     }
 
     pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
-        let had_conflict_on_last_merge_head_change =
-            self.merge.conflicted_paths.contains(repo_path);
+        let had_conflict_on_last_merge_head_change = self
+            .merge
+            .merge_heads_by_conflicted_path
+            .contains_key(repo_path);
         let has_conflict_currently = self
             .status_for_path(repo_path)
             .is_some_and(|entry| entry.status.is_conflicted());
@@ -3630,13 +3742,13 @@ pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
 }
 
 impl MergeDetails {
-    async fn load(
+    async fn update(
+        &mut self,
         backend: &Arc<dyn GitRepository>,
-        status: &SumTree<StatusEntry>,
-        prev_snapshot: &RepositorySnapshot,
-    ) -> Result<(MergeDetails, bool)> {
+        current_conflicted_paths: Vec<RepoPath>,
+    ) -> Result<bool> {
         log::debug!("load merge details");
-        let message = backend.merge_message().await;
+        self.message = backend.merge_message().await.map(SharedString::from);
         let heads = backend
             .revparse_batch(vec![
                 "MERGE_HEAD".into(),
@@ -3651,48 +3763,42 @@ impl MergeDetails {
             .into_iter()
             .map(|opt| opt.map(SharedString::from))
             .collect::<Vec<_>>();
-        let merge_heads_changed = heads != prev_snapshot.merge.heads;
-        let conflicted_paths = if merge_heads_changed {
-            let current_conflicted_paths = TreeSet::from_ordered_entries(
-                status
-                    .iter()
-                    .filter(|entry| entry.status.is_conflicted())
-                    .map(|entry| entry.repo_path.clone()),
-            );
 
-            // It can happen that we run a scan while a lengthy merge is in progress
-            // that will eventually result in conflicts, but before those conflicts
-            // are reported by `git status`. Since for the moment we only care about
-            // the merge heads state for the purposes of tracking conflicts, don't update
-            // this state until we see some conflicts.
-            if heads.iter().any(Option::is_some)
-                && !prev_snapshot.merge.heads.iter().any(Option::is_some)
-                && current_conflicted_paths.is_empty()
-            {
-                log::debug!("not updating merge heads because no conflicts found");
-                return Ok((
-                    MergeDetails {
-                        message: message.map(SharedString::from),
-                        ..prev_snapshot.merge.clone()
-                    },
-                    false,
-                ));
+        let mut conflicts_changed = false;
+
+        // Record the merge state for newly conflicted paths
+        for path in &current_conflicted_paths {
+            if self.merge_heads_by_conflicted_path.get(&path).is_none() {
+                conflicts_changed = true;
+                self.merge_heads_by_conflicted_path
+                    .insert(path.clone(), heads.clone());
             }
+        }
 
-            current_conflicted_paths
-        } else {
-            prev_snapshot.merge.conflicted_paths.clone()
-        };
-        let details = MergeDetails {
-            conflicted_paths,
-            message: message.map(SharedString::from),
-            heads,
-        };
-        Ok((details, merge_heads_changed))
+        // Clear state for paths that are no longer conflicted and for which the merge heads have changed
+        self.merge_heads_by_conflicted_path
+            .retain(|path, old_merge_heads| {
+                let keep = current_conflicted_paths.contains(path)
+                    || (old_merge_heads == &heads
+                        && old_merge_heads.iter().any(|head| head.is_some()));
+                if !keep {
+                    conflicts_changed = true;
+                }
+                keep
+            });
+
+        Ok(conflicts_changed)
     }
 }
 
 impl Repository {
+    pub fn is_trusted(&self) -> bool {
+        match self.repository_state.peek() {
+            Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
+            _ => false,
+        }
+    }
+
     pub fn snapshot(&self) -> RepositorySnapshot {
         self.snapshot.clone()
     }
@@ -3714,14 +3820,20 @@ impl Repository {
     fn local(
         id: RepositoryId,
         work_directory_abs_path: Arc<Path>,
+        original_repo_abs_path: Arc<Path>,
         dot_git_abs_path: Arc<Path>,
         project_environment: WeakEntity<ProjectEnvironment>,
         fs: Arc<dyn Fs>,
+        is_trusted: bool,
         git_store: WeakEntity<GitStore>,
         cx: &mut Context<Self>,
     ) -> Self {
-        let snapshot =
-            RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
+        let snapshot = RepositorySnapshot::empty(
+            id,
+            work_directory_abs_path.clone(),
+            Some(original_repo_abs_path),
+            PathStyle::local(),
+        );
         let state = cx
             .spawn(async move |_, cx| {
                 LocalRepositoryState::new(
@@ -3729,6 +3841,7 @@ impl Repository {
                     dot_git_abs_path,
                     project_environment,
                     fs,
+                    is_trusted,
                     cx,
                 )
                 .await
@@ -3744,7 +3857,7 @@ impl Repository {
             .shared();
 
         cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
-            RepositoryEvent::BranchChanged | RepositoryEvent::MergeHeadsChanged => {
+            RepositoryEvent::BranchChanged => {
                 if this.scan_id > 1 {
                     this.initial_graph_data.clear();
                 }
@@ -3775,13 +3888,19 @@ impl Repository {
     fn remote(
         id: RepositoryId,
         work_directory_abs_path: Arc<Path>,
+        original_repo_abs_path: Option<Arc<Path>>,
         path_style: PathStyle,
         project_id: ProjectId,
         client: AnyProtoClient,
         git_store: WeakEntity<GitStore>,
         cx: &mut Context<Self>,
     ) -> Self {
-        let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
+        let snapshot = RepositorySnapshot::empty(
+            id,
+            work_directory_abs_path,
+            original_repo_abs_path,
+            path_style,
+        );
         let repository_state = RemoteRepositoryState { project_id, client };
         let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
         let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
@@ -4053,6 +4172,10 @@ impl Repository {
         self.snapshot.status()
     }
 
+    pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
+        self.snapshot.diff_stat_for_path(path)
+    }
+
     pub fn cached_stash(&self) -> GitStash {
         self.snapshot.stash_entries.clone()
     }
@@ -4364,47 +4487,82 @@ impl Repository {
         })
     }
 
+    pub fn get_graph_data(
+        &self,
+        log_source: LogSource,
+        log_order: LogOrder,
+    ) -> Option<&InitialGitGraphData> {
+        self.initial_graph_data.get(&(log_source, log_order))
+    }
+
     pub fn graph_data(
         &mut self,
         log_source: LogSource,
         log_order: LogOrder,
         range: Range<usize>,
         cx: &mut Context<Self>,
-    ) -> (&[Arc<InitialGraphCommitData>], bool) {
-        let (loading_task, initial_commit_data) = self
+    ) -> GraphDataResponse<'_> {
+        let initial_commit_data = self
             .initial_graph_data
-            .entry((log_order, log_source.clone()))
+            .entry((log_source.clone(), log_order))
             .or_insert_with(|| {
                 let state = self.repository_state.clone();
                 let log_source = log_source.clone();
-                (
-                    cx.spawn(async move |repository, cx| {
-                        let state = state.await;
-                        match state {
-                            Ok(RepositoryState::Local(LocalRepositoryState {
-                                backend, ..
-                            })) => {
-                                Self::local_git_graph_data(
-                                    repository, backend, log_source, log_order, cx,
-                                )
-                                .await
-                            }
-                            Ok(RepositoryState::Remote(_)) => {
-                                Err("Git graph is not supported for collab yet".into())
-                            }
-                            Err(e) => Err(SharedString::from(e)),
+
+                let fetch_task = cx.spawn(async move |repository, cx| {
+                    let state = state.await;
+                    let result = match state {
+                        Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
+                            Self::local_git_graph_data(
+                                repository.clone(),
+                                backend,
+                                log_source.clone(),
+                                log_order,
+                                cx,
+                            )
+                            .await
                         }
-                    }),
-                    vec![],
-                )
+                        Ok(RepositoryState::Remote(_)) => {
+                            Err("Git graph is not supported for collab yet".into())
+                        }
+                        Err(e) => Err(SharedString::from(e)),
+                    };
+
+                    if let Err(fetch_task_error) = result {
+                        repository
+                            .update(cx, |repository, _| {
+                                if let Some(data) = repository
+                                    .initial_graph_data
+                                    .get_mut(&(log_source, log_order))
+                                {
+                                    data.error = Some(fetch_task_error);
+                                } else {
+                                    debug_panic!(
+                                        "This task would be dropped if this entry doesn't exist"
+                                    );
+                                }
+                            })
+                            .ok();
+                    }
+                });
+
+                InitialGitGraphData {
+                    fetch_task,
+                    error: None,
+                    commit_data: Vec::new(),
+                    commit_oid_to_index: HashMap::default(),
+                }
             });
 
-        let max_start = initial_commit_data.len().saturating_sub(1);
-        let max_end = initial_commit_data.len();
-        (
-            &initial_commit_data[range.start.min(max_start)..range.end.min(max_end)],
-            !loading_task.is_ready(),
-        )
+        let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
+        let max_end = initial_commit_data.commit_data.len();
+
+        GraphDataResponse {
+            commits: &initial_commit_data.commit_data
+                [range.start.min(max_start)..range.end.min(max_end)],
+            is_loading: !initial_commit_data.fetch_task.is_ready(),
+            error: initial_commit_data.error.clone(),
+        }
     }
 
     async fn local_git_graph_data(
@@ -4427,32 +4585,38 @@ impl Repository {
             }
         });
 
-        let graph_data_key = (log_order, log_source.clone());
+        let graph_data_key = (log_source, log_order);
 
         while let Ok(initial_graph_commit_data) = request_rx.recv().await {
             this.update(cx, |repository, cx| {
                 let graph_data = repository
                     .initial_graph_data
-                    .get_mut(&graph_data_key)
-                    .map(|(_, graph_data)| graph_data);
-                debug_assert!(
-                    graph_data.is_some(),
-                    "This task should be dropped if data doesn't exist"
-                );
+                    .entry(graph_data_key.clone())
+                    .and_modify(|graph_data| {
+                        for commit_data in initial_graph_commit_data {
+                            graph_data
+                                .commit_oid_to_index
+                                .insert(commit_data.sha, graph_data.commit_data.len());
+                            graph_data.commit_data.push(commit_data);
+
+                            cx.emit(RepositoryEvent::GraphEvent(
+                                graph_data_key.clone(),
+                                GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
+                            ));
+                        }
+                    });
 
-                if let Some(graph_data) = graph_data {
-                    graph_data.extend(initial_graph_commit_data);
-                    cx.emit(RepositoryEvent::GitGraphCountUpdated(
-                        graph_data_key.clone(),
-                        graph_data.len(),
-                    ));
+                match &graph_data {
+                    Entry::Occupied(_) => {}
+                    Entry::Vacant(_) => {
+                        debug_panic!("This task should be dropped if data doesn't exist");
+                    }
                 }
             })
             .ok();
         }
 
         task.await?;
-
         Ok(())
     }
 
@@ -4872,8 +5036,7 @@ impl Repository {
                                         .map(|repo_path| repo_path.to_proto())
                                         .collect(),
                                 })
-                                .await
-                                .context("sending stash request")?;
+                                .await?;
                             Ok(())
                         }
                     }
@@ -5082,8 +5245,7 @@ impl Repository {
                             }),
                             askpass_id,
                         })
-                        .await
-                        .context("sending commit request")?;
+                        .await?;
 
                     Ok(())
                 }
@@ -5122,8 +5284,7 @@ impl Repository {
                             askpass_id,
                             remote: fetch_options.to_proto(),
                         })
-                        .await
-                        .context("sending fetch request")?;
+                        .await?;
 
                     Ok(RemoteCommandOutput {
                         stdout: response.stdout,
@@ -5224,8 +5385,7 @@ impl Repository {
                                 }
                                     as i32),
                             })
-                            .await
-                            .context("sending push request")?;
+                            .await?;
 
                         Ok(RemoteCommandOutput {
                             stdout: response.stdout,
@@ -5291,8 +5451,7 @@ impl Repository {
                             branch_name: branch.as_ref().map(|b| b.to_string()),
                             remote_name: remote.to_string(),
                         })
-                        .await
-                        .context("sending pull request")?;
+                        .await?;
 
                     Ok(RemoteCommandOutput {
                         stdout: response.stdout,
@@ -5571,6 +5730,24 @@ impl Repository {
         )
     }
 
+    pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
+        self.send_job(
+            Some(format!("git worktree remove: {}", path.display()).into()),
+            move |repo, _cx| async move {
+                match repo {
+                    RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
+                        backend.remove_worktree(path, force).await
+                    }
+                    RepositoryState::Remote(_) => {
+                        anyhow::bail!(
+                            "Removing worktrees on remote repositories is not yet supported"
+                        )
+                    }
+                }
+            },
+        )
+    }
+
     pub fn default_branch(
         &mut self,
         include_remote_name: bool,
@@ -5852,12 +6029,10 @@ impl Repository {
         update: proto::UpdateRepository,
         cx: &mut Context<Self>,
     ) -> Result<()> {
-        let conflicted_paths = TreeSet::from_ordered_entries(
-            update
-                .current_merge_conflicts
-                .into_iter()
-                .filter_map(|path| RepoPath::from_proto(&path).log_err()),
-        );
+        if let Some(main_path) = &update.original_repo_abs_path {
+            self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
+        }
+
         let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
         let new_head_commit = update
             .head_commit_details
@@ -5869,7 +6044,17 @@ impl Repository {
         self.snapshot.branch = new_branch;
         self.snapshot.head_commit = new_head_commit;
 
-        self.snapshot.merge.conflicted_paths = conflicted_paths;
+        // We don't store any merge head state for downstream projects; the upstream
+        // will track it and we will just get the updated conflicts
+        let new_merge_heads = TreeMap::from_ordered_entries(
+            update
+                .current_merge_conflicts
+                .into_iter()
+                .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
+        );
+        let conflicts_changed =
+            self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
+        self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
         self.snapshot.merge.message = update.merge_message.map(SharedString::from);
         let new_stash_entries = GitStash {
             entries: update
@@ -5902,10 +6087,11 @@ impl Repository {
                     }),
             )
             .collect::<Vec<_>>();
-        if !edits.is_empty() {
+        if conflicts_changed || !edits.is_empty() {
             cx.emit(RepositoryEvent::StatusesChanged);
         }
         self.snapshot.statuses_by_path.edit(edits, ());
+
         if update.is_last_update {
             self.snapshot.scan_id = update.scan_id;
         }
@@ -5989,17 +6175,16 @@ impl Repository {
                 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
                     bail!("not a local repository")
                 };
-                let (snapshot, events) = this
-                    .update(&mut cx, |this, _| {
-                        this.paths_needing_status_update.clear();
-                        compute_snapshot(
-                            this.id,
-                            this.work_directory_abs_path.clone(),
-                            this.snapshot.clone(),
-                            backend.clone(),
-                        )
-                    })
-                    .await?;
+                let compute_snapshot = this.update(&mut cx, |this, _| {
+                    this.paths_needing_status_update.clear();
+                    compute_snapshot(
+                        this.id,
+                        this.work_directory_abs_path.clone(),
+                        this.snapshot.clone(),
+                        backend.clone(),
+                    )
+                });
+                let (snapshot, events) = cx.background_spawn(compute_snapshot).await?;
                 this.update(&mut cx, |this, cx| {
                     this.snapshot = snapshot.clone();
                     this.clear_pending_ops(cx);
@@ -6221,22 +6406,43 @@ impl Repository {
                     return Ok(());
                 }
 
+                let has_head = prev_snapshot.head_commit.is_some();
+
                 let stash_entries = backend.stash_entries().await?;
                 let changed_path_statuses = cx
                     .background_spawn(async move {
                         let mut changed_paths =
                             changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
-                        let statuses = backend
-                            .status(&changed_paths.iter().cloned().collect::<Vec<_>>())
-                            .await?;
+                        let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
+
+                        let status_task = backend.status(&changed_paths_vec);
+                        let diff_stat_future = if has_head {
+                            backend.diff_stat(&changed_paths_vec)
+                        } else {
+                            future::ready(Ok(status::GitDiffStat {
+                                entries: Arc::default(),
+                            }))
+                            .boxed()
+                        };
+
+                        let (statuses, diff_stats) =
+                            futures::future::try_join(status_task, diff_stat_future).await?;
+
+                        let diff_stats: HashMap<RepoPath, DiffStat> =
+                            HashMap::from_iter(diff_stats.entries.into_iter().cloned());
+
                         let mut changed_path_statuses = Vec::new();
                         let prev_statuses = prev_snapshot.statuses_by_path.clone();
                         let mut cursor = prev_statuses.cursor::<PathProgress>(());
 
                         for (repo_path, status) in &*statuses.entries {
+                            let current_diff_stat = diff_stats.get(repo_path).copied();
+
                             changed_paths.remove(repo_path);
                             if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
-                                && cursor.item().is_some_and(|entry| entry.status == *status)
+                                && cursor.item().is_some_and(|entry| {
+                                    entry.status == *status && entry.diff_stat == current_diff_stat
+                                })
                             {
                                 continue;
                             }
@@ -6244,6 +6450,7 @@ impl Repository {
                             changed_path_statuses.push(Edit::Insert(StatusEntry {
                                 repo_path: repo_path.clone(),
                                 status: *status,
+                                diff_stat: current_diff_stat,
                             }));
                         }
                         let mut cursor = prev_statuses.cursor::<PathProgress>(());
@@ -6601,40 +6808,54 @@ async fn compute_snapshot(
     let mut events = Vec::new();
     let branches = backend.branches().await?;
     let branch = branches.into_iter().find(|branch| branch.is_head);
-    let statuses = backend
-        .status(&[RepoPath::from_rel_path(
+
+    // Useful when branch is None in detached head state
+    let head_commit = match backend.head_sha().await {
+        Some(head_sha) => backend.show(head_sha).await.log_err(),
+        None => None,
+    };
+
+    let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> = if head_commit.is_some() {
+        backend.diff_stat(&[])
+    } else {
+        future::ready(Ok(status::GitDiffStat {
+            entries: Arc::default(),
+        }))
+        .boxed()
+    };
+    let (statuses, diff_stats) = futures::future::try_join(
+        backend.status(&[RepoPath::from_rel_path(
             &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
-        )])
-        .await?;
+        )]),
+        diff_stat_future,
+    )
+    .await?;
+
+    let diff_stat_map: HashMap<&RepoPath, DiffStat> =
+        diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
     let stash_entries = backend.stash_entries().await?;
+    let mut conflicted_paths = Vec::new();
     let statuses_by_path = SumTree::from_iter(
-        statuses
-            .entries
-            .iter()
-            .map(|(repo_path, status)| StatusEntry {
+        statuses.entries.iter().map(|(repo_path, status)| {
+            if status.is_conflicted() {
+                conflicted_paths.push(repo_path.clone());
+            }
+            StatusEntry {
                 repo_path: repo_path.clone(),
                 status: *status,
-            }),
+                diff_stat: diff_stat_map.get(repo_path).copied(),
+            }
+        }),
         (),
     );
-    let (merge_details, merge_heads_changed) =
-        MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
-    log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
+    let mut merge_details = prev_snapshot.merge;
+    let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
+    log::debug!("new merge details: {merge_details:?}");
 
-    if merge_heads_changed {
-        events.push(RepositoryEvent::MergeHeadsChanged);
-    }
-
-    if statuses_by_path != prev_snapshot.statuses_by_path {
+    if conflicts_changed || statuses_by_path != prev_snapshot.statuses_by_path {
         events.push(RepositoryEvent::StatusesChanged)
     }
 
-    // Useful when branch is None in detached head state
-    let head_commit = match backend.head_sha().await {
-        Some(head_sha) => backend.show(head_sha).await.log_err(),
-        None => None,
-    };
-
     if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
         events.push(RepositoryEvent::BranchChanged);
     }

crates/project/src/lsp_command.rs 🔗

@@ -533,7 +533,7 @@ impl LspCommand for PerformRename {
             .rename_provider
             .is_some_and(|capability| match capability {
                 OneOf::Left(enabled) => enabled,
-                OneOf::Right(_options) => true,
+                OneOf::Right(_) => true,
             })
     }
 

crates/project/src/lsp_store.rs 🔗

@@ -548,6 +548,7 @@ impl LocalLspStore {
                     let mut initialization_options = Self::initialization_options_for_adapter(
                         adapter.adapter.clone(),
                         &delegate,
+                        cx,
                     )
                     .await?;
 
@@ -3157,7 +3158,7 @@ impl LocalLspStore {
                 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
                 .collect::<Vec<_>>();
 
-            lsp_edits.sort_by_key(|(range, _)| (range.start, range.end));
+            lsp_edits.sort_unstable_by_key(|(range, _)| (range.start, range.end));
 
             let mut lsp_edits = lsp_edits.into_iter().peekable();
             let mut edits = Vec::new();
@@ -3771,9 +3772,10 @@ impl LocalLspStore {
     async fn initialization_options_for_adapter(
         adapter: Arc<dyn LspAdapter>,
         delegate: &Arc<dyn LspAdapterDelegate>,
+        cx: &mut AsyncApp,
     ) -> Result<Option<serde_json::Value>> {
         let Some(mut initialization_config) =
-            adapter.clone().initialization_options(delegate).await?
+            adapter.clone().initialization_options(delegate, cx).await?
         else {
             return Ok(None);
         };
@@ -4999,10 +5001,6 @@ impl LspStore {
         };
 
         let status = request.status();
-        if !request.check_capabilities(language_server.adapter_server_capabilities()) {
-            return Task::ready(Ok(Default::default()));
-        }
-
         let request_timeout = ProjectSettings::get_global(cx)
             .global_lsp_settings
             .get_request_timeout();
@@ -5104,6 +5102,10 @@ impl LspStore {
             .clone();
         self.semantic_token_config
             .update_rules(new_semantic_token_rules);
+        // Always clear cached stylizers so that changes to language-specific
+        // semantic token rules (e.g. from extension install/uninstall) are
+        // picked up. Stylizers are recreated lazily, so this is cheap.
+        self.semantic_token_config.clear_stylizers();
 
         let new_global_semantic_tokens_mode =
             all_language_settings(None, cx).defaults.semantic_tokens;
@@ -7030,6 +7032,21 @@ impl LspStore {
                 .collect()
         } else {
             for (chunk, range_to_query) in ranges_to_query.into_iter().flatten() {
+                // When a server refresh was requested, other servers' cached hints
+                // are unaffected by the refresh and must be included in the result.
+                // Otherwise apply_fetched_hints (with should_invalidate()=true)
+                // removes all visible hints but only adds back the requesting
+                // server's new hints, permanently losing other servers' hints.
+                let other_servers_cached: CacheInlayHints = if lsp_refresh_requested {
+                    lsp_data
+                        .inlay_hints
+                        .cached_hints(&chunk)
+                        .cloned()
+                        .unwrap_or_default()
+                } else {
+                    HashMap::default()
+                };
+
                 let next_hint_id = next_hint_id.clone();
                 let buffer = buffer.clone();
                 let query_version = query_version.clone();
@@ -7048,33 +7065,32 @@ impl LspStore {
                                         if update_cache {
                                             lsp_data.inlay_hints.invalidate_for_chunk(chunk);
                                         }
-                                        HashMap::default()
+                                        other_servers_cached
                                     } else {
-                                        new_hints_by_server
-                                            .into_iter()
-                                            .map(|(server_id, new_hints)| {
-                                                let new_hints = new_hints
-                                                    .into_iter()
-                                                    .map(|new_hint| {
-                                                        (
-                                                            InlayId::Hint(next_hint_id.fetch_add(
-                                                                1,
-                                                                atomic::Ordering::AcqRel,
-                                                            )),
-                                                            new_hint,
-                                                        )
-                                                    })
-                                                    .collect::<Vec<_>>();
-                                                if update_cache {
-                                                    lsp_data.inlay_hints.insert_new_hints(
-                                                        chunk,
-                                                        server_id,
-                                                        new_hints.clone(),
-                                                    );
-                                                }
-                                                (server_id, new_hints)
-                                            })
-                                            .collect()
+                                        let mut result = other_servers_cached;
+                                        for (server_id, new_hints) in new_hints_by_server {
+                                            let new_hints = new_hints
+                                                .into_iter()
+                                                .map(|new_hint| {
+                                                    (
+                                                        InlayId::Hint(next_hint_id.fetch_add(
+                                                            1,
+                                                            atomic::Ordering::AcqRel,
+                                                        )),
+                                                        new_hint,
+                                                    )
+                                                })
+                                                .collect::<Vec<_>>();
+                                            if update_cache {
+                                                lsp_data.inlay_hints.insert_new_hints(
+                                                    chunk,
+                                                    server_id,
+                                                    new_hints.clone(),
+                                                );
+                                            }
+                                            result.insert(server_id, new_hints);
+                                        }
+                                        result
                                     }
                                 })
                             })
@@ -11406,6 +11422,15 @@ impl LspStore {
 
                 let buffer_id = buffer.remote_id();
                 if local.registered_buffers.contains_key(&buffer_id) {
+                    let abs_path = file.abs_path(cx);
+                    let uri = match lsp::Uri::from_file_path(&abs_path) {
+                        Ok(uri) => uri,
+                        Err(()) => {
+                            log::error!("failed to convert path to URI: {:?}", abs_path);
+                            continue;
+                        }
+                    };
+
                     let versions = local
                         .buffer_snapshots
                         .entry(buffer_id)
@@ -11427,14 +11452,13 @@ impl LspStore {
                     let snapshot = versions.last().unwrap();
                     let version = snapshot.version;
                     let initial_snapshot = &snapshot.snapshot;
-                    let uri = lsp::Uri::from_file_path(file.abs_path(cx)).unwrap();
                     language_server.register_buffer(
                         uri,
                         adapter.language_id(&language.name()),
                         version,
                         initial_snapshot.text(),
                     );
-                    buffer_paths_registered.push((buffer_id, file.abs_path(cx)));
+                    buffer_paths_registered.push((buffer_id, abs_path));
                     local
                         .buffers_opened_in_servers
                         .entry(buffer_id)
@@ -13964,6 +13988,7 @@ impl LspAdapter for SshLspAdapter {
     async fn initialization_options(
         self: Arc<Self>,
         _: &Arc<dyn LspAdapterDelegate>,
+        _: &mut AsyncApp,
     ) -> Result<Option<serde_json::Value>> {
         let Some(options) = &self.initialization_options else {
             return Ok(None);

crates/project/src/lsp_store/lsp_ext_command.rs 🔗

@@ -211,10 +211,10 @@ impl LspCommand for OpenDocs {
         _: &Arc<LanguageServer>,
         _: &App,
     ) -> Result<OpenDocsParams> {
+        let uri = lsp::Uri::from_file_path(path)
+            .map_err(|()| anyhow::anyhow!("{path:?} is not a valid URI"))?;
         Ok(OpenDocsParams {
-            text_document: lsp::TextDocumentIdentifier {
-                uri: lsp::Uri::from_file_path(path).unwrap(),
-            },
+            text_document: lsp::TextDocumentIdentifier { uri },
             position: point_to_lsp(self.position),
         })
     }

crates/project/src/lsp_store/semantic_tokens.rs 🔗

@@ -12,8 +12,11 @@ use gpui::{App, AppContext, AsyncApp, Context, Entity, ReadGlobal as _, SharedSt
 use language::{Buffer, LanguageName, language_settings::all_language_settings};
 use lsp::{AdapterServerCapabilities, LanguageServerId};
 use rpc::{TypedEnvelope, proto};
-use settings::{SemanticTokenRule, SemanticTokenRules, Settings as _, SettingsStore};
+use settings::{
+    DefaultSemanticTokenRules, SemanticTokenRule, SemanticTokenRules, Settings as _, SettingsStore,
+};
 use smol::future::yield_now;
+
 use text::{Anchor, Bias, OffsetUtf16, PointUtf16, Unclipped};
 use util::ResultExt as _;
 
@@ -58,6 +61,15 @@ impl SemanticTokenConfig {
         }
     }
 
+    /// Clears all cached stylizers.
+    ///
+    /// This is called when settings change to ensure that any modifications to
+    /// language-specific semantic token rules (e.g. from extension install/uninstall)
+    /// are picked up. Stylizers are recreated lazily on next use.
+    pub(super) fn clear_stylizers(&mut self) {
+        self.stylizers.clear();
+    }
+
     pub(super) fn update_global_mode(&mut self, new_mode: settings::SemanticTokens) -> bool {
         if new_mode != self.global_mode {
             self.global_mode = new_mode;
@@ -462,6 +474,7 @@ impl SemanticTokenStylizer {
         let global_rules = &ProjectSettings::get_global(cx)
             .global_lsp_settings
             .semantic_token_rules;
+        let default_rules = cx.global::<DefaultSemanticTokenRules>();
 
         let rules_by_token_type = token_types
             .iter()
@@ -475,6 +488,7 @@ impl SemanticTokenStylizer {
                     .rules
                     .iter()
                     .chain(language_rules.into_iter().flat_map(|lr| &lr.rules))
+                    .chain(default_rules.0.rules.iter())
                     .rev()
                     .filter(filter)
                     .cloned()
@@ -653,8 +667,8 @@ impl ServerSemanticTokens {
 
     pub(crate) fn apply(&mut self, edits: &[SemanticTokensEdit]) {
         for edit in edits {
-            let start = edit.start as usize;
-            let end = start + edit.delete_count as usize;
+            let start = (edit.start as usize).min(self.data.len());
+            let end = (start + edit.delete_count as usize).min(self.data.len());
             self.data.splice(start..end, edit.data.iter().copied());
         }
     }
@@ -1000,4 +1014,38 @@ mod tests {
             ]
         );
     }
+
+    #[test]
+    fn applies_out_of_bounds_delta_edit_without_panic() {
+        let mut tokens = ServerSemanticTokens::from_full(vec![2, 5, 3, 0, 3, 0, 5, 4, 1, 0], None);
+
+        // start beyond data length
+        tokens.apply(&[SemanticTokensEdit {
+            start: 100,
+            delete_count: 5,
+            data: vec![1, 2, 3, 4, 5],
+        }]);
+        assert_eq!(
+            tokens.data,
+            vec![2, 5, 3, 0, 3, 0, 5, 4, 1, 0, 1, 2, 3, 4, 5]
+        );
+
+        // delete_count extends past data length
+        let mut tokens = ServerSemanticTokens::from_full(vec![2, 5, 3, 0, 3], None);
+        tokens.apply(&[SemanticTokensEdit {
+            start: 3,
+            delete_count: 100,
+            data: vec![9, 9],
+        }]);
+        assert_eq!(tokens.data, vec![2, 5, 3, 9, 9]);
+
+        // empty data
+        let mut tokens = ServerSemanticTokens::from_full(Vec::new(), None);
+        tokens.apply(&[SemanticTokensEdit {
+            start: 0,
+            delete_count: 5,
+            data: vec![1, 2, 3, 4, 5],
+        }]);
+        assert_eq!(tokens.data, vec![1, 2, 3, 4, 5]);
+    }
 }

crates/project/src/project.rs 🔗

@@ -1942,6 +1942,11 @@ impl Project {
         }
     }
 
+    #[cfg(feature = "test-support")]
+    pub fn client_subscriptions(&self) -> &Vec<client::Subscription> {
+        &self.client_subscriptions
+    }
+
     #[cfg(feature = "test-support")]
     pub async fn example(
         root_paths: impl IntoIterator<Item = &Path>,
@@ -2741,6 +2746,7 @@ impl Project {
         } = &mut self.client_state
         {
             *sharing_has_stopped = true;
+            self.client_subscriptions.clear();
             self.collaborators.clear();
             self.worktree_store.update(cx, |store, cx| {
                 store.disconnected_from_host(cx);

crates/project/src/project_settings.rs 🔗

@@ -1407,35 +1407,38 @@ impl SettingsObserver {
         let (mut user_tasks_file_rx, watcher_task) =
             watch_config_file(cx.background_executor(), fs, file_path.clone());
         let user_tasks_content = cx.foreground_executor().block_on(user_tasks_file_rx.next());
-        let weak_entry = cx.weak_entity();
         cx.spawn(async move |settings_observer, cx| {
             let _watcher_task = watcher_task;
             let Ok(task_store) = settings_observer.read_with(cx, |settings_observer, _| {
-                settings_observer.task_store.clone()
+                settings_observer.task_store.downgrade()
             }) else {
                 return;
             };
             if let Some(user_tasks_content) = user_tasks_content {
-                task_store.update(cx, |task_store, cx| {
-                    task_store
-                        .update_user_tasks(
-                            TaskSettingsLocation::Global(&file_path),
-                            Some(&user_tasks_content),
-                            cx,
-                        )
-                        .log_err();
-                });
+                task_store
+                    .update(cx, |task_store, cx| {
+                        task_store
+                            .update_user_tasks(
+                                TaskSettingsLocation::Global(&file_path),
+                                Some(&user_tasks_content),
+                                cx,
+                            )
+                            .log_err();
+                    })
+                    .ok();
             }
             while let Some(user_tasks_content) = user_tasks_file_rx.next().await {
-                let result = task_store.update(cx, |task_store, cx| {
+                let Ok(result) = task_store.update(cx, |task_store, cx| {
                     task_store.update_user_tasks(
                         TaskSettingsLocation::Global(&file_path),
                         Some(&user_tasks_content),
                         cx,
                     )
-                });
+                }) else {
+                    continue;
+                };
 
-                weak_entry
+                settings_observer
                     .update(cx, |_, cx| match result {
                         Ok(()) => cx.emit(SettingsObserverEvent::LocalTasksUpdated(Ok(
                             file_path.clone()
@@ -1459,35 +1462,38 @@ impl SettingsObserver {
         let (mut user_tasks_file_rx, watcher_task) =
             watch_config_file(cx.background_executor(), fs, file_path.clone());
         let user_tasks_content = cx.foreground_executor().block_on(user_tasks_file_rx.next());
-        let weak_entry = cx.weak_entity();
         cx.spawn(async move |settings_observer, cx| {
             let _watcher_task = watcher_task;
             let Ok(task_store) = settings_observer.read_with(cx, |settings_observer, _| {
-                settings_observer.task_store.clone()
+                settings_observer.task_store.downgrade()
             }) else {
                 return;
             };
             if let Some(user_tasks_content) = user_tasks_content {
-                task_store.update(cx, |task_store, cx| {
-                    task_store
-                        .update_user_debug_scenarios(
-                            TaskSettingsLocation::Global(&file_path),
-                            Some(&user_tasks_content),
-                            cx,
-                        )
-                        .log_err();
-                });
+                task_store
+                    .update(cx, |task_store, cx| {
+                        task_store
+                            .update_user_debug_scenarios(
+                                TaskSettingsLocation::Global(&file_path),
+                                Some(&user_tasks_content),
+                                cx,
+                            )
+                            .log_err();
+                    })
+                    .ok();
             }
             while let Some(user_tasks_content) = user_tasks_file_rx.next().await {
-                let result = task_store.update(cx, |task_store, cx| {
+                let Ok(result) = task_store.update(cx, |task_store, cx| {
                     task_store.update_user_debug_scenarios(
                         TaskSettingsLocation::Global(&file_path),
                         Some(&user_tasks_content),
                         cx,
                     )
-                });
+                }) else {
+                    continue;
+                };
 
-                weak_entry
+                settings_observer
                     .update(cx, |_, cx| match result {
                         Ok(()) => cx.emit(SettingsObserverEvent::LocalDebugScenariosUpdated(Ok(
                             file_path.clone(),

crates/project/tests/integration/ext_agent_tests.rs 🔗

@@ -9,21 +9,16 @@ struct NoopExternalAgent;
 impl ExternalAgentServer for NoopExternalAgent {
     fn get_command(
         &mut self,
-        _root_dir: Option<&str>,
         _extra_env: HashMap<String, String>,
         _status_tx: Option<watch::Sender<SharedString>>,
         _new_version_available_tx: Option<watch::Sender<Option<String>>>,
         _cx: &mut AsyncApp,
-    ) -> Task<Result<(AgentServerCommand, String, Option<task::SpawnInTerminal>)>> {
-        Task::ready(Ok((
-            AgentServerCommand {
-                path: PathBuf::from("noop"),
-                args: Vec::new(),
-                env: None,
-            },
-            "".to_string(),
-            None,
-        )))
+    ) -> Task<Result<AgentServerCommand>> {
+        Task::ready(Ok(AgentServerCommand {
+            path: PathBuf::from("noop"),
+            args: Vec::new(),
+            env: None,
+        }))
     }
 
     fn as_any_mut(&mut self) -> &mut dyn Any {

crates/project/tests/integration/extension_agent_tests.rs 🔗

@@ -25,21 +25,16 @@ struct NoopExternalAgent;
 impl ExternalAgentServer for NoopExternalAgent {
     fn get_command(
         &mut self,
-        _root_dir: Option<&str>,
         _extra_env: HashMap<String, String>,
         _status_tx: Option<watch::Sender<SharedString>>,
         _new_version_available_tx: Option<watch::Sender<Option<String>>>,
         _cx: &mut AsyncApp,
-    ) -> Task<Result<(AgentServerCommand, String, Option<task::SpawnInTerminal>)>> {
-        Task::ready(Ok((
-            AgentServerCommand {
-                path: PathBuf::from("noop"),
-                args: Vec::new(),
-                env: None,
-            },
-            "".to_string(),
-            None,
-        )))
+    ) -> Task<Result<AgentServerCommand>> {
+        Task::ready(Ok(AgentServerCommand {
+            path: PathBuf::from("noop"),
+            args: Vec::new(),
+            env: None,
+        }))
     }
 
     fn as_any_mut(&mut self) -> &mut dyn Any {
@@ -301,26 +296,6 @@ async fn test_commands_run_in_extraction_directory(cx: &mut TestAppContext) {
 
 #[test]
 fn test_tilde_expansion_in_settings() {
-    let settings = settings::BuiltinAgentServerSettings {
-        path: Some(PathBuf::from("~/bin/agent")),
-        args: Some(vec!["--flag".into()]),
-        env: None,
-        ignore_system_version: None,
-        default_mode: None,
-        default_model: None,
-        favorite_models: vec![],
-        default_config_options: Default::default(),
-        favorite_config_option_values: Default::default(),
-    };
-
-    let BuiltinAgentServerSettings { path, .. } = settings.into();
-
-    let path = path.unwrap();
-    assert!(
-        !path.to_string_lossy().starts_with("~"),
-        "Tilde should be expanded for builtin agent path"
-    );
-
     let settings = settings::CustomAgentServerSettings::Custom {
         path: PathBuf::from("~/custom/agent"),
         args: vec!["serve".into()],

crates/project/tests/integration/git_store.rs 🔗

@@ -336,7 +336,7 @@ mod conflict_set_tests {
                     second_head: UnmergedStatusCode::Updated,
                 },
             );
-            // Cause the repository to emit MergeHeadsChanged.
+            // Cause the repository to update cached conflicts
             state.refs.insert("MERGE_HEAD".into(), "123".into())
         })
         .unwrap();
@@ -461,6 +461,168 @@ mod conflict_set_tests {
             assert_eq!(conflict_range, Point::new(1, 0)..Point::new(6, 0));
         });
     }
+
+    #[gpui::test]
+    async fn test_conflict_updates_with_delayed_merge_head_conflicts(
+        executor: BackgroundExecutor,
+        cx: &mut TestAppContext,
+    ) {
+        zlog::init_test();
+        cx.update(|cx| {
+            settings::init(cx);
+        });
+
+        let initial_text = "
+            one
+            two
+            three
+            four
+        "
+        .unindent();
+
+        let conflicted_text = "
+            one
+            <<<<<<< HEAD
+            two
+            =======
+            TWO
+            >>>>>>> branch
+            three
+            four
+        "
+        .unindent();
+
+        let resolved_text = "
+            one
+            TWO
+            three
+            four
+        "
+        .unindent();
+
+        let fs = FakeFs::new(executor);
+        fs.insert_tree(
+            path!("/project"),
+            json!({
+                ".git": {},
+                "a.txt": initial_text,
+            }),
+        )
+        .await;
+
+        let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
+        let (git_store, buffer) = project.update(cx, |project, cx| {
+            (
+                project.git_store().clone(),
+                project.open_local_buffer(path!("/project/a.txt"), cx),
+            )
+        });
+        let buffer = buffer.await.unwrap();
+        let conflict_set = git_store.update(cx, |git_store, cx| {
+            git_store.open_conflict_set(buffer.clone(), cx)
+        });
+
+        let (events_tx, events_rx) = mpsc::channel::<ConflictSetUpdate>();
+        let _conflict_set_subscription = cx.update(|cx| {
+            cx.subscribe(&conflict_set, move |_, event, _| {
+                events_tx.send(event.clone()).ok();
+            })
+        });
+
+        cx.run_until_parked();
+        events_rx
+            .try_recv()
+            .expect_err("conflict set should start empty");
+
+        fs.with_git_state(path!("/project/.git").as_ref(), true, |state| {
+            state.refs.insert("MERGE_HEAD".into(), "123".into())
+        })
+        .unwrap();
+
+        cx.run_until_parked();
+        events_rx
+            .try_recv()
+            .expect_err("merge head without conflicted paths should not publish conflicts");
+        conflict_set.update(cx, |conflict_set, _| {
+            assert!(!conflict_set.has_conflict);
+            assert_eq!(conflict_set.snapshot.conflicts.len(), 0);
+        });
+
+        buffer.update(cx, |buffer, cx| {
+            buffer.set_text(conflicted_text.clone(), cx);
+        });
+        fs.with_git_state(path!("/project/.git").as_ref(), true, |state| {
+            state.unmerged_paths.insert(
+                repo_path("a.txt"),
+                UnmergedStatus {
+                    first_head: UnmergedStatusCode::Updated,
+                    second_head: UnmergedStatusCode::Updated,
+                },
+            );
+        })
+        .unwrap();
+
+        cx.run_until_parked();
+        let update = events_rx
+            .try_recv()
+            .expect("conflicts should appear once conflicted paths are visible");
+        assert_eq!(update.old_range, 0..0);
+        assert_eq!(update.new_range, 0..1);
+        conflict_set.update(cx, |conflict_set, cx| {
+            assert!(conflict_set.has_conflict);
+            let conflict_range = conflict_set.snapshot().conflicts[0]
+                .range
+                .to_point(buffer.read(cx));
+            assert_eq!(conflict_range, Point::new(1, 0)..Point::new(6, 0));
+        });
+
+        buffer.update(cx, |buffer, cx| {
+            buffer.set_text(resolved_text.clone(), cx);
+        });
+
+        cx.run_until_parked();
+        let update = events_rx
+            .try_recv()
+            .expect("resolved buffer text should clear visible conflict markers");
+        assert_eq!(update.old_range, 0..1);
+        assert_eq!(update.new_range, 0..0);
+        conflict_set.update(cx, |conflict_set, _| {
+            assert!(conflict_set.has_conflict);
+            assert_eq!(conflict_set.snapshot.conflicts.len(), 0);
+        });
+
+        fs.with_git_state(path!("/project/.git").as_ref(), true, |state| {
+            state.refs.insert("MERGE_HEAD".into(), "456".into());
+        })
+        .unwrap();
+
+        cx.run_until_parked();
+        events_rx.try_recv().expect_err(
+            "merge-head change without unmerged-path changes should not emit marker updates",
+        );
+        conflict_set.update(cx, |conflict_set, _| {
+            assert!(conflict_set.has_conflict);
+            assert_eq!(conflict_set.snapshot.conflicts.len(), 0);
+        });
+
+        fs.with_git_state(path!("/project/.git").as_ref(), true, |state| {
+            state.unmerged_paths.remove(&repo_path("a.txt"));
+            state.refs.remove("MERGE_HEAD");
+        })
+        .unwrap();
+
+        cx.run_until_parked();
+        let update = events_rx.try_recv().expect(
+            "status catch-up should emit a no-op update when clearing stale conflict state",
+        );
+        assert_eq!(update.old_range, 0..0);
+        assert_eq!(update.new_range, 0..0);
+        assert!(update.buffer_range.is_none());
+        conflict_set.update(cx, |conflict_set, _| {
+            assert!(!conflict_set.has_conflict);
+            assert_eq!(conflict_set.snapshot.conflicts.len(), 0);
+        });
+    }
 }
 
 mod git_traversal {
@@ -1012,3 +1174,327 @@ mod git_traversal {
         pretty_assertions::assert_eq!(found_statuses, expected_statuses);
     }
 }
+
+mod git_worktrees {
+    use std::path::PathBuf;
+
+    use fs::FakeFs;
+    use gpui::TestAppContext;
+    use serde_json::json;
+    use settings::SettingsStore;
+    use util::path;
+
+    fn init_test(cx: &mut gpui::TestAppContext) {
+        zlog::init_test();
+
+        cx.update(|cx| {
+            let settings_store = SettingsStore::test(cx);
+            cx.set_global(settings_store);
+        });
+    }
+
+    #[gpui::test]
+    async fn test_git_worktrees_list_and_create(cx: &mut TestAppContext) {
+        init_test(cx);
+        let fs = FakeFs::new(cx.background_executor.clone());
+        fs.insert_tree(
+            path!("/root"),
+            json!({
+                ".git": {},
+                "file.txt": "content",
+            }),
+        )
+        .await;
+
+        let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
+        cx.executor().run_until_parked();
+
+        let repository = project.read_with(cx, |project, cx| {
+            project.repositories(cx).values().next().unwrap().clone()
+        });
+
+        let worktrees = cx
+            .update(|cx| repository.update(cx, |repository, _| repository.worktrees()))
+            .await
+            .unwrap()
+            .unwrap();
+        assert_eq!(worktrees.len(), 1);
+        assert_eq!(worktrees[0].path, PathBuf::from(path!("/root")));
+
+        let worktree_directory = PathBuf::from(path!("/root"));
+        cx.update(|cx| {
+            repository.update(cx, |repository, _| {
+                repository.create_worktree(
+                    "feature-branch".to_string(),
+                    worktree_directory.clone(),
+                    Some("abc123".to_string()),
+                )
+            })
+        })
+        .await
+        .unwrap()
+        .unwrap();
+
+        cx.executor().run_until_parked();
+
+        let worktrees = cx
+            .update(|cx| repository.update(cx, |repository, _| repository.worktrees()))
+            .await
+            .unwrap()
+            .unwrap();
+        assert_eq!(worktrees.len(), 2);
+        assert_eq!(worktrees[0].path, PathBuf::from(path!("/root")));
+        assert_eq!(worktrees[1].path, worktree_directory.join("feature-branch"));
+        assert_eq!(worktrees[1].ref_name.as_ref(), "refs/heads/feature-branch");
+        assert_eq!(worktrees[1].sha.as_ref(), "abc123");
+
+        cx.update(|cx| {
+            repository.update(cx, |repository, _| {
+                repository.create_worktree(
+                    "bugfix-branch".to_string(),
+                    worktree_directory.clone(),
+                    None,
+                )
+            })
+        })
+        .await
+        .unwrap()
+        .unwrap();
+
+        cx.executor().run_until_parked();
+
+        // List worktrees — should now have main + two created
+        let worktrees = cx
+            .update(|cx| repository.update(cx, |repository, _| repository.worktrees()))
+            .await
+            .unwrap()
+            .unwrap();
+        assert_eq!(worktrees.len(), 3);
+
+        let feature_worktree = worktrees
+            .iter()
+            .find(|worktree| worktree.ref_name.as_ref() == "refs/heads/feature-branch")
+            .expect("should find feature-branch worktree");
+        assert_eq!(
+            feature_worktree.path,
+            worktree_directory.join("feature-branch")
+        );
+
+        let bugfix_worktree = worktrees
+            .iter()
+            .find(|worktree| worktree.ref_name.as_ref() == "refs/heads/bugfix-branch")
+            .expect("should find bugfix-branch worktree");
+        assert_eq!(
+            bugfix_worktree.path,
+            worktree_directory.join("bugfix-branch")
+        );
+        assert_eq!(bugfix_worktree.sha.as_ref(), "fake-sha");
+    }
+
+    use crate::Project;
+}
+
+mod trust_tests {
+    use collections::HashSet;
+    use fs::FakeFs;
+    use gpui::TestAppContext;
+    use project::trusted_worktrees::*;
+
+    use serde_json::json;
+    use settings::SettingsStore;
+    use util::path;
+
+    use crate::Project;
+
+    fn init_test(cx: &mut TestAppContext) {
+        zlog::init_test();
+
+        cx.update(|cx| {
+            let settings_store = SettingsStore::test(cx);
+            cx.set_global(settings_store);
+        });
+    }
+
+    #[gpui::test]
+    async fn test_repository_defaults_to_untrusted_without_trust_system(cx: &mut TestAppContext) {
+        init_test(cx);
+        let fs = FakeFs::new(cx.background_executor.clone());
+        fs.insert_tree(
+            path!("/project"),
+            json!({
+                ".git": {},
+                "a.txt": "hello",
+            }),
+        )
+        .await;
+
+        // Create project without trust system — repos should default to untrusted.
+        let project = Project::test(fs, [path!("/project").as_ref()], cx).await;
+        cx.executor().run_until_parked();
+
+        let repository = project.read_with(cx, |project, cx| {
+            project.repositories(cx).values().next().unwrap().clone()
+        });
+
+        repository.read_with(cx, |repo, _| {
+            assert!(
+                !repo.is_trusted(),
+                "repository should default to untrusted when no trust system is initialized"
+            );
+        });
+    }
+
+    #[gpui::test]
+    async fn test_multiple_repos_trust_with_single_worktree(cx: &mut TestAppContext) {
+        init_test(cx);
+        let fs = FakeFs::new(cx.background_executor.clone());
+        fs.insert_tree(
+            path!("/project"),
+            json!({
+                ".git": {},
+                "a.txt": "hello",
+                "sub": {
+                    ".git": {},
+                    "b.txt": "world",
+                },
+            }),
+        )
+        .await;
+
+        cx.update(|cx| {
+            init(DbTrustedPaths::default(), cx);
+        });
+
+        let project =
+            Project::test_with_worktree_trust(fs.clone(), [path!("/project").as_ref()], cx).await;
+        cx.executor().run_until_parked();
+
+        let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
+        let worktree_id = worktree_store.read_with(cx, |store, cx| {
+            store.worktrees().next().unwrap().read(cx).id()
+        });
+
+        let repos = project.read_with(cx, |project, cx| {
+            project
+                .repositories(cx)
+                .values()
+                .cloned()
+                .collect::<Vec<_>>()
+        });
+        assert_eq!(repos.len(), 2, "should have two repositories");
+        for repo in &repos {
+            repo.read_with(cx, |repo, _| {
+                assert!(
+                    !repo.is_trusted(),
+                    "all repos should be untrusted initially"
+                );
+            });
+        }
+
+        let trusted_worktrees = cx
+            .update(|cx| TrustedWorktrees::try_get_global(cx).expect("trust global should be set"));
+        trusted_worktrees.update(cx, |store, cx| {
+            store.trust(
+                &worktree_store,
+                HashSet::from_iter([PathTrust::Worktree(worktree_id)]),
+                cx,
+            );
+        });
+        cx.executor().run_until_parked();
+
+        for repo in &repos {
+            repo.read_with(cx, |repo, _| {
+                assert!(
+                    repo.is_trusted(),
+                    "all repos should be trusted after worktree is trusted"
+                );
+            });
+        }
+    }
+
+    #[gpui::test]
+    async fn test_repository_trust_restrict_trust_cycle(cx: &mut TestAppContext) {
+        init_test(cx);
+        let fs = FakeFs::new(cx.background_executor.clone());
+        fs.insert_tree(
+            path!("/project"),
+            json!({
+                ".git": {},
+                "a.txt": "hello",
+            }),
+        )
+        .await;
+
+        cx.update(|cx| {
+            project::trusted_worktrees::init(DbTrustedPaths::default(), cx);
+        });
+
+        let project =
+            Project::test_with_worktree_trust(fs.clone(), [path!("/project").as_ref()], cx).await;
+        cx.executor().run_until_parked();
+
+        let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
+        let worktree_id = worktree_store.read_with(cx, |store, cx| {
+            store.worktrees().next().unwrap().read(cx).id()
+        });
+
+        let repository = project.read_with(cx, |project, cx| {
+            project.repositories(cx).values().next().unwrap().clone()
+        });
+
+        repository.read_with(cx, |repo, _| {
+            assert!(!repo.is_trusted(), "repository should start untrusted");
+        });
+
+        let trusted_worktrees = cx
+            .update(|cx| TrustedWorktrees::try_get_global(cx).expect("trust global should be set"));
+
+        trusted_worktrees.update(cx, |store, cx| {
+            store.trust(
+                &worktree_store,
+                HashSet::from_iter([PathTrust::Worktree(worktree_id)]),
+                cx,
+            );
+        });
+        cx.executor().run_until_parked();
+
+        repository.read_with(cx, |repo, _| {
+            assert!(
+                repo.is_trusted(),
+                "repository should be trusted after worktree is trusted"
+            );
+        });
+
+        trusted_worktrees.update(cx, |store, cx| {
+            store.restrict(
+                worktree_store.downgrade(),
+                HashSet::from_iter([PathTrust::Worktree(worktree_id)]),
+                cx,
+            );
+        });
+        cx.executor().run_until_parked();
+
+        repository.read_with(cx, |repo, _| {
+            assert!(
+                !repo.is_trusted(),
+                "repository should be untrusted after worktree is restricted"
+            );
+        });
+
+        trusted_worktrees.update(cx, |store, cx| {
+            store.trust(
+                &worktree_store,
+                HashSet::from_iter([PathTrust::Worktree(worktree_id)]),
+                cx,
+            );
+        });
+        cx.executor().run_until_parked();
+
+        repository.read_with(cx, |repo, _| {
+            assert!(
+                repo.is_trusted(),
+                "repository should be trusted again after second trust"
+            );
+        });
+    }
+}

crates/project/tests/integration/project_tests.rs 🔗

@@ -31,7 +31,7 @@ use futures::{StreamExt, future};
 use git::{
     GitHostingProviderRegistry,
     repository::{RepoPath, repo_path},
-    status::{FileStatus, StatusCode, TrackedStatus},
+    status::{DiffStat, FileStatus, StatusCode, TrackedStatus},
 };
 use git2::RepositoryInitOptions;
 use gpui::{
@@ -5359,6 +5359,52 @@ async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
     });
 }
 
+#[cfg(target_os = "linux")]
+#[gpui::test(retries = 5)]
+async fn test_recreated_directory_receives_child_events(cx: &mut gpui::TestAppContext) {
+    init_test(cx);
+    cx.executor().allow_parking();
+
+    let dir = TempTree::new(json!({}));
+    let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
+    let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
+
+    tree.flush_fs_events(cx).await;
+
+    let repro_dir = dir.path().join("repro");
+    std::fs::create_dir(&repro_dir).unwrap();
+    tree.flush_fs_events(cx).await;
+
+    cx.update(|cx| {
+        assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
+    });
+
+    std::fs::remove_dir_all(&repro_dir).unwrap();
+    tree.flush_fs_events(cx).await;
+
+    cx.update(|cx| {
+        assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_none());
+    });
+
+    std::fs::create_dir(&repro_dir).unwrap();
+    tree.flush_fs_events(cx).await;
+
+    cx.update(|cx| {
+        assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
+    });
+
+    std::fs::write(repro_dir.join("repro-marker"), "").unwrap();
+    tree.flush_fs_events(cx).await;
+
+    cx.update(|cx| {
+        assert!(
+            tree.read(cx)
+                .entry_for_path(rel_path("repro/repro-marker"))
+                .is_some()
+        );
+    });
+}
+
 #[gpui::test(iterations = 10)]
 async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
     init_test(cx);
@@ -9207,14 +9253,23 @@ async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
                 StatusEntry {
                     repo_path: repo_path("a.txt"),
                     status: StatusCode::Modified.worktree(),
+                    diff_stat: Some(DiffStat {
+                        added: 1,
+                        deleted: 1,
+                    }),
                 },
                 StatusEntry {
                     repo_path: repo_path("b.txt"),
                     status: FileStatus::Untracked,
+                    diff_stat: None,
                 },
                 StatusEntry {
                     repo_path: repo_path("d.txt"),
                     status: StatusCode::Deleted.worktree(),
+                    diff_stat: Some(DiffStat {
+                        added: 0,
+                        deleted: 1,
+                    }),
                 },
             ]
         );
@@ -9236,18 +9291,31 @@ async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
                 StatusEntry {
                     repo_path: repo_path("a.txt"),
                     status: StatusCode::Modified.worktree(),
+                    diff_stat: Some(DiffStat {
+                        added: 1,
+                        deleted: 1,
+                    }),
                 },
                 StatusEntry {
                     repo_path: repo_path("b.txt"),
                     status: FileStatus::Untracked,
+                    diff_stat: None,
                 },
                 StatusEntry {
                     repo_path: repo_path("c.txt"),
                     status: StatusCode::Modified.worktree(),
+                    diff_stat: Some(DiffStat {
+                        added: 1,
+                        deleted: 1,
+                    }),
                 },
                 StatusEntry {
                     repo_path: repo_path("d.txt"),
                     status: StatusCode::Deleted.worktree(),
+                    diff_stat: Some(DiffStat {
+                        added: 0,
+                        deleted: 1,
+                    }),
                 },
             ]
         );
@@ -9281,6 +9349,10 @@ async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
             [StatusEntry {
                 repo_path: repo_path("a.txt"),
                 status: StatusCode::Deleted.worktree(),
+                diff_stat: Some(DiffStat {
+                    added: 0,
+                    deleted: 1,
+                }),
             }]
         );
     });
@@ -9345,6 +9417,7 @@ async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
                     worktree_status: StatusCode::Added
                 }
                 .into(),
+                diff_stat: None,
             }]
         )
     });
@@ -9547,6 +9620,10 @@ async fn test_repository_pending_ops_staging(
                     worktree_status: StatusCode::Unmodified
                 }
                 .into(),
+                diff_stat: Some(DiffStat {
+                    added: 1,
+                    deleted: 0,
+                }),
             }]
         );
     });
@@ -9653,6 +9730,10 @@ async fn test_repository_pending_ops_long_running_staging(
                     worktree_status: StatusCode::Unmodified
                 }
                 .into(),
+                diff_stat: Some(DiffStat {
+                    added: 1,
+                    deleted: 0,
+                }),
             }]
         );
     });
@@ -9777,10 +9858,12 @@ async fn test_repository_pending_ops_stage_all(
                 StatusEntry {
                     repo_path: repo_path("a.txt"),
                     status: FileStatus::Untracked,
+                    diff_stat: None,
                 },
                 StatusEntry {
                     repo_path: repo_path("b.txt"),
                     status: FileStatus::Untracked,
+                    diff_stat: None,
                 },
             ]
         );
@@ -10409,10 +10492,7 @@ async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
 
     assert_eq!(
         repository_updates.lock().drain(..).collect::<Vec<_>>(),
-        vec![
-            RepositoryEvent::StatusesChanged,
-            RepositoryEvent::MergeHeadsChanged,
-        ],
+        vec![RepositoryEvent::StatusesChanged,],
         "Initial worktree scan should produce a repo update event"
     );
     assert_eq!(
@@ -10579,7 +10659,6 @@ async fn test_odd_events_for_ignored_dirs(
     assert_eq!(
         repository_updates.lock().drain(..).collect::<Vec<_>>(),
         vec![
-            RepositoryEvent::MergeHeadsChanged,
             RepositoryEvent::BranchChanged,
             RepositoryEvent::StatusesChanged,
             RepositoryEvent::StatusesChanged,

crates/project_panel/src/project_panel.rs 🔗

@@ -46,6 +46,7 @@ use settings::{
     update_settings_file,
 };
 use smallvec::SmallVec;
+use std::ops::Neg;
 use std::{any::TypeId, time::Instant};
 use std::{
     cell::OnceCell,
@@ -146,6 +147,7 @@ pub struct ProjectPanel {
     width: Option<Pixels>,
     pending_serialization: Task<Option<()>>,
     diagnostics: HashMap<(WorktreeId, Arc<RelPath>), DiagnosticSeverity>,
+    diagnostic_counts: HashMap<(WorktreeId, Arc<RelPath>), DiagnosticCount>,
     diagnostic_summary_update: Task<()>,
     // We keep track of the mouse down state on entries so we don't flash the UI
     // in case a user clicks to open a file.
@@ -232,6 +234,30 @@ enum ClipboardEntry {
     Cut(BTreeSet<SelectedEntry>),
 }
 
+#[derive(Debug, Default, PartialEq, Eq, Clone, Copy)]
+struct DiagnosticCount {
+    error_count: usize,
+    warning_count: usize,
+}
+
+impl DiagnosticCount {
+    fn capped_error_count(&self) -> String {
+        Self::capped_count(self.error_count)
+    }
+
+    fn capped_warning_count(&self) -> String {
+        Self::capped_count(self.warning_count)
+    }
+
+    fn capped_count(count: usize) -> String {
+        if count > 99 {
+            "99+".to_string()
+        } else {
+            count.to_string()
+        }
+    }
+}
+
 #[derive(Debug, PartialEq, Eq, Clone)]
 struct EntryDetails {
     filename: String,
@@ -249,6 +275,7 @@ struct EntryDetails {
     sticky: Option<StickyDetails>,
     filename_text_color: Color,
     diagnostic_severity: Option<DiagnosticSeverity>,
+    diagnostic_count: Option<DiagnosticCount>,
     git_status: GitSummary,
     is_private: bool,
     worktree_id: WorktreeId,
@@ -847,6 +874,7 @@ impl ProjectPanel {
                 width: None,
                 pending_serialization: Task::ready(None),
                 diagnostics: Default::default(),
+                diagnostic_counts: Default::default(),
                 diagnostic_summary_update: Task::ready(()),
                 scroll_handle,
                 mouse_down: false,
@@ -1029,6 +1057,26 @@ impl ProjectPanel {
                 });
         }
         self.diagnostics = diagnostics;
+
+        let diagnostic_badges = ProjectPanelSettings::get_global(cx).diagnostic_badges;
+        self.diagnostic_counts =
+            if diagnostic_badges && show_diagnostics_setting != ShowDiagnostics::Off {
+                self.project.read(cx).diagnostic_summaries(false, cx).fold(
+                    HashMap::default(),
+                    |mut counts, (project_path, _, summary)| {
+                        let entry = counts
+                            .entry((project_path.worktree_id, project_path.path))
+                            .or_default();
+                        entry.error_count += summary.error_count;
+                        if show_diagnostics_setting == ShowDiagnostics::All {
+                            entry.warning_count += summary.warning_count;
+                        }
+                        counts
+                    },
+                )
+            } else {
+                Default::default()
+            };
     }
 
     fn update_strongest_diagnostic_severity(
@@ -5044,6 +5092,7 @@ impl ProjectPanel {
 
         let filename_text_color = details.filename_text_color;
         let diagnostic_severity = details.diagnostic_severity;
+        let diagnostic_count = details.diagnostic_count;
         let item_colors = get_item_color(is_sticky, cx);
 
         let canonical_path = details
@@ -5482,22 +5531,55 @@ impl ProjectPanel {
                         ProjectPanelEntrySpacing::Standard => ListItemSpacing::ExtraDense,
                     })
                     .selectable(false)
-                    .when_some(canonical_path, |this, path| {
-                        this.end_slot::<AnyElement>(
-                            div()
-                                .id("symlink_icon")
-                                .pr_3()
-                                .tooltip(move |_window, cx| {
-                                    Tooltip::with_meta(path.to_string(), None, "Symbolic Link", cx)
-                                })
-                                .child(
-                                    Icon::new(IconName::ArrowUpRight)
-                                        .size(IconSize::Indicator)
-                                        .color(filename_text_color),
-                                )
-                                .into_any_element(),
-                        )
-                    })
+                    .when(
+                        canonical_path.is_some() || diagnostic_count.is_some(),
+                        |this| {
+                            let symlink_element = canonical_path.map(|path| {
+                                div()
+                                    .id("symlink_icon")
+                                    .tooltip(move |_window, cx| {
+                                        Tooltip::with_meta(
+                                            path.to_string(),
+                                            None,
+                                            "Symbolic Link",
+                                            cx,
+                                        )
+                                    })
+                                    .child(
+                                        Icon::new(IconName::ArrowUpRight)
+                                            .size(IconSize::Indicator)
+                                            .color(filename_text_color),
+                                    )
+                            });
+                            this.end_slot::<AnyElement>(
+                                h_flex()
+                                    .gap_1()
+                                    .flex_none()
+                                    .pr_3()
+                                    .when_some(diagnostic_count, |this, count| {
+                                        this.when(count.error_count > 0, |this| {
+                                            this.child(
+                                                Label::new(count.capped_error_count())
+                                                    .size(LabelSize::Small)
+                                                    .color(Color::Error),
+                                            )
+                                        })
+                                        .when(
+                                            count.warning_count > 0,
+                                            |this| {
+                                                this.child(
+                                                    Label::new(count.capped_warning_count())
+                                                        .size(LabelSize::Small)
+                                                        .color(Color::Warning),
+                                                )
+                                            },
+                                        )
+                                    })
+                                    .when_some(symlink_element, |this, el| this.child(el))
+                                    .into_any_element(),
+                            )
+                        },
+                    )
                     .child(if let Some(icon) = &icon {
                         if let Some((_, decoration_color)) =
                             entry_diagnostic_aware_icon_decoration_and_color(diagnostic_severity)
@@ -5907,6 +5989,11 @@ impl ProjectPanel {
             .get(&(worktree_id, entry.path.clone()))
             .cloned();
 
+        let diagnostic_count = self
+            .diagnostic_counts
+            .get(&(worktree_id, entry.path.clone()))
+            .copied();
+
         let filename_text_color =
             entry_git_aware_label_color(git_status, entry.is_ignored, is_marked);
 
@@ -5931,6 +6018,7 @@ impl ProjectPanel {
             sticky,
             filename_text_color,
             diagnostic_severity,
+            diagnostic_count,
             git_status,
             is_private: entry.is_private,
             worktree_id,
@@ -5966,12 +6054,27 @@ impl ProjectPanel {
             .worktree_for_entry(entry_id, cx)
             .context("can't reveal a non-existent entry in the project panel")?;
         let worktree = worktree.read(cx);
-        if skip_ignored
-            && worktree
-                .entry_for_id(entry_id)
-                .is_none_or(|entry| entry.is_ignored && !entry.is_always_included)
-        {
-            anyhow::bail!("can't reveal an ignored entry in the project panel");
+        let worktree_id = worktree.id();
+        let is_ignored = worktree
+            .entry_for_id(entry_id)
+            .is_none_or(|entry| entry.is_ignored && !entry.is_always_included);
+        if skip_ignored && is_ignored {
+            if self.index_for_entry(entry_id, worktree_id).is_none() {
+                anyhow::bail!("can't reveal an ignored entry in the project panel");
+            }
+
+            self.selection = Some(SelectedEntry {
+                worktree_id,
+                entry_id,
+            });
+            self.marked_entries.clear();
+            self.marked_entries.push(SelectedEntry {
+                worktree_id,
+                entry_id,
+            });
+            self.autoscroll(cx);
+            cx.notify();
+            return Ok(());
         }
         let is_active_item_file_diff_view = self
             .workspace
@@ -5983,7 +6086,6 @@ impl ProjectPanel {
             return Ok(());
         }
 
-        let worktree_id = worktree.id();
         self.expand_entry(worktree_id, entry_id, cx);
         self.update_visible_entries(Some((worktree_id, entry_id)), false, true, window, cx);
         self.marked_entries.clear();
@@ -6356,11 +6458,14 @@ impl Render for ProjectPanel {
                             el.on_action(cx.listener(Self::trash))
                         })
                 })
-                .when(project.is_local(), |el| {
-                    el.on_action(cx.listener(Self::reveal_in_finder))
-                        .on_action(cx.listener(Self::open_system))
-                        .on_action(cx.listener(Self::open_in_terminal))
-                })
+                .when(
+                    project.is_local() || project.is_via_wsl_with_host_interop(cx),
+                    |el| {
+                        el.on_action(cx.listener(Self::reveal_in_finder))
+                            .on_action(cx.listener(Self::open_system))
+                            .on_action(cx.listener(Self::open_in_terminal))
+                    },
+                )
                 .when(project.is_via_remote_server(), |el| {
                     el.on_action(cx.listener(Self::open_in_terminal))
                         .on_action(cx.listener(Self::download_from_remote))
@@ -6587,6 +6692,24 @@ impl Render for ProjectPanel {
                                 .id("project-panel-blank-area")
                                 .block_mouse_except_scroll()
                                 .flex_grow()
+                                .on_scroll_wheel({
+                                    let scroll_handle = self.scroll_handle.clone();
+                                    let entity_id = cx.entity().entity_id();
+                                    move |event, window, cx| {
+                                        let state = scroll_handle.0.borrow();
+                                        let base_handle = &state.base_handle;
+                                        let current_offset = base_handle.offset();
+                                        let max_offset = base_handle.max_offset();
+                                        let delta = event.delta.pixel_delta(window.line_height());
+                                        let new_offset = (current_offset + delta)
+                                            .clamp(&max_offset.neg(), &Point::default());
+
+                                        if new_offset != current_offset {
+                                            base_handle.set_offset(new_offset);
+                                            cx.notify(entity_id);
+                                        }
+                                    }
+                                })
                                 .when(
                                     self.drag_target_entry.as_ref().is_some_and(
                                         |entry| match entry {
@@ -6756,14 +6879,17 @@ impl Render for ProjectPanel {
                     Button::new("open_project", "Open Project")
                         .full_width()
                         .key_binding(KeyBinding::for_action_in(
-                            &workspace::Open,
+                            &workspace::Open::default(),
                             &focus_handle,
                             cx,
                         ))
                         .on_click(cx.listener(|this, _, window, cx| {
                             this.workspace
                                 .update(cx, |_, cx| {
-                                    window.dispatch_action(workspace::Open.boxed_clone(), cx);
+                                    window.dispatch_action(
+                                        workspace::Open::default().boxed_clone(),
+                                        cx,
+                                    );
                                 })
                                 .log_err();
                         })),

crates/project_panel/src/project_panel_settings.rs 🔗

@@ -35,6 +35,7 @@ pub struct ProjectPanelSettings {
     pub drag_and_drop: bool,
     pub auto_open: AutoOpenSettings,
     pub sort_mode: ProjectPanelSortMode,
+    pub diagnostic_badges: bool,
 }
 
 #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
@@ -125,9 +126,8 @@ impl Settings for ProjectPanelSettings {
                     on_drop: auto_open.on_drop.unwrap(),
                 }
             },
-            sort_mode: project_panel
-                .sort_mode
-                .unwrap_or(ProjectPanelSortMode::DirectoriesFirst),
+            sort_mode: project_panel.sort_mode.unwrap(),
+            diagnostic_badges: project_panel.diagnostic_badges.unwrap(),
         }
     }
 }

crates/project_panel/src/project_panel_tests.rs 🔗

@@ -4843,6 +4843,64 @@ async fn test_autoreveal_and_gitignored_files(cx: &mut gpui::TestAppContext) {
         ],
         "When a gitignored entry is explicitly revealed, it should be shown in the project tree"
     );
+
+    panel.update(cx, |panel, cx| {
+        panel.project.update(cx, |_, cx| {
+            cx.emit(project::Event::ActiveEntryChanged(Some(dir_2_file)))
+        })
+    });
+    cx.run_until_parked();
+    assert_eq!(
+        visible_entries_as_strings(&panel, 0..20, cx),
+        &[
+            "v project_root",
+            "    > .git",
+            "    v dir_1",
+            "        v gitignored_dir",
+            "              file_a.py",
+            "              file_b.py",
+            "              file_c.py",
+            "          file_1.py",
+            "          file_2.py",
+            "          file_3.py",
+            "    v dir_2",
+            "          file_1.py  <== selected  <== marked",
+            "          file_2.py",
+            "          file_3.py",
+            "      .gitignore",
+        ],
+        "After switching to dir_2_file, it should be selected and marked"
+    );
+
+    panel.update(cx, |panel, cx| {
+        panel.project.update(cx, |_, cx| {
+            cx.emit(project::Event::ActiveEntryChanged(Some(
+                gitignored_dir_file,
+            )))
+        })
+    });
+    cx.run_until_parked();
+    assert_eq!(
+        visible_entries_as_strings(&panel, 0..20, cx),
+        &[
+            "v project_root",
+            "    > .git",
+            "    v dir_1",
+            "        v gitignored_dir",
+            "              file_a.py  <== selected  <== marked",
+            "              file_b.py",
+            "              file_c.py",
+            "          file_1.py",
+            "          file_2.py",
+            "          file_3.py",
+            "    v dir_2",
+            "          file_1.py",
+            "          file_2.py",
+            "          file_3.py",
+            "      .gitignore",
+        ],
+        "When a gitignored entry is already visible, auto reveal should mark it as selected"
+    );
 }
 
 #[gpui::test]

crates/proto/proto/ai.proto 🔗

@@ -5,245 +5,245 @@ import "buffer.proto";
 import "task.proto";
 
 message Context {
-    repeated ContextOperation operations = 1;
+  repeated ContextOperation operations = 1;
 }
 
 message ContextMetadata {
-    string context_id = 1;
-    optional string summary = 2;
+  string context_id = 1;
+  optional string summary = 2;
 }
 
 message ContextMessageStatus {
-    oneof variant {
-        Done done = 1;
-        Pending pending = 2;
-        Error error = 3;
-        Canceled canceled = 4;
-    }
+  oneof variant {
+    Done done = 1;
+    Pending pending = 2;
+    Error error = 3;
+    Canceled canceled = 4;
+  }
 
-    message Done {}
+  message Done {}
 
-    message Pending {}
+  message Pending {}
 
-    message Error {
-        string message = 1;
-    }
+  message Error {
+    string message = 1;
+  }
 
-    message Canceled {}
+  message Canceled {}
 }
 
 message ContextMessage {
-    LamportTimestamp id = 1;
-    Anchor start = 2;
-    LanguageModelRole role = 3;
-    ContextMessageStatus status = 4;
+  LamportTimestamp id = 1;
+  Anchor start = 2;
+  LanguageModelRole role = 3;
+  ContextMessageStatus status = 4;
 }
 
 message SlashCommandOutputSection {
-    AnchorRange range = 1;
-    string icon_name = 2;
-    string label = 3;
-    optional string metadata = 4;
+  AnchorRange range = 1;
+  string icon_name = 2;
+  string label = 3;
+  optional string metadata = 4;
 }
 
 message ThoughtProcessOutputSection {
-    AnchorRange range = 1;
+  AnchorRange range = 1;
 }
 
 message ContextOperation {
-    oneof variant {
-        InsertMessage insert_message = 1;
-        UpdateMessage update_message = 2;
-        UpdateSummary update_summary = 3;
-        BufferOperation buffer_operation = 5;
-        SlashCommandStarted slash_command_started = 6;
-        SlashCommandOutputSectionAdded slash_command_output_section_added = 7;
-        SlashCommandCompleted slash_command_completed = 8;
-        ThoughtProcessOutputSectionAdded thought_process_output_section_added = 9;
-    }
-
-    reserved 4;
-
-    message InsertMessage {
-        ContextMessage message = 1;
-        repeated VectorClockEntry version = 2;
-    }
-
-    message UpdateMessage {
-        LamportTimestamp message_id = 1;
-        LanguageModelRole role = 2;
-        ContextMessageStatus status = 3;
-        LamportTimestamp timestamp = 4;
-        repeated VectorClockEntry version = 5;
-    }
-
-    message UpdateSummary {
-        string summary = 1;
-        bool done = 2;
-        LamportTimestamp timestamp = 3;
-        repeated VectorClockEntry version = 4;
-    }
-
-    message SlashCommandStarted {
-        LamportTimestamp id = 1;
-        AnchorRange output_range = 2;
-        string name = 3;
-        repeated VectorClockEntry version = 4;
-    }
-
-    message SlashCommandOutputSectionAdded {
-        LamportTimestamp timestamp = 1;
-        SlashCommandOutputSection section = 2;
-        repeated VectorClockEntry version = 3;
-    }
-
-    message SlashCommandCompleted {
-        LamportTimestamp id = 1;
-        LamportTimestamp timestamp = 3;
-        optional string error_message = 4;
-        repeated VectorClockEntry version = 5;
-    }
-
-    message ThoughtProcessOutputSectionAdded {
-        LamportTimestamp timestamp = 1;
-        ThoughtProcessOutputSection section = 2;
-        repeated VectorClockEntry version = 3;
-    }
-
-    message BufferOperation {
-        Operation operation = 1;
-    }
+  oneof variant {
+    InsertMessage insert_message = 1;
+    UpdateMessage update_message = 2;
+    UpdateSummary update_summary = 3;
+    BufferOperation buffer_operation = 5;
+    SlashCommandStarted slash_command_started = 6;
+    SlashCommandOutputSectionAdded slash_command_output_section_added = 7;
+    SlashCommandCompleted slash_command_completed = 8;
+    ThoughtProcessOutputSectionAdded thought_process_output_section_added = 9;
+  }
+
+  reserved 4;
+
+  message InsertMessage {
+    ContextMessage message = 1;
+    repeated VectorClockEntry version = 2;
+  }
+
+  message UpdateMessage {
+    LamportTimestamp message_id = 1;
+    LanguageModelRole role = 2;
+    ContextMessageStatus status = 3;
+    LamportTimestamp timestamp = 4;
+    repeated VectorClockEntry version = 5;
+  }
+
+  message UpdateSummary {
+    string summary = 1;
+    bool done = 2;
+    LamportTimestamp timestamp = 3;
+    repeated VectorClockEntry version = 4;
+  }
+
+  message SlashCommandStarted {
+    LamportTimestamp id = 1;
+    AnchorRange output_range = 2;
+    string name = 3;
+    repeated VectorClockEntry version = 4;
+  }
+
+  message SlashCommandOutputSectionAdded {
+    LamportTimestamp timestamp = 1;
+    SlashCommandOutputSection section = 2;
+    repeated VectorClockEntry version = 3;
+  }
+
+  message SlashCommandCompleted {
+    LamportTimestamp id = 1;
+    LamportTimestamp timestamp = 3;
+    optional string error_message = 4;
+    repeated VectorClockEntry version = 5;
+  }
+
+  message ThoughtProcessOutputSectionAdded {
+    LamportTimestamp timestamp = 1;
+    ThoughtProcessOutputSection section = 2;
+    repeated VectorClockEntry version = 3;
+  }
+
+  message BufferOperation {
+    Operation operation = 1;
+  }
 }
 
 message AdvertiseContexts {
-    uint64 project_id = 1;
-    repeated ContextMetadata contexts = 2;
+  uint64 project_id = 1;
+  repeated ContextMetadata contexts = 2;
 }
 
 message OpenContext {
-    uint64 project_id = 1;
-    string context_id = 2;
+  uint64 project_id = 1;
+  string context_id = 2;
 }
 
 message OpenContextResponse {
-    Context context = 1;
+  Context context = 1;
 }
 
 message CreateContext {
-    uint64 project_id = 1;
+  uint64 project_id = 1;
 }
 
 message CreateContextResponse {
-    string context_id = 1;
-    Context context = 2;
+  string context_id = 1;
+  Context context = 2;
 }
 
 message UpdateContext {
-    uint64 project_id = 1;
-    string context_id = 2;
-    ContextOperation operation = 3;
+  uint64 project_id = 1;
+  string context_id = 2;
+  ContextOperation operation = 3;
 }
 
 message ContextVersion {
-    string context_id = 1;
-    repeated VectorClockEntry context_version = 2;
-    repeated VectorClockEntry buffer_version = 3;
+  string context_id = 1;
+  repeated VectorClockEntry context_version = 2;
+  repeated VectorClockEntry buffer_version = 3;
 }
 
 message SynchronizeContexts {
-    uint64 project_id = 1;
-    repeated ContextVersion contexts = 2;
+  uint64 project_id = 1;
+  repeated ContextVersion contexts = 2;
 }
 
 message SynchronizeContextsResponse {
-    repeated ContextVersion contexts = 1;
+  repeated ContextVersion contexts = 1;
 }
 
 enum LanguageModelRole {
-    LanguageModelUser = 0;
-    LanguageModelAssistant = 1;
-    LanguageModelSystem = 2;
-    reserved 3;
+  LanguageModelUser = 0;
+  LanguageModelAssistant = 1;
+  LanguageModelSystem = 2;
+  reserved 3;
 }
 
 message GetAgentServerCommand {
-    uint64 project_id = 1;
-    string name = 2;
-    optional string root_dir = 3;
+  uint64 project_id = 1;
+  string name = 2;
+  optional string root_dir = 3;
 }
 
 message GetContextServerCommand {
-    uint64 project_id = 1;
-    string server_id = 2;
-    optional string root_dir = 3;
+  uint64 project_id = 1;
+  string server_id = 2;
+  optional string root_dir = 3;
 }
 
 message ContextServerCommand {
-    string path = 1;
-    repeated string args = 2;
-    map<string, string> env = 3;
+  string path = 1;
+  repeated string args = 2;
+  map<string, string> env = 3;
 }
 
 message AgentServerCommand {
-    string path = 1;
-    repeated string args = 2;
-    map<string, string> env = 3;
-    string root_dir = 4;
+  string path = 1;
+  repeated string args = 2;
+  map<string, string> env = 3;
+  string root_dir = 4;
 
-    optional SpawnInTerminal login = 5;
+  optional SpawnInTerminal login = 5;
 }
 
 message ExternalAgentsUpdated {
-    uint64 project_id = 1;
-    repeated string names = 2;
+  uint64 project_id = 1;
+  repeated string names = 2;
 }
 
 message ExternalExtensionAgentTarget {
-    string archive = 1;
-    string cmd = 2;
-    repeated string args = 3;
-    optional string sha256 = 4;
-    map<string, string> env = 5;
+  string archive = 1;
+  string cmd = 2;
+  repeated string args = 3;
+  optional string sha256 = 4;
+  map<string, string> env = 5;
 }
 
 message ExternalExtensionAgent {
-    string name = 1;
-    optional string icon_path = 2;
-    string extension_id = 3;
-    map<string, ExternalExtensionAgentTarget> targets = 4;
-    map<string, string> env = 5;
+  string name = 1;
+  optional string icon_path = 2;
+  string extension_id = 3;
+  map<string, ExternalExtensionAgentTarget> targets = 4;
+  map<string, string> env = 5;
 }
 
 message ExternalExtensionAgentsUpdated {
-    uint64 project_id = 1;
-    repeated ExternalExtensionAgent agents = 2;
+  uint64 project_id = 1;
+  repeated ExternalExtensionAgent agents = 2;
 }
 
 message ExternalAgentLoadingStatusUpdated {
-    uint64 project_id = 1;
-    string name = 2;
-    string status = 3;
+  uint64 project_id = 1;
+  string name = 2;
+  string status = 3;
 }
 
 message NewExternalAgentVersionAvailable {
-    uint64 project_id = 1;
-    string name = 2;
-    string version = 3;
+  uint64 project_id = 1;
+  string name = 2;
+  string version = 3;
 }
 
 message ShareAgentThread {
-    string session_id = 1; // Client-generated UUID (acp::SessionId)
-    string title = 2;
-    bytes thread_data = 3;
+  string session_id = 1; // Client-generated UUID (acp::SessionId)
+  string title = 2;
+  bytes thread_data = 3;
 }
 
 message GetSharedAgentThread {
-    string session_id = 1; // UUID string
+  string session_id = 1; // UUID string
 }
 
 message GetSharedAgentThreadResponse {
-    string title = 1;
-    bytes thread_data = 2;
-    string sharer_username = 3;
-    string created_at = 4;
+  string title = 1;
+  bytes thread_data = 2;
+  string sharer_username = 3;
+  string created_at = 4;
 }

crates/proto/proto/app.proto 🔗

@@ -4,60 +4,59 @@ package zed.messages;
 message ShutdownRemoteServer {}
 
 message Toast {
-    uint64 project_id = 1;
-    string notification_id = 2;
-    string message = 3;
+  uint64 project_id = 1;
+  string notification_id = 2;
+  string message = 3;
 }
 
 message HideToast {
-    uint64 project_id = 1;
-    string notification_id = 2;
+  uint64 project_id = 1;
+  string notification_id = 2;
 }
 
 message OpenServerSettings {
-    uint64 project_id = 1;
+  uint64 project_id = 1;
 }
 
-message GetCrashFiles {
-}
+message GetCrashFiles {}
 
 message GetCrashFilesResponse {
-    repeated CrashReport crashes = 1;
-    reserved 2; // old panics
+  repeated CrashReport crashes = 1;
+  reserved 2; // old panics
 }
 
 message CrashReport {
-    reserved 1, 2;
-    string metadata = 3;
-    bytes minidump_contents = 4;
+  reserved 1, 2;
+  string metadata = 3;
+  bytes minidump_contents = 4;
 }
 
 message Extension {
-    string id = 1;
-    string version = 2;
-    bool dev = 3;
+  string id = 1;
+  string version = 2;
+  bool dev = 3;
 }
 
 message SyncExtensions {
-    repeated Extension extensions = 1;
+  repeated Extension extensions = 1;
 }
 
 message SyncExtensionsResponse {
-    string tmp_dir = 1;
-    repeated Extension missing_extensions = 2;
+  string tmp_dir = 1;
+  repeated Extension missing_extensions = 2;
 }
 
 message InstallExtension {
-    Extension extension = 1;
-    string tmp_dir = 2;
+  Extension extension = 1;
+  string tmp_dir = 2;
 }
 
 message AskPassRequest {
-    uint64 project_id = 1;
-    reserved 2;
-    uint64 repository_id = 3;
-    uint64 askpass_id = 4;
-    string prompt = 5;
+  uint64 project_id = 1;
+  reserved 2;
+  uint64 repository_id = 3;
+  uint64 askpass_id = 4;
+  string prompt = 5;
 }
 
 message AskPassResponse {
@@ -65,29 +64,29 @@ message AskPassResponse {
 }
 
 message GetRemoteProfilingData {
-    uint64 project_id = 1;
-    bool foreground_only = 2;
+  uint64 project_id = 1;
+  bool foreground_only = 2;
 }
 
 message GetRemoteProfilingDataResponse {
-    repeated RemoteProfilingThread threads = 1;
-    uint64 now_nanos = 2;
+  repeated RemoteProfilingThread threads = 1;
+  uint64 now_nanos = 2;
 }
 
 message RemoteProfilingThread {
-    optional string thread_name = 1;
-    uint64 thread_id = 2;
-    repeated RemoteProfilingTiming timings = 3;
+  optional string thread_name = 1;
+  uint64 thread_id = 2;
+  repeated RemoteProfilingTiming timings = 3;
 }
 
 message RemoteProfilingTiming {
-    RemoteProfilingLocation location = 1;
-    uint64 start_nanos = 2;
-    uint64 duration_nanos = 3;
+  RemoteProfilingLocation location = 1;
+  uint64 start_nanos = 2;
+  uint64 duration_nanos = 3;
 }
 
 message RemoteProfilingLocation {
-    string file = 1;
-    uint32 line = 2;
-    uint32 column = 3;
+  string file = 1;
+  uint32 line = 2;
+  uint32 column = 3;
 }

crates/proto/proto/buf.yaml 🔗

@@ -2,3 +2,13 @@ version: v1
 breaking:
   use:
     - WIRE
+lint:
+  except:
+    # Since we use post_build instead of buf this doesn't matter
+    - PACKAGE_DIRECTORY_MATCH
+    # This is internal to Zed only so we don't enforce versions
+    - PACKAGE_VERSION_SUFFIX
+    # Style rules we don't enforce
+    - ENUM_VALUE_PREFIX
+    - ENUM_VALUE_UPPER_SNAKE_CASE
+    - ENUM_ZERO_VALUE_SUFFIX

crates/proto/proto/buffer.proto 🔗

@@ -5,313 +5,312 @@ import "core.proto";
 import "worktree.proto";
 
 message OpenNewBuffer {
-    uint64 project_id = 1;
+  uint64 project_id = 1;
 }
 
 message OpenBufferResponse {
-    uint64 buffer_id = 1;
+  uint64 buffer_id = 1;
 }
 
 message CreateBufferForPeer {
-    uint64 project_id = 1;
-    PeerId peer_id = 2;
-    oneof variant {
-        BufferState state = 3;
-        BufferChunk chunk = 4;
-    }
+  uint64 project_id = 1;
+  PeerId peer_id = 2;
+  oneof variant {
+    BufferState state = 3;
+    BufferChunk chunk = 4;
+  }
 }
 
 message UpdateBuffer {
-    uint64 project_id = 1;
-    uint64 buffer_id = 2;
-    repeated Operation operations = 3;
+  uint64 project_id = 1;
+  uint64 buffer_id = 2;
+  repeated Operation operations = 3;
 }
 
 message OpenBufferByPath {
-    uint64 project_id = 1;
-    uint64 worktree_id = 2;
-    string path = 3;
+  uint64 project_id = 1;
+  uint64 worktree_id = 2;
+  string path = 3;
 }
 
 message OpenBufferById {
-    uint64 project_id = 1;
-    uint64 id = 2;
+  uint64 project_id = 1;
+  uint64 id = 2;
 }
 
 message UpdateBufferFile {
-    uint64 project_id = 1;
-    uint64 buffer_id = 2;
-    File file = 3;
+  uint64 project_id = 1;
+  uint64 buffer_id = 2;
+  File file = 3;
 }
 
 message SaveBuffer {
-    uint64 project_id = 1;
-    uint64 buffer_id = 2;
-    repeated VectorClockEntry version = 3;
-    optional ProjectPath new_path = 4;
+  uint64 project_id = 1;
+  uint64 buffer_id = 2;
+  repeated VectorClockEntry version = 3;
+  optional ProjectPath new_path = 4;
 }
 
 message CloseBuffer {
-    uint64 project_id = 1;
-    uint64 buffer_id = 2;
+  uint64 project_id = 1;
+  uint64 buffer_id = 2;
 }
 
 message BufferSaved {
-    uint64 project_id = 1;
-    uint64 buffer_id = 2;
-    repeated VectorClockEntry version = 3;
-    Timestamp mtime = 4;
-    reserved 5;
+  uint64 project_id = 1;
+  uint64 buffer_id = 2;
+  repeated VectorClockEntry version = 3;
+  Timestamp mtime = 4;
+  reserved 5;
 }
 
 message BufferReloaded {
-    uint64 project_id = 1;
-    uint64 buffer_id = 2;
-    repeated VectorClockEntry version = 3;
-    Timestamp mtime = 4;
-    reserved 5;
-    LineEnding line_ending = 6;
+  uint64 project_id = 1;
+  uint64 buffer_id = 2;
+  repeated VectorClockEntry version = 3;
+  Timestamp mtime = 4;
+  reserved 5;
+  LineEnding line_ending = 6;
 }
 
 message ReloadBuffers {
-    uint64 project_id = 1;
-    repeated uint64 buffer_ids = 2;
+  uint64 project_id = 1;
+  repeated uint64 buffer_ids = 2;
 }
 
 message ReloadBuffersResponse {
-    ProjectTransaction transaction = 1;
+  ProjectTransaction transaction = 1;
 }
 
 message SynchronizeBuffers {
-    uint64 project_id = 1;
-    repeated BufferVersion buffers = 2;
+  uint64 project_id = 1;
+  repeated BufferVersion buffers = 2;
 }
 
 message SynchronizeBuffersResponse {
-    repeated BufferVersion buffers = 1;
+  repeated BufferVersion buffers = 1;
 }
 
 message BufferVersion {
-    uint64 id = 1;
-    repeated VectorClockEntry version = 2;
+  uint64 id = 1;
+  repeated VectorClockEntry version = 2;
 }
 
 message BufferState {
-    uint64 id = 1;
-    optional File file = 2;
-    string base_text = 3;
-    LineEnding line_ending = 5;
-    repeated VectorClockEntry saved_version = 6;
-    Timestamp saved_mtime = 8;
+  uint64 id = 1;
+  optional File file = 2;
+  string base_text = 3;
+  LineEnding line_ending = 5;
+  repeated VectorClockEntry saved_version = 6;
+  Timestamp saved_mtime = 8;
 
-    reserved 7;
-    reserved 4;
+  reserved 7;
+  reserved 4;
 }
 
 message BufferChunk {
-    uint64 buffer_id = 1;
-    repeated Operation operations = 2;
-    bool is_last = 3;
+  uint64 buffer_id = 1;
+  repeated Operation operations = 2;
+  bool is_last = 3;
 }
 
 enum LineEnding {
-    Unix = 0;
-    Windows = 1;
+  Unix = 0;
+  Windows = 1;
 }
 
 message VectorClockEntry {
-    uint32 replica_id = 1;
-    uint32 timestamp = 2;
+  uint32 replica_id = 1;
+  uint32 timestamp = 2;
 }
 
 message UndoMapEntry {
-    uint32 replica_id = 1;
-    uint32 local_timestamp = 2;
-    repeated UndoCount counts = 3;
+  uint32 replica_id = 1;
+  uint32 local_timestamp = 2;
+  repeated UndoCount counts = 3;
 }
 
 message UndoCount {
-    uint32 replica_id = 1;
-    uint32 lamport_timestamp = 2;
-    uint32 count = 3;
+  uint32 replica_id = 1;
+  uint32 lamport_timestamp = 2;
+  uint32 count = 3;
 }
 
 message Operation {
-    oneof variant {
-        Edit edit = 1;
-        Undo undo = 2;
-        UpdateSelections update_selections = 3;
-        UpdateDiagnostics update_diagnostics = 4;
-        UpdateCompletionTriggers update_completion_triggers = 5;
-        UpdateLineEnding update_line_ending = 6;
-    }
-
-    message Edit {
-        uint32 replica_id = 1;
-        uint32 lamport_timestamp = 2;
-        repeated VectorClockEntry version = 3;
-        repeated Range ranges = 4;
-        repeated string new_text = 5;
-    }
-
-    message Undo {
-        uint32 replica_id = 1;
-        uint32 lamport_timestamp = 2;
-        repeated VectorClockEntry version = 3;
-        repeated UndoCount counts = 4;
-    }
-
-    message UpdateSelections {
-        uint32 replica_id = 1;
-        uint32 lamport_timestamp = 2;
-        repeated Selection selections = 3;
-        bool line_mode = 4;
-        CursorShape cursor_shape = 5;
-    }
-
-    message UpdateCompletionTriggers {
-        uint32 replica_id = 1;
-        uint32 lamport_timestamp = 2;
-        repeated string triggers = 3;
-        uint64 language_server_id = 4;
-    }
-
-    message UpdateLineEnding {
-        uint32 replica_id = 1;
-        uint32 lamport_timestamp = 2;
-        LineEnding line_ending = 3;
-    }
+  oneof variant {
+    Edit edit = 1;
+    Undo undo = 2;
+    UpdateSelections update_selections = 3;
+    UpdateDiagnostics update_diagnostics = 4;
+    UpdateCompletionTriggers update_completion_triggers = 5;
+    UpdateLineEnding update_line_ending = 6;
+  }
+
+  message Edit {
+    uint32 replica_id = 1;
+    uint32 lamport_timestamp = 2;
+    repeated VectorClockEntry version = 3;
+    repeated Range ranges = 4;
+    repeated string new_text = 5;
+  }
+
+  message Undo {
+    uint32 replica_id = 1;
+    uint32 lamport_timestamp = 2;
+    repeated VectorClockEntry version = 3;
+    repeated UndoCount counts = 4;
+  }
+
+  message UpdateSelections {
+    uint32 replica_id = 1;
+    uint32 lamport_timestamp = 2;
+    repeated Selection selections = 3;
+    bool line_mode = 4;
+    CursorShape cursor_shape = 5;
+  }
+
+  message UpdateCompletionTriggers {
+    uint32 replica_id = 1;
+    uint32 lamport_timestamp = 2;
+    repeated string triggers = 3;
+    uint64 language_server_id = 4;
+  }
+
+  message UpdateLineEnding {
+    uint32 replica_id = 1;
+    uint32 lamport_timestamp = 2;
+    LineEnding line_ending = 3;
+  }
 }
 
 message ProjectTransaction {
-    repeated uint64 buffer_ids = 1;
-    repeated Transaction transactions = 2;
+  repeated uint64 buffer_ids = 1;
+  repeated Transaction transactions = 2;
 }
 
 message Transaction {
-    LamportTimestamp id = 1;
-    repeated LamportTimestamp edit_ids = 2;
-    repeated VectorClockEntry start = 3;
+  LamportTimestamp id = 1;
+  repeated LamportTimestamp edit_ids = 2;
+  repeated VectorClockEntry start = 3;
 }
 
 message LamportTimestamp {
-    uint32 replica_id = 1;
-    uint32 value = 2;
+  uint32 replica_id = 1;
+  uint32 value = 2;
 }
 
 message Range {
-    uint64 start = 1;
-    uint64 end = 2;
+  uint64 start = 1;
+  uint64 end = 2;
 }
 
 message Selection {
-    uint64 id = 1;
-    EditorAnchor start = 2;
-    EditorAnchor end = 3;
-    bool reversed = 4;
+  uint64 id = 1;
+  EditorAnchor start = 2;
+  EditorAnchor end = 3;
+  bool reversed = 4;
 }
 
 message EditorAnchor {
-    uint64 excerpt_id = 1;
-    Anchor anchor = 2;
+  uint64 excerpt_id = 1;
+  Anchor anchor = 2;
 }
 
 enum CursorShape {
-    CursorBar = 0;
-    CursorBlock = 1;
-    CursorUnderscore = 2;
-    CursorHollow = 3;
+  CursorBar = 0;
+  CursorBlock = 1;
+  CursorUnderscore = 2;
+  CursorHollow = 3;
 }
 
 message UpdateDiagnostics {
-    uint32 replica_id = 1;
-    uint32 lamport_timestamp = 2;
-    uint64 server_id = 3;
-    repeated Diagnostic diagnostics = 4;
+  uint32 replica_id = 1;
+  uint32 lamport_timestamp = 2;
+  uint64 server_id = 3;
+  repeated Diagnostic diagnostics = 4;
 }
 
 message Anchor {
-    uint32 replica_id = 1;
-    uint32 timestamp = 2;
-    uint64 offset = 3;
-    Bias bias = 4;
-    optional uint64 buffer_id = 5;
+  uint32 replica_id = 1;
+  uint32 timestamp = 2;
+  uint64 offset = 3;
+  Bias bias = 4;
+  optional uint64 buffer_id = 5;
 }
 
 message AnchorRange {
-    Anchor start = 1;
-    Anchor end = 2;
+  Anchor start = 1;
+  Anchor end = 2;
 }
 
 message Location {
-    uint64 buffer_id = 1;
-    Anchor start = 2;
-    Anchor end = 3;
+  uint64 buffer_id = 1;
+  Anchor start = 2;
+  Anchor end = 3;
 }
 
 enum Bias {
-    Left = 0;
-    Right = 1;
+  Left = 0;
+  Right = 1;
 }
 
 message Diagnostic {
-    Anchor start = 1;
-    Anchor end = 2;
-    optional string source = 3;
-    optional string registration_id = 17;
-
-    enum SourceKind {
-        Pulled = 0;
-        Pushed = 1;
-        Other = 2;
-    }
-
-    SourceKind source_kind = 16;
-    Severity severity = 4;
-    string message = 5;
-    optional string code = 6;
-    uint64 group_id = 7;
-    bool is_primary = 8;
-
-    reserved 9;
-
-    bool is_disk_based = 10;
-    bool is_unnecessary = 11;
-    bool underline = 15;
-
-    enum Severity {
-        None = 0;
-        Error = 1;
-        Warning = 2;
-        Information = 3;
-        Hint = 4;
-    }
-    optional string data = 12;
-    optional string code_description = 13;
-    optional string markdown = 14;
+  Anchor start = 1;
+  Anchor end = 2;
+  optional string source = 3;
+  optional string registration_id = 17;
+
+  enum SourceKind {
+    Pulled = 0;
+    Pushed = 1;
+    Other = 2;
+  }
+
+  SourceKind source_kind = 16;
+  Severity severity = 4;
+  string message = 5;
+  optional string code = 6;
+  uint64 group_id = 7;
+  bool is_primary = 8;
+
+  reserved 9;
+
+  bool is_disk_based = 10;
+  bool is_unnecessary = 11;
+  bool underline = 15;
+
+  enum Severity {
+    None = 0;
+    Error = 1;
+    Warning = 2;
+    Information = 3;
+    Hint = 4;
+  }
+  optional string data = 12;
+  optional string code_description = 13;
+  optional string markdown = 14;
 }
 
 message SearchQuery {
-    string query = 2;
-    bool regex = 3;
-    bool whole_word = 4;
-    bool case_sensitive = 5;
-    repeated string files_to_include = 10;
-    repeated string files_to_exclude = 11;
-    bool match_full_paths = 9;
-    bool include_ignored = 8;
-    string files_to_include_legacy = 6;
-    string files_to_exclude_legacy = 7;
+  string query = 2;
+  bool regex = 3;
+  bool whole_word = 4;
+  bool case_sensitive = 5;
+  repeated string files_to_include = 10;
+  repeated string files_to_exclude = 11;
+  bool match_full_paths = 9;
+  bool include_ignored = 8;
+  string files_to_include_legacy = 6;
+  string files_to_exclude_legacy = 7;
 }
 
 message FindSearchCandidates {
-    uint64 project_id = 1;
-    SearchQuery query = 2;
-    uint64 limit = 3;
-    uint64 handle = 4;
+  uint64 project_id = 1;
+  SearchQuery query = 2;
+  uint64 limit = 3;
+  uint64 handle = 4;
 }
 
-
 message FindSearchCandidatesDone {}
 
 message FindSearchCandidatesMatches {
@@ -330,6 +329,6 @@ message FindSearchCandidatesChunk {
 }
 
 message FindSearchCandidatesCancelled {
-    uint64 project_id = 1;
-    uint64 handle = 2;
+  uint64 project_id = 1;
+  uint64 handle = 2;
 }

crates/proto/proto/call.proto 🔗

@@ -1,424 +1,424 @@
 syntax = "proto3";
 package zed.messages;
 
-import "core.proto";
-import "worktree.proto";
 import "buffer.proto";
-import "lsp.proto";
 import "channel.proto";
+import "core.proto";
 import "git.proto";
+import "lsp.proto";
+import "worktree.proto";
 
 message CreateRoom {}
 
 message CreateRoomResponse {
-    Room room = 1;
-    optional LiveKitConnectionInfo live_kit_connection_info = 2;
+  Room room = 1;
+  optional LiveKitConnectionInfo live_kit_connection_info = 2;
 }
 
 message JoinRoom {
-    uint64 id = 1;
+  uint64 id = 1;
 }
 
 message JoinRoomResponse {
-    Room room = 1;
-    optional uint64 channel_id = 2;
-    optional LiveKitConnectionInfo live_kit_connection_info = 3;
+  Room room = 1;
+  optional uint64 channel_id = 2;
+  optional LiveKitConnectionInfo live_kit_connection_info = 3;
 }
 
 message RejoinRoom {
-    uint64 id = 1;
-    repeated UpdateProject reshared_projects = 2;
-    repeated RejoinProject rejoined_projects = 3;
+  uint64 id = 1;
+  repeated UpdateProject reshared_projects = 2;
+  repeated RejoinProject rejoined_projects = 3;
 }
 
 message RejoinRemoteProjects {
-    repeated RejoinProject rejoined_projects = 1;
+  repeated RejoinProject rejoined_projects = 1;
 }
 
 message RejoinRemoteProjectsResponse {
-    repeated RejoinedProject rejoined_projects = 1;
+  repeated RejoinedProject rejoined_projects = 1;
 }
 
 message RejoinProject {
-    uint64 id = 1;
-    repeated RejoinWorktree worktrees = 2;
-    repeated RejoinRepository repositories = 3;
+  uint64 id = 1;
+  repeated RejoinWorktree worktrees = 2;
+  repeated RejoinRepository repositories = 3;
 }
 
 message RejoinWorktree {
-    uint64 id = 1;
-    uint64 scan_id = 2;
+  uint64 id = 1;
+  uint64 scan_id = 2;
 }
 
 message RejoinRepository {
-    uint64 id = 1;
-    uint64 scan_id = 2;
+  uint64 id = 1;
+  uint64 scan_id = 2;
 }
 
 message RejoinRoomResponse {
-    Room room = 1;
-    repeated ResharedProject reshared_projects = 2;
-    repeated RejoinedProject rejoined_projects = 3;
+  Room room = 1;
+  repeated ResharedProject reshared_projects = 2;
+  repeated RejoinedProject rejoined_projects = 3;
 }
 
 message ResharedProject {
-    uint64 id = 1;
-    repeated Collaborator collaborators = 2;
+  uint64 id = 1;
+  repeated Collaborator collaborators = 2;
 }
 
 message RejoinedProject {
-    uint64 id = 1;
-    repeated WorktreeMetadata worktrees = 2;
-    repeated Collaborator collaborators = 3;
-    repeated LanguageServer language_servers = 4;
-    repeated string language_server_capabilities = 5;
+  uint64 id = 1;
+  repeated WorktreeMetadata worktrees = 2;
+  repeated Collaborator collaborators = 3;
+  repeated LanguageServer language_servers = 4;
+  repeated string language_server_capabilities = 5;
 }
 
 message LeaveRoom {}
 
 message Room {
-    uint64 id = 1;
-    repeated Participant participants = 2;
-    repeated PendingParticipant pending_participants = 3;
-    repeated Follower followers = 4;
-    string livekit_room = 5;
+  uint64 id = 1;
+  repeated Participant participants = 2;
+  repeated PendingParticipant pending_participants = 3;
+  repeated Follower followers = 4;
+  string livekit_room = 5;
 }
 
 message Participant {
-    uint64 user_id = 1;
-    PeerId peer_id = 2;
-    repeated ParticipantProject projects = 3;
-    ParticipantLocation location = 4;
-    uint32 participant_index = 5;
-    ChannelRole role = 6;
-    reserved 7;
+  uint64 user_id = 1;
+  PeerId peer_id = 2;
+  repeated ParticipantProject projects = 3;
+  ParticipantLocation location = 4;
+  uint32 participant_index = 5;
+  ChannelRole role = 6;
+  reserved 7;
 }
 
 message PendingParticipant {
-    uint64 user_id = 1;
-    uint64 calling_user_id = 2;
-    optional uint64 initial_project_id = 3;
+  uint64 user_id = 1;
+  uint64 calling_user_id = 2;
+  optional uint64 initial_project_id = 3;
 }
 
 message ParticipantProject {
-    uint64 id = 1;
-    repeated string worktree_root_names = 2;
+  uint64 id = 1;
+  repeated string worktree_root_names = 2;
 }
 
 message Follower {
-    PeerId leader_id = 1;
-    PeerId follower_id = 2;
-    uint64 project_id = 3;
+  PeerId leader_id = 1;
+  PeerId follower_id = 2;
+  uint64 project_id = 3;
 }
 
 message ParticipantLocation {
-    oneof variant {
-        SharedProject shared_project = 1;
-        UnsharedProject unshared_project = 2;
-        External external = 3;
-    }
+  oneof variant {
+    SharedProject shared_project = 1;
+    UnsharedProject unshared_project = 2;
+    External external = 3;
+  }
 
-    message SharedProject {
-        uint64 id = 1;
-    }
+  message SharedProject {
+    uint64 id = 1;
+  }
 
-    message UnsharedProject {}
+  message UnsharedProject {}
 
-    message External {}
+  message External {}
 }
 
 message Call {
-    uint64 room_id = 1;
-    uint64 called_user_id = 2;
-    optional uint64 initial_project_id = 3;
+  uint64 room_id = 1;
+  uint64 called_user_id = 2;
+  optional uint64 initial_project_id = 3;
 }
 
 message IncomingCall {
-    uint64 room_id = 1;
-    uint64 calling_user_id = 2;
-    repeated uint64 participant_user_ids = 3;
-    optional ParticipantProject initial_project = 4;
+  uint64 room_id = 1;
+  uint64 calling_user_id = 2;
+  repeated uint64 participant_user_ids = 3;
+  optional ParticipantProject initial_project = 4;
 }
 
 message CallCanceled {
-    uint64 room_id = 1;
+  uint64 room_id = 1;
 }
 
 message CancelCall {
-    uint64 room_id = 1;
-    uint64 called_user_id = 2;
+  uint64 room_id = 1;
+  uint64 called_user_id = 2;
 }
 
 message DeclineCall {
-    uint64 room_id = 1;
+  uint64 room_id = 1;
 }
 
 message UpdateParticipantLocation {
-    uint64 room_id = 1;
-    ParticipantLocation location = 2;
+  uint64 room_id = 1;
+  ParticipantLocation location = 2;
 }
 
 message RoomUpdated {
-    Room room = 1;
+  Room room = 1;
 }
 
 message LiveKitConnectionInfo {
-    string server_url = 1;
-    string token = 2;
-    bool can_publish = 3;
+  string server_url = 1;
+  string token = 2;
+  bool can_publish = 3;
 }
 
 message ShareProject {
-    uint64 room_id = 1;
-    repeated WorktreeMetadata worktrees = 2;
-    reserved 3;
-    bool is_ssh_project = 4;
-    optional bool windows_paths = 5;
+  uint64 room_id = 1;
+  repeated WorktreeMetadata worktrees = 2;
+  reserved 3;
+  bool is_ssh_project = 4;
+  optional bool windows_paths = 5;
 }
 
 message ShareProjectResponse {
-    uint64 project_id = 1;
+  uint64 project_id = 1;
 }
 
 message UnshareProject {
-    uint64 project_id = 1;
+  uint64 project_id = 1;
 }
 
 message UpdateProject {
-    uint64 project_id = 1;
-    repeated WorktreeMetadata worktrees = 2;
+  uint64 project_id = 1;
+  repeated WorktreeMetadata worktrees = 2;
 }
 
 message JoinProject {
-    uint64 project_id = 1;
-    optional string committer_email = 2;
-    optional string committer_name = 3;
+  uint64 project_id = 1;
+  optional string committer_email = 2;
+  optional string committer_name = 3;
 }
 
 message JoinProjectResponse {
-    uint64 project_id = 5;
-    uint32 replica_id = 1;
-    repeated WorktreeMetadata worktrees = 2;
-    repeated Collaborator collaborators = 3;
-    repeated LanguageServer language_servers = 4;
-    repeated string language_server_capabilities = 8;
-    ChannelRole role = 6;
-    bool windows_paths = 9;
-    reserved 7;
+  uint64 project_id = 5;
+  uint32 replica_id = 1;
+  repeated WorktreeMetadata worktrees = 2;
+  repeated Collaborator collaborators = 3;
+  repeated LanguageServer language_servers = 4;
+  repeated string language_server_capabilities = 8;
+  ChannelRole role = 6;
+  bool windows_paths = 9;
+  reserved 7;
 }
 
 message LeaveProject {
-    uint64 project_id = 1;
+  uint64 project_id = 1;
 }
 
 message UpdateWorktree {
-    uint64 project_id = 1;
-    uint64 worktree_id = 2;
-    string root_name = 3;
-    repeated Entry updated_entries = 4;
-    repeated uint64 removed_entries = 5;
-    repeated RepositoryEntry updated_repositories = 6; // deprecated
-    repeated uint64 removed_repositories = 7; // deprecated
-    uint64 scan_id = 8;
-    bool is_last_update = 9;
-    string abs_path = 10;
+  uint64 project_id = 1;
+  uint64 worktree_id = 2;
+  string root_name = 3;
+  repeated Entry updated_entries = 4;
+  repeated uint64 removed_entries = 5;
+  repeated RepositoryEntry updated_repositories = 6; // deprecated
+  repeated uint64 removed_repositories = 7; // deprecated
+  uint64 scan_id = 8;
+  bool is_last_update = 9;
+  string abs_path = 10;
 }
 
 // deprecated
 message RepositoryEntry {
-    uint64 repository_id = 1;
-    reserved 2;
-    repeated StatusEntry updated_statuses = 3;
-    repeated string removed_statuses = 4;
-    repeated string current_merge_conflicts = 5;
-    optional Branch branch_summary = 6;
+  uint64 repository_id = 1;
+  reserved 2;
+  repeated StatusEntry updated_statuses = 3;
+  repeated string removed_statuses = 4;
+  repeated string current_merge_conflicts = 5;
+  optional Branch branch_summary = 6;
 }
 
 message AddProjectCollaborator {
-    uint64 project_id = 1;
-    Collaborator collaborator = 2;
+  uint64 project_id = 1;
+  Collaborator collaborator = 2;
 }
 
 message UpdateProjectCollaborator {
-    uint64 project_id = 1;
-    PeerId old_peer_id = 2;
-    PeerId new_peer_id = 3;
+  uint64 project_id = 1;
+  PeerId old_peer_id = 2;
+  PeerId new_peer_id = 3;
 }
 
 message RemoveProjectCollaborator {
-    uint64 project_id = 1;
-    PeerId peer_id = 2;
+  uint64 project_id = 1;
+  PeerId peer_id = 2;
 }
 
 message GetUsers {
-    repeated uint64 user_ids = 1;
+  repeated uint64 user_ids = 1;
 }
 
 message FuzzySearchUsers {
-    string query = 1;
+  string query = 1;
 }
 
 message UsersResponse {
-    repeated User users = 1;
+  repeated User users = 1;
 }
 
 message RequestContact {
-    uint64 responder_id = 1;
+  uint64 responder_id = 1;
 }
 
 message RemoveContact {
-    uint64 user_id = 1;
+  uint64 user_id = 1;
 }
 
 message RespondToContactRequest {
-    uint64 requester_id = 1;
-    ContactRequestResponse response = 2;
+  uint64 requester_id = 1;
+  ContactRequestResponse response = 2;
 }
 
 enum ContactRequestResponse {
-    Accept = 0;
-    Decline = 1;
-    Block = 2;
-    Dismiss = 3;
+  Accept = 0;
+  Decline = 1;
+  Block = 2;
+  Dismiss = 3;
 }
 
 message UpdateContacts {
-    repeated Contact contacts = 1;
-    repeated uint64 remove_contacts = 2;
-    repeated IncomingContactRequest incoming_requests = 3;
-    repeated uint64 remove_incoming_requests = 4;
-    repeated uint64 outgoing_requests = 5;
-    repeated uint64 remove_outgoing_requests = 6;
+  repeated Contact contacts = 1;
+  repeated uint64 remove_contacts = 2;
+  repeated IncomingContactRequest incoming_requests = 3;
+  repeated uint64 remove_incoming_requests = 4;
+  repeated uint64 outgoing_requests = 5;
+  repeated uint64 remove_outgoing_requests = 6;
 }
 
 message ShowContacts {}
 
 message IncomingContactRequest {
-    uint64 requester_id = 1;
+  uint64 requester_id = 1;
 }
 
 message Follow {
-    uint64 room_id = 1;
-    optional uint64 project_id = 2;
-    PeerId leader_id = 3;
+  uint64 room_id = 1;
+  optional uint64 project_id = 2;
+  PeerId leader_id = 3;
 }
 
 message FollowResponse {
-    View active_view = 3;
-    reserved 1;
-    repeated View views = 2;
+  View active_view = 3;
+  reserved 1;
+  repeated View views = 2;
 }
 
 message UpdateFollowers {
-    uint64 room_id = 1;
-    optional uint64 project_id = 2;
-    reserved 3;
-    oneof variant {
-        View create_view = 5;
-        UpdateActiveView update_active_view = 4;
-        UpdateView update_view = 6;
-    }
+  uint64 room_id = 1;
+  optional uint64 project_id = 2;
+  reserved 3;
+  oneof variant {
+    View create_view = 5;
+    UpdateActiveView update_active_view = 4;
+    UpdateView update_view = 6;
+  }
 }
 
 message Unfollow {
-    uint64 room_id = 1;
-    optional uint64 project_id = 2;
-    PeerId leader_id = 3;
+  uint64 room_id = 1;
+  optional uint64 project_id = 2;
+  PeerId leader_id = 3;
 }
 
 message ViewId {
-    PeerId creator = 1;
-    uint64 id = 2;
+  PeerId creator = 1;
+  uint64 id = 2;
 }
 
 message UpdateActiveView {
-    reserved 1, 2;
-    View view = 3;
+  reserved 1, 2;
+  View view = 3;
 }
 
 enum PanelId {
-    AssistantPanel = 0;
-    DebugPanel = 1;
+  AssistantPanel = 0;
+  DebugPanel = 1;
 }
 
 message UpdateView {
-    ViewId id = 1;
-    optional PeerId leader_id = 2;
-
-    oneof variant {
-        Editor editor = 3;
-    }
-
-    message Editor {
-        repeated ExcerptInsertion inserted_excerpts = 1;
-        repeated uint64 deleted_excerpts = 2;
-        repeated Selection selections = 3;
-        optional Selection pending_selection = 4;
-        EditorAnchor scroll_top_anchor = 5;
-        reserved 6;
-        reserved 7;
-        double scroll_x = 8;
-        double scroll_y = 9;
-    }
+  ViewId id = 1;
+  optional PeerId leader_id = 2;
+
+  oneof variant {
+    Editor editor = 3;
+  }
+
+  message Editor {
+    repeated ExcerptInsertion inserted_excerpts = 1;
+    repeated uint64 deleted_excerpts = 2;
+    repeated Selection selections = 3;
+    optional Selection pending_selection = 4;
+    EditorAnchor scroll_top_anchor = 5;
+    reserved 6;
+    reserved 7;
+    double scroll_x = 8;
+    double scroll_y = 9;
+  }
 }
 
 message View {
-    ViewId id = 1;
-    optional PeerId leader_id = 2;
-    optional PanelId panel_id = 6;
-
-    oneof variant {
-        Editor editor = 3;
-        ChannelView channel_view = 4;
-        ContextEditor context_editor = 5;
-    }
-
-    message Editor {
-        bool singleton = 1;
-        optional string title = 2;
-        repeated Excerpt excerpts = 3;
-        repeated Selection selections = 4;
-        optional Selection pending_selection = 5;
-        EditorAnchor scroll_top_anchor = 6;
-        reserved 7;
-        reserved 8;
-        double scroll_x = 9;
-        double scroll_y = 10;
-    }
-
-    message ChannelView {
-        uint64 channel_id = 1;
-        Editor editor = 2;
-    }
-
-    message ContextEditor {
-        string context_id = 1;
-        Editor editor = 2;
-    }
+  ViewId id = 1;
+  optional PeerId leader_id = 2;
+  optional PanelId panel_id = 6;
+
+  oneof variant {
+    Editor editor = 3;
+    ChannelView channel_view = 4;
+    ContextEditor context_editor = 5;
+  }
+
+  message Editor {
+    bool singleton = 1;
+    optional string title = 2;
+    repeated Excerpt excerpts = 3;
+    repeated Selection selections = 4;
+    optional Selection pending_selection = 5;
+    EditorAnchor scroll_top_anchor = 6;
+    reserved 7;
+    reserved 8;
+    double scroll_x = 9;
+    double scroll_y = 10;
+  }
+
+  message ChannelView {
+    uint64 channel_id = 1;
+    Editor editor = 2;
+  }
+
+  message ContextEditor {
+    string context_id = 1;
+    Editor editor = 2;
+  }
 }
 
 message ExcerptInsertion {
-    Excerpt excerpt = 1;
-    optional uint64 previous_excerpt_id = 2;
+  Excerpt excerpt = 1;
+  optional uint64 previous_excerpt_id = 2;
 }
 
 message Excerpt {
-    uint64 id = 1;
-    uint64 buffer_id = 2;
-    Anchor context_start = 3;
-    Anchor context_end = 4;
-    Anchor primary_start = 5;
-    Anchor primary_end = 6;
+  uint64 id = 1;
+  uint64 buffer_id = 2;
+  Anchor context_start = 3;
+  Anchor context_end = 4;
+  Anchor primary_start = 5;
+  Anchor primary_end = 6;
 }
 
 message Contact {
-    uint64 user_id = 1;
-    bool online = 2;
-    bool busy = 3;
+  uint64 user_id = 1;
+  bool online = 2;
+  bool busy = 3;
 }
 
 message SetRoomParticipantRole {
-    uint64 room_id = 1;
-    uint64 user_id = 2;
-    ChannelRole role = 3;
+  uint64 room_id = 1;
+  uint64 user_id = 2;
+  ChannelRole role = 3;
 }

crates/proto/proto/channel.proto 🔗

@@ -1,294 +1,294 @@
 syntax = "proto3";
 package zed.messages;
 
-import "core.proto";
 import "buffer.proto";
+import "core.proto";
 
 message Channel {
-    uint64 id = 1;
-    string name = 2;
-    ChannelVisibility visibility = 3;
-    int32 channel_order = 4;
-    repeated uint64 parent_path = 5;
+  uint64 id = 1;
+  string name = 2;
+  ChannelVisibility visibility = 3;
+  int32 channel_order = 4;
+  repeated uint64 parent_path = 5;
 }
 
 enum ChannelVisibility {
-    Public = 0;
-    Members = 1;
+  Public = 0;
+  Members = 1;
 }
 
 message UpdateChannels {
-    repeated Channel channels = 1;
-    repeated uint64 delete_channels = 4;
-    repeated Channel channel_invitations = 5;
-    repeated uint64 remove_channel_invitations = 6;
-    repeated ChannelParticipants channel_participants = 7;
-    repeated ChannelBufferVersion latest_channel_buffer_versions = 9;
+  repeated Channel channels = 1;
+  repeated uint64 delete_channels = 4;
+  repeated Channel channel_invitations = 5;
+  repeated uint64 remove_channel_invitations = 6;
+  repeated ChannelParticipants channel_participants = 7;
+  repeated ChannelBufferVersion latest_channel_buffer_versions = 9;
 
-    reserved 8;
-    reserved 10 to 15;
+  reserved 8;
+  reserved 10 to 15;
 }
 
 message UpdateUserChannels {
-    repeated ChannelBufferVersion observed_channel_buffer_version = 2;
-    repeated ChannelMembership channel_memberships = 3;
+  repeated ChannelBufferVersion observed_channel_buffer_version = 2;
+  repeated ChannelMembership channel_memberships = 3;
 
-    reserved 1;
+  reserved 1;
 }
 
 message ChannelMembership {
-    uint64 channel_id = 1;
-    ChannelRole role = 2;
+  uint64 channel_id = 1;
+  ChannelRole role = 2;
 }
 
 message ChannelMessageId {
-    uint64 channel_id = 1;
-    uint64 message_id = 2;
+  uint64 channel_id = 1;
+  uint64 message_id = 2;
 }
 
 message ChannelPermission {
-    uint64 channel_id = 1;
-    ChannelRole role = 3;
+  uint64 channel_id = 1;
+  ChannelRole role = 3;
 }
 
 message ChannelParticipants {
-    uint64 channel_id = 1;
-    repeated uint64 participant_user_ids = 2;
+  uint64 channel_id = 1;
+  repeated uint64 participant_user_ids = 2;
 }
 
 message JoinChannel {
-    uint64 channel_id = 1;
+  uint64 channel_id = 1;
 }
 
 message DeleteChannel {
-    uint64 channel_id = 1;
+  uint64 channel_id = 1;
 }
 
 message GetChannelMembers {
-    uint64 channel_id = 1;
-    string query = 2;
-    uint64 limit = 3;
+  uint64 channel_id = 1;
+  string query = 2;
+  uint64 limit = 3;
 }
 
 message GetChannelMembersResponse {
-    repeated ChannelMember members = 1;
-    repeated User users = 2;
+  repeated ChannelMember members = 1;
+  repeated User users = 2;
 }
 
 message ChannelMember {
-    uint64 user_id = 1;
-    Kind kind = 3;
-    ChannelRole role = 4;
+  uint64 user_id = 1;
+  Kind kind = 3;
+  ChannelRole role = 4;
 
-    enum Kind {
-        Member = 0;
-        Invitee = 1;
-    }
+  enum Kind {
+    Member = 0;
+    Invitee = 1;
+  }
 }
 
 message SubscribeToChannels {}
 
 message CreateChannel {
-    string name = 1;
-    optional uint64 parent_id = 2;
+  string name = 1;
+  optional uint64 parent_id = 2;
 }
 
 message CreateChannelResponse {
-    Channel channel = 1;
-    optional uint64 parent_id = 2;
+  Channel channel = 1;
+  optional uint64 parent_id = 2;
 }
 
 message InviteChannelMember {
-    uint64 channel_id = 1;
-    uint64 user_id = 2;
-    ChannelRole role = 4;
+  uint64 channel_id = 1;
+  uint64 user_id = 2;
+  ChannelRole role = 4;
 }
 
 message RemoveChannelMember {
-    uint64 channel_id = 1;
-    uint64 user_id = 2;
+  uint64 channel_id = 1;
+  uint64 user_id = 2;
 }
 
 enum ChannelRole {
-    Admin = 0;
-    Member = 1;
-    Guest = 2;
-    Banned = 3;
-    Talker = 4;
+  Admin = 0;
+  Member = 1;
+  Guest = 2;
+  Banned = 3;
+  Talker = 4;
 }
 
 message SetChannelMemberRole {
-    uint64 channel_id = 1;
-    uint64 user_id = 2;
-    ChannelRole role = 3;
+  uint64 channel_id = 1;
+  uint64 user_id = 2;
+  ChannelRole role = 3;
 }
 
 message SetChannelVisibility {
-    uint64 channel_id = 1;
-    ChannelVisibility visibility = 2;
+  uint64 channel_id = 1;
+  ChannelVisibility visibility = 2;
 }
 
 message RenameChannel {
-    uint64 channel_id = 1;
-    string name = 2;
+  uint64 channel_id = 1;
+  string name = 2;
 }
 
 message RenameChannelResponse {
-    Channel channel = 1;
+  Channel channel = 1;
 }
 
 message JoinChannelChat {
-    uint64 channel_id = 1;
+  uint64 channel_id = 1;
 }
 
 message JoinChannelChatResponse {
-    repeated ChannelMessage messages = 1;
-    bool done = 2;
+  repeated ChannelMessage messages = 1;
+  bool done = 2;
 }
 
 message LeaveChannelChat {
-    uint64 channel_id = 1;
+  uint64 channel_id = 1;
 }
 
 message SendChannelMessage {
-    uint64 channel_id = 1;
-    string body = 2;
-    Nonce nonce = 3;
-    repeated ChatMention mentions = 4;
-    optional uint64 reply_to_message_id = 5;
+  uint64 channel_id = 1;
+  string body = 2;
+  Nonce nonce = 3;
+  repeated ChatMention mentions = 4;
+  optional uint64 reply_to_message_id = 5;
 }
 
 message RemoveChannelMessage {
-    uint64 channel_id = 1;
-    uint64 message_id = 2;
+  uint64 channel_id = 1;
+  uint64 message_id = 2;
 }
 
 message UpdateChannelMessage {
-    uint64 channel_id = 1;
-    uint64 message_id = 2;
-    Nonce nonce = 4;
-    string body = 5;
-    repeated ChatMention mentions = 6;
+  uint64 channel_id = 1;
+  uint64 message_id = 2;
+  Nonce nonce = 4;
+  string body = 5;
+  repeated ChatMention mentions = 6;
 }
 
 message AckChannelMessage {
-    uint64 channel_id = 1;
-    uint64 message_id = 2;
+  uint64 channel_id = 1;
+  uint64 message_id = 2;
 }
 
 message SendChannelMessageResponse {
-    ChannelMessage message = 1;
+  ChannelMessage message = 1;
 }
 
 message ChannelMessageSent {
-    uint64 channel_id = 1;
-    ChannelMessage message = 2;
+  uint64 channel_id = 1;
+  ChannelMessage message = 2;
 }
 
 message ChannelMessageUpdate {
-    uint64 channel_id = 1;
-    ChannelMessage message = 2;
+  uint64 channel_id = 1;
+  ChannelMessage message = 2;
 }
 
 message GetChannelMessages {
-    uint64 channel_id = 1;
-    uint64 before_message_id = 2;
+  uint64 channel_id = 1;
+  uint64 before_message_id = 2;
 }
 
 message GetChannelMessagesResponse {
-    repeated ChannelMessage messages = 1;
-    bool done = 2;
+  repeated ChannelMessage messages = 1;
+  bool done = 2;
 }
 
 message GetChannelMessagesById {
-    repeated uint64 message_ids = 1;
+  repeated uint64 message_ids = 1;
 }
 
 message MoveChannel {
-    uint64 channel_id = 1;
-    uint64 to = 2;
+  uint64 channel_id = 1;
+  uint64 to = 2;
 }
 
 message ReorderChannel {
-    uint64 channel_id = 1;
-    enum Direction {
-        Up = 0;
-        Down = 1;
-    }
-    Direction direction = 2;
+  uint64 channel_id = 1;
+  enum Direction {
+    Up = 0;
+    Down = 1;
+  }
+  Direction direction = 2;
 }
 
 message JoinChannelBuffer {
-    uint64 channel_id = 1;
+  uint64 channel_id = 1;
 }
 
 message ChannelBufferVersion {
-    uint64 channel_id = 1;
-    repeated VectorClockEntry version = 2;
-    uint64 epoch = 3;
+  uint64 channel_id = 1;
+  repeated VectorClockEntry version = 2;
+  uint64 epoch = 3;
 }
 
 message UpdateChannelBufferCollaborators {
-    uint64 channel_id = 1;
-    repeated Collaborator collaborators = 2;
+  uint64 channel_id = 1;
+  repeated Collaborator collaborators = 2;
 }
 
 message UpdateChannelBuffer {
-    uint64 channel_id = 1;
-    repeated Operation operations = 2;
+  uint64 channel_id = 1;
+  repeated Operation operations = 2;
 }
 
 message ChannelMessage {
-    uint64 id = 1;
-    string body = 2;
-    uint64 timestamp = 3;
-    uint64 sender_id = 4;
-    Nonce nonce = 5;
-    repeated ChatMention mentions = 6;
-    optional uint64 reply_to_message_id = 7;
-    optional uint64 edited_at = 8;
+  uint64 id = 1;
+  string body = 2;
+  uint64 timestamp = 3;
+  uint64 sender_id = 4;
+  Nonce nonce = 5;
+  repeated ChatMention mentions = 6;
+  optional uint64 reply_to_message_id = 7;
+  optional uint64 edited_at = 8;
 }
 
 message ChatMention {
-    Range range = 1;
-    uint64 user_id = 2;
+  Range range = 1;
+  uint64 user_id = 2;
 }
 
 message RejoinChannelBuffers {
-    repeated ChannelBufferVersion buffers = 1;
+  repeated ChannelBufferVersion buffers = 1;
 }
 
 message RejoinChannelBuffersResponse {
-    repeated RejoinedChannelBuffer buffers = 1;
+  repeated RejoinedChannelBuffer buffers = 1;
 }
 
 message AckBufferOperation {
-    uint64 buffer_id = 1;
-    uint64 epoch = 2;
-    repeated VectorClockEntry version = 3;
+  uint64 buffer_id = 1;
+  uint64 epoch = 2;
+  repeated VectorClockEntry version = 3;
 }
 
 message JoinChannelBufferResponse {
-    uint64 buffer_id = 1;
-    uint32 replica_id = 2;
-    string base_text = 3;
-    repeated Operation operations = 4;
-    repeated Collaborator collaborators = 5;
-    uint64 epoch = 6;
+  uint64 buffer_id = 1;
+  uint32 replica_id = 2;
+  string base_text = 3;
+  repeated Operation operations = 4;
+  repeated Collaborator collaborators = 5;
+  uint64 epoch = 6;
 }
 
 message RejoinedChannelBuffer {
-    uint64 channel_id = 1;
-    repeated VectorClockEntry version = 2;
-    repeated Operation operations = 3;
-    repeated Collaborator collaborators = 4;
+  uint64 channel_id = 1;
+  repeated VectorClockEntry version = 2;
+  repeated Operation operations = 3;
+  repeated Collaborator collaborators = 4;
 }
 
 message LeaveChannelBuffer {
-    uint64 channel_id = 1;
+  uint64 channel_id = 1;
 }
 
 message RespondToChannelInvite {
-    uint64 channel_id = 1;
-    bool accept = 2;
+  uint64 channel_id = 1;
+  bool accept = 2;
 }

crates/proto/proto/core.proto 🔗

@@ -2,28 +2,28 @@ syntax = "proto3";
 package zed.messages;
 
 message PeerId {
-    uint32 owner_id = 1;
-    uint32 id = 2;
+  uint32 owner_id = 1;
+  uint32 id = 2;
 }
 
 message User {
-    reserved 4;
-    uint64 id = 1;
-    string github_login = 2;
-    string avatar_url = 3;
-    optional string name = 5;
+  reserved 4;
+  uint64 id = 1;
+  string github_login = 2;
+  string avatar_url = 3;
+  optional string name = 5;
 }
 
 message Nonce {
-    uint64 upper_half = 1;
-    uint64 lower_half = 2;
+  uint64 upper_half = 1;
+  uint64 lower_half = 2;
 }
 
 message Collaborator {
-    PeerId peer_id = 1;
-    uint32 replica_id = 2;
-    uint64 user_id = 3;
-    bool is_host = 4;
-    optional string committer_name = 5;
-    optional string committer_email = 6;
+  PeerId peer_id = 1;
+  uint32 replica_id = 2;
+  uint64 user_id = 3;
+  bool is_host = 4;
+  optional string committer_name = 5;
+  optional string committer_email = 6;
 }

crates/proto/proto/debugger.proto 🔗

@@ -1,555 +1,553 @@
 syntax = "proto3";
 package zed.messages;
 
-import "core.proto";
 import "buffer.proto";
 import "task.proto";
 
 enum BreakpointState {
-    Enabled = 0;
-    Disabled = 1;
+  Enabled = 0;
+  Disabled = 1;
 }
 
 message Breakpoint {
-    Anchor position = 1;
-    BreakpointState state = 2;
-    reserved 3;
-    optional string message = 4;
-    optional string condition = 5;
-    optional string hit_condition = 6;
-    map<uint64, BreakpointSessionState> session_state = 7;
+  Anchor position = 1;
+  BreakpointState state = 2;
+  reserved 3;
+  optional string message = 4;
+  optional string condition = 5;
+  optional string hit_condition = 6;
+  map<uint64, BreakpointSessionState> session_state = 7;
 }
 
 message BreakpointSessionState {
-    uint64 id = 1;
-    bool verified = 2;
+  uint64 id = 1;
+  bool verified = 2;
 }
 
 message BreakpointsForFile {
-    uint64 project_id = 1;
-    string path = 2;
-    repeated Breakpoint breakpoints = 3;
+  uint64 project_id = 1;
+  string path = 2;
+  repeated Breakpoint breakpoints = 3;
 }
 
 message ToggleBreakpoint {
-    uint64 project_id = 1;
-    string path = 2;
-    Breakpoint breakpoint = 3;
+  uint64 project_id = 1;
+  string path = 2;
+  Breakpoint breakpoint = 3;
 }
 
 enum DapThreadStatus {
-    Running = 0;
-    Stopped = 1;
-    Exited = 2;
-    Ended = 3;
+  Running = 0;
+  Stopped = 1;
+  Exited = 2;
+  Ended = 3;
 }
 
 enum VariablesArgumentsFilter {
-    Indexed = 0;
-    Named = 1;
+  Indexed = 0;
+  Named = 1;
 }
 
 message ValueFormat {
-    optional bool hex = 1;
+  optional bool hex = 1;
 }
 
 message VariablesRequest {
-    uint64 project_id = 1;
-    uint64 client_id = 2;
-    uint64 variables_reference = 3;
-    optional VariablesArgumentsFilter filter = 4;
-    optional uint64 start = 5;
-    optional uint64 count = 6;
-    optional ValueFormat format = 7;
+  uint64 project_id = 1;
+  uint64 client_id = 2;
+  uint64 variables_reference = 3;
+  optional VariablesArgumentsFilter filter = 4;
+  optional uint64 start = 5;
+  optional uint64 count = 6;
+  optional ValueFormat format = 7;
 }
 
 enum SteppingGranularity {
-    Statement = 0;
-    Line = 1;
-    Instruction = 2;
+  Statement = 0;
+  Line = 1;
+  Instruction = 2;
 }
 
 message DapLocationsRequest {
-    uint64 project_id = 1;
-    uint64 session_id = 2;
-    uint64 location_reference = 3;
+  uint64 project_id = 1;
+  uint64 session_id = 2;
+  uint64 location_reference = 3;
 }
 
 message DapLocationsResponse {
-    DapSource source = 1;
-    uint64 line = 2;
-    optional uint64 column = 3;
-    optional uint64 end_line = 4;
-    optional uint64 end_column = 5;
+  DapSource source = 1;
+  uint64 line = 2;
+  optional uint64 column = 3;
+  optional uint64 end_line = 4;
+  optional uint64 end_column = 5;
 }
 
 enum DapEvaluateContext {
-    Repl = 0;
-    Watch = 1;
-    Hover = 2;
-    Clipboard = 3;
-    EvaluateVariables = 4;
-    EvaluateUnknown = 5;
+  Repl = 0;
+  Watch = 1;
+  Hover = 2;
+  Clipboard = 3;
+  EvaluateVariables = 4;
+  EvaluateUnknown = 5;
 }
 
 message DapEvaluateRequest {
-    uint64 project_id = 1;
-    uint64 client_id = 2;
-    string expression = 3;
-    optional uint64 frame_id = 4;
-    optional DapEvaluateContext context = 5;
+  uint64 project_id = 1;
+  uint64 client_id = 2;
+  string expression = 3;
+  optional uint64 frame_id = 4;
+  optional DapEvaluateContext context = 5;
 }
 
 message DapEvaluateResponse {
-    string result = 1;
-    optional string evaluate_type = 2;
-    uint64 variable_reference = 3;
-    optional uint64 named_variables = 4;
-    optional uint64 indexed_variables = 5;
-    optional string memory_reference = 6;
+  string result = 1;
+  optional string evaluate_type = 2;
+  uint64 variable_reference = 3;
+  optional uint64 named_variables = 4;
+  optional uint64 indexed_variables = 5;
+  optional string memory_reference = 6;
 }
 
-
 message DapCompletionRequest {
-    uint64 project_id = 1;
-    uint64 client_id = 2;
-    string query = 3;
-    optional uint64 frame_id = 4;
-    optional uint64 line = 5;
-    uint64 column = 6;
+  uint64 project_id = 1;
+  uint64 client_id = 2;
+  string query = 3;
+  optional uint64 frame_id = 4;
+  optional uint64 line = 5;
+  uint64 column = 6;
 }
 
 enum DapCompletionItemType {
-    Method = 0;
-    Function = 1;
-    Constructor = 2;
-    Field = 3;
-    Variable = 4;
-    Class = 5;
-    Interface = 6;
-    Module = 7;
-    Property = 8;
-    Unit = 9;
-    Value = 10;
-    Enum = 11;
-    Keyword = 12;
-    Snippet = 13;
-    Text = 14;
-    Color = 15;
-    CompletionItemFile = 16;
-    Reference = 17;
-    Customcolor = 19;
+  Method = 0;
+  Function = 1;
+  Constructor = 2;
+  Field = 3;
+  Variable = 4;
+  Class = 5;
+  Interface = 6;
+  Module = 7;
+  Property = 8;
+  Unit = 9;
+  Value = 10;
+  Enum = 11;
+  Keyword = 12;
+  Snippet = 13;
+  Text = 14;
+  Color = 15;
+  CompletionItemFile = 16;
+  Reference = 17;
+  Customcolor = 19;
 }
 
 message DapCompletionItem {
-    string label = 1;
-    optional string text = 2;
-    optional string sort_text = 3;
-    optional string detail = 4;
-    optional DapCompletionItemType typ = 5;
-    optional uint64 start = 6;
-    optional uint64 length = 7;
-    optional uint64 selection_start = 8;
-    optional uint64 selection_length = 9;
+  string label = 1;
+  optional string text = 2;
+  optional string sort_text = 3;
+  optional string detail = 4;
+  optional DapCompletionItemType typ = 5;
+  optional uint64 start = 6;
+  optional uint64 length = 7;
+  optional uint64 selection_start = 8;
+  optional uint64 selection_length = 9;
 }
 
 message DapCompletionResponse {
-    uint64 client_id = 1;
-    repeated DapCompletionItem completions = 2;
+  uint64 client_id = 1;
+  repeated DapCompletionItem completions = 2;
 }
 
 message DapScopesRequest {
-    uint64 project_id = 1;
-    uint64 client_id = 2;
-    uint64 stack_frame_id = 3;
+  uint64 project_id = 1;
+  uint64 client_id = 2;
+  uint64 stack_frame_id = 3;
 }
 
 message DapScopesResponse {
-    repeated DapScope scopes = 1;
+  repeated DapScope scopes = 1;
 }
 
 message DapSetVariableValueRequest {
-    uint64 project_id = 1;
-    uint64 client_id = 2;
-    string name = 3;
-    string value = 4;
-    uint64 variables_reference = 5;
+  uint64 project_id = 1;
+  uint64 client_id = 2;
+  string name = 3;
+  string value = 4;
+  uint64 variables_reference = 5;
 }
 
 message DapSetVariableValueResponse {
-    uint64 client_id = 1;
-    string value = 2;
-    optional string variable_type = 3;
-    optional uint64 variables_reference = 4;
-    optional uint64 named_variables = 5;
-    optional uint64 indexed_variables = 6;
-    optional string memory_reference = 7;
+  uint64 client_id = 1;
+  string value = 2;
+  optional string variable_type = 3;
+  optional uint64 variables_reference = 4;
+  optional uint64 named_variables = 5;
+  optional uint64 indexed_variables = 6;
+  optional string memory_reference = 7;
 }
 
 message DapPauseRequest {
-    uint64 project_id = 1;
-    uint64 client_id = 2;
-    int64 thread_id = 3;
+  uint64 project_id = 1;
+  uint64 client_id = 2;
+  int64 thread_id = 3;
 }
 
 message DapDisconnectRequest {
-    uint64 project_id = 1;
-    uint64 client_id = 2;
-    optional bool restart = 3;
-    optional bool terminate_debuggee = 4;
-    optional bool suspend_debuggee = 5;
+  uint64 project_id = 1;
+  uint64 client_id = 2;
+  optional bool restart = 3;
+  optional bool terminate_debuggee = 4;
+  optional bool suspend_debuggee = 5;
 }
 
 message DapTerminateThreadsRequest {
-    uint64 project_id = 1;
-    uint64 client_id = 2;
-    repeated int64 thread_ids = 3;
+  uint64 project_id = 1;
+  uint64 client_id = 2;
+  repeated int64 thread_ids = 3;
 }
 
 message DapThreadsRequest {
-    uint64 project_id = 1;
-    uint64 client_id = 2;
+  uint64 project_id = 1;
+  uint64 client_id = 2;
 }
 
 message DapThreadsResponse {
-    repeated DapThread threads = 1;
+  repeated DapThread threads = 1;
 }
 
 message DapTerminateRequest {
-    uint64 project_id = 1;
-    uint64 client_id = 2;
-    optional bool restart = 3;
+  uint64 project_id = 1;
+  uint64 client_id = 2;
+  optional bool restart = 3;
 }
 
 message DapRestartRequest {
-    uint64 project_id = 1;
-    uint64 client_id = 2;
-    bytes raw_args = 3;
+  uint64 project_id = 1;
+  uint64 client_id = 2;
+  bytes raw_args = 3;
 }
 
 message DapRestartStackFrameRequest {
-    uint64 project_id = 1;
-    uint64 client_id = 2;
-    uint64 stack_frame_id = 3;
+  uint64 project_id = 1;
+  uint64 client_id = 2;
+  uint64 stack_frame_id = 3;
 }
 
 message ToggleIgnoreBreakpoints {
-    uint64 project_id = 1;
-    uint32 session_id = 2;
+  uint64 project_id = 1;
+  uint32 session_id = 2;
 }
 
 message IgnoreBreakpointState {
-    uint64 project_id = 1;
-    uint64 session_id = 2;
-    bool ignore = 3;
+  uint64 project_id = 1;
+  uint64 session_id = 2;
+  bool ignore = 3;
 }
 
 message DapNextRequest {
-    uint64 project_id = 1;
-    uint64 client_id = 2;
-    int64 thread_id = 3;
-    optional bool single_thread = 4;
-    optional SteppingGranularity granularity = 5;
+  uint64 project_id = 1;
+  uint64 client_id = 2;
+  int64 thread_id = 3;
+  optional bool single_thread = 4;
+  optional SteppingGranularity granularity = 5;
 }
 
 message DapStepInRequest {
-    uint64 project_id = 1;
-    uint64 client_id = 2;
-    int64 thread_id = 3;
-    optional uint64 target_id = 4;
-    optional bool single_thread = 5;
-    optional SteppingGranularity granularity = 6;
+  uint64 project_id = 1;
+  uint64 client_id = 2;
+  int64 thread_id = 3;
+  optional uint64 target_id = 4;
+  optional bool single_thread = 5;
+  optional SteppingGranularity granularity = 6;
 }
 
 message DapStepOutRequest {
-    uint64 project_id = 1;
-    uint64 client_id = 2;
-    int64 thread_id = 3;
-    optional bool single_thread = 4;
-    optional SteppingGranularity granularity = 5;
+  uint64 project_id = 1;
+  uint64 client_id = 2;
+  int64 thread_id = 3;
+  optional bool single_thread = 4;
+  optional SteppingGranularity granularity = 5;
 }
 
 message DapStepBackRequest {
-    uint64 project_id = 1;
-    uint64 client_id = 2;
-    int64 thread_id = 3;
-    optional bool single_thread = 4;
-    optional SteppingGranularity granularity = 5;
+  uint64 project_id = 1;
+  uint64 client_id = 2;
+  int64 thread_id = 3;
+  optional bool single_thread = 4;
+  optional SteppingGranularity granularity = 5;
 }
 
 message DapContinueRequest {
-    uint64 project_id = 1;
-    uint64 client_id = 2;
-    int64 thread_id = 3;
-    optional bool single_thread = 4;
+  uint64 project_id = 1;
+  uint64 client_id = 2;
+  int64 thread_id = 3;
+  optional bool single_thread = 4;
 }
 
 message DapContinueResponse {
-    uint64 client_id = 1;
-    optional bool all_threads_continued = 2;
+  uint64 client_id = 1;
+  optional bool all_threads_continued = 2;
 }
 
 message DapModulesRequest {
-    uint64 project_id = 1;
-    uint64 client_id = 2;
+  uint64 project_id = 1;
+  uint64 client_id = 2;
 }
 
 message DapModulesResponse {
-    uint64 client_id = 1;
-    repeated DapModule modules = 2;
+  uint64 client_id = 1;
+  repeated DapModule modules = 2;
 }
 
 message DapLoadedSourcesRequest {
-    uint64 project_id = 1;
-    uint64 client_id = 2;
+  uint64 project_id = 1;
+  uint64 client_id = 2;
 }
 
 message DapLoadedSourcesResponse {
-    uint64 client_id = 1;
-    repeated DapSource sources = 2;
+  uint64 client_id = 1;
+  repeated DapSource sources = 2;
 }
 
 message DapStackTraceRequest {
-    uint64 project_id = 1;
-    uint64 client_id = 2;
-    int64 thread_id = 3;
-    optional uint64 start_frame = 4;
-    optional uint64 stack_trace_levels = 5;
+  uint64 project_id = 1;
+  uint64 client_id = 2;
+  int64 thread_id = 3;
+  optional uint64 start_frame = 4;
+  optional uint64 stack_trace_levels = 5;
 }
 
 message DapStackTraceResponse {
-    repeated DapStackFrame frames = 1;
+  repeated DapStackFrame frames = 1;
 }
 
 message DapStackFrame {
-    uint64 id = 1;
-    string name = 2;
-    optional DapSource source = 3;
-    uint64 line = 4;
-    uint64 column = 5;
-    optional uint64 end_line = 6;
-    optional uint64 end_column = 7;
-    optional bool can_restart = 8;
-    optional string instruction_pointer_reference = 9;
-    optional DapModuleId module_id = 10;
-    optional DapStackPresentationHint presentation_hint = 11;
+  uint64 id = 1;
+  string name = 2;
+  optional DapSource source = 3;
+  uint64 line = 4;
+  uint64 column = 5;
+  optional uint64 end_line = 6;
+  optional uint64 end_column = 7;
+  optional bool can_restart = 8;
+  optional string instruction_pointer_reference = 9;
+  optional DapModuleId module_id = 10;
+  optional DapStackPresentationHint presentation_hint = 11;
 }
 
 message DebuggerLoadedSourceList {
-    uint64 client_id = 1;
-    repeated DapSource sources = 2;
+  uint64 client_id = 1;
+  repeated DapSource sources = 2;
 }
 
 message DapVariables {
-    uint64 client_id = 1;
-    repeated DapVariable variables = 2;
+  uint64 client_id = 1;
+  repeated DapVariable variables = 2;
 }
 
 // Remote Debugging: Dap Types
 message DapVariable {
-    string name = 1;
-    string value = 2;
-    optional string type = 3;
-    // optional DapVariablePresentationHint presentation_hint = 4;
-    optional string evaluate_name = 5;
-    uint64 variables_reference = 6;
-    optional uint64 named_variables = 7;
-    optional uint64 indexed_variables = 8;
-    optional string memory_reference = 9;
+  string name = 1;
+  string value = 2;
+  optional string type = 3;
+  // optional DapVariablePresentationHint presentation_hint = 4;
+  optional string evaluate_name = 5;
+  uint64 variables_reference = 6;
+  optional uint64 named_variables = 7;
+  optional uint64 indexed_variables = 8;
+  optional string memory_reference = 9;
 }
 
 message DapThread {
-    int64 id = 1;
-    string name = 2;
+  int64 id = 1;
+  string name = 2;
 }
 
 message DapScope {
-    string name = 1;
-    optional DapScopePresentationHint presentation_hint = 2;
-    uint64 variables_reference = 3;
-    optional uint64 named_variables = 4;
-    optional uint64 indexed_variables = 5;
-    bool expensive = 6;
-    optional DapSource source = 7;
-    optional uint64 line = 8;
-    optional uint64 column = 9;
-    optional uint64 end_line = 10;
-    optional uint64 end_column = 11;
+  string name = 1;
+  optional DapScopePresentationHint presentation_hint = 2;
+  uint64 variables_reference = 3;
+  optional uint64 named_variables = 4;
+  optional uint64 indexed_variables = 5;
+  bool expensive = 6;
+  optional DapSource source = 7;
+  optional uint64 line = 8;
+  optional uint64 column = 9;
+  optional uint64 end_line = 10;
+  optional uint64 end_column = 11;
 }
 
 message DapSource {
-    optional string name = 1;
-    optional string path = 2;
-    optional uint64 source_reference = 3;
-    optional DapSourcePresentationHint presentation_hint = 4;
-    optional string origin = 5;
-    repeated DapSource sources = 6;
-    optional bytes adapter_data = 7;
-    repeated DapChecksum checksums = 8;
+  optional string name = 1;
+  optional string path = 2;
+  optional uint64 source_reference = 3;
+  optional DapSourcePresentationHint presentation_hint = 4;
+  optional string origin = 5;
+  repeated DapSource sources = 6;
+  optional bytes adapter_data = 7;
+  repeated DapChecksum checksums = 8;
 }
 
 enum DapOutputCategory {
-    ConsoleOutput = 0;
-    Important = 1;
-    Stdout = 2;
-    Stderr = 3;
-    Unknown = 4;
+  ConsoleOutput = 0;
+  Important = 1;
+  Stdout = 2;
+  Stderr = 3;
+  Unknown = 4;
 }
 
 enum DapOutputEventGroup {
-    Start = 0;
-    StartCollapsed = 1;
-    End = 2;
+  Start = 0;
+  StartCollapsed = 1;
+  End = 2;
 }
 
 message DapOutputEvent {
-    string output = 1;
-    optional DapOutputCategory category = 2;
-    optional uint64 variables_reference = 3;
-    optional DapOutputEventGroup group = 4;
-    optional DapSource source = 5;
-    optional uint32 line = 6;
-    optional uint32 column = 7;
+  string output = 1;
+  optional DapOutputCategory category = 2;
+  optional uint64 variables_reference = 3;
+  optional DapOutputEventGroup group = 4;
+  optional DapSource source = 5;
+  optional uint32 line = 6;
+  optional uint32 column = 7;
 }
 
 enum DapChecksumAlgorithm {
-    CHECKSUM_ALGORITHM_UNSPECIFIED = 0;
-    MD5 = 1;
-    SHA1 = 2;
-    SHA256 = 3;
-    TIMESTAMP = 4;
+  CHECKSUM_ALGORITHM_UNSPECIFIED = 0;
+  MD5 = 1;
+  SHA1 = 2;
+  SHA256 = 3;
+  TIMESTAMP = 4;
 }
 
 message DapChecksum {
-    DapChecksumAlgorithm algorithm = 1;
-    string checksum = 2;
+  DapChecksumAlgorithm algorithm = 1;
+  string checksum = 2;
 }
 
 enum DapScopePresentationHint {
-    Arguments = 0;
-    Locals = 1;
-    Registers = 2;
-    ReturnValue = 3;
-    ScopeUnknown = 4;
+  Arguments = 0;
+  Locals = 1;
+  Registers = 2;
+  ReturnValue = 3;
+  ScopeUnknown = 4;
 }
 
 enum DapSourcePresentationHint {
-    SourceNormal = 0;
-    Emphasize = 1;
-    Deemphasize = 2;
-    SourceUnknown = 3;
+  SourceNormal = 0;
+  Emphasize = 1;
+  Deemphasize = 2;
+  SourceUnknown = 3;
 }
 
 enum DapStackPresentationHint {
-    StackNormal = 0;
-    Label = 1;
-    Subtle = 2;
-    StackUnknown = 3;
+  StackNormal = 0;
+  Label = 1;
+  Subtle = 2;
+  StackUnknown = 3;
 }
 message DapModule {
-    DapModuleId id = 1;
-    string name = 2;
-    optional string path = 3;
-    optional bool is_optimized = 4;
-    optional bool is_user_code = 5;
-    optional string version = 6;
-    optional string symbol_status = 7;
-    optional string symbol_file_path = 8;
-    optional string date_time_stamp = 9;
-    optional string address_range = 10;
+  DapModuleId id = 1;
+  string name = 2;
+  optional string path = 3;
+  optional bool is_optimized = 4;
+  optional bool is_user_code = 5;
+  optional string version = 6;
+  optional string symbol_status = 7;
+  optional string symbol_file_path = 8;
+  optional string date_time_stamp = 9;
+  optional string address_range = 10;
 }
 
 message DebugTaskDefinition {
-    string adapter = 1;
-    string label = 2;
-    string config = 3;
-    optional TcpHost tcp_connection = 4;
+  string adapter = 1;
+  string label = 2;
+  string config = 3;
+  optional TcpHost tcp_connection = 4;
 }
 
 message TcpHost {
-    optional uint32 port = 1;
-    optional string host = 2;
-    optional uint64 timeout = 3;
+  optional uint32 port = 1;
+  optional string host = 2;
+  optional uint64 timeout = 3;
 }
 
 message DebugLaunchRequest {
-    string program = 1;
-    optional string cwd = 2;
-    repeated string args = 3;
-    map<string, string> env = 4;
+  string program = 1;
+  optional string cwd = 2;
+  repeated string args = 3;
+  map<string, string> env = 4;
 }
 
 message DebugAttachRequest {
-    uint32 process_id = 1;
+  uint32 process_id = 1;
 }
 
 message DapModuleId {
-    oneof id {
-        uint32 number = 1;
-        string string = 2;
-    }
+  oneof id {
+    uint32 number = 1;
+    string string = 2;
+  }
 }
 
 message GetDebugAdapterBinary {
-    uint64 project_id = 1;
-    uint64 session_id = 3;
-    DebugTaskDefinition definition = 2;
-    uint64 worktree_id = 4;
+  uint64 project_id = 1;
+  uint64 session_id = 3;
+  DebugTaskDefinition definition = 2;
+  uint64 worktree_id = 4;
 }
 
 message DebugAdapterBinary {
-    optional string command = 1;
-    repeated string arguments = 2;
-    map<string, string> envs = 3;
-    optional string cwd = 4;
-    optional TcpHost connection = 5;
-    string configuration = 7;
-    LaunchType launch_type = 8;
-    enum LaunchType {
-        Attach = 0;
-        Launch = 1;
-    }
+  optional string command = 1;
+  repeated string arguments = 2;
+  map<string, string> envs = 3;
+  optional string cwd = 4;
+  optional TcpHost connection = 5;
+  string configuration = 7;
+  LaunchType launch_type = 8;
+  enum LaunchType {
+    Attach = 0;
+    Launch = 1;
+  }
 }
 
 message RunDebugLocators {
-    uint64 project_id = 1;
-    SpawnInTerminal build_command = 2;
-    string locator = 3;
+  uint64 project_id = 1;
+  SpawnInTerminal build_command = 2;
+  string locator = 3;
 }
 
 message DebugRequest {
-    oneof request {
-        DebugLaunchRequest debug_launch_request = 1;
-        DebugAttachRequest debug_attach_request = 2;
-    }
+  oneof request {
+    DebugLaunchRequest debug_launch_request = 1;
+    DebugAttachRequest debug_attach_request = 2;
+  }
 }
 
 message DebugScenario {
-    string label = 1;
-    string adapter = 2;
-    reserved 3;
-    DebugRequest request = 4;
-    optional TcpHost connection = 5;
-    optional bool stop_on_entry = 6;
-    optional string configuration = 7;
+  string label = 1;
+  string adapter = 2;
+  reserved 3;
+  DebugRequest request = 4;
+  optional TcpHost connection = 5;
+  optional bool stop_on_entry = 6;
+  optional string configuration = 7;
 }
 
 message LogToDebugConsole {
-    uint64 project_id = 1;
-    uint64 session_id = 2;
-    string message = 3;
+  uint64 project_id = 1;
+  uint64 session_id = 2;
+  string message = 3;
 }
 
 message GetProcesses {
-    uint64 project_id = 1;
+  uint64 project_id = 1;
 }
 
 message GetProcessesResponse {
-    repeated ProcessInfo processes = 1;
+  repeated ProcessInfo processes = 1;
 }
 
 message ProcessInfo {
-    uint32 pid = 1;
-    string name = 2;
-    repeated string command = 3;
+  uint32 pid = 1;
+  string name = 2;
+  repeated string command = 3;
 }

crates/proto/proto/download.proto 🔗

@@ -5,32 +5,32 @@ import "core.proto";
 import "worktree.proto";
 
 message DownloadFileByPath {
-    uint64 project_id = 1;
-    uint64 worktree_id = 2;
-    string path = 3;
-    uint64 file_id = 4;
+  uint64 project_id = 1;
+  uint64 worktree_id = 2;
+  string path = 3;
+  uint64 file_id = 4;
 }
 
 message DownloadFileResponse {
-    uint64 file_id = 1;
+  uint64 file_id = 1;
 }
 
 message CreateFileForPeer {
-    uint64 project_id = 1;
-    PeerId peer_id = 2;
-    oneof variant {
-        FileState state = 3;
-        FileChunk chunk = 4;
-    }
+  uint64 project_id = 1;
+  PeerId peer_id = 2;
+  oneof variant {
+    FileState state = 3;
+    FileChunk chunk = 4;
+  }
 }
 
 message FileState {
-    uint64 id = 1;
-    optional File file = 2;
-    uint64 content_size = 3;
+  uint64 id = 1;
+  optional File file = 2;
+  uint64 content_size = 3;
 }
 
 message FileChunk {
-    uint64 file_id = 1;
-    bytes data = 2;
+  uint64 file_id = 1;
+  bytes data = 2;
 }

crates/proto/proto/git.proto 🔗

@@ -1,254 +1,254 @@
 syntax = "proto3";
 package zed.messages;
 
-import "worktree.proto";
 import "buffer.proto";
+import "worktree.proto";
 
 message GitBranchesResponse {
-    repeated Branch branches = 1;
+  repeated Branch branches = 1;
 }
 
 message UpdateDiffBases {
-    uint64 project_id = 1;
-    uint64 buffer_id = 2;
-
-    enum Mode {
-        // No collaborator is using the unstaged diff.
-        HEAD_ONLY = 0;
-        // No collaborator is using the diff from HEAD.
-        INDEX_ONLY = 1;
-        // Both the unstaged and uncommitted diffs are demanded,
-        // and the contents of the index and HEAD are the same for this path.
-        INDEX_MATCHES_HEAD = 2;
-        // Both the unstaged and uncommitted diffs are demanded,
-        // and the contents of the index and HEAD differ for this path,
-        // where None means the path doesn't exist in that state of the repo.
-        INDEX_AND_HEAD = 3;
-    }
-
-    optional string staged_text = 3;
-    optional string committed_text = 4;
-    Mode mode = 5;
+  uint64 project_id = 1;
+  uint64 buffer_id = 2;
+
+  enum Mode {
+    // No collaborator is using the unstaged diff.
+    HEAD_ONLY = 0;
+    // No collaborator is using the diff from HEAD.
+    INDEX_ONLY = 1;
+    // Both the unstaged and uncommitted diffs are demanded,
+    // and the contents of the index and HEAD are the same for this path.
+    INDEX_MATCHES_HEAD = 2;
+    // Both the unstaged and uncommitted diffs are demanded,
+    // and the contents of the index and HEAD differ for this path,
+    // where None means the path doesn't exist in that state of the repo.
+    INDEX_AND_HEAD = 3;
+  }
+
+  optional string staged_text = 3;
+  optional string committed_text = 4;
+  Mode mode = 5;
 }
 
 message OpenUnstagedDiff {
-    uint64 project_id = 1;
-    uint64 buffer_id = 2;
+  uint64 project_id = 1;
+  uint64 buffer_id = 2;
 }
 
 message OpenUnstagedDiffResponse {
-    optional string staged_text = 1;
+  optional string staged_text = 1;
 }
 
 message OpenUncommittedDiff {
-    uint64 project_id = 1;
-    uint64 buffer_id = 2;
+  uint64 project_id = 1;
+  uint64 buffer_id = 2;
 }
 
 message OpenUncommittedDiffResponse {
-    enum Mode {
-        INDEX_MATCHES_HEAD = 0;
-        INDEX_AND_HEAD = 1;
-    }
-    optional string staged_text = 1;
-    optional string committed_text = 2;
-    Mode mode = 3;
+  enum Mode {
+    INDEX_MATCHES_HEAD = 0;
+    INDEX_AND_HEAD = 1;
+  }
+  optional string staged_text = 1;
+  optional string committed_text = 2;
+  Mode mode = 3;
 }
 
 message SetIndexText {
-    uint64 project_id = 1;
-    reserved 2;
-    uint64 repository_id = 3;
-    string path = 4;
-    optional string text = 5;
+  uint64 project_id = 1;
+  reserved 2;
+  uint64 repository_id = 3;
+  string path = 4;
+  optional string text = 5;
 }
 
 message GetPermalinkToLine {
-    uint64 project_id = 1;
-    uint64 buffer_id = 2;
-    Range selection = 3;
+  uint64 project_id = 1;
+  uint64 buffer_id = 2;
+  Range selection = 3;
 }
 
 message GetPermalinkToLineResponse {
-    string permalink = 1;
+  string permalink = 1;
 }
 
 message Branch {
-    bool is_head = 1;
-    string ref_name = 2;
-    optional uint64 unix_timestamp = 3;
-    optional GitUpstream upstream = 4;
-    optional CommitSummary most_recent_commit = 5;
+  bool is_head = 1;
+  string ref_name = 2;
+  optional uint64 unix_timestamp = 3;
+  optional GitUpstream upstream = 4;
+  optional CommitSummary most_recent_commit = 5;
 }
 
 message GitUpstream {
-    string ref_name = 1;
-    optional UpstreamTracking tracking = 2;
+  string ref_name = 1;
+  optional UpstreamTracking tracking = 2;
 }
 
 message UpstreamTracking {
-    uint64 ahead = 1;
-    uint64 behind = 2;
+  uint64 ahead = 1;
+  uint64 behind = 2;
 }
 
 message CommitSummary {
-    string sha = 1;
-    string subject = 2;
-    int64 commit_timestamp = 3;
-    string author_name = 4;
+  string sha = 1;
+  string subject = 2;
+  int64 commit_timestamp = 3;
+  string author_name = 4;
 }
 
 message GitBranches {
-    uint64 project_id = 1;
-    ProjectPath repository = 2;
+  uint64 project_id = 1;
+  ProjectPath repository = 2;
 }
 
-
 message UpdateGitBranch {
-    uint64 project_id = 1;
-    string branch_name = 2;
-    ProjectPath repository = 3;
+  uint64 project_id = 1;
+  string branch_name = 2;
+  ProjectPath repository = 3;
 }
 
 message UpdateRepository {
-    uint64 project_id = 1;
-    uint64 id = 2;
-    string abs_path = 3;
-    repeated uint64 entry_ids = 4;
-    optional Branch branch_summary = 5;
-    repeated StatusEntry updated_statuses = 6;
-    repeated string removed_statuses = 7;
-    repeated string current_merge_conflicts = 8;
-    uint64 scan_id = 9;
-    bool is_last_update = 10;
-    optional GitCommitDetails head_commit_details = 11;
-    optional string merge_message = 12;
-    repeated StashEntry stash_entries = 13;
-    optional string remote_upstream_url = 14;
-    optional string remote_origin_url = 15;
+  uint64 project_id = 1;
+  uint64 id = 2;
+  string abs_path = 3;
+  repeated uint64 entry_ids = 4;
+  optional Branch branch_summary = 5;
+  repeated StatusEntry updated_statuses = 6;
+  repeated string removed_statuses = 7;
+  repeated string current_merge_conflicts = 8;
+  uint64 scan_id = 9;
+  bool is_last_update = 10;
+  optional GitCommitDetails head_commit_details = 11;
+  optional string merge_message = 12;
+  repeated StashEntry stash_entries = 13;
+  optional string remote_upstream_url = 14;
+  optional string remote_origin_url = 15;
+  optional string original_repo_abs_path = 16;
 }
 
 message RemoveRepository {
-    uint64 project_id = 1;
-    uint64 id = 2;
+  uint64 project_id = 1;
+  uint64 id = 2;
 }
 
 enum GitStatus {
-    Added = 0;
-    Modified = 1;
-    Conflict = 2;
-    Deleted = 3;
-    Updated = 4;
-    TypeChanged = 5;
-    Renamed = 6;
-    Copied = 7;
-    Unmodified = 8;
+  Added = 0;
+  Modified = 1;
+  Conflict = 2;
+  Deleted = 3;
+  Updated = 4;
+  TypeChanged = 5;
+  Renamed = 6;
+  Copied = 7;
+  Unmodified = 8;
 }
 
 message GitFileStatus {
-    oneof variant {
-        Untracked untracked = 1;
-        Ignored ignored = 2;
-        Unmerged unmerged = 3;
-        Tracked tracked = 4;
-    }
-
-    message Untracked {}
-    message Ignored {}
-    message Unmerged {
-        GitStatus first_head = 1;
-        GitStatus second_head = 2;
-    }
-    message Tracked {
-        GitStatus index_status = 1;
-        GitStatus worktree_status = 2;
-    }
+  oneof variant {
+    Untracked untracked = 1;
+    Ignored ignored = 2;
+    Unmerged unmerged = 3;
+    Tracked tracked = 4;
+  }
+
+  message Untracked {}
+  message Ignored {}
+  message Unmerged {
+    GitStatus first_head = 1;
+    GitStatus second_head = 2;
+  }
+  message Tracked {
+    GitStatus index_status = 1;
+    GitStatus worktree_status = 2;
+  }
 }
 
 message GitGetBranches {
-    uint64 project_id = 1;
-    reserved 2;
-    uint64 repository_id = 3;
+  uint64 project_id = 1;
+  reserved 2;
+  uint64 repository_id = 3;
 }
 
 message GitCreateBranch {
-    uint64 project_id = 1;
-    reserved 2;
-    uint64 repository_id = 3;
-    string branch_name = 4;
+  uint64 project_id = 1;
+  reserved 2;
+  uint64 repository_id = 3;
+  string branch_name = 4;
 }
 
 message GitChangeBranch {
-    uint64 project_id = 1;
-    reserved 2;
-    uint64 repository_id = 3;
-    string branch_name = 4;
+  uint64 project_id = 1;
+  reserved 2;
+  uint64 repository_id = 3;
+  string branch_name = 4;
 }
 
 message GitRenameBranch {
-    uint64 project_id = 1;
-    uint64 repository_id = 2;
-    string branch = 3;
-    string new_name = 4;
+  uint64 project_id = 1;
+  uint64 repository_id = 2;
+  string branch = 3;
+  string new_name = 4;
 }
 
 message GitCreateRemote {
-    uint64 project_id = 1;
-    uint64 repository_id = 2;
-    string remote_name = 3;
-    string remote_url = 4;
+  uint64 project_id = 1;
+  uint64 repository_id = 2;
+  string remote_name = 3;
+  string remote_url = 4;
 }
 
 message GitRemoveRemote {
-    uint64 project_id = 1;
-    uint64 repository_id = 2;
-    string remote_name = 3;
+  uint64 project_id = 1;
+  uint64 repository_id = 2;
+  string remote_name = 3;
 }
 
 message GitDeleteBranch {
-    uint64 project_id = 1;
-    uint64 repository_id = 2;
-    string branch_name = 3;
+  uint64 project_id = 1;
+  uint64 repository_id = 2;
+  string branch_name = 3;
 }
 
 message GitDiff {
-    uint64 project_id = 1;
-    reserved 2;
-    uint64 repository_id = 3;
-    DiffType diff_type = 4;
-    optional string merge_base_ref = 5;
+  uint64 project_id = 1;
+  reserved 2;
+  uint64 repository_id = 3;
+  DiffType diff_type = 4;
+  optional string merge_base_ref = 5;
 
-    enum DiffType {
-        HEAD_TO_WORKTREE = 0;
-        HEAD_TO_INDEX = 1;
-        MERGE_BASE = 2;
-    }
+  enum DiffType {
+    HEAD_TO_WORKTREE = 0;
+    HEAD_TO_INDEX = 1;
+    MERGE_BASE = 2;
+  }
 }
 
 message GitDiffResponse {
-    string diff = 1;
+  string diff = 1;
 }
 
 message GitInit {
-    uint64 project_id = 1;
-    string abs_path = 2;
-    string fallback_branch_name = 3;
+  uint64 project_id = 1;
+  string abs_path = 2;
+  string fallback_branch_name = 3;
 }
 
 message GitClone {
-    uint64 project_id = 1;
-    string abs_path = 2;
-    string remote_repo = 3;
+  uint64 project_id = 1;
+  string abs_path = 2;
+  string remote_repo = 3;
 }
 
 message GitCloneResponse {
-    bool success = 1;
+  bool success = 1;
 }
 
 message CheckForPushedCommits {
-    uint64 project_id = 1;
-    reserved 2;
-    uint64 repository_id = 3;
+  uint64 project_id = 1;
+  reserved 2;
+  uint64 repository_id = 3;
 }
 
 message CheckForPushedCommitsResponse {
@@ -256,338 +256,340 @@ message CheckForPushedCommitsResponse {
 }
 
 message GitShow {
-    uint64 project_id = 1;
-    reserved 2;
-    uint64 repository_id = 3;
-    string commit = 4;
+  uint64 project_id = 1;
+  reserved 2;
+  uint64 repository_id = 3;
+  string commit = 4;
 }
 
 message GitCommitDetails {
-    string sha = 1;
-    string message = 2;
-    int64 commit_timestamp = 3;
-    string author_email = 4;
-    string author_name = 5;
+  string sha = 1;
+  string message = 2;
+  int64 commit_timestamp = 3;
+  string author_email = 4;
+  string author_name = 5;
 }
 
 message LoadCommitDiff {
-    uint64 project_id = 1;
-    reserved 2;
-    uint64 repository_id = 3;
-    string commit = 4;
+  uint64 project_id = 1;
+  reserved 2;
+  uint64 repository_id = 3;
+  string commit = 4;
 }
 
 message LoadCommitDiffResponse {
-    repeated CommitFile files = 1;
+  repeated CommitFile files = 1;
 }
 
 message CommitFile {
-    string path = 1;
-    optional string old_text = 2;
-    optional string new_text = 3;
-    bool is_binary = 4;
+  string path = 1;
+  optional string old_text = 2;
+  optional string new_text = 3;
+  bool is_binary = 4;
 }
 
 message GitReset {
-    uint64 project_id = 1;
-    reserved 2;
-    uint64 repository_id = 3;
-    string commit = 4;
-    ResetMode mode = 5;
-    enum ResetMode {
-        SOFT = 0;
-        MIXED = 1;
-    }
+  uint64 project_id = 1;
+  reserved 2;
+  uint64 repository_id = 3;
+  string commit = 4;
+  ResetMode mode = 5;
+  enum ResetMode {
+    SOFT = 0;
+    MIXED = 1;
+  }
 }
 
 message GitCheckoutFiles {
-    uint64 project_id = 1;
-    reserved 2;
-    uint64 repository_id = 3;
-    string commit = 4;
-    repeated string paths = 5;
+  uint64 project_id = 1;
+  reserved 2;
+  uint64 repository_id = 3;
+  string commit = 4;
+  repeated string paths = 5;
 }
 
 message GitFileHistory {
-    uint64 project_id = 1;
-    reserved 2;
-    uint64 repository_id = 3;
-    string path = 4;
-    uint64 skip = 5;
-    optional uint64 limit = 6;
+  uint64 project_id = 1;
+  reserved 2;
+  uint64 repository_id = 3;
+  string path = 4;
+  uint64 skip = 5;
+  optional uint64 limit = 6;
 }
 
 message GitFileHistoryResponse {
-    repeated FileHistoryEntry entries = 1;
-    string path = 2;
+  repeated FileHistoryEntry entries = 1;
+  string path = 2;
 }
 
 message FileHistoryEntry {
-    string sha = 1;
-    string subject = 2;
-    string message = 3;
-    int64 commit_timestamp = 4;
-    string author_name = 5;
-    string author_email = 6;
+  string sha = 1;
+  string subject = 2;
+  string message = 3;
+  int64 commit_timestamp = 4;
+  string author_name = 5;
+  string author_email = 6;
 }
 
 // Move to `git.proto` once collab's min version is >=0.171.0.
 message StatusEntry {
-    string repo_path = 1;
-    // Can be removed once collab's min version is >=0.171.0.
-    GitStatus simple_status = 2;
-    GitFileStatus status = 3;
+  string repo_path = 1;
+  // Can be removed once collab's min version is >=0.171.0.
+  GitStatus simple_status = 2;
+  GitFileStatus status = 3;
+  optional uint32 diff_stat_added = 4;
+  optional uint32 diff_stat_deleted = 5;
 }
 
 message StashEntry {
-    bytes oid = 1;
-    string message = 2;
-    optional string branch = 3;
-    uint64 index = 4;
-    int64 timestamp = 5;
+  bytes oid = 1;
+  string message = 2;
+  optional string branch = 3;
+  uint64 index = 4;
+  int64 timestamp = 5;
 }
 
 message Stage {
-    uint64 project_id = 1;
-    reserved 2;
-    uint64 repository_id = 3;
-    repeated string paths = 4;
+  uint64 project_id = 1;
+  reserved 2;
+  uint64 repository_id = 3;
+  repeated string paths = 4;
 }
 
 message Unstage {
-    uint64 project_id = 1;
-    reserved 2;
-    uint64 repository_id = 3;
-    repeated string paths = 4;
+  uint64 project_id = 1;
+  reserved 2;
+  uint64 repository_id = 3;
+  repeated string paths = 4;
 }
 
 message Stash {
-    uint64 project_id = 1;
-    uint64 repository_id = 2;
-    repeated string paths = 3;
+  uint64 project_id = 1;
+  uint64 repository_id = 2;
+  repeated string paths = 3;
 }
 
 message StashPop {
-    uint64 project_id = 1;
-    uint64 repository_id = 2;
-    optional uint64 stash_index = 3;
+  uint64 project_id = 1;
+  uint64 repository_id = 2;
+  optional uint64 stash_index = 3;
 }
 
 message StashApply {
-    uint64 project_id = 1;
-    uint64 repository_id = 2;
-    optional uint64 stash_index = 3;
+  uint64 project_id = 1;
+  uint64 repository_id = 2;
+  optional uint64 stash_index = 3;
 }
 
 message StashDrop {
-    uint64 project_id = 1;
-    uint64 repository_id = 2;
-    optional uint64 stash_index = 3;
+  uint64 project_id = 1;
+  uint64 repository_id = 2;
+  optional uint64 stash_index = 3;
 }
 
 message Commit {
-    uint64 project_id = 1;
-    reserved 2;
-    uint64 repository_id = 3;
-    optional string name = 4;
-    optional string email = 5;
-    string message = 6;
-    optional CommitOptions options = 7;
-    reserved 8;
-    uint64 askpass_id = 9;
-
-    message CommitOptions {
-        bool amend = 1;
-        bool signoff = 2;
-    }
+  uint64 project_id = 1;
+  reserved 2;
+  uint64 repository_id = 3;
+  optional string name = 4;
+  optional string email = 5;
+  string message = 6;
+  optional CommitOptions options = 7;
+  reserved 8;
+  uint64 askpass_id = 9;
+
+  message CommitOptions {
+    bool amend = 1;
+    bool signoff = 2;
+  }
 }
 
 message OpenCommitMessageBuffer {
-    uint64 project_id = 1;
-    reserved 2;
-    uint64 repository_id = 3;
+  uint64 project_id = 1;
+  reserved 2;
+  uint64 repository_id = 3;
 }
 
 message Push {
-    uint64 project_id = 1;
-    reserved 2;
-    uint64 repository_id = 3;
-    string remote_name = 4;
-    string branch_name = 5;
-    optional PushOptions options = 6;
-    uint64 askpass_id = 7;
-    string remote_branch_name = 8;
+  uint64 project_id = 1;
+  reserved 2;
+  uint64 repository_id = 3;
+  string remote_name = 4;
+  string branch_name = 5;
+  optional PushOptions options = 6;
+  uint64 askpass_id = 7;
+  string remote_branch_name = 8;
 
-    enum PushOptions {
-        SET_UPSTREAM = 0;
-        FORCE = 1;
-    }
+  enum PushOptions {
+    SET_UPSTREAM = 0;
+    FORCE = 1;
+  }
 }
 
 message Fetch {
-    uint64 project_id = 1;
-    reserved 2;
-    uint64 repository_id = 3;
-    uint64 askpass_id = 4;
-    optional string remote = 5;
+  uint64 project_id = 1;
+  reserved 2;
+  uint64 repository_id = 3;
+  uint64 askpass_id = 4;
+  optional string remote = 5;
 }
 
 message GetRemotes {
-    uint64 project_id = 1;
-    reserved 2;
-    uint64 repository_id = 3;
-    optional string branch_name = 4;
-    bool is_push = 5;
+  uint64 project_id = 1;
+  reserved 2;
+  uint64 repository_id = 3;
+  optional string branch_name = 4;
+  bool is_push = 5;
 }
 
 message GetRemotesResponse {
-    repeated Remote remotes = 1;
+  repeated Remote remotes = 1;
 
-    message Remote {
-        string name = 1;
-    }
+  message Remote {
+    string name = 1;
+  }
 }
 
 message Pull {
-    uint64 project_id = 1;
-    reserved 2;
-    uint64 repository_id = 3;
-    string remote_name = 4;
-    optional string branch_name = 5;
-    uint64 askpass_id = 6;
-    bool rebase = 7;
+  uint64 project_id = 1;
+  reserved 2;
+  uint64 repository_id = 3;
+  string remote_name = 4;
+  optional string branch_name = 5;
+  uint64 askpass_id = 6;
+  bool rebase = 7;
 }
 
 message RemoteMessageResponse {
-    string stdout = 1;
-    string stderr = 2;
+  string stdout = 1;
+  string stderr = 2;
 }
 
 message BlameBuffer {
-    uint64 project_id = 1;
-    uint64 buffer_id = 2;
-    repeated VectorClockEntry version = 3;
+  uint64 project_id = 1;
+  uint64 buffer_id = 2;
+  repeated VectorClockEntry version = 3;
 }
 
 message BlameEntry {
-    bytes sha = 1;
+  bytes sha = 1;
 
-    uint32 start_line = 2;
-    uint32 end_line = 3;
-    uint32 original_line_number = 4;
+  uint32 start_line = 2;
+  uint32 end_line = 3;
+  uint32 original_line_number = 4;
 
-    optional string author = 5;
-    optional string author_mail = 6;
-    optional int64 author_time = 7;
-    optional string author_tz = 8;
+  optional string author = 5;
+  optional string author_mail = 6;
+  optional int64 author_time = 7;
+  optional string author_tz = 8;
 
-    optional string committer = 9;
-    optional string committer_mail = 10;
-    optional int64 committer_time = 11;
-    optional string committer_tz = 12;
+  optional string committer = 9;
+  optional string committer_mail = 10;
+  optional int64 committer_time = 11;
+  optional string committer_tz = 12;
 
-    optional string summary = 13;
-    optional string previous = 14;
+  optional string summary = 13;
+  optional string previous = 14;
 
-    string filename = 15;
+  string filename = 15;
 }
 
 message CommitMessage {
-    bytes oid = 1;
-    string message = 2;
+  bytes oid = 1;
+  string message = 2;
 }
 
 message CommitPermalink {
-    bytes oid = 1;
-    string permalink = 2;
+  bytes oid = 1;
+  string permalink = 2;
 }
 
 message BlameBufferResponse {
-    message BlameResponse {
-        repeated BlameEntry entries = 1;
-        repeated CommitMessage messages = 2;
-        reserved 3;
-        reserved 4;
-    }
+  message BlameResponse {
+    repeated BlameEntry entries = 1;
+    repeated CommitMessage messages = 2;
+    reserved 3;
+    reserved 4;
+  }
 
-    optional BlameResponse blame_response = 5;
+  optional BlameResponse blame_response = 5;
 
-    reserved 1 to 4;
+  reserved 1 to 4;
 }
 
 message GetDefaultBranch {
-    uint64 project_id = 1;
-    uint64 repository_id = 2;
+  uint64 project_id = 1;
+  uint64 repository_id = 2;
 }
 
 message GetDefaultBranchResponse {
-    optional string branch = 1;
+  optional string branch = 1;
 }
 
 message GetTreeDiff {
-    uint64 project_id = 1;
-    uint64 repository_id = 2;
-    bool is_merge = 3;
-    string base = 4;
-    string head = 5;
+  uint64 project_id = 1;
+  uint64 repository_id = 2;
+  bool is_merge = 3;
+  string base = 4;
+  string head = 5;
 }
 
 message GetTreeDiffResponse {
-    repeated TreeDiffStatus entries = 1;
+  repeated TreeDiffStatus entries = 1;
 }
 
 message TreeDiffStatus {
-    enum Status {
-        ADDED = 0;
-        MODIFIED = 1;
-        DELETED = 2;
-    }
+  enum Status {
+    ADDED = 0;
+    MODIFIED = 1;
+    DELETED = 2;
+  }
 
-    Status status = 1;
-    string path = 2;
-    optional string oid = 3;
+  Status status = 1;
+  string path = 2;
+  optional string oid = 3;
 }
 
 message GetBlobContent {
-    uint64 project_id = 1;
-    uint64 repository_id = 2;
-    string oid =3;
+  uint64 project_id = 1;
+  uint64 repository_id = 2;
+  string oid = 3;
 }
 
 message GetBlobContentResponse {
-    string content = 1;
+  string content = 1;
 }
 
 message GitGetWorktrees {
-    uint64 project_id = 1;
-    uint64 repository_id = 2;
+  uint64 project_id = 1;
+  uint64 repository_id = 2;
 }
 
 message GitWorktreesResponse {
-    repeated Worktree worktrees = 1;
+  repeated Worktree worktrees = 1;
 }
 
 message Worktree {
-    string path = 1;
-    string ref_name = 2;
-    string sha = 3;
+  string path = 1;
+  string ref_name = 2;
+  string sha = 3;
 }
 
 message GitCreateWorktree {
-    uint64 project_id = 1;
-    uint64 repository_id = 2;
-    string name = 3;
-    string directory = 4;
-    optional string commit = 5;
+  uint64 project_id = 1;
+  uint64 repository_id = 2;
+  string name = 3;
+  string directory = 4;
+  optional string commit = 5;
 }
 
 message RunGitHook {
-    enum GitHook {
-        PRE_COMMIT = 0;
-        reserved 1;
-    }
-
-    uint64 project_id = 1;
-    uint64 repository_id = 2;
-    GitHook hook = 3;
+  enum GitHook {
+    PRE_COMMIT = 0;
+    reserved 1;
+  }
+
+  uint64 project_id = 1;
+  uint64 repository_id = 2;
+  GitHook hook = 3;
 }

crates/proto/proto/image.proto 🔗

@@ -5,32 +5,32 @@ import "core.proto";
 import "worktree.proto";
 
 message OpenImageByPath {
-    uint64 project_id = 1;
-    uint64 worktree_id = 2;
-    string path = 3;
+  uint64 project_id = 1;
+  uint64 worktree_id = 2;
+  string path = 3;
 }
 
 message OpenImageResponse {
-    uint64 image_id = 1;
+  uint64 image_id = 1;
 }
 
 message CreateImageForPeer {
-    uint64 project_id = 1;
-    PeerId peer_id = 2;
-    oneof variant {
-        ImageState state = 3;
-        ImageChunk chunk = 4;
-    }
+  uint64 project_id = 1;
+  PeerId peer_id = 2;
+  oneof variant {
+    ImageState state = 3;
+    ImageChunk chunk = 4;
+  }
 }
 
 message ImageState {
-    uint64 id = 1;
-    optional File file = 2;
-    uint64 content_size = 3;
-    string format = 4; // e.g., "png", "jpeg", "webp", etc.
+  uint64 id = 1;
+  optional File file = 2;
+  uint64 content_size = 3;
+  string format = 4; // e.g., "png", "jpeg", "webp", etc.
 }
 
 message ImageChunk {
-    uint64 image_id = 1;
-    bytes data = 2;
+  uint64 image_id = 1;
+  bytes data = 2;
 }

crates/proto/proto/lsp.proto 🔗

@@ -2,8 +2,6 @@ syntax = "proto3";
 package zed.messages;
 
 import "buffer.proto";
-import "core.proto";
-import "worktree.proto";
 
 message GetDefinition {
   uint64 project_id = 1;

crates/proto/proto/notification.proto 🔗

@@ -2,36 +2,36 @@ syntax = "proto3";
 package zed.messages;
 
 message GetNotifications {
-    optional uint64 before_id = 1;
+  optional uint64 before_id = 1;
 }
 
 message AddNotification {
-    Notification notification = 1;
+  Notification notification = 1;
 }
 
 message GetNotificationsResponse {
-    repeated Notification notifications = 1;
-    bool done = 2;
+  repeated Notification notifications = 1;
+  bool done = 2;
 }
 
 message DeleteNotification {
-    uint64 notification_id = 1;
+  uint64 notification_id = 1;
 }
 
 message UpdateNotification {
-    Notification notification = 1;
+  Notification notification = 1;
 }
 
 message MarkNotificationRead {
-    uint64 notification_id = 1;
+  uint64 notification_id = 1;
 }
 
 message Notification {
-    uint64 id = 1;
-    uint64 timestamp = 2;
-    string kind = 3;
-    optional uint64 entity_id = 4;
-    string content = 5;
-    bool is_read = 6;
-    optional bool response = 7;
+  uint64 id = 1;
+  uint64 timestamp = 2;
+  string kind = 3;
+  optional uint64 entity_id = 4;
+  string content = 5;
+  bool is_read = 6;
+  optional bool response = 7;
 }

crates/proto/proto/task.proto 🔗

@@ -4,57 +4,57 @@ package zed.messages;
 import "buffer.proto";
 
 message TaskContextForLocation {
-    uint64 project_id = 1;
-    Location location = 2;
-    map<string, string> task_variables = 3;
+  uint64 project_id = 1;
+  Location location = 2;
+  map<string, string> task_variables = 3;
 }
 
 message TaskContext {
-    optional string cwd = 1;
-    map<string, string> task_variables = 2;
-    map<string, string> project_env = 3;
+  optional string cwd = 1;
+  map<string, string> task_variables = 2;
+  map<string, string> project_env = 3;
 }
 
 message Shell {
-    message WithArguments {
-        string program = 1;
-        repeated string args = 2;
-    }
+  message WithArguments {
+    string program = 1;
+    repeated string args = 2;
+  }
 
-    oneof shell_type {
-        System system = 1;
-        string program = 2;
-        WithArguments with_arguments = 3;
-    }
+  oneof shell_type {
+    System system = 1;
+    string program = 2;
+    WithArguments with_arguments = 3;
+  }
 }
 
 message System {}
 
 enum RevealStrategy {
-    RevealAlways = 0;
-    RevealNever = 1;
+  RevealAlways = 0;
+  RevealNever = 1;
 }
 
 enum HideStrategy {
-    HideAlways = 0;
-    HideNever = 1;
-    HideOnSuccess = 2;
+  HideAlways = 0;
+  HideNever = 1;
+  HideOnSuccess = 2;
 }
 
 message SpawnInTerminal {
-    string label = 1;
-    optional string command = 2;
-    repeated string args = 3;
-    map<string, string> env = 4;
-    optional string cwd = 5;
+  string label = 1;
+  optional string command = 2;
+  repeated string args = 3;
+  map<string, string> env = 4;
+  optional string cwd = 5;
 }
 
 message GetDirectoryEnvironment {
-    uint64 project_id = 1;
-    Shell shell = 2;
-    string directory = 3;
+  uint64 project_id = 1;
+  Shell shell = 2;
+  string directory = 3;
 }
 
 message DirectoryEnvironment {
-    map<string, string> environment = 1;
+  map<string, string> environment = 1;
 }

crates/proto/proto/toolchain.proto 🔗

@@ -2,58 +2,58 @@ syntax = "proto3";
 package zed.messages;
 
 message ListToolchains {
-    uint64 project_id = 1;
-    uint64 worktree_id = 2;
-    string language_name = 3;
-    optional string path = 4;
+  uint64 project_id = 1;
+  uint64 worktree_id = 2;
+  string language_name = 3;
+  optional string path = 4;
 }
 
 message Toolchain {
-    string name = 1;
-    string path = 2;
-    string raw_json = 3;
+  string name = 1;
+  string path = 2;
+  string raw_json = 3;
 }
 
 message ToolchainGroup {
-    uint64 start_index = 1;
-    string name = 2;
+  uint64 start_index = 1;
+  string name = 2;
 }
 
 message ListToolchainsResponse {
-    repeated Toolchain toolchains = 1;
-    bool has_values = 2;
-    repeated ToolchainGroup groups = 3;
-    optional string relative_worktree_path = 4;
+  repeated Toolchain toolchains = 1;
+  bool has_values = 2;
+  repeated ToolchainGroup groups = 3;
+  optional string relative_worktree_path = 4;
 }
 
 message ActivateToolchain {
-    uint64 project_id = 1;
-    uint64 worktree_id = 2;
-    Toolchain toolchain = 3;
-    string language_name = 4;
-    optional string path = 5;
+  uint64 project_id = 1;
+  uint64 worktree_id = 2;
+  Toolchain toolchain = 3;
+  string language_name = 4;
+  optional string path = 5;
 }
 
 message ActiveToolchain {
-    uint64 project_id = 1;
-    uint64 worktree_id = 2;
-    string language_name = 3;
-    optional string path = 4;
+  uint64 project_id = 1;
+  uint64 worktree_id = 2;
+  string language_name = 3;
+  optional string path = 4;
 }
 
 message ActiveToolchainResponse {
-    optional Toolchain toolchain = 1;
+  optional Toolchain toolchain = 1;
 }
 
 message ResolveToolchain {
-    uint64 project_id = 1;
-    string abs_path = 2;
-    string language_name = 3;
+  uint64 project_id = 1;
+  string abs_path = 2;
+  string language_name = 3;
 }
 
 message ResolveToolchainResponse {
-    oneof response {
-        Toolchain toolchain = 1;
-        string error = 2;
-    }
+  oneof response {
+    Toolchain toolchain = 1;
+    string error = 2;
+  }
 }

crates/proto/proto/zed.proto 🔗

@@ -18,493 +18,492 @@ import "toolchain.proto";
 import "worktree.proto";
 
 // Looking for a number? Search "// current max"
-
 message Envelope {
-    uint32 id = 1;
-    optional uint32 responding_to = 2;
-    optional PeerId original_sender_id = 3;
-    optional uint32 ack_id = 266;
-
-    oneof payload {
-        Hello hello = 4;
-        Ack ack = 5;
-        Error error = 6;
-        Ping ping = 7;
-        Test test = 8;
-        EndStream end_stream = 165;
-
-        CreateRoom create_room = 9;
-        CreateRoomResponse create_room_response = 10;
-        JoinRoom join_room = 11;
-        JoinRoomResponse join_room_response = 12;
-        RejoinRoom rejoin_room = 13;
-        RejoinRoomResponse rejoin_room_response = 14;
-        LeaveRoom leave_room = 15;
-        Call call = 16;
-        IncomingCall incoming_call = 17;
-        CallCanceled call_canceled = 18;
-        CancelCall cancel_call = 19;
-        DeclineCall decline_call = 20;
-        UpdateParticipantLocation update_participant_location = 21;
-        RoomUpdated room_updated = 22;
-
-        ShareProject share_project = 23;
-        ShareProjectResponse share_project_response = 24;
-        UnshareProject unshare_project = 25;
-        JoinProject join_project = 26;
-        JoinProjectResponse join_project_response = 27;
-        LeaveProject leave_project = 28;
-        AddProjectCollaborator add_project_collaborator = 29;
-        UpdateProjectCollaborator update_project_collaborator = 30;
-        RemoveProjectCollaborator remove_project_collaborator = 31;
-
-        GetDefinition get_definition = 32;
-        GetDefinitionResponse get_definition_response = 33;
-        GetDeclaration get_declaration = 237;
-        GetDeclarationResponse get_declaration_response = 238;
-        GetTypeDefinition get_type_definition = 34;
-        GetTypeDefinitionResponse get_type_definition_response = 35;
-
-        GetReferences get_references = 36;
-        GetReferencesResponse get_references_response = 37;
-        GetDocumentHighlights get_document_highlights = 38;
-        GetDocumentHighlightsResponse get_document_highlights_response = 39;
-        GetProjectSymbols get_project_symbols = 40;
-        GetProjectSymbolsResponse get_project_symbols_response = 41;
-        OpenBufferForSymbol open_buffer_for_symbol = 42;
-        OpenBufferForSymbolResponse open_buffer_for_symbol_response = 43;
-
-        UpdateProject update_project = 44;
-        UpdateWorktree update_worktree = 45;
-
-        CreateProjectEntry create_project_entry = 46;
-        RenameProjectEntry rename_project_entry = 47;
-        CopyProjectEntry copy_project_entry = 48;
-        DeleteProjectEntry delete_project_entry = 49;
-        ProjectEntryResponse project_entry_response = 50;
-        ExpandProjectEntry expand_project_entry = 51;
-        ExpandProjectEntryResponse expand_project_entry_response = 52;
-        ExpandAllForProjectEntry expand_all_for_project_entry = 291;
-        ExpandAllForProjectEntryResponse expand_all_for_project_entry_response = 292;
-        UpdateDiagnosticSummary update_diagnostic_summary = 53;
-        StartLanguageServer start_language_server = 54;
-        UpdateLanguageServer update_language_server = 55;
-
-        OpenBufferById open_buffer_by_id = 56;
-        OpenBufferByPath open_buffer_by_path = 57;
-        OpenBufferResponse open_buffer_response = 58;
-        CreateBufferForPeer create_buffer_for_peer = 59;
-        UpdateBuffer update_buffer = 60;
-        UpdateBufferFile update_buffer_file = 61;
-        SaveBuffer save_buffer = 62;
-        BufferSaved buffer_saved = 63;
-        BufferReloaded buffer_reloaded = 64;
-        ReloadBuffers reload_buffers = 65;
-        ReloadBuffersResponse reload_buffers_response = 66;
-        SynchronizeBuffers synchronize_buffers = 67;
-        SynchronizeBuffersResponse synchronize_buffers_response = 68;
-        FormatBuffers format_buffers = 69;
-        FormatBuffersResponse format_buffers_response = 70;
-        GetCompletions get_completions = 71;
-        GetCompletionsResponse get_completions_response = 72;
-        ResolveCompletionDocumentation resolve_completion_documentation = 73;
-        ResolveCompletionDocumentationResponse resolve_completion_documentation_response = 74;
-        ApplyCompletionAdditionalEdits apply_completion_additional_edits = 75;
-        ApplyCompletionAdditionalEditsResponse apply_completion_additional_edits_response = 76;
-        GetCodeActions get_code_actions = 77;
-        GetCodeActionsResponse get_code_actions_response = 78;
-        GetHover get_hover = 79;
-        GetHoverResponse get_hover_response = 80;
-        ApplyCodeAction apply_code_action = 81;
-        ApplyCodeActionResponse apply_code_action_response = 82;
-        PrepareRename prepare_rename = 83;
-        PrepareRenameResponse prepare_rename_response = 84;
-        PerformRename perform_rename = 85;
-        PerformRenameResponse perform_rename_response = 86;
-
-        UpdateContacts update_contacts = 89;
-        ShowContacts show_contacts = 91;
-
-        GetUsers get_users = 92;
-        FuzzySearchUsers fuzzy_search_users = 93;
-        UsersResponse users_response = 94;
-        RequestContact request_contact = 95;
-        RespondToContactRequest respond_to_contact_request = 96;
-        RemoveContact remove_contact = 97;
-
-        Follow follow = 98;
-        FollowResponse follow_response = 99;
-        UpdateFollowers update_followers = 100;
-        Unfollow unfollow = 101;
-        UpdateDiffBases update_diff_bases = 104;
-
-        OnTypeFormatting on_type_formatting = 105;
-        OnTypeFormattingResponse on_type_formatting_response = 106;
-
-        UpdateWorktreeSettings update_worktree_settings = 107;
-
-        InlayHints inlay_hints = 108;
-        InlayHintsResponse inlay_hints_response = 109;
-        ResolveInlayHint resolve_inlay_hint = 110;
-        ResolveInlayHintResponse resolve_inlay_hint_response = 111;
-        RefreshInlayHints refresh_inlay_hints = 112;
-
-        CreateChannel create_channel = 113;
-        CreateChannelResponse create_channel_response = 114;
-        InviteChannelMember invite_channel_member = 115;
-        RemoveChannelMember remove_channel_member = 116;
-        RespondToChannelInvite respond_to_channel_invite = 117;
-        UpdateChannels update_channels = 118;
-        JoinChannel join_channel = 119;
-        DeleteChannel delete_channel = 120;
-        GetChannelMembers get_channel_members = 121;
-        GetChannelMembersResponse get_channel_members_response = 122;
-        SetChannelMemberRole set_channel_member_role = 123;
-        RenameChannel rename_channel = 124;
-        RenameChannelResponse rename_channel_response = 125;
-        SubscribeToChannels subscribe_to_channels = 207;
-
-        JoinChannelBuffer join_channel_buffer = 126;
-        JoinChannelBufferResponse join_channel_buffer_response = 127;
-        UpdateChannelBuffer update_channel_buffer = 128;
-        LeaveChannelBuffer leave_channel_buffer = 129;
-        UpdateChannelBufferCollaborators update_channel_buffer_collaborators = 130;
-        RejoinChannelBuffers rejoin_channel_buffers = 131;
-        RejoinChannelBuffersResponse rejoin_channel_buffers_response = 132;
-        AckBufferOperation ack_buffer_operation = 133;
-
-        JoinChannelChat join_channel_chat = 134;
-        JoinChannelChatResponse join_channel_chat_response = 135;
-        LeaveChannelChat leave_channel_chat = 136;
-        SendChannelMessage send_channel_message = 137;
-        SendChannelMessageResponse send_channel_message_response = 138;
-        ChannelMessageSent channel_message_sent = 139;
-        GetChannelMessages get_channel_messages = 140;
-        GetChannelMessagesResponse get_channel_messages_response = 141;
-        RemoveChannelMessage remove_channel_message = 142;
-        AckChannelMessage ack_channel_message = 143;
-        GetChannelMessagesById get_channel_messages_by_id = 144;
-
-        MoveChannel move_channel = 147;
-        ReorderChannel reorder_channel = 349;
-        SetChannelVisibility set_channel_visibility = 148;
-
-        AddNotification add_notification = 149;
-        GetNotifications get_notifications = 150;
-        GetNotificationsResponse get_notifications_response = 151;
-        DeleteNotification delete_notification = 152;
-        MarkNotificationRead mark_notification_read = 153;
-        LspExtExpandMacro lsp_ext_expand_macro = 154;
-        LspExtExpandMacroResponse lsp_ext_expand_macro_response = 155;
-        SetRoomParticipantRole set_room_participant_role = 156;
-
-        UpdateUserChannels update_user_channels = 157;
-
-        GetImplementation get_implementation = 162;
-        GetImplementationResponse get_implementation_response = 163;
-
-        UpdateChannelMessage update_channel_message = 170;
-        ChannelMessageUpdate channel_message_update = 171;
-
-        BlameBuffer blame_buffer = 172;
-        BlameBufferResponse blame_buffer_response = 173;
-
-        UpdateNotification update_notification = 174;
-
-        RestartLanguageServers restart_language_servers = 208;
-
-        RejoinRemoteProjects rejoin_remote_projects = 186;
-        RejoinRemoteProjectsResponse rejoin_remote_projects_response = 187;
+  uint32 id = 1;
+  optional uint32 responding_to = 2;
+  optional PeerId original_sender_id = 3;
+  optional uint32 ack_id = 266;
+
+  oneof payload {
+    Hello hello = 4;
+    Ack ack = 5;
+    Error error = 6;
+    Ping ping = 7;
+    Test test = 8;
+    EndStream end_stream = 165;
+
+    CreateRoom create_room = 9;
+    CreateRoomResponse create_room_response = 10;
+    JoinRoom join_room = 11;
+    JoinRoomResponse join_room_response = 12;
+    RejoinRoom rejoin_room = 13;
+    RejoinRoomResponse rejoin_room_response = 14;
+    LeaveRoom leave_room = 15;
+    Call call = 16;
+    IncomingCall incoming_call = 17;
+    CallCanceled call_canceled = 18;
+    CancelCall cancel_call = 19;
+    DeclineCall decline_call = 20;
+    UpdateParticipantLocation update_participant_location = 21;
+    RoomUpdated room_updated = 22;
+
+    ShareProject share_project = 23;
+    ShareProjectResponse share_project_response = 24;
+    UnshareProject unshare_project = 25;
+    JoinProject join_project = 26;
+    JoinProjectResponse join_project_response = 27;
+    LeaveProject leave_project = 28;
+    AddProjectCollaborator add_project_collaborator = 29;
+    UpdateProjectCollaborator update_project_collaborator = 30;
+    RemoveProjectCollaborator remove_project_collaborator = 31;
+
+    GetDefinition get_definition = 32;
+    GetDefinitionResponse get_definition_response = 33;
+    GetDeclaration get_declaration = 237;
+    GetDeclarationResponse get_declaration_response = 238;
+    GetTypeDefinition get_type_definition = 34;
+    GetTypeDefinitionResponse get_type_definition_response = 35;
+
+    GetReferences get_references = 36;
+    GetReferencesResponse get_references_response = 37;
+    GetDocumentHighlights get_document_highlights = 38;
+    GetDocumentHighlightsResponse get_document_highlights_response = 39;
+    GetProjectSymbols get_project_symbols = 40;
+    GetProjectSymbolsResponse get_project_symbols_response = 41;
+    OpenBufferForSymbol open_buffer_for_symbol = 42;
+    OpenBufferForSymbolResponse open_buffer_for_symbol_response = 43;
+
+    UpdateProject update_project = 44;
+    UpdateWorktree update_worktree = 45;
+
+    CreateProjectEntry create_project_entry = 46;
+    RenameProjectEntry rename_project_entry = 47;
+    CopyProjectEntry copy_project_entry = 48;
+    DeleteProjectEntry delete_project_entry = 49;
+    ProjectEntryResponse project_entry_response = 50;
+    ExpandProjectEntry expand_project_entry = 51;
+    ExpandProjectEntryResponse expand_project_entry_response = 52;
+    ExpandAllForProjectEntry expand_all_for_project_entry = 291;
+    ExpandAllForProjectEntryResponse expand_all_for_project_entry_response = 292;
+    UpdateDiagnosticSummary update_diagnostic_summary = 53;
+    StartLanguageServer start_language_server = 54;
+    UpdateLanguageServer update_language_server = 55;
+
+    OpenBufferById open_buffer_by_id = 56;
+    OpenBufferByPath open_buffer_by_path = 57;
+    OpenBufferResponse open_buffer_response = 58;
+    CreateBufferForPeer create_buffer_for_peer = 59;
+    UpdateBuffer update_buffer = 60;
+    UpdateBufferFile update_buffer_file = 61;
+    SaveBuffer save_buffer = 62;
+    BufferSaved buffer_saved = 63;
+    BufferReloaded buffer_reloaded = 64;
+    ReloadBuffers reload_buffers = 65;
+    ReloadBuffersResponse reload_buffers_response = 66;
+    SynchronizeBuffers synchronize_buffers = 67;
+    SynchronizeBuffersResponse synchronize_buffers_response = 68;
+    FormatBuffers format_buffers = 69;
+    FormatBuffersResponse format_buffers_response = 70;
+    GetCompletions get_completions = 71;
+    GetCompletionsResponse get_completions_response = 72;
+    ResolveCompletionDocumentation resolve_completion_documentation = 73;
+    ResolveCompletionDocumentationResponse resolve_completion_documentation_response = 74;
+    ApplyCompletionAdditionalEdits apply_completion_additional_edits = 75;
+    ApplyCompletionAdditionalEditsResponse apply_completion_additional_edits_response = 76;
+    GetCodeActions get_code_actions = 77;
+    GetCodeActionsResponse get_code_actions_response = 78;
+    GetHover get_hover = 79;
+    GetHoverResponse get_hover_response = 80;
+    ApplyCodeAction apply_code_action = 81;
+    ApplyCodeActionResponse apply_code_action_response = 82;
+    PrepareRename prepare_rename = 83;
+    PrepareRenameResponse prepare_rename_response = 84;
+    PerformRename perform_rename = 85;
+    PerformRenameResponse perform_rename_response = 86;
+
+    UpdateContacts update_contacts = 89;
+    ShowContacts show_contacts = 91;
+
+    GetUsers get_users = 92;
+    FuzzySearchUsers fuzzy_search_users = 93;
+    UsersResponse users_response = 94;
+    RequestContact request_contact = 95;
+    RespondToContactRequest respond_to_contact_request = 96;
+    RemoveContact remove_contact = 97;
+
+    Follow follow = 98;
+    FollowResponse follow_response = 99;
+    UpdateFollowers update_followers = 100;
+    Unfollow unfollow = 101;
+    UpdateDiffBases update_diff_bases = 104;
+
+    OnTypeFormatting on_type_formatting = 105;
+    OnTypeFormattingResponse on_type_formatting_response = 106;
+
+    UpdateWorktreeSettings update_worktree_settings = 107;
+
+    InlayHints inlay_hints = 108;
+    InlayHintsResponse inlay_hints_response = 109;
+    ResolveInlayHint resolve_inlay_hint = 110;
+    ResolveInlayHintResponse resolve_inlay_hint_response = 111;
+    RefreshInlayHints refresh_inlay_hints = 112;
+
+    CreateChannel create_channel = 113;
+    CreateChannelResponse create_channel_response = 114;
+    InviteChannelMember invite_channel_member = 115;
+    RemoveChannelMember remove_channel_member = 116;
+    RespondToChannelInvite respond_to_channel_invite = 117;
+    UpdateChannels update_channels = 118;
+    JoinChannel join_channel = 119;
+    DeleteChannel delete_channel = 120;
+    GetChannelMembers get_channel_members = 121;
+    GetChannelMembersResponse get_channel_members_response = 122;
+    SetChannelMemberRole set_channel_member_role = 123;
+    RenameChannel rename_channel = 124;
+    RenameChannelResponse rename_channel_response = 125;
+    SubscribeToChannels subscribe_to_channels = 207;
+
+    JoinChannelBuffer join_channel_buffer = 126;
+    JoinChannelBufferResponse join_channel_buffer_response = 127;
+    UpdateChannelBuffer update_channel_buffer = 128;
+    LeaveChannelBuffer leave_channel_buffer = 129;
+    UpdateChannelBufferCollaborators update_channel_buffer_collaborators = 130;
+    RejoinChannelBuffers rejoin_channel_buffers = 131;
+    RejoinChannelBuffersResponse rejoin_channel_buffers_response = 132;
+    AckBufferOperation ack_buffer_operation = 133;
+
+    JoinChannelChat join_channel_chat = 134;
+    JoinChannelChatResponse join_channel_chat_response = 135;
+    LeaveChannelChat leave_channel_chat = 136;
+    SendChannelMessage send_channel_message = 137;
+    SendChannelMessageResponse send_channel_message_response = 138;
+    ChannelMessageSent channel_message_sent = 139;
+    GetChannelMessages get_channel_messages = 140;
+    GetChannelMessagesResponse get_channel_messages_response = 141;
+    RemoveChannelMessage remove_channel_message = 142;
+    AckChannelMessage ack_channel_message = 143;
+    GetChannelMessagesById get_channel_messages_by_id = 144;
+
+    MoveChannel move_channel = 147;
+    ReorderChannel reorder_channel = 349;
+    SetChannelVisibility set_channel_visibility = 148;
+
+    AddNotification add_notification = 149;
+    GetNotifications get_notifications = 150;
+    GetNotificationsResponse get_notifications_response = 151;
+    DeleteNotification delete_notification = 152;
+    MarkNotificationRead mark_notification_read = 153;
+    LspExtExpandMacro lsp_ext_expand_macro = 154;
+    LspExtExpandMacroResponse lsp_ext_expand_macro_response = 155;
+    SetRoomParticipantRole set_room_participant_role = 156;
+
+    UpdateUserChannels update_user_channels = 157;
+
+    GetImplementation get_implementation = 162;
+    GetImplementationResponse get_implementation_response = 163;
+
+    UpdateChannelMessage update_channel_message = 170;
+    ChannelMessageUpdate channel_message_update = 171;
+
+    BlameBuffer blame_buffer = 172;
+    BlameBufferResponse blame_buffer_response = 173;
+
+    UpdateNotification update_notification = 174;
+
+    RestartLanguageServers restart_language_servers = 208;
+
+    RejoinRemoteProjects rejoin_remote_projects = 186;
+    RejoinRemoteProjectsResponse rejoin_remote_projects_response = 187;
 
-        OpenNewBuffer open_new_buffer = 196;
+    OpenNewBuffer open_new_buffer = 196;
 
-        TaskContextForLocation task_context_for_location = 203;
-        TaskContext task_context = 204;
+    TaskContextForLocation task_context_for_location = 203;
+    TaskContext task_context = 204;
 
-        LinkedEditingRange linked_editing_range = 209;
-        LinkedEditingRangeResponse linked_editing_range_response = 210;
+    LinkedEditingRange linked_editing_range = 209;
+    LinkedEditingRangeResponse linked_editing_range_response = 210;
 
-        AdvertiseContexts advertise_contexts = 211;
-        OpenContext open_context = 212;
-        OpenContextResponse open_context_response = 213;
-        CreateContext create_context = 232;
-        CreateContextResponse create_context_response = 233;
-        UpdateContext update_context = 214;
-        SynchronizeContexts synchronize_contexts = 215;
-        SynchronizeContextsResponse synchronize_contexts_response = 216;
+    AdvertiseContexts advertise_contexts = 211;
+    OpenContext open_context = 212;
+    OpenContextResponse open_context_response = 213;
+    CreateContext create_context = 232;
+    CreateContextResponse create_context_response = 233;
+    UpdateContext update_context = 214;
+    SynchronizeContexts synchronize_contexts = 215;
+    SynchronizeContextsResponse synchronize_contexts_response = 216;
 
-        GetSignatureHelp get_signature_help = 217;
-        GetSignatureHelpResponse get_signature_help_response = 218;
+    GetSignatureHelp get_signature_help = 217;
+    GetSignatureHelpResponse get_signature_help_response = 218;
 
-        ListRemoteDirectory list_remote_directory = 219;
-        ListRemoteDirectoryResponse list_remote_directory_response = 220;
-        AddWorktree add_worktree = 222;
-        AddWorktreeResponse add_worktree_response = 223;
+    ListRemoteDirectory list_remote_directory = 219;
+    ListRemoteDirectoryResponse list_remote_directory_response = 220;
+    AddWorktree add_worktree = 222;
+    AddWorktreeResponse add_worktree_response = 223;
 
-        LspExtSwitchSourceHeader lsp_ext_switch_source_header = 241;
-        LspExtSwitchSourceHeaderResponse lsp_ext_switch_source_header_response = 242;
+    LspExtSwitchSourceHeader lsp_ext_switch_source_header = 241;
+    LspExtSwitchSourceHeaderResponse lsp_ext_switch_source_header_response = 242;
 
-        FindSearchCandidates find_search_candidates = 243;
+    FindSearchCandidates find_search_candidates = 243;
 
-        CloseBuffer close_buffer = 245;
+    CloseBuffer close_buffer = 245;
 
-        ShutdownRemoteServer shutdown_remote_server = 257;
+    ShutdownRemoteServer shutdown_remote_server = 257;
 
-        RemoveWorktree remove_worktree = 258;
+    RemoveWorktree remove_worktree = 258;
 
-        LanguageServerLog language_server_log = 260;
+    LanguageServerLog language_server_log = 260;
 
-        Toast toast = 261;
-        HideToast hide_toast = 262;
+    Toast toast = 261;
+    HideToast hide_toast = 262;
 
-        OpenServerSettings open_server_settings = 263;
+    OpenServerSettings open_server_settings = 263;
 
-        GetPermalinkToLine get_permalink_to_line = 264;
-        GetPermalinkToLineResponse get_permalink_to_line_response = 265;
+    GetPermalinkToLine get_permalink_to_line = 264;
+    GetPermalinkToLineResponse get_permalink_to_line_response = 265;
 
-        FlushBufferedMessages flush_buffered_messages = 267;
+    FlushBufferedMessages flush_buffered_messages = 267;
 
-        LanguageServerPromptRequest language_server_prompt_request = 268;
-        LanguageServerPromptResponse language_server_prompt_response = 269;
+    LanguageServerPromptRequest language_server_prompt_request = 268;
+    LanguageServerPromptResponse language_server_prompt_response = 269;
 
-        GitBranchesResponse git_branches_response = 271;
+    GitBranchesResponse git_branches_response = 271;
 
-        UpdateGitBranch update_git_branch = 272;
+    UpdateGitBranch update_git_branch = 272;
 
-        ListToolchains list_toolchains = 273;
-        ListToolchainsResponse list_toolchains_response = 274;
-        ActivateToolchain activate_toolchain = 275;
-        ActiveToolchain active_toolchain = 276;
-        ActiveToolchainResponse active_toolchain_response = 277;
+    ListToolchains list_toolchains = 273;
+    ListToolchainsResponse list_toolchains_response = 274;
+    ActivateToolchain activate_toolchain = 275;
+    ActiveToolchain active_toolchain = 276;
+    ActiveToolchainResponse active_toolchain_response = 277;
 
-        GetPathMetadata get_path_metadata = 278;
-        GetPathMetadataResponse get_path_metadata_response = 279;
+    GetPathMetadata get_path_metadata = 278;
+    GetPathMetadataResponse get_path_metadata_response = 279;
 
-        CancelLanguageServerWork cancel_language_server_work = 282;
+    CancelLanguageServerWork cancel_language_server_work = 282;
 
-        LspExtOpenDocs lsp_ext_open_docs = 283;
-        LspExtOpenDocsResponse lsp_ext_open_docs_response = 284;
+    LspExtOpenDocs lsp_ext_open_docs = 283;
+    LspExtOpenDocsResponse lsp_ext_open_docs_response = 284;
 
-        SyncExtensions sync_extensions = 285;
-        SyncExtensionsResponse sync_extensions_response = 286;
-        InstallExtension install_extension = 287;
+    SyncExtensions sync_extensions = 285;
+    SyncExtensionsResponse sync_extensions_response = 286;
+    InstallExtension install_extension = 287;
 
-        OpenUnstagedDiff open_unstaged_diff = 288;
-        OpenUnstagedDiffResponse open_unstaged_diff_response = 289;
+    OpenUnstagedDiff open_unstaged_diff = 288;
+    OpenUnstagedDiffResponse open_unstaged_diff_response = 289;
 
-        RegisterBufferWithLanguageServers register_buffer_with_language_servers = 290;
+    RegisterBufferWithLanguageServers register_buffer_with_language_servers = 290;
 
-        Stage stage = 293;
-        Unstage unstage = 294;
-        Commit commit = 295;
-        OpenCommitMessageBuffer open_commit_message_buffer = 296;
+    Stage stage = 293;
+    Unstage unstage = 294;
+    Commit commit = 295;
+    OpenCommitMessageBuffer open_commit_message_buffer = 296;
 
-        OpenUncommittedDiff open_uncommitted_diff = 297;
-        OpenUncommittedDiffResponse open_uncommitted_diff_response = 298;
+    OpenUncommittedDiff open_uncommitted_diff = 297;
+    OpenUncommittedDiffResponse open_uncommitted_diff_response = 298;
 
-        SetIndexText set_index_text = 299;
+    SetIndexText set_index_text = 299;
 
-        GitShow git_show = 300;
-        GitReset git_reset = 301;
-        GitCommitDetails git_commit_details = 302;
-        GitCheckoutFiles git_checkout_files = 303;
+    GitShow git_show = 300;
+    GitReset git_reset = 301;
+    GitCommitDetails git_commit_details = 302;
+    GitCheckoutFiles git_checkout_files = 303;
 
-        Push push = 304;
-        Fetch fetch = 305;
-        GetRemotes get_remotes = 306;
-        GetRemotesResponse get_remotes_response = 307;
-        Pull pull = 308;
+    Push push = 304;
+    Fetch fetch = 305;
+    GetRemotes get_remotes = 306;
+    GetRemotesResponse get_remotes_response = 307;
+    Pull pull = 308;
 
-        ApplyCodeActionKind apply_code_action_kind = 309;
-        ApplyCodeActionKindResponse apply_code_action_kind_response = 310;
+    ApplyCodeActionKind apply_code_action_kind = 309;
+    ApplyCodeActionKindResponse apply_code_action_kind_response = 310;
 
-        RemoteMessageResponse remote_message_response = 311;
+    RemoteMessageResponse remote_message_response = 311;
 
-        GitGetBranches git_get_branches = 312;
-        GitCreateBranch git_create_branch = 313;
-        GitChangeBranch git_change_branch = 314;
+    GitGetBranches git_get_branches = 312;
+    GitCreateBranch git_create_branch = 313;
+    GitChangeBranch git_change_branch = 314;
 
-        CheckForPushedCommits check_for_pushed_commits = 315;
-        CheckForPushedCommitsResponse check_for_pushed_commits_response = 316;
+    CheckForPushedCommits check_for_pushed_commits = 315;
+    CheckForPushedCommitsResponse check_for_pushed_commits_response = 316;
 
-        AskPassRequest ask_pass_request = 317;
-        AskPassResponse ask_pass_response = 318;
+    AskPassRequest ask_pass_request = 317;
+    AskPassResponse ask_pass_response = 318;
 
-        GitDiff git_diff = 319;
-        GitDiffResponse git_diff_response = 320;
-        GitInit git_init = 321;
+    GitDiff git_diff = 319;
+    GitDiffResponse git_diff_response = 320;
+    GitInit git_init = 321;
 
-        CodeLens code_lens = 322;
-        GetCodeLens get_code_lens = 323;
-        GetCodeLensResponse get_code_lens_response = 324;
-        RefreshCodeLens refresh_code_lens = 325;
+    CodeLens code_lens = 322;
+    GetCodeLens get_code_lens = 323;
+    GetCodeLensResponse get_code_lens_response = 324;
+    RefreshCodeLens refresh_code_lens = 325;
 
-        ToggleBreakpoint toggle_breakpoint = 326;
-        BreakpointsForFile breakpoints_for_file = 327;
+    ToggleBreakpoint toggle_breakpoint = 326;
+    BreakpointsForFile breakpoints_for_file = 327;
 
-        UpdateRepository update_repository = 328;
-        RemoveRepository remove_repository = 329;
+    UpdateRepository update_repository = 328;
+    RemoveRepository remove_repository = 329;
 
-        GetDocumentSymbols get_document_symbols = 330;
-        GetDocumentSymbolsResponse get_document_symbols_response = 331;
+    GetDocumentSymbols get_document_symbols = 330;
+    GetDocumentSymbolsResponse get_document_symbols_response = 331;
 
-        LoadCommitDiff load_commit_diff = 334;
-        LoadCommitDiffResponse load_commit_diff_response = 335;
+    LoadCommitDiff load_commit_diff = 334;
+    LoadCommitDiffResponse load_commit_diff_response = 335;
 
-        StopLanguageServers stop_language_servers = 336;
+    StopLanguageServers stop_language_servers = 336;
 
-        LspExtRunnables lsp_ext_runnables = 337;
-        LspExtRunnablesResponse lsp_ext_runnables_response = 338;
+    LspExtRunnables lsp_ext_runnables = 337;
+    LspExtRunnablesResponse lsp_ext_runnables_response = 338;
 
-        GetDebugAdapterBinary get_debug_adapter_binary = 339;
-        DebugAdapterBinary debug_adapter_binary = 340;
-        RunDebugLocators run_debug_locators = 341;
-        DebugRequest debug_request = 342;
+    GetDebugAdapterBinary get_debug_adapter_binary = 339;
+    DebugAdapterBinary debug_adapter_binary = 340;
+    RunDebugLocators run_debug_locators = 341;
+    DebugRequest debug_request = 342;
 
-        LspExtGoToParentModule lsp_ext_go_to_parent_module = 343;
-        LspExtGoToParentModuleResponse lsp_ext_go_to_parent_module_response = 344;
-        LspExtCancelFlycheck lsp_ext_cancel_flycheck = 345;
-        LspExtRunFlycheck lsp_ext_run_flycheck = 346;
-        LspExtClearFlycheck lsp_ext_clear_flycheck = 347;
+    LspExtGoToParentModule lsp_ext_go_to_parent_module = 343;
+    LspExtGoToParentModuleResponse lsp_ext_go_to_parent_module_response = 344;
+    LspExtCancelFlycheck lsp_ext_cancel_flycheck = 345;
+    LspExtRunFlycheck lsp_ext_run_flycheck = 346;
+    LspExtClearFlycheck lsp_ext_clear_flycheck = 347;
 
-        LogToDebugConsole log_to_debug_console = 348;
+    LogToDebugConsole log_to_debug_console = 348;
 
-        GetDocumentDiagnostics get_document_diagnostics = 350;
-        GetDocumentDiagnosticsResponse get_document_diagnostics_response = 351;
-        PullWorkspaceDiagnostics pull_workspace_diagnostics = 352;
+    GetDocumentDiagnostics get_document_diagnostics = 350;
+    GetDocumentDiagnosticsResponse get_document_diagnostics_response = 351;
+    PullWorkspaceDiagnostics pull_workspace_diagnostics = 352;
 
-        GetDocumentColor get_document_color = 353;
-        GetDocumentColorResponse get_document_color_response = 354;
-        GetColorPresentation get_color_presentation = 355;
-        GetColorPresentationResponse get_color_presentation_response = 356;
+    GetDocumentColor get_document_color = 353;
+    GetDocumentColorResponse get_document_color_response = 354;
+    GetColorPresentation get_color_presentation = 355;
+    GetColorPresentationResponse get_color_presentation_response = 356;
 
-        Stash stash = 357;
-        StashPop stash_pop = 358;
+    Stash stash = 357;
+    StashPop stash_pop = 358;
 
-        GetDefaultBranch get_default_branch = 359;
-        GetDefaultBranchResponse get_default_branch_response = 360;
+    GetDefaultBranch get_default_branch = 359;
+    GetDefaultBranchResponse get_default_branch_response = 360;
 
-        GetCrashFiles get_crash_files = 361;
-        GetCrashFilesResponse get_crash_files_response = 362;
+    GetCrashFiles get_crash_files = 361;
+    GetCrashFilesResponse get_crash_files_response = 362;
 
-        GitClone git_clone = 363;
-        GitCloneResponse git_clone_response = 364;
+    GitClone git_clone = 363;
+    GitCloneResponse git_clone_response = 364;
 
-        LspQuery lsp_query = 365;
-        LspQueryResponse lsp_query_response = 366;
-        ToggleLspLogs toggle_lsp_logs = 367;
+    LspQuery lsp_query = 365;
+    LspQueryResponse lsp_query_response = 366;
+    ToggleLspLogs toggle_lsp_logs = 367;
 
-        UpdateUserSettings update_user_settings = 368;
+    UpdateUserSettings update_user_settings = 368;
 
-        GetProcesses get_processes = 369;
-        GetProcessesResponse get_processes_response = 370;
+    GetProcesses get_processes = 369;
+    GetProcessesResponse get_processes_response = 370;
 
-        ResolveToolchain resolve_toolchain = 371;
-        ResolveToolchainResponse resolve_toolchain_response = 372;
+    ResolveToolchain resolve_toolchain = 371;
+    ResolveToolchainResponse resolve_toolchain_response = 372;
 
-        GetAgentServerCommand get_agent_server_command = 373;
-        AgentServerCommand agent_server_command = 374;
+    GetAgentServerCommand get_agent_server_command = 373;
+    AgentServerCommand agent_server_command = 374;
 
-        ExternalAgentsUpdated external_agents_updated = 375;
-        ExternalAgentLoadingStatusUpdated external_agent_loading_status_updated = 376;
-        NewExternalAgentVersionAvailable new_external_agent_version_available = 377;
+    ExternalAgentsUpdated external_agents_updated = 375;
+    ExternalAgentLoadingStatusUpdated external_agent_loading_status_updated = 376;
+    NewExternalAgentVersionAvailable new_external_agent_version_available = 377;
 
-        StashDrop stash_drop = 378;
-        StashApply stash_apply = 379;
+    StashDrop stash_drop = 378;
+    StashApply stash_apply = 379;
 
-        GitRenameBranch git_rename_branch = 380;
+    GitRenameBranch git_rename_branch = 380;
 
-        RemoteStarted remote_started = 381;
+    RemoteStarted remote_started = 381;
 
-        GetDirectoryEnvironment get_directory_environment = 382;
-        DirectoryEnvironment directory_environment = 383;
+    GetDirectoryEnvironment get_directory_environment = 382;
+    DirectoryEnvironment directory_environment = 383;
 
-        GetTreeDiff get_tree_diff = 384;
-        GetTreeDiffResponse get_tree_diff_response = 385;
+    GetTreeDiff get_tree_diff = 384;
+    GetTreeDiffResponse get_tree_diff_response = 385;
 
-        GetBlobContent get_blob_content = 386;
-        GetBlobContentResponse get_blob_content_response = 387;
+    GetBlobContent get_blob_content = 386;
+    GetBlobContentResponse get_blob_content_response = 387;
 
-        GitWorktreesResponse git_worktrees_response = 388;
-        GitGetWorktrees git_get_worktrees = 389;
-        GitCreateWorktree git_create_worktree = 390;
+    GitWorktreesResponse git_worktrees_response = 388;
+    GitGetWorktrees git_get_worktrees = 389;
+    GitCreateWorktree git_create_worktree = 390;
 
-        OpenImageByPath open_image_by_path = 391;
-        OpenImageResponse open_image_response = 392;
-        CreateImageForPeer create_image_for_peer = 393;
+    OpenImageByPath open_image_by_path = 391;
+    OpenImageResponse open_image_response = 392;
+    CreateImageForPeer create_image_for_peer = 393;
 
+    GitFileHistory git_file_history = 397;
+    GitFileHistoryResponse git_file_history_response = 398;
 
-        GitFileHistory git_file_history = 397;
-        GitFileHistoryResponse git_file_history_response = 398;
+    RunGitHook run_git_hook = 399;
 
-        RunGitHook run_git_hook = 399;
+    GitDeleteBranch git_delete_branch = 400;
 
-        GitDeleteBranch git_delete_branch = 400;
+    ExternalExtensionAgentsUpdated external_extension_agents_updated = 401;
 
-        ExternalExtensionAgentsUpdated external_extension_agents_updated = 401;
+    GitCreateRemote git_create_remote = 402;
+    GitRemoveRemote git_remove_remote = 403;
 
-        GitCreateRemote git_create_remote = 402;
-        GitRemoveRemote git_remove_remote = 403;
+    TrustWorktrees trust_worktrees = 404;
+    RestrictWorktrees restrict_worktrees = 405;
 
-        TrustWorktrees trust_worktrees = 404;
-        RestrictWorktrees restrict_worktrees = 405;
+    ShareAgentThread share_agent_thread = 406;
+    GetSharedAgentThread get_shared_agent_thread = 407;
+    GetSharedAgentThreadResponse get_shared_agent_thread_response = 408;
 
-        ShareAgentThread share_agent_thread = 406;
-        GetSharedAgentThread get_shared_agent_thread = 407;
-        GetSharedAgentThreadResponse get_shared_agent_thread_response = 408;
+    FindSearchCandidatesChunk find_search_candidates_chunk = 409;
+    FindSearchCandidatesCancelled find_search_candidates_cancelled = 410;
+    GetContextServerCommand get_context_server_command = 411;
+    ContextServerCommand context_server_command = 412;
 
-        FindSearchCandidatesChunk find_search_candidates_chunk = 409;
-        FindSearchCandidatesCancelled find_search_candidates_cancelled = 410;
-        GetContextServerCommand get_context_server_command = 411;
-        ContextServerCommand context_server_command = 412;
+    AllocateWorktreeId allocate_worktree_id = 413;
+    AllocateWorktreeIdResponse allocate_worktree_id_response = 414;
 
-        AllocateWorktreeId allocate_worktree_id = 413;
-        AllocateWorktreeIdResponse allocate_worktree_id_response = 414;
+    DownloadFileByPath download_file_by_path = 415;
+    DownloadFileResponse download_file_response = 416;
+    CreateFileForPeer create_file_for_peer = 417;
 
-        DownloadFileByPath download_file_by_path = 415;
-        DownloadFileResponse download_file_response = 416;
-        CreateFileForPeer create_file_for_peer = 417;
+    SemanticTokens semantic_tokens = 418;
+    SemanticTokensResponse semantic_tokens_response = 419;
+    RefreshSemanticTokens refresh_semantic_tokens = 420;
+    GetFoldingRanges get_folding_ranges = 421;
+    GetFoldingRangesResponse get_folding_ranges_response = 422;
 
-        SemanticTokens semantic_tokens = 418;
-        SemanticTokensResponse semantic_tokens_response = 419;
-        RefreshSemanticTokens refresh_semantic_tokens = 420;
-        GetFoldingRanges get_folding_ranges = 421;
-        GetFoldingRangesResponse get_folding_ranges_response = 422;
+    GetRemoteProfilingData get_remote_profiling_data = 423;
+    GetRemoteProfilingDataResponse get_remote_profiling_data_response = 424;
 
-        GetRemoteProfilingData get_remote_profiling_data = 423;
-        GetRemoteProfilingDataResponse get_remote_profiling_data_response = 424;
-        
-        SpawnKernel spawn_kernel = 426;
-        SpawnKernelResponse spawn_kernel_response = 427;
-        KillKernel kill_kernel = 428; // current max
-    }
+    SpawnKernel spawn_kernel = 426;
+    SpawnKernelResponse spawn_kernel_response = 427;
+    KillKernel kill_kernel = 428; // current max
+  }
 
-    reserved 87 to 88;
-    reserved 90;
-    reserved 102 to 103;
-    reserved 158 to 161;
-    reserved 164;
-    reserved 166 to 169;
-    reserved 175 to 185;
-    reserved 188 to 195;
-    reserved 197;
-    reserved 198 to 202;
-    reserved 205 to 206;
-    reserved 221;
-    reserved 224 to 231;
-    reserved 234 to 236;
-    reserved 239 to 240;
-    reserved 244;
-    reserved 246 to 256;
-    reserved 259;
-    reserved 270;
-    reserved 280 to 281;
-    reserved 332 to 333;
-    reserved 394 to 396;
+  reserved 87 to 88;
+  reserved 90;
+  reserved 102 to 103;
+  reserved 158 to 161;
+  reserved 164;
+  reserved 166 to 169;
+  reserved 175 to 185;
+  reserved 188 to 195;
+  reserved 197;
+  reserved 198 to 202;
+  reserved 205 to 206;
+  reserved 221;
+  reserved 224 to 231;
+  reserved 234 to 236;
+  reserved 239 to 240;
+  reserved 244;
+  reserved 246 to 256;
+  reserved 259;
+  reserved 270;
+  reserved 280 to 281;
+  reserved 332 to 333;
+  reserved 394 to 396;
+  reserved 429 to 430;
 }
 
 message Hello {
-    PeerId peer_id = 1;
+  PeerId peer_id = 1;
 }
 
 message Ping {}
@@ -512,37 +511,37 @@ message Ping {}
 message Ack {}
 
 message Error {
-    string message = 1;
-    ErrorCode code = 2;
-    repeated string tags = 3;
+  string message = 1;
+  ErrorCode code = 2;
+  repeated string tags = 3;
 }
 
 enum ErrorCode {
-    Internal = 0;
-    NoSuchChannel = 1;
-    Disconnected = 2;
-    SignedOut = 3;
-    UpgradeRequired = 4;
-    Forbidden = 5;
-    NeedsCla = 7;
-    NotARootChannel = 8;
-    BadPublicNesting = 9;
-    CircularNesting = 10;
-    WrongMoveTarget = 11;
-    UnsharedItem = 12;
-    NoSuchProject = 13;
-    DevServerProjectPathDoesNotExist = 16;
-    RemoteUpgradeRequired = 17;
-    RateLimitExceeded = 18;
-    CommitFailed = 19;
-    reserved 6;
-    reserved 14 to 15;
+  Internal = 0;
+  NoSuchChannel = 1;
+  Disconnected = 2;
+  SignedOut = 3;
+  UpgradeRequired = 4;
+  Forbidden = 5;
+  NeedsCla = 7;
+  NotARootChannel = 8;
+  BadPublicNesting = 9;
+  CircularNesting = 10;
+  WrongMoveTarget = 11;
+  UnsharedItem = 12;
+  NoSuchProject = 13;
+  DevServerProjectPathDoesNotExist = 16;
+  RemoteUpgradeRequired = 17;
+  RateLimitExceeded = 18;
+  CommitFailed = 19;
+  reserved 6;
+  reserved 14 to 15;
 }
 
 message EndStream {}
 
 message Test {
-    uint64 id = 1;
+  uint64 id = 1;
 }
 
 message FlushBufferedMessages {}
@@ -552,19 +551,19 @@ message FlushBufferedMessagesResponse {}
 message RemoteStarted {}
 
 message SpawnKernel {
-    string kernel_name = 1;
-    string working_directory = 2;
-    uint64 project_id = 3;
-    string command = 4;
-    repeated string args = 5;
+  string kernel_name = 1;
+  string working_directory = 2;
+  uint64 project_id = 3;
+  string command = 4;
+  repeated string args = 5;
 }
 
 message SpawnKernelResponse {
-    string kernel_id = 1;
-    string connection_file = 2;
+  string kernel_id = 1;
+  string connection_file = 2;
 }
 
 message KillKernel {
-    string kernel_id = 1;
-    uint64 project_id = 2;
+  string kernel_id = 1;
+  uint64 project_id = 2;
 }

crates/proto/src/error.rs 🔗

@@ -159,6 +159,12 @@ pub struct RpcError {
 /// in the app; however it is useful for chaining .message() and .with_tag() on
 /// ErrorCode.
 impl RpcError {
+    /// Returns the raw server-provided error message without any RPC framing
+    /// (e.g. without the "RPC request X failed: " prefix that `Display` adds).
+    pub fn raw_message(&self) -> &str {
+        &self.msg
+    }
+
     /// from_proto converts a crate::Error into an anyhow::Error containing
     /// an RpcError.
     pub fn from_proto(error: &crate::Error, request: &str) -> anyhow::Error {

crates/recent_projects/src/recent_projects.rs 🔗

@@ -750,12 +750,7 @@ impl PickerDelegate for RecentProjectsDelegate {
         self.selected_index = ix;
     }
 
-    fn can_select(
-        &mut self,
-        ix: usize,
-        _window: &mut Window,
-        _cx: &mut Context<Picker<Self>>,
-    ) -> bool {
+    fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context<Picker<Self>>) -> bool {
         matches!(
             self.filtered_entries.get(ix),
             Some(ProjectPickerEntry::OpenFolder { .. } | ProjectPickerEntry::RecentProject(_))
@@ -1258,17 +1253,16 @@ impl PickerDelegate for RecentProjectsDelegate {
                     .gap_1()
                     .border_t_1()
                     .border_color(cx.theme().colors().border_variant)
-                    .child(
+                    .child({
+                        let open_action = workspace::Open {
+                            create_new_window: self.create_new_window,
+                        };
                         Button::new("open_local_folder", "Open Local Project")
-                            .key_binding(KeyBinding::for_action_in(
-                                &workspace::Open,
-                                &focus_handle,
-                                cx,
-                            ))
-                            .on_click(|_, window, cx| {
-                                window.dispatch_action(workspace::Open.boxed_clone(), cx)
-                            }),
-                    )
+                            .key_binding(KeyBinding::for_action_in(&open_action, &focus_handle, cx))
+                            .on_click(move |_, window, cx| {
+                                window.dispatch_action(open_action.boxed_clone(), cx)
+                            })
+                    })
                     .child(
                         Button::new("open_remote_folder", "Open Remote Project")
                             .key_binding(KeyBinding::for_action(
@@ -1359,6 +1353,7 @@ impl PickerDelegate for RecentProjectsDelegate {
                         )
                         .menu({
                             let focus_handle = focus_handle.clone();
+                            let create_new_window = self.create_new_window;
 
                             move |window, cx| {
                                 Some(ContextMenu::build(window, cx, {
@@ -1367,7 +1362,7 @@ impl PickerDelegate for RecentProjectsDelegate {
                                         menu.context(focus_handle)
                                             .action(
                                                 "Open Local Project",
-                                                workspace::Open.boxed_clone(),
+                                                workspace::Open { create_new_window }.boxed_clone(),
                                             )
                                             .action(
                                                 "Open Remote Project",

crates/recent_projects/src/remote_servers.rs 🔗

@@ -1161,12 +1161,11 @@ impl RemoteServerProjects {
                 workspace.toggle_modal(window, cx, |window, cx| {
                     RemoteConnectionModal::new(&connection_options, Vec::new(), window, cx)
                 });
-                let prompt = workspace
-                    .active_modal::<RemoteConnectionModal>(cx)
-                    .unwrap()
-                    .read(cx)
-                    .prompt
-                    .clone();
+                // can be None if another copy of this modal opened in the meantime
+                let Some(modal) = workspace.active_modal::<RemoteConnectionModal>(cx) else {
+                    return;
+                };
+                let prompt = modal.read(cx).prompt.clone();
 
                 let connect = connect(
                     ConnectionIdentifier::setup(),
@@ -1849,6 +1848,7 @@ impl RemoteServerProjects {
     ) {
         let replace_window = window.window_handle().downcast::<MultiWorkspace>();
 
+        let app_state = Arc::downgrade(&app_state);
         cx.spawn_in(window, async move |entity, cx| {
             let (connection, starting_dir) =
                 match start_dev_container_with_config(context, config).await {
@@ -1882,6 +1882,9 @@ impl RemoteServerProjects {
                 })
                 .log_err();
 
+            let Some(app_state) = app_state.upgrade() else {
+                return;
+            };
             let result = open_remote_project(
                 connection.into(),
                 vec![starting_dir].into_iter().map(PathBuf::from).collect(),

crates/remote/Cargo.toml 🔗

@@ -48,3 +48,4 @@ which.workspace = true
 [dev-dependencies]
 gpui = { workspace = true, features = ["test-support"] }
 fs = { workspace = true, features = ["test-support"] }
+util = { workspace = true, features = ["test-support"] }

crates/remote/src/transport.rs 🔗

@@ -1,3 +1,5 @@
+use std::io::Write;
+
 use crate::{
     RemoteArch, RemoteOs, RemotePlatform,
     json_log::LogRecord,
@@ -137,7 +139,12 @@ fn handle_rpc_messages_over_child_process_stdio(
                 if let Ok(record) = serde_json::from_slice::<LogRecord>(content) {
                     record.log(log::logger())
                 } else {
-                    eprintln!("(remote) {}", String::from_utf8_lossy(content));
+                    std::io::stderr()
+                        .write_fmt(format_args!(
+                            "(remote) {}\n",
+                            String::from_utf8_lossy(content)
+                        ))
+                        .ok();
                 }
             }
             stderr_buffer.drain(0..start_ix);

crates/remote/src/transport/docker.rs 🔗

@@ -635,7 +635,7 @@ impl RemoteConnection for DockerExecConnection {
         for env_var in ["RUST_LOG", "RUST_BACKTRACE", "ZED_GENERATE_MINIDUMPS"] {
             if let Some(value) = std::env::var(env_var).ok() {
                 docker_args.push("-e".to_string());
-                docker_args.push(format!("{}='{}'", env_var, value));
+                docker_args.push(format!("{env_var}={value}"));
             }
         }
 

crates/remote/src/transport/ssh.rs 🔗

@@ -94,6 +94,14 @@ impl Default for SshConnectionHost {
     }
 }
 
+fn bracket_ipv6(host: &str) -> String {
+    if host.contains(':') && !host.starts_with('[') {
+        format!("[{}]", host)
+    } else {
+        host.to_string()
+    }
+}
+
 #[derive(Debug, Default, Clone, PartialEq, Eq, Hash)]
 pub struct SshConnectionOptions {
     pub host: SshConnectionHost,
@@ -344,7 +352,12 @@ impl RemoteConnection for SshRemoteConnection {
         args.push("-N".into());
         for (local_port, host, remote_port) in forwards {
             args.push("-L".into());
-            args.push(format!("{local_port}:{host}:{remote_port}"));
+            args.push(format!(
+                "{}:{}:{}",
+                local_port,
+                bracket_ipv6(&host),
+                remote_port
+            ));
         }
         args.push(socket.connection_options.ssh_destination());
         Ok(CommandTemplate {
@@ -450,7 +463,7 @@ impl RemoteConnection for SshRemoteConnection {
             let mut proxy_args = vec![];
             for env_var in VARS {
                 if let Some(value) = std::env::var(env_var).ok() {
-                    proxy_args.push(format!("{}='{}'", env_var, value));
+                    proxy_args.push(format!("{env_var}={value}"));
                 }
             }
             proxy_args.push(remote_binary_path.display(self.path_style()).into_owned());
@@ -1342,33 +1355,71 @@ fn parse_port_number(port_str: &str) -> Result<u16> {
         .with_context(|| format!("parsing port number: {port_str}"))
 }
 
+fn split_port_forward_tokens(spec: &str) -> Result<Vec<String>> {
+    let mut tokens = Vec::new();
+    let mut chars = spec.chars().peekable();
+
+    while chars.peek().is_some() {
+        if chars.peek() == Some(&'[') {
+            chars.next();
+            let mut bracket_content = String::new();
+            loop {
+                match chars.next() {
+                    Some(']') => break,
+                    Some(ch) => bracket_content.push(ch),
+                    None => anyhow::bail!("Unmatched '[' in port forward spec: {spec}"),
+                }
+            }
+            tokens.push(bracket_content);
+            if chars.peek() == Some(&':') {
+                chars.next();
+            }
+        } else {
+            let mut token = String::new();
+            for ch in chars.by_ref() {
+                if ch == ':' {
+                    break;
+                }
+                token.push(ch);
+            }
+            tokens.push(token);
+        }
+    }
+
+    Ok(tokens)
+}
+
 fn parse_port_forward_spec(spec: &str) -> Result<SshPortForwardOption> {
-    let parts: Vec<&str> = spec.split(':').collect();
+    let tokens = if spec.contains('[') {
+        split_port_forward_tokens(spec)?
+    } else {
+        spec.split(':').map(String::from).collect()
+    };
 
-    match *parts {
-        [a, b, c, d] => {
-            let local_port = parse_port_number(b)?;
-            let remote_port = parse_port_number(d)?;
+    match tokens.len() {
+        4 => {
+            let local_port = parse_port_number(&tokens[1])?;
+            let remote_port = parse_port_number(&tokens[3])?;
 
             Ok(SshPortForwardOption {
-                local_host: Some(a.to_string()),
+                local_host: Some(tokens[0].clone()),
                 local_port,
-                remote_host: Some(c.to_string()),
+                remote_host: Some(tokens[2].clone()),
                 remote_port,
             })
         }
-        [a, b, c] => {
-            let local_port = parse_port_number(a)?;
-            let remote_port = parse_port_number(c)?;
+        3 => {
+            let local_port = parse_port_number(&tokens[0])?;
+            let remote_port = parse_port_number(&tokens[2])?;
 
             Ok(SshPortForwardOption {
                 local_host: None,
                 local_port,
-                remote_host: Some(b.to_string()),
+                remote_host: Some(tokens[1].clone()),
                 remote_port,
             })
         }
-        _ => anyhow::bail!("Invalid port forward format"),
+        _ => anyhow::bail!("Invalid port forward format: {spec}"),
     }
 }
 
@@ -1534,7 +1585,10 @@ impl SshConnectionOptions {
 
                 format!(
                     "-L{}:{}:{}:{}",
-                    local_host, pf.local_port, remote_host, pf.remote_port
+                    bracket_ipv6(local_host),
+                    pf.local_port,
+                    bracket_ipv6(remote_host),
+                    pf.remote_port
                 )
             }));
         }
@@ -1612,12 +1666,11 @@ fn build_command_posix(
     write!(exec, "exec env ")?;
 
     for (k, v) in input_env.iter() {
-        write!(
-            exec,
-            "{}={} ",
-            k,
-            ssh_shell_kind.try_quote(v).context("shell quoting")?
-        )?;
+        let assignment = format!("{k}={v}");
+        let assignment = ssh_shell_kind
+            .try_quote(&assignment)
+            .context("shell quoting")?;
+        write!(exec, "{assignment} ")?;
     }
 
     if let Some(input_program) = input_program {
@@ -1641,7 +1694,12 @@ fn build_command_posix(
 
     if let Some((local_port, host, remote_port)) = port_forward {
         args.push("-L".into());
-        args.push(format!("{local_port}:{host}:{remote_port}"));
+        args.push(format!(
+            "{}:{}:{}",
+            local_port,
+            bracket_ipv6(&host),
+            remote_port
+        ));
     }
 
     // -q suppresses the "Connection to ... closed." message that SSH prints when
@@ -1731,7 +1789,12 @@ fn build_command_windows(
 
     if let Some((local_port, host, remote_port)) = port_forward {
         args.push("-L".into());
-        args.push(format!("{local_port}:{host}:{remote_port}"));
+        args.push(format!(
+            "{}:{}:{}",
+            local_port,
+            bracket_ipv6(&host),
+            remote_port
+        ));
     }
 
     // -q suppresses the "Connection to ... closed." message that SSH prints when
@@ -1818,7 +1881,7 @@ mod tests {
                 "-q",
                 "-t",
                 "user@host",
-                "cd \"$HOME/work\" && exec env INPUT_VA=val remote_program arg1 arg2"
+                "cd \"$HOME/work\" && exec env 'INPUT_VA=val' remote_program arg1 arg2"
             ]
         );
         assert_eq!(command.env, env);
@@ -1854,7 +1917,7 @@ mod tests {
                 "-q",
                 "-t",
                 "user@host",
-                "cd && exec env INPUT_VA=val /bin/fish -l"
+                "cd && exec env 'INPUT_VA=val' /bin/fish -l"
             ]
         );
         assert_eq!(command.env, env);
@@ -1862,6 +1925,38 @@ mod tests {
         Ok(())
     }
 
+    #[test]
+    fn test_build_command_quotes_env_assignment() -> Result<()> {
+        let mut input_env = HashMap::default();
+        input_env.insert("ZED$(echo foo)".to_string(), "value".to_string());
+
+        let command = build_command_posix(
+            Some("remote_program".to_string()),
+            &[],
+            &input_env,
+            None,
+            None,
+            HashMap::default(),
+            PathStyle::Posix,
+            "/bin/bash",
+            ShellKind::Posix,
+            vec![],
+            "user@host",
+            Interactive::No,
+        )?;
+
+        let remote_command = command
+            .args
+            .last()
+            .context("missing remote command argument")?;
+        assert!(
+            remote_command.contains("exec env 'ZED$(echo foo)=value' remote_program"),
+            "expected env assignment to be quoted, got: {remote_command}"
+        );
+
+        Ok(())
+    }
+
     #[test]
     fn scp_args_exclude_port_forward_flags() {
         let options = SshConnectionOptions {
@@ -1938,4 +2033,79 @@ mod tests {
 
         Ok(())
     }
+
+    #[test]
+    fn test_parse_port_forward_spec_ipv6() -> Result<()> {
+        let pf = parse_port_forward_spec("[::1]:8080:[::1]:80")?;
+        assert_eq!(pf.local_host, Some("::1".to_string()));
+        assert_eq!(pf.local_port, 8080);
+        assert_eq!(pf.remote_host, Some("::1".to_string()));
+        assert_eq!(pf.remote_port, 80);
+
+        let pf = parse_port_forward_spec("8080:[::1]:80")?;
+        assert_eq!(pf.local_host, None);
+        assert_eq!(pf.local_port, 8080);
+        assert_eq!(pf.remote_host, Some("::1".to_string()));
+        assert_eq!(pf.remote_port, 80);
+
+        let pf = parse_port_forward_spec("[2001:db8::1]:3000:[fe80::1]:4000")?;
+        assert_eq!(pf.local_host, Some("2001:db8::1".to_string()));
+        assert_eq!(pf.local_port, 3000);
+        assert_eq!(pf.remote_host, Some("fe80::1".to_string()));
+        assert_eq!(pf.remote_port, 4000);
+
+        let pf = parse_port_forward_spec("127.0.0.1:8080:localhost:80")?;
+        assert_eq!(pf.local_host, Some("127.0.0.1".to_string()));
+        assert_eq!(pf.local_port, 8080);
+        assert_eq!(pf.remote_host, Some("localhost".to_string()));
+        assert_eq!(pf.remote_port, 80);
+
+        Ok(())
+    }
+
+    #[test]
+    fn test_port_forward_ipv6_formatting() {
+        let options = SshConnectionOptions {
+            host: "example.com".into(),
+            port_forwards: Some(vec![SshPortForwardOption {
+                local_host: Some("::1".to_string()),
+                local_port: 8080,
+                remote_host: Some("::1".to_string()),
+                remote_port: 80,
+            }]),
+            ..Default::default()
+        };
+
+        let args = options.additional_args();
+        assert!(
+            args.iter().any(|arg| arg == "-L[::1]:8080:[::1]:80"),
+            "expected bracketed IPv6 in -L flag: {args:?}"
+        );
+    }
+
+    #[test]
+    fn test_build_command_with_ipv6_port_forward() -> Result<()> {
+        let command = build_command_posix(
+            None,
+            &[],
+            &HashMap::default(),
+            None,
+            Some((8080, "::1".to_owned(), 80)),
+            HashMap::default(),
+            PathStyle::Posix,
+            "/bin/bash",
+            ShellKind::Posix,
+            vec![],
+            "user@host",
+            Interactive::No,
+        )?;
+
+        assert!(
+            command.args.iter().any(|arg| arg == "8080:[::1]:80"),
+            "expected bracketed IPv6 in port forward arg: {:?}",
+            command.args
+        );
+
+        Ok(())
+    }
 }

crates/remote/src/transport/wsl.rs 🔗

@@ -450,13 +450,10 @@ impl RemoteConnection for WslRemoteConnection {
 
         let mut exec = String::from("exec env ");
 
-        for (k, v) in env.iter() {
-            write!(
-                exec,
-                "{}={} ",
-                k,
-                shell_kind.try_quote(v).context("shell quoting")?
-            )?;
+        for (key, value) in env.iter() {
+            let assignment = format!("{key}={value}");
+            let assignment = shell_kind.try_quote(&assignment).context("shell quoting")?;
+            write!(exec, "{assignment} ")?;
         }
 
         if let Some(program) = program {

crates/remote_server/Cargo.toml 🔗

@@ -82,6 +82,7 @@ minidumper.workspace = true
 
 [target.'cfg(windows)'.dependencies]
 windows.workspace = true
+gpui = { workspace = true, features = ["windows-manifest"] }
 
 [dev-dependencies]
 action_log.workspace = true

crates/remote_server/src/remote_editing_tests.rs 🔗

@@ -2,12 +2,11 @@
 /// The tests in this file assume that server_cx is running on Windows too.
 /// We neead to find a way to test Windows-Non-Windows interactions.
 use crate::headless_project::HeadlessProject;
-use agent::{AgentTool, ReadFileTool, ReadFileToolInput, Templates, Thread, ToolCallEventStream};
+use agent::{AgentTool, ReadFileTool, ReadFileToolInput, ToolCallEventStream, ToolInput};
 use client::{Client, UserStore};
 use clock::FakeSystemClock;
 use collections::{HashMap, HashSet};
-use language_model::{LanguageModelToolResultContent, fake_provider::FakeLanguageModel};
-use prompt_store::ProjectContext;
+use language_model::LanguageModelToolResultContent;
 
 use extension::ExtensionHostProxy;
 use fs::{FakeFs, Fs};
@@ -1939,30 +1938,19 @@ async fn test_remote_agent_fs_tool_calls(cx: &mut TestAppContext, server_cx: &mu
 
     let action_log = cx.new(|_| action_log::ActionLog::new(project.clone()));
 
-    // Create a minimal thread for the ReadFileTool
-    let context_server_registry =
-        cx.new(|cx| agent::ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
-    let model = Arc::new(FakeLanguageModel::default());
-    let thread = cx.new(|cx| {
-        Thread::new(
-            project.clone(),
-            cx.new(|_cx| ProjectContext::default()),
-            context_server_registry,
-            Templates::new(),
-            Some(model),
-            cx,
-        )
-    });
-
     let input = ReadFileToolInput {
         path: "project/b.txt".into(),
         start_line: None,
         end_line: None,
     };
-    let read_tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
+    let read_tool = Arc::new(ReadFileTool::new(project, action_log, true));
     let (event_stream, _) = ToolCallEventStream::test();
 
-    let exists_result = cx.update(|cx| read_tool.clone().run(input, event_stream.clone(), cx));
+    let exists_result = cx.update(|cx| {
+        read_tool
+            .clone()
+            .run(ToolInput::resolved(input), event_stream.clone(), cx)
+    });
     let output = exists_result.await.unwrap();
     assert_eq!(output, LanguageModelToolResultContent::Text("B".into()));
 
@@ -1971,7 +1959,8 @@ async fn test_remote_agent_fs_tool_calls(cx: &mut TestAppContext, server_cx: &mu
         start_line: None,
         end_line: None,
     };
-    let does_not_exist_result = cx.update(|cx| read_tool.run(input, event_stream, cx));
+    let does_not_exist_result =
+        cx.update(|cx| read_tool.run(ToolInput::resolved(input), event_stream, cx));
     does_not_exist_result.await.unwrap_err();
 }
 
@@ -1998,7 +1987,7 @@ async fn test_remote_external_agent_server(
             .map(|name| name.to_string())
             .collect::<Vec<_>>()
     });
-    pretty_assertions::assert_eq!(names, ["codex", "gemini", "claude"]);
+    pretty_assertions::assert_eq!(names, Vec::<String>::new());
     server_cx.update_global::<SettingsStore, _>(|settings_store, cx| {
         settings_store
             .set_server_settings(
@@ -2029,15 +2018,14 @@ async fn test_remote_external_agent_server(
             .map(|name| name.to_string())
             .collect::<Vec<_>>()
     });
-    pretty_assertions::assert_eq!(names, ["gemini", "codex", "claude", "foo"]);
-    let (command, root, login) = project
+    pretty_assertions::assert_eq!(names, ["foo"]);
+    let command = project
         .update(cx, |project, cx| {
             project.agent_server_store().update(cx, |store, cx| {
                 store
                     .get_external_agent(&"foo".into())
                     .unwrap()
                     .get_command(
-                        None,
                         HashMap::from_iter([("OTHER_VAR".into(), "other-val".into())]),
                         None,
                         None,
@@ -2053,13 +2041,12 @@ async fn test_remote_external_agent_server(
             path: "mock".into(),
             args: vec!["foo-cli".into(), "--flag".into()],
             env: Some(HashMap::from_iter([
+                ("NO_BROWSER".into(), "1".into()),
                 ("VAR".into(), "val".into()),
                 ("OTHER_VAR".into(), "other-val".into())
             ]))
         }
     );
-    assert_eq!(&PathBuf::from(root), paths::home_dir());
-    assert!(login.is_none());
 }
 
 pub async fn init_test(

crates/remote_server/src/server.rs 🔗

@@ -356,9 +356,18 @@ fn start_server(
 
             let (mut stdin_msg_tx, mut stdin_msg_rx) = mpsc::unbounded::<Envelope>();
             cx.background_spawn(async move {
-                while let Ok(msg) = read_message(&mut stdin_stream, &mut input_buffer).await {
-                    if (stdin_msg_tx.send(msg).await).is_err() {
-                        break;
+                loop {
+                    match read_message(&mut stdin_stream, &mut input_buffer).await {
+                        Ok(msg) => {
+                            if (stdin_msg_tx.send(msg).await).is_err() {
+                                log::info!("stdin message channel closed, stopping stdin reader");
+                                break;
+                            }
+                        }
+                        Err(error) => {
+                            log::warn!("stdin read failed: {error:?}");
+                            break;
+                        }
                     }
                 }
             }).detach();

crates/repl/Cargo.toml 🔗

@@ -47,6 +47,7 @@ runtimelib.workspace = true
 serde.workspace = true
 serde_json.workspace = true
 settings.workspace = true
+shlex.workspace = true
 smol.workspace = true
 telemetry.workspace = true
 terminal.workspace = true

crates/repl/src/kernels/wsl_kernel.rs 🔗

@@ -21,6 +21,7 @@ use std::{
     path::PathBuf,
     sync::Arc,
 };
+
 use uuid::Uuid;
 
 // Find a set of open ports. This creates a listener with port set to 0. The listener will be closed at the end when it goes out of scope.
@@ -56,6 +57,15 @@ impl Debug for WslRunningKernel {
     }
 }
 
+fn quote_posix_shell_arguments(arguments: &[String]) -> Result<String> {
+    let mut quoted_arguments = Vec::with_capacity(arguments.len());
+    for argument in arguments {
+        let quoted = shlex::try_quote(argument).map(|quoted| quoted.into_owned())?;
+        quoted_arguments.push(quoted);
+    }
+    Ok(quoted_arguments.join(" "))
+}
+
 impl WslRunningKernel {
     pub fn new<S: KernelSession + 'static>(
         kernel_specification: WslKernelSpecification,
@@ -129,9 +139,8 @@ impl WslRunningKernel {
             // `wsl -d <distro> --exec <argv0> <argv1> ...`
             // But we need to replace {connection_file} with wsl_connection_path.
 
-            let argv = kernel_specification.kernelspec.argv;
             anyhow::ensure!(
-                !argv.is_empty(),
+                !kernel_specification.kernelspec.argv.is_empty(),
                 "Empty argv in kernelspec {}",
                 kernel_specification.name
             );
@@ -182,50 +191,57 @@ impl WslRunningKernel {
             // We use bash -lc to run in a login shell for proper environment setup
             let mut kernel_args: Vec<String> = Vec::new();
 
-            if let Some(env) = &kernel_specification.kernelspec.env {
-                if !env.is_empty() {
-                    kernel_args.push("env".to_string());
-                    for (k, v) in env {
-                        kernel_args.push(format!("{}={}", k, v));
+            let resolved_argv: Vec<String> = kernel_specification
+                .kernelspec
+                .argv
+                .iter()
+                .map(|arg| {
+                    if arg == "{connection_file}" {
+                        wsl_connection_path.clone()
+                    } else {
+                        arg.clone()
                     }
+                })
+                .collect();
+
+            let executable = resolved_argv.first().map(String::as_str);
+            let needs_python_resolution = executable.map_or(false, |executable| {
+                executable == "python" || executable == "python3" || !executable.starts_with('/')
+            });
+
+            let mut env_assignments: Vec<String> = Vec::new();
+            if let Some(env) = &kernel_specification.kernelspec.env {
+                env_assignments.reserve(env.len());
+                for (key, value) in env {
+                    let assignment = format!("{key}={value}");
+                    let assignment = shlex::try_quote(&assignment)
+                        .map(|quoted| quoted.into_owned())?;
+                    env_assignments.push(assignment);
                 }
-            }
 
-            for arg in argv {
-                if arg == "{connection_file}" {
-                    kernel_args.push(wsl_connection_path.clone());
-                } else {
-                    kernel_args.push(arg.clone());
+                if !env_assignments.is_empty() {
+                    kernel_args.push("env".to_string());
+                    kernel_args.extend(env_assignments.iter().cloned());
                 }
             }
 
-            // because first command is python/python3 we need make sure it's present in the env
-            let first_cmd = kernel_args.first().map(|arg| {
-                arg.split_whitespace().next().unwrap_or(arg)
-            });
-
-            let needs_python_resolution = first_cmd.map_or(false, |cmd| {
-                cmd == "python" || cmd == "python3" || !cmd.starts_with('/')
-            });
+            kernel_args.extend(resolved_argv.iter().cloned());
 
             let shell_command = if needs_python_resolution {
                 // 1. Check for .venv/bin/python or .venv/bin/python3 in working directory
                 // 2. Fall back to system python3 or python
-                let rest_args: Vec<String> = kernel_args.iter().skip(1).cloned().collect();
-                let rest_string = rest_args
-                    .iter()
-                    .map(|arg| {
-                        if arg.contains(' ') || arg.contains('\'') || arg.contains('"') {
-                            format!("'{}'", arg.replace('\'', "'\\''"))
-                        } else {
-                            arg.clone()
-                        }
-                    })
-                    .collect::<Vec<_>>()
-                    .join(" ");
+                let rest_args: Vec<String> = resolved_argv.iter().skip(1).cloned().collect();
+                let arg_string = quote_posix_shell_arguments(&rest_args)?;
+                let set_env_command = if env_assignments.is_empty() {
+                    String::new()
+                } else {
+                    format!("export {}; ", env_assignments.join(" "))
+                };
 
                 let cd_command = if let Some(wd) = wsl_working_directory.as_ref() {
-                    format!("cd '{}' && ", wd.replace('\'', "'\\''"))
+                    let quoted_wd = shlex::try_quote(wd)
+                        .map(|quoted| quoted.into_owned())?;
+                    format!("cd {quoted_wd} && ")
                 } else {
                     String::new()
                 };
@@ -233,6 +249,7 @@ impl WslRunningKernel {
 
                 format!(
                     "set -e; \
+                     {} \
                      {} \
                      echo \"Working directory: $(pwd)\" >&2; \
                      if [ -x .venv/bin/python ]; then \
@@ -254,20 +271,10 @@ impl WslRunningKernel {
                        echo 'PATH:' \"$PATH\" >&2; \
                        exit 127; \
                      fi",
-                    cd_command, rest_string, rest_string, rest_string, rest_string
+                    cd_command, set_env_command, arg_string, arg_string, arg_string, arg_string
                 )
             } else {
-                kernel_args
-                    .iter()
-                    .map(|arg| {
-                        if arg.contains(' ') || arg.contains('\'') || arg.contains('"') {
-                            format!("'{}'", arg.replace('\'', "'\\''"))
-                        } else {
-                            arg.clone()
-                        }
-                    })
-                    .collect::<Vec<_>>()
-                    .join(" ")
+                quote_posix_shell_arguments(&kernel_args)?
             };
 
             cmd.arg("bash")

crates/repl/src/notebook/cell.rs 🔗

@@ -1,12 +1,11 @@
-#![allow(unused, dead_code)]
 use std::sync::Arc;
 use std::time::{Duration, Instant};
 
-use editor::{Editor, EditorMode, MultiBuffer};
+use editor::{Editor, EditorMode, MultiBuffer, SizingBehavior};
 use futures::future::Shared;
 use gpui::{
     App, Entity, EventEmitter, Focusable, Hsla, InteractiveElement, RetainAllImageCache,
-    StatefulInteractiveElement, Task, TextStyleRefinement, image_cache, prelude::*,
+    StatefulInteractiveElement, Task, TextStyleRefinement, prelude::*,
 };
 use language::{Buffer, Language, LanguageRegistry};
 use markdown::{Markdown, MarkdownElement, MarkdownStyle};
@@ -235,7 +234,7 @@ pub trait RenderableCell: Render {
     fn source(&self) -> &String;
     fn selected(&self) -> bool;
     fn set_selected(&mut self, selected: bool) -> &mut Self;
-    fn selected_bg_color(&self, window: &mut Window, cx: &mut Context<Self>) -> Hsla {
+    fn selected_bg_color(&self, _window: &mut Window, cx: &mut Context<Self>) -> Hsla {
         if self.selected() {
             let mut color = cx.theme().colors().element_hover;
             color.fade_out(0.5);
@@ -252,7 +251,7 @@ pub trait RenderableCell: Render {
     fn cell_position_spacer(
         &self,
         is_first: bool,
-        window: &mut Window,
+        _window: &mut Window,
         cx: &mut Context<Self>,
     ) -> Option<impl IntoElement> {
         let cell_position = self.cell_position();
@@ -327,7 +326,6 @@ pub struct MarkdownCell {
     editing: bool,
     selected: bool,
     cell_position: Option<CellPosition>,
-    languages: Arc<LanguageRegistry>,
     _editor_subscription: gpui::Subscription,
 }
 
@@ -357,9 +355,10 @@ impl MarkdownCell {
 
         let editor = cx.new(|cx| {
             let mut editor = Editor::new(
-                EditorMode::AutoHeight {
-                    min_lines: 1,
-                    max_lines: Some(1024),
+                EditorMode::Full {
+                    scale_ui_elements_with_buffer_font_size: false,
+                    show_active_line_background: false,
+                    sizing_behavior: SizingBehavior::SizeByContent,
                 },
                 multi_buffer,
                 None,
@@ -378,12 +377,12 @@ impl MarkdownCell {
 
             editor.set_show_gutter(false, cx);
             editor.set_text_style_refinement(refinement);
+            editor.set_use_modal_editing(true);
             editor
         });
 
         let markdown = cx.new(|cx| Markdown::new(source.clone().into(), None, None, cx));
 
-        let cell_id = id.clone();
         let editor_subscription =
             cx.subscribe(&editor, move |this, _editor, event, cx| match event {
                 editor::EditorEvent::Blurred => {
@@ -407,7 +406,6 @@ impl MarkdownCell {
             editing: start_editing,
             selected: false,
             cell_position: None,
-            languages,
             _editor_subscription: editor_subscription,
         }
     }
@@ -458,8 +456,6 @@ impl MarkdownCell {
             .unwrap_or_default();
 
         self.source = source.clone();
-        let languages = self.languages.clone();
-
         self.markdown.update(cx, |markdown, cx| {
             markdown.reset(source.into(), cx);
         });
@@ -603,7 +599,7 @@ pub struct CodeCell {
     outputs: Vec<Output>,
     selected: bool,
     cell_position: Option<CellPosition>,
-    language_task: Task<()>,
+    _language_task: Task<()>,
     execution_start_time: Option<Instant>,
     execution_duration: Option<Duration>,
     is_executing: bool,
@@ -625,9 +621,10 @@ impl CodeCell {
 
         let editor_view = cx.new(|cx| {
             let mut editor = Editor::new(
-                EditorMode::AutoHeight {
-                    min_lines: 1,
-                    max_lines: Some(1024),
+                EditorMode::Full {
+                    scale_ui_elements_with_buffer_font_size: false,
+                    show_active_line_background: false,
+                    sizing_behavior: SizingBehavior::SizeByContent,
                 },
                 multi_buffer,
                 None,
@@ -646,6 +643,7 @@ impl CodeCell {
 
             editor.set_show_gutter(false, cx);
             editor.set_text_style_refinement(refinement);
+            editor.set_use_modal_editing(true);
             editor
         });
 
@@ -665,10 +663,10 @@ impl CodeCell {
             outputs: Vec::new(),
             selected: false,
             cell_position: None,
-            language_task,
             execution_start_time: None,
             execution_duration: None,
             is_executing: false,
+            _language_task: language_task,
         }
     }
 
@@ -700,9 +698,10 @@ impl CodeCell {
 
         let editor_view = cx.new(|cx| {
             let mut editor = Editor::new(
-                EditorMode::AutoHeight {
-                    min_lines: 1,
-                    max_lines: Some(1024),
+                EditorMode::Full {
+                    scale_ui_elements_with_buffer_font_size: false,
+                    show_active_line_background: false,
+                    sizing_behavior: SizingBehavior::SizeByContent,
                 },
                 multi_buffer,
                 None,
@@ -722,6 +721,7 @@ impl CodeCell {
             editor.set_text(source.clone(), window, cx);
             editor.set_show_gutter(false, cx);
             editor.set_text_style_refinement(refinement);
+            editor.set_use_modal_editing(true);
             editor
         });
 
@@ -741,10 +741,10 @@ impl CodeCell {
             outputs,
             selected: false,
             cell_position: None,
-            language_task,
             execution_start_time: None,
             execution_duration: None,
             is_executing: false,
+            _language_task: language_task,
         }
     }
 
@@ -872,15 +872,7 @@ impl CodeCell {
         cx.notify();
     }
 
-    fn output_control(&self) -> Option<CellControlType> {
-        if self.has_outputs() {
-            Some(CellControlType::ClearCell)
-        } else {
-            None
-        }
-    }
-
-    pub fn gutter_output(&self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+    pub fn gutter_output(&self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
         let is_selected = self.selected();
 
         div()
@@ -941,7 +933,7 @@ impl RenderableCell for CodeCell {
         &self.source
     }
 
-    fn control(&self, window: &mut Window, cx: &mut Context<Self>) -> Option<CellControl> {
+    fn control(&self, _window: &mut Window, cx: &mut Context<Self>) -> Option<CellControl> {
         let control_type = if self.has_outputs() {
             CellControlType::RerunCell
         } else {
@@ -1031,8 +1023,7 @@ impl RenderableCell for CodeCell {
 }
 
 impl RunnableCell for CodeCell {
-    fn run(&mut self, window: &mut Window, cx: &mut Context<Self>) {
-        println!("Running code cell: {}", self.id);
+    fn run(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
         cx.emit(CellEvent::Run(self.id.clone()));
     }
 
@@ -1055,11 +1046,8 @@ impl Render for CodeCell {
         } else {
             None
         };
-        let output_max_width = plain::max_width_for_columns(
-            ReplSettings::get_global(cx).output_max_width_columns,
-            window,
-            cx,
-        );
+        let output_max_width =
+            plain::max_width_for_columns(ReplSettings::get_global(cx).max_columns, window, cx);
         // get the language from the editor's buffer
         let language_name = self
             .editor
@@ -1117,71 +1105,6 @@ impl Render for CodeCell {
                         ),
                     ),
             )
-            // Output portion
-            .child(
-                h_flex()
-                    .w_full()
-                    .pr_6()
-                    .rounded_xs()
-                    .items_start()
-                    .gap(DynamicSpacing::Base08.rems(cx))
-                    .bg(self.selected_bg_color(window, cx))
-                    .child(self.gutter_output(window, cx))
-                    .child(
-                        div().py_1p5().w_full().child(
-                            div()
-                                .flex()
-                                .size_full()
-                                .flex_1()
-                                .py_3()
-                                .px_5()
-                                .rounded_lg()
-                                .border_1()
-                                .child(
-                                    div()
-                                        .id((ElementId::from(self.id.to_string()), "output-scroll"))
-                                        .w_full()
-                                        .when_some(output_max_width, |div, max_w| {
-                                            div.max_w(max_w).overflow_x_scroll()
-                                        })
-                                        .when_some(output_max_height, |div, max_h| {
-                                            div.max_h(max_h).overflow_y_scroll()
-                                        })
-                                        .children(self.outputs.iter().map(|output| {
-                                            let content = match output {
-                                                Output::Plain { content, .. } => {
-                                                    Some(content.clone().into_any_element())
-                                                }
-                                                Output::Markdown { content, .. } => {
-                                                    Some(content.clone().into_any_element())
-                                                }
-                                                Output::Stream { content, .. } => {
-                                                    Some(content.clone().into_any_element())
-                                                }
-                                                Output::Image { content, .. } => {
-                                                    Some(content.clone().into_any_element())
-                                                }
-                                                Output::Message(message) => Some(
-                                                    div().child(message.clone()).into_any_element(),
-                                                ),
-                                                Output::Table { content, .. } => {
-                                                    Some(content.clone().into_any_element())
-                                                }
-                                                Output::Json { content, .. } => {
-                                                    Some(content.clone().into_any_element())
-                                                }
-                                                Output::ErrorOutput(error_view) => {
-                                                    error_view.render(window, cx)
-                                                }
-                                                Output::ClearOutputWaitMarker => None,
-                                            };
-
-                                            div().children(content)
-                                        })),
-                                ),
-                        ),
-                    ),
-            )
             .when(
                 self.has_outputs() || self.execution_duration.is_some() || self.is_executing,
                 |this| {
@@ -1256,41 +1179,23 @@ impl Render for CodeCell {
                                             },
                                         )
                                         // output at bottom
-                                        .child(div().w_full().children(self.outputs.iter().map(
-                                            |output| {
-                                                let content = match output {
-                                                    Output::Plain { content, .. } => {
-                                                        Some(content.clone().into_any_element())
-                                                    }
-                                                    Output::Markdown { content, .. } => {
-                                                        Some(content.clone().into_any_element())
-                                                    }
-                                                    Output::Stream { content, .. } => {
-                                                        Some(content.clone().into_any_element())
-                                                    }
-                                                    Output::Image { content, .. } => {
-                                                        Some(content.clone().into_any_element())
-                                                    }
-                                                    Output::Message(message) => Some(
-                                                        div()
-                                                            .child(message.clone())
-                                                            .into_any_element(),
-                                                    ),
-                                                    Output::Table { content, .. } => {
-                                                        Some(content.clone().into_any_element())
-                                                    }
-                                                    Output::Json { content, .. } => {
-                                                        Some(content.clone().into_any_element())
-                                                    }
-                                                    Output::ErrorOutput(error_view) => {
-                                                        error_view.render(window, cx)
-                                                    }
-                                                    Output::ClearOutputWaitMarker => None,
-                                                };
-
-                                                div().children(content)
-                                            },
-                                        ))),
+                                        .child(
+                                            div()
+                                                .id((
+                                                    ElementId::from(self.id.to_string()),
+                                                    "output-scroll",
+                                                ))
+                                                .w_full()
+                                                .when_some(output_max_width, |div, max_width| {
+                                                    div.max_w(max_width).overflow_x_scroll()
+                                                })
+                                                .when_some(output_max_height, |div, max_height| {
+                                                    div.max_h(max_height).overflow_y_scroll()
+                                                })
+                                                .children(self.outputs.iter().map(|output| {
+                                                    div().children(output.content(window, cx))
+                                                })),
+                                        ),
                                 ),
                             ),
                     )

crates/repl/src/notebook/notebook_ui.rs 🔗

@@ -5,6 +5,7 @@ use std::{path::PathBuf, sync::Arc};
 use anyhow::{Context as _, Result};
 use client::proto::ViewId;
 use collections::HashMap;
+use editor::DisplayPoint;
 use feature_flags::{FeatureFlagAppExt as _, NotebookFeatureFlag};
 use futures::FutureExt;
 use futures::future::Shared;
@@ -40,6 +41,7 @@ use picker::Picker;
 use runtimelib::{ExecuteRequest, JupyterMessage, JupyterMessageContent};
 use ui::PopoverMenuHandle;
 use zed_actions::editor::{MoveDown, MoveUp};
+use zed_actions::notebook::{NotebookMoveDown, NotebookMoveUp};
 
 actions!(
     notebook,
@@ -1295,6 +1297,127 @@ impl Render for NotebookEditor {
                     }
                 }
             }))
+            .on_action(cx.listener(|this, _: &NotebookMoveDown, window, cx| {
+                let Some(cell_id) = this.cell_order.get(this.selected_cell_index) else {
+                    return;
+                };
+                let Some(cell) = this.cell_map.get(cell_id) else {
+                    return;
+                };
+
+                let editor = match cell {
+                    Cell::Code(cell) => cell.read(cx).editor().clone(),
+                    Cell::Markdown(cell) => cell.read(cx).editor().clone(),
+                    _ => return,
+                };
+
+                let is_at_last_line = editor.update(cx, |editor, cx| {
+                    let display_snapshot = editor.display_snapshot(cx);
+                    let selections = editor.selections.all_display(&display_snapshot);
+                    if let Some(selection) = selections.last() {
+                        let head = selection.head();
+                        let cursor_row = head.row();
+                        let max_row = display_snapshot.max_point().row();
+
+                        cursor_row >= max_row
+                    } else {
+                        false
+                    }
+                });
+
+                if is_at_last_line {
+                    this.select_next(&menu::SelectNext, window, cx);
+                    if let Some(cell_id) = this.cell_order.get(this.selected_cell_index) {
+                        if let Some(cell) = this.cell_map.get(cell_id) {
+                            match cell {
+                                Cell::Code(cell) => {
+                                    let editor = cell.read(cx).editor().clone();
+                                    editor.update(cx, |editor, cx| {
+                                        editor.move_to_beginning(&Default::default(), window, cx);
+                                    });
+                                    editor.focus_handle(cx).focus(window, cx);
+                                }
+                                Cell::Markdown(cell) => {
+                                    cell.update(cx, |cell, cx| {
+                                        cell.set_editing(true);
+                                        cx.notify();
+                                    });
+                                    let editor = cell.read(cx).editor().clone();
+                                    editor.update(cx, |editor, cx| {
+                                        editor.move_to_beginning(&Default::default(), window, cx);
+                                    });
+                                    editor.focus_handle(cx).focus(window, cx);
+                                }
+                                _ => {}
+                            }
+                        }
+                    }
+                } else {
+                    editor.update(cx, |editor, cx| {
+                        editor.move_down(&Default::default(), window, cx);
+                    });
+                }
+            }))
+            .on_action(cx.listener(|this, _: &NotebookMoveUp, window, cx| {
+                let Some(cell_id) = this.cell_order.get(this.selected_cell_index) else {
+                    return;
+                };
+                let Some(cell) = this.cell_map.get(cell_id) else {
+                    return;
+                };
+
+                let editor = match cell {
+                    Cell::Code(cell) => cell.read(cx).editor().clone(),
+                    Cell::Markdown(cell) => cell.read(cx).editor().clone(),
+                    _ => return,
+                };
+
+                let is_at_first_line = editor.update(cx, |editor, cx| {
+                    let display_snapshot = editor.display_snapshot(cx);
+                    let selections = editor.selections.all_display(&display_snapshot);
+                    if let Some(selection) = selections.first() {
+                        let head = selection.head();
+                        let cursor_row = head.row();
+
+                        cursor_row.0 == 0
+                    } else {
+                        false
+                    }
+                });
+
+                if is_at_first_line {
+                    this.select_previous(&menu::SelectPrevious, window, cx);
+                    if let Some(cell_id) = this.cell_order.get(this.selected_cell_index) {
+                        if let Some(cell) = this.cell_map.get(cell_id) {
+                            match cell {
+                                Cell::Code(cell) => {
+                                    let editor = cell.read(cx).editor().clone();
+                                    editor.update(cx, |editor, cx| {
+                                        editor.move_to_end(&Default::default(), window, cx);
+                                    });
+                                    editor.focus_handle(cx).focus(window, cx);
+                                }
+                                Cell::Markdown(cell) => {
+                                    cell.update(cx, |cell, cx| {
+                                        cell.set_editing(true);
+                                        cx.notify();
+                                    });
+                                    let editor = cell.read(cx).editor().clone();
+                                    editor.update(cx, |editor, cx| {
+                                        editor.move_to_end(&Default::default(), window, cx);
+                                    });
+                                    editor.focus_handle(cx).focus(window, cx);
+                                }
+                                _ => {}
+                            }
+                        }
+                    }
+                } else {
+                    editor.update(cx, |editor, cx| {
+                        editor.move_up(&Default::default(), window, cx);
+                    });
+                }
+            }))
             .on_action(
                 cx.listener(|this, action, window, cx| this.restart_kernel(action, window, cx)),
             )
@@ -1391,6 +1514,9 @@ impl project::ProjectItem for NotebookItem {
 
                             nbformat::upgrade_legacy_notebook(legacy_notebook)?
                         }
+                        nbformat::Notebook::V3(v3_notebook) => {
+                            nbformat::upgrade_v3_notebook(v3_notebook)?
+                        }
                     }
                 };
 
@@ -1668,6 +1794,9 @@ impl Item for NotebookEditor {
                 Ok(nbformat::Notebook::Legacy(legacy_notebook)) => {
                     nbformat::upgrade_legacy_notebook(legacy_notebook)?
                 }
+                Ok(nbformat::Notebook::V3(v3_notebook)) => {
+                    nbformat::upgrade_v3_notebook(v3_notebook)?
+                }
                 Err(e) => {
                     anyhow::bail!("Failed to parse notebook: {:?}", e);
                 }

crates/repl/src/outputs.rs 🔗

@@ -253,18 +253,8 @@ impl Output {
         )
     }
 
-    pub fn render(
-        &self,
-        workspace: WeakEntity<Workspace>,
-        window: &mut Window,
-        cx: &mut Context<ExecutionView>,
-    ) -> impl IntoElement + use<> {
-        let max_width = plain::max_width_for_columns(
-            ReplSettings::get_global(cx).output_max_width_columns,
-            window,
-            cx,
-        );
-        let content = match self {
+    pub fn content(&self, window: &mut Window, cx: &mut App) -> Option<AnyElement> {
+        match self {
             Self::Plain { content, .. } => Some(content.clone().into_any_element()),
             Self::Markdown { content, .. } => Some(content.clone().into_any_element()),
             Self::Stream { content, .. } => Some(content.clone().into_any_element()),
@@ -274,21 +264,36 @@ impl Output {
             Self::Json { content, .. } => Some(content.clone().into_any_element()),
             Self::ErrorOutput(error_view) => error_view.render(window, cx),
             Self::ClearOutputWaitMarker => None,
-        };
+        }
+    }
 
-        let needs_horizontal_scroll = matches!(self, Self::Table { .. } | Self::Image { .. });
+    pub fn render(
+        &self,
+        workspace: WeakEntity<Workspace>,
+        window: &mut Window,
+        cx: &mut Context<ExecutionView>,
+    ) -> impl IntoElement + use<> {
+        let max_width =
+            plain::max_width_for_columns(ReplSettings::get_global(cx).max_columns, window, cx);
+        let content = self.content(window, cx);
+
+        let needs_horizontal_scroll = matches!(self, Self::Table { .. });
 
         h_flex()
             .id("output-content")
             .w_full()
-            .when_some(max_width, |this, max_w| this.max_w(max_w))
-            .overflow_x_scroll()
+            .when_else(
+                needs_horizontal_scroll,
+                |this| this.overflow_x_scroll(),
+                |this| this.overflow_x_hidden(),
+            )
             .items_start()
             .child(
                 div()
                     .when(!needs_horizontal_scroll, |el| {
                         el.flex_1().w_full().overflow_x_hidden()
                     })
+                    .when_some(max_width, |el, max_width| el.max_w(max_width))
                     .children(content),
             )
             .children(match self {

crates/repl/src/outputs/image.rs 🔗

@@ -3,10 +3,10 @@ use base64::{
     Engine as _, alphabet,
     engine::{DecodePaddingMode, GeneralPurpose, GeneralPurposeConfig},
 };
-use gpui::{App, ClipboardItem, Image, ImageFormat, RenderImage, Window, img};
+use gpui::{App, ClipboardItem, Image, ImageFormat, Pixels, RenderImage, Window, img};
 use settings::Settings as _;
 use std::sync::Arc;
-use ui::{IntoElement, Styled, div, prelude::*};
+use ui::{IntoElement, Styled, prelude::*};
 
 use crate::outputs::{OutputContent, plain};
 use crate::repl_settings::ReplSettings;
@@ -113,7 +113,7 @@ impl Render for ImageView {
         let settings = ReplSettings::get_global(cx);
         let line_height = window.line_height();
 
-        let max_width = plain::max_width_for_columns(settings.output_max_width_columns, window, cx);
+        let max_width = plain::max_width_for_columns(settings.max_columns, window, cx);
 
         let max_height = if settings.output_max_height_lines > 0 {
             Some(line_height * settings.output_max_height_lines as f32)
@@ -125,7 +125,7 @@ impl Render for ImageView {
 
         let image = self.image.clone();
 
-        div().h(height).w(width).child(img(image))
+        img(image).w(width).h(height)
     }
 }
 

crates/repl/src/outputs/plain.rs 🔗

@@ -22,7 +22,7 @@ use alacritty_terminal::{
     term::Config,
     vte::ansi::Processor,
 };
-use gpui::{Bounds, ClipboardItem, Entity, FontStyle, TextStyle, WhiteSpace, canvas, size};
+use gpui::{Bounds, ClipboardItem, Entity, FontStyle, Pixels, TextStyle, WhiteSpace, canvas, size};
 use language::Buffer;
 use settings::Settings as _;
 use terminal::terminal_settings::TerminalSettings;

crates/repl/src/repl_editor.rs 🔗

@@ -636,12 +636,9 @@ fn language_supported(language: &Arc<Language>, cx: &mut App) -> bool {
     let store = ReplStore::global(cx);
     let store_read = store.read(cx);
 
-    // Since we're just checking for general language support, we only need to look at
-    // the pure Jupyter kernels - these are all the globally available ones
-    store_read.pure_jupyter_kernel_specifications().any(|spec| {
-        // Convert to lowercase for case-insensitive comparison since kernels might report "python" while our language is "Python"
-        spec.language().as_ref().to_lowercase() == language.name().as_ref().to_lowercase()
-    })
+    store_read
+        .pure_jupyter_kernel_specifications()
+        .any(|spec| language.matches_kernel_language(spec.language().as_ref()))
 }
 
 fn get_language(editor: WeakEntity<Editor>, cx: &mut App) -> Option<Arc<Language>> {

crates/repl/src/repl_settings.rs 🔗

@@ -27,11 +27,6 @@ pub struct ReplSettings {
     ///
     /// Default: 0
     pub output_max_height_lines: usize,
-    /// Maximum number of columns of output to display before scaling images.
-    /// Set to 0 to disable output width limits.
-    ///
-    /// Default: 0
-    pub output_max_width_columns: usize,
 }
 
 impl Settings for ReplSettings {
@@ -44,7 +39,6 @@ impl Settings for ReplSettings {
             inline_output: repl.inline_output.unwrap_or(true),
             inline_output_max_length: repl.inline_output_max_length.unwrap_or(50),
             output_max_height_lines: repl.output_max_height_lines.unwrap_or(0),
-            output_max_width_columns: repl.output_max_width_columns.unwrap_or(0),
         }
     }
 }

crates/repl/src/repl_store.rs 🔗

@@ -289,7 +289,6 @@ impl ReplStore {
         }
 
         let language_at_cursor = language_at_cursor?;
-        let language_name = language_at_cursor.code_fence_block_name().to_lowercase();
 
         // Prefer the recommended (active toolchain) kernel if it has ipykernel
         if let Some(active_path) = self.active_python_toolchain_path(worktree_id) {
@@ -297,7 +296,7 @@ impl ReplStore {
                 .kernel_specifications_for_worktree(worktree_id)
                 .find(|spec| {
                     spec.has_ipykernel()
-                        && spec.language().as_ref().to_lowercase() == language_name
+                        && language_at_cursor.matches_kernel_language(spec.language().as_ref())
                         && spec.path().as_ref() == active_path.as_ref()
                 })
                 .cloned();
@@ -312,7 +311,7 @@ impl ReplStore {
             .find(|spec| {
                 matches!(spec, KernelSpecification::PythonEnv(_))
                     && spec.has_ipykernel()
-                    && spec.language().as_ref().to_lowercase() == language_name
+                    && language_at_cursor.matches_kernel_language(spec.language().as_ref())
             })
             .cloned();
         if python_env.is_some() {
@@ -350,10 +349,10 @@ impl ReplStore {
             return Some(found_by_name);
         }
 
-        let language_name = language_at_cursor.code_fence_block_name().to_lowercase();
         self.kernel_specifications_for_worktree(worktree_id)
             .find(|spec| {
-                spec.has_ipykernel() && spec.language().as_ref().to_lowercase() == language_name
+                spec.has_ipykernel()
+                    && language_at_cursor.matches_kernel_language(spec.language().as_ref())
             })
             .cloned()
     }

crates/reqwest_client/Cargo.toml 🔗

@@ -20,13 +20,15 @@ anyhow.workspace = true
 bytes.workspace = true
 futures.workspace = true
 http_client.workspace = true
-http_client_tls.workspace = true
 serde.workspace = true
 log.workspace = true
 tokio = { workspace = true, features = ["rt", "rt-multi-thread"] }
 regex.workspace = true
 reqwest.workspace = true
-util.workspace = true
+gpui_util.workspace = true
+
+[target.'cfg(not(target_family = "wasm"))'.dependencies]
+http_client_tls.workspace = true
 
 [dev-dependencies]
 gpui.workspace = true

crates/reqwest_client/src/reqwest_client.rs 🔗

@@ -2,7 +2,7 @@ use std::error::Error;
 use std::sync::{LazyLock, OnceLock};
 use std::{borrow::Cow, mem, pin::Pin, task::Poll, time::Duration};
 
-use util::defer;
+use gpui_util::defer;
 
 use anyhow::anyhow;
 use bytes::{BufMut, Bytes, BytesMut};

crates/rope/src/chunk.rs 🔗

@@ -102,6 +102,11 @@ impl Chunk {
         self.append(Chunk::new(text).as_slice());
     }
 
+    #[inline(always)]
+    pub fn prepend_str(&mut self, text: &str) {
+        self.prepend(Chunk::new(text).as_slice());
+    }
+
     #[inline(always)]
     pub fn append(&mut self, slice: ChunkSlice) {
         if slice.is_empty() {
@@ -116,6 +121,28 @@ impl Chunk {
         self.text.push_str(slice.text);
     }
 
+    #[inline(always)]
+    pub fn prepend(&mut self, slice: ChunkSlice) {
+        if slice.is_empty() {
+            return;
+        }
+        if self.text.is_empty() {
+            *self = Chunk::new(slice.text);
+            return;
+        }
+
+        let shift = slice.text.len();
+        self.chars = slice.chars | (self.chars << shift);
+        self.chars_utf16 = slice.chars_utf16 | (self.chars_utf16 << shift);
+        self.newlines = slice.newlines | (self.newlines << shift);
+        self.tabs = slice.tabs | (self.tabs << shift);
+
+        let mut new_text = ArrayString::<MAX_BASE>::new();
+        new_text.push_str(slice.text);
+        new_text.push_str(&self.text);
+        self.text = new_text;
+    }
+
     #[inline(always)]
     pub fn as_slice(&self) -> ChunkSlice<'_> {
         ChunkSlice {
@@ -890,6 +917,24 @@ mod tests {
         verify_chunk(chunk1.as_slice(), &(str1 + &str2[start_offset..end_offset]));
     }
 
+    #[gpui::test(iterations = 1000)]
+    fn test_prepend_random_strings(mut rng: StdRng) {
+        let len1 = rng.random_range(0..=MAX_BASE);
+        let len2 = rng.random_range(0..=MAX_BASE).saturating_sub(len1);
+        let str1 = random_string_with_utf8_len(&mut rng, len1);
+        let str2 = random_string_with_utf8_len(&mut rng, len2);
+        let mut chunk1 = Chunk::new(&str1);
+        let chunk2 = Chunk::new(&str2);
+        let char_offsets = char_offsets_with_end(&str2);
+        let start_index = rng.random_range(0..char_offsets.len());
+        let start_offset = char_offsets[start_index];
+        let end_offset = char_offsets[rng.random_range(start_index..char_offsets.len())];
+        let slice = chunk2.slice(start_offset..end_offset);
+        let prefix_text = &str2[start_offset..end_offset];
+        chunk1.prepend(slice);
+        verify_chunk(chunk1.as_slice(), &(prefix_text.to_owned() + &str1));
+    }
+
     /// Return the byte offsets for each character in a string.
     ///
     /// These are valid offsets to split the string.

crates/rope/src/rope.rs 🔗

@@ -167,6 +167,11 @@ impl Rope {
             (),
         );
 
+        if text.is_empty() {
+            self.check_invariants();
+            return;
+        }
+
         #[cfg(all(test, not(rust_analyzer)))]
         const NUM_CHUNKS: usize = 16;
         #[cfg(not(all(test, not(rust_analyzer))))]
@@ -269,6 +274,23 @@ impl Rope {
     }
 
     pub fn push_front(&mut self, text: &str) {
+        if text.is_empty() {
+            return;
+        }
+        if self.is_empty() {
+            self.push(text);
+            return;
+        }
+        if self
+            .chunks
+            .first()
+            .is_some_and(|c| c.text.len() + text.len() <= chunk::MAX_BASE)
+        {
+            self.chunks
+                .update_first(|first_chunk| first_chunk.prepend_str(text), ());
+            self.check_invariants();
+            return;
+        }
         let suffix = mem::replace(self, Rope::from(text));
         self.append(suffix);
     }
@@ -548,6 +570,48 @@ impl Rope {
         }
     }
 
+    pub fn starts_with(&self, pattern: &str) -> bool {
+        if pattern.len() > self.len() {
+            return false;
+        }
+        let mut remaining = pattern;
+        for chunk in self.chunks_in_range(0..self.len()) {
+            let Some(chunk) = chunk.get(..remaining.len().min(chunk.len())) else {
+                return false;
+            };
+            if remaining.starts_with(chunk) {
+                remaining = &remaining[chunk.len()..];
+                if remaining.is_empty() {
+                    return true;
+                }
+            } else {
+                return false;
+            }
+        }
+        remaining.is_empty()
+    }
+
+    pub fn ends_with(&self, pattern: &str) -> bool {
+        if pattern.len() > self.len() {
+            return false;
+        }
+        let mut remaining = pattern;
+        for chunk in self.reversed_chunks_in_range(0..self.len()) {
+            let Some(chunk) = chunk.get(chunk.len() - remaining.len().min(chunk.len())..) else {
+                return false;
+            };
+            if remaining.ends_with(chunk) {
+                remaining = &remaining[..remaining.len() - chunk.len()];
+                if remaining.is_empty() {
+                    return true;
+                }
+            } else {
+                return false;
+            }
+        }
+        remaining.is_empty()
+    }
+
     pub fn line_len(&self, row: u32) -> u32 {
         self.clip_point(Point::new(row, u32::MAX), Bias::Left)
             .column
@@ -2168,6 +2232,74 @@ mod tests {
         assert!(!rope.reversed_chunks_in_range(0..0).equals_str("foo"));
     }
 
+    #[test]
+    fn test_starts_with() {
+        let text = "Hello, world! 🌍🌎🌏";
+        let rope = Rope::from(text);
+
+        assert!(rope.starts_with(""));
+        assert!(rope.starts_with("H"));
+        assert!(rope.starts_with("Hello"));
+        assert!(rope.starts_with("Hello, world! 🌍🌎🌏"));
+        assert!(!rope.starts_with("ello"));
+        assert!(!rope.starts_with("Hello, world! 🌍🌎🌏!"));
+
+        let empty_rope = Rope::from("");
+        assert!(empty_rope.starts_with(""));
+        assert!(!empty_rope.starts_with("a"));
+    }
+
+    #[test]
+    fn test_ends_with() {
+        let text = "Hello, world! 🌍🌎🌏";
+        let rope = Rope::from(text);
+
+        assert!(rope.ends_with(""));
+        assert!(rope.ends_with("🌏"));
+        assert!(rope.ends_with("🌍🌎🌏"));
+        assert!(rope.ends_with("Hello, world! 🌍🌎🌏"));
+        assert!(!rope.ends_with("🌎"));
+        assert!(!rope.ends_with("!Hello, world! 🌍🌎🌏"));
+
+        let empty_rope = Rope::from("");
+        assert!(empty_rope.ends_with(""));
+        assert!(!empty_rope.ends_with("a"));
+    }
+
+    #[test]
+    fn test_starts_with_ends_with_random() {
+        let mut rng = StdRng::seed_from_u64(0);
+        for _ in 0..100 {
+            let len = rng.random_range(0..100);
+            let text: String = RandomCharIter::new(&mut rng).take(len).collect();
+            let rope = Rope::from(text.as_str());
+
+            for _ in 0..10 {
+                let start = rng.random_range(0..=text.len());
+                let start = text.ceil_char_boundary(start);
+                let end = rng.random_range(start..=text.len());
+                let end = text.ceil_char_boundary(end);
+                let prefix = &text[..end];
+                let suffix = &text[start..];
+
+                assert_eq!(
+                    rope.starts_with(prefix),
+                    text.starts_with(prefix),
+                    "starts_with mismatch for {:?} in {:?}",
+                    prefix,
+                    text
+                );
+                assert_eq!(
+                    rope.ends_with(suffix),
+                    text.ends_with(suffix),
+                    "ends_with mismatch for {:?} in {:?}",
+                    suffix,
+                    text
+                );
+            }
+        }
+    }
+
     #[test]
     fn test_is_char_boundary() {
         let fixture = "地";
@@ -2229,6 +2361,119 @@ mod tests {
         }
     }
 
+    #[test]
+    fn test_push_front_empty_text_on_empty_rope() {
+        let mut rope = Rope::new();
+        rope.push_front("");
+        assert_eq!(rope.text(), "");
+        assert_eq!(rope.len(), 0);
+    }
+
+    #[test]
+    fn test_push_front_empty_text_on_nonempty_rope() {
+        let mut rope = Rope::from("hello");
+        rope.push_front("");
+        assert_eq!(rope.text(), "hello");
+    }
+
+    #[test]
+    fn test_push_front_on_empty_rope() {
+        let mut rope = Rope::new();
+        rope.push_front("hello");
+        assert_eq!(rope.text(), "hello");
+        assert_eq!(rope.len(), 5);
+        assert_eq!(rope.max_point(), Point::new(0, 5));
+    }
+
+    #[test]
+    fn test_push_front_single_space() {
+        let mut rope = Rope::from("hint");
+        rope.push_front(" ");
+        assert_eq!(rope.text(), " hint");
+        assert_eq!(rope.len(), 5);
+    }
+
+    #[gpui::test(iterations = 50)]
+    fn test_push_front_random(mut rng: StdRng) {
+        let initial_len = rng.random_range(0..=64);
+        let initial_text: String = RandomCharIter::new(&mut rng).take(initial_len).collect();
+        let mut rope = Rope::from(initial_text.as_str());
+
+        let mut expected = initial_text;
+
+        for _ in 0..rng.random_range(1..=10) {
+            let prefix_len = rng.random_range(0..=32);
+            let prefix: String = RandomCharIter::new(&mut rng).take(prefix_len).collect();
+
+            rope.push_front(&prefix);
+            expected.insert_str(0, &prefix);
+
+            assert_eq!(
+                rope.text(),
+                expected,
+                "text mismatch after push_front({:?})",
+                prefix
+            );
+            assert_eq!(rope.len(), expected.len());
+
+            let actual_summary = rope.summary();
+            let expected_summary = TextSummary::from(expected.as_str());
+            assert_eq!(
+                actual_summary.len, expected_summary.len,
+                "len mismatch for {:?}",
+                expected
+            );
+            assert_eq!(
+                actual_summary.lines, expected_summary.lines,
+                "lines mismatch for {:?}",
+                expected
+            );
+            assert_eq!(
+                actual_summary.chars, expected_summary.chars,
+                "chars mismatch for {:?}",
+                expected
+            );
+            assert_eq!(
+                actual_summary.longest_row, expected_summary.longest_row,
+                "longest_row mismatch for {:?}",
+                expected
+            );
+
+            // Verify offset-to-point and point-to-offset round-trip at boundaries.
+            for (ix, _) in expected.char_indices().chain(Some((expected.len(), '\0'))) {
+                assert_eq!(
+                    rope.point_to_offset(rope.offset_to_point(ix)),
+                    ix,
+                    "offset round-trip failed at {} for {:?}",
+                    ix,
+                    expected
+                );
+            }
+        }
+    }
+
+    #[gpui::test(iterations = 50)]
+    fn test_push_front_large_prefix(mut rng: StdRng) {
+        let initial_len = rng.random_range(0..=32);
+        let initial_text: String = RandomCharIter::new(&mut rng).take(initial_len).collect();
+        let mut rope = Rope::from(initial_text.as_str());
+
+        let prefix_len = rng.random_range(64..=256);
+        let prefix: String = RandomCharIter::new(&mut rng).take(prefix_len).collect();
+
+        rope.push_front(&prefix);
+        let expected = format!("{}{}", prefix, initial_text);
+
+        assert_eq!(rope.text(), expected);
+        assert_eq!(rope.len(), expected.len());
+
+        let actual_summary = rope.summary();
+        let expected_summary = TextSummary::from(expected.as_str());
+        assert_eq!(actual_summary.len, expected_summary.len);
+        assert_eq!(actual_summary.lines, expected_summary.lines);
+        assert_eq!(actual_summary.chars, expected_summary.chars);
+    }
+
     fn clip_offset(text: &str, mut offset: usize, bias: Bias) -> usize {
         while !text.is_char_boundary(offset) {
             match bias {

crates/rules_library/src/rules_library.rs 🔗

@@ -222,7 +222,7 @@ impl PickerDelegate for RulePickerDelegate {
         cx.notify();
     }
 
-    fn can_select(&mut self, ix: usize, _: &mut Window, _: &mut Context<Picker<Self>>) -> bool {
+    fn can_select(&self, ix: usize, _: &mut Window, _: &mut Context<Picker<Self>>) -> bool {
         match self.filtered_entries.get(ix) {
             Some(RulePickerEntry::Rule(_)) => true,
             Some(RulePickerEntry::Header(_)) | Some(RulePickerEntry::Separator) | None => false,
@@ -1106,6 +1106,7 @@ impl RulesLibrary {
                                     temperature: None,
                                     thinking_allowed: true,
                                     thinking_effort: None,
+                                    speed: None,
                                 },
                                 cx,
                             )

crates/scheduler/Cargo.toml 🔗

@@ -23,3 +23,4 @@ flume = "0.11"
 futures.workspace = true
 parking_lot.workspace = true
 rand.workspace = true
+web-time.workspace = true

crates/scheduler/src/clock.rs 🔗

@@ -1,6 +1,8 @@
 use chrono::{DateTime, Utc};
 use parking_lot::Mutex;
-use std::time::{Duration, Instant};
+use std::time::Duration;
+
+pub use web_time::Instant;
 
 pub trait Clock {
     fn utc_now(&self) -> DateTime<Utc>;

crates/scheduler/src/executor.rs 🔗

@@ -1,4 +1,4 @@
-use crate::{Priority, RunnableMeta, Scheduler, SessionId, Timer};
+use crate::{Instant, Priority, RunnableMeta, Scheduler, SessionId, Timer};
 use std::{
     future::Future,
     marker::PhantomData,
@@ -12,7 +12,7 @@ use std::{
     },
     task::{Context, Poll},
     thread::{self, ThreadId},
-    time::{Duration, Instant},
+    time::Duration,
 };
 
 #[derive(Clone)]
@@ -372,8 +372,9 @@ where
 
     impl<F> Drop for Checked<F> {
         fn drop(&mut self) {
-            assert!(
-                self.id == thread_id(),
+            assert_eq!(
+                self.id,
+                thread_id(),
                 "local task dropped by a thread that didn't spawn it. Task spawned at {}",
                 self.location
             );

crates/scheduler/src/test_scheduler.rs 🔗

@@ -1,6 +1,6 @@
 use crate::{
-    BackgroundExecutor, Clock, ForegroundExecutor, Priority, RunnableMeta, Scheduler, SessionId,
-    TestClock, Timer,
+    BackgroundExecutor, Clock, ForegroundExecutor, Instant, Priority, RunnableMeta, Scheduler,
+    SessionId, TestClock, Timer,
 };
 use async_task::Runnable;
 use backtrace::{Backtrace, BacktraceFrame};
@@ -26,7 +26,7 @@ use std::{
     },
     task::{Context, Poll, RawWaker, RawWakerVTable, Waker},
     thread::{self, Thread},
-    time::{Duration, Instant},
+    time::Duration,
 };
 
 const PENDING_TRACES_VAR_NAME: &str = "PENDING_TRACES";
@@ -57,7 +57,7 @@ impl TestScheduler {
             .map(|seed| seed.parse().unwrap())
             .unwrap_or(0);
 
-        (seed..num_iterations as u64)
+        (seed..seed + num_iterations as u64)
             .map(|seed| {
                 let mut unwind_safe_f = AssertUnwindSafe(&mut f);
                 eprintln!("Running seed: {seed}");
@@ -335,6 +335,28 @@ impl TestScheduler {
         false
     }
 
+    /// Drops all runnable tasks from the scheduler.
+    ///
+    /// This is used by the leak detector to ensure that all tasks have been dropped as tasks may keep entities alive otherwise.
+    /// Why do we even have tasks left when tests finish you may ask. The reason for that is simple, the scheduler itself is the executor and it retains the scheduled runnables.
+    /// A lot of tasks, including every foreground task contain an executor handle that keeps the test scheduler alive, causing a reference cycle, thus the need for this function right now.
+    pub fn drain_tasks(&self) {
+        // dropping runnables may reschedule tasks
+        // due to drop impls with executors in them
+        // so drop until we reach a fixpoint
+        loop {
+            let mut state = self.state.lock();
+            if state.runnables.is_empty() && state.timers.is_empty() {
+                break;
+            }
+            let runnables = std::mem::take(&mut state.runnables);
+            let timers = std::mem::take(&mut state.timers);
+            drop(state);
+            drop(timers);
+            drop(runnables);
+        }
+    }
+
     pub fn advance_clock_to_next_timer(&self) -> bool {
         if let Some(timer) = self.state.lock().timers.first() {
             self.clock.advance(timer.expiration - self.clock.now());

crates/scheduler/src/tests.rs 🔗

@@ -290,6 +290,31 @@ fn test_helper_methods() {
     assert_eq!(results, vec![10, 10, 10]);
 }
 
+#[test]
+fn test_many_with_arbitrary_seed() {
+    for seed in [0u64, 1, 5, 42] {
+        let mut seeds_seen = Vec::new();
+        let iterations = 3usize;
+
+        for current_seed in seed..seed + iterations as u64 {
+            let scheduler = Arc::new(TestScheduler::new(TestSchedulerConfig::with_seed(
+                current_seed,
+            )));
+            let captured_seed = current_seed;
+            scheduler
+                .foreground()
+                .block_on(async { seeds_seen.push(captured_seed) });
+            scheduler.run();
+        }
+
+        assert_eq!(
+            seeds_seen,
+            (seed..seed + iterations as u64).collect::<Vec<_>>(),
+            "Expected {iterations} iterations starting at seed {seed}"
+        );
+    }
+}
+
 #[test]
 fn test_block_with_timeout() {
     // Test case: future completes within timeout

crates/search/src/buffer_search.rs 🔗

@@ -18,7 +18,7 @@ use editor::{
 };
 use futures::channel::oneshot;
 use gpui::{
-    Action, App, ClickEvent, Context, Entity, EventEmitter, Focusable, InteractiveElement as _,
+    App, ClickEvent, Context, Entity, EventEmitter, Focusable, InteractiveElement as _,
     IntoElement, KeyContext, ParentElement as _, Render, ScrollHandle, Styled, Subscription, Task,
     WeakEntity, Window, div,
 };
@@ -117,18 +117,17 @@ impl Render for BufferSearchBar {
                                 .toggle_state(!is_split)
                                 .tooltip(Tooltip::element(move |_, cx| {
                                     v_flex()
-                                        .gap_1()
-                                        .child(Label::new("Unified"))
+                                        .child("Unified")
                                         .child(
                                             h_flex()
                                                 .gap_0p5()
-                                                .text_sm()
+                                                .text_ui_sm(cx)
                                                 .text_color(Color::Muted.color(cx))
                                                 .children(render_modifiers(
                                                     &gpui::Modifiers::secondary_key(),
                                                     PlatformStyle::platform(),
                                                     None,
-                                                    Some(TextSize::Default.rems(cx).into()),
+                                                    Some(TextSize::Small.rems(cx).into()),
                                                     false,
                                                 ))
                                                 .child("click to set as default"),
@@ -168,18 +167,17 @@ impl Render for BufferSearchBar {
                                 .toggle_state(is_split)
                                 .tooltip(Tooltip::element(move |_, cx| {
                                     v_flex()
-                                        .gap_1()
-                                        .child(Label::new("Split"))
+                                        .child("Split")
                                         .child(
                                             h_flex()
                                                 .gap_0p5()
-                                                .text_sm()
+                                                .text_ui_sm(cx)
                                                 .text_color(Color::Muted.color(cx))
                                                 .children(render_modifiers(
                                                     &gpui::Modifiers::secondary_key(),
                                                     PlatformStyle::platform(),
                                                     None,
-                                                    Some(TextSize::Default.rems(cx).into()),
+                                                    Some(TextSize::Small.rems(cx).into()),
                                                     false,
                                                 ))
                                                 .child("click to set as default"),
@@ -245,9 +243,9 @@ impl Render for BufferSearchBar {
                             cx,
                         )
                     })
-                    .on_click(|_event, window, cx| {
-                        window.dispatch_action(ToggleFoldAll.boxed_clone(), cx)
-                    })
+                    .on_click(cx.listener(|this, _: &ClickEvent, window, cx| {
+                        this.toggle_fold_all(&ToggleFoldAll, window, cx);
+                    }))
             };
 
             if self.dismissed {
@@ -1876,7 +1874,7 @@ mod tests {
 
     use super::*;
     use editor::{
-        DisplayPoint, Editor, ExcerptRange, MultiBuffer, SearchSettings, SelectionEffects,
+        DisplayPoint, Editor, MultiBuffer, PathKey, SearchSettings, SelectionEffects,
         display_map::DisplayRow, test::editor_test_context::EditorTestContext,
     };
     use gpui::{Hsla, TestAppContext, UpdateGlobal, VisualTestContext};
@@ -1934,14 +1932,18 @@ mod tests {
             let mut buffer = MultiBuffer::new(language::Capability::ReadWrite);
 
             //[ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))]
-            buffer.push_excerpts(
+            buffer.set_excerpts_for_path(
+                PathKey::sorted(0),
                 buffer1,
-                [ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0))],
+                [Point::new(0, 0)..Point::new(3, 0)],
+                0,
                 cx,
             );
-            buffer.push_excerpts(
+            buffer.set_excerpts_for_path(
+                PathKey::sorted(1),
                 buffer2,
-                [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))],
+                [Point::new(0, 0)..Point::new(1, 0)],
+                0,
                 cx,
             );
 

crates/settings/src/settings.rs 🔗

@@ -44,9 +44,9 @@ pub use keymap_file::{
 pub use settings_file::*;
 pub use settings_json::*;
 pub use settings_store::{
-    InvalidSettingsError, LSP_SETTINGS_SCHEMA_URL_PREFIX, LocalSettingsKind, LocalSettingsPath,
-    MigrationStatus, Settings, SettingsFile, SettingsJsonSchemaParams, SettingsKey,
-    SettingsLocation, SettingsParseResult, SettingsStore,
+    DefaultSemanticTokenRules, InvalidSettingsError, LSP_SETTINGS_SCHEMA_URL_PREFIX,
+    LocalSettingsKind, LocalSettingsPath, MigrationStatus, Settings, SettingsFile,
+    SettingsJsonSchemaParams, SettingsKey, SettingsLocation, SettingsParseResult, SettingsStore,
 };
 
 pub use vscode_import::{VsCodeSettings, VsCodeSettingsSource};

crates/settings/src/settings_store.rs 🔗

@@ -241,6 +241,11 @@ impl LocalSettingsPath {
 
 impl Global for SettingsStore {}
 
+#[derive(Default)]
+pub struct DefaultSemanticTokenRules(pub SemanticTokenRules);
+
+impl gpui::Global for DefaultSemanticTokenRules {}
+
 #[doc(hidden)]
 #[derive(Debug)]
 pub struct SettingValue<T> {
@@ -275,29 +280,22 @@ pub struct SettingsJsonSchemaParams<'a> {
 
 impl SettingsStore {
     pub fn new(cx: &mut App, default_settings: &str) -> Self {
-        Self::new_with_semantic_tokens(cx, default_settings, &crate::default_semantic_token_rules())
+        Self::new_with_semantic_tokens(cx, default_settings)
     }
 
-    pub fn new_with_semantic_tokens(
-        cx: &mut App,
-        default_settings: &str,
-        default_semantic_tokens: &str,
-    ) -> Self {
+    pub fn new_with_semantic_tokens(cx: &mut App, default_settings: &str) -> Self {
         let (setting_file_updates_tx, mut setting_file_updates_rx) = mpsc::unbounded();
-        let mut default_settings: SettingsContent =
+        let default_settings: SettingsContent =
             SettingsContent::parse_json_with_comments(default_settings).unwrap();
-        if let Ok(semantic_token_rules) =
-            crate::parse_json_with_comments::<SemanticTokenRules>(default_semantic_tokens)
-        {
-            let global_lsp = default_settings
-                .global_lsp_settings
-                .get_or_insert_with(Default::default);
-            let existing_rules = global_lsp
-                .semantic_token_rules
-                .get_or_insert_with(Default::default);
-            existing_rules.rules.extend(semantic_token_rules.rules);
+        if !cx.has_global::<DefaultSemanticTokenRules>() {
+            cx.set_global::<DefaultSemanticTokenRules>(
+                crate::parse_json_with_comments::<SemanticTokenRules>(
+                    &crate::default_semantic_token_rules(),
+                )
+                .map(DefaultSemanticTokenRules)
+                .unwrap_or_default(),
+            );
         }
-
         let default_settings: Rc<SettingsContent> = default_settings.into();
         let mut this = Self {
             setting_values: Default::default(),
@@ -868,18 +866,30 @@ impl SettingsStore {
     /// Sets language-specific semantic token rules.
     ///
     /// These rules are registered by language modules (e.g. the Rust language module)
-    /// and are stored separately from the global rules. They are only applied to
-    /// buffers of the matching language by the `SemanticTokenStylizer`.
+    /// or by third-party extensions (via `semantic_token_rules.json` in their language
+    /// directories). They are stored separately from the global rules and are only
+    /// applied to buffers of the matching language by the `SemanticTokenStylizer`.
     ///
-    /// These should be registered before any `SemanticTokenStylizer` instances are
-    /// created (typically during `languages::init`), as existing cached stylizers
-    /// are not automatically invalidated.
+    /// This triggers a settings recomputation so that observers (e.g. `LspStore`)
+    /// are notified and can invalidate cached stylizers.
     pub fn set_language_semantic_token_rules(
         &mut self,
         language: SharedString,
         rules: SemanticTokenRules,
+        cx: &mut App,
     ) {
         self.language_semantic_token_rules.insert(language, rules);
+        self.recompute_values(None, cx);
+    }
+
+    /// Removes language-specific semantic token rules for the given language.
+    ///
+    /// This should be called when an extension that registered rules for a language
+    /// is unloaded. Triggers a settings recomputation so that observers (e.g.
+    /// `LspStore`) are notified and can invalidate cached stylizers.
+    pub fn remove_language_semantic_token_rules(&mut self, language: &str, cx: &mut App) {
+        self.language_semantic_token_rules.remove(language);
+        self.recompute_values(None, cx);
     }
 
     /// Returns the language-specific semantic token rules for the given language,
@@ -1696,7 +1706,7 @@ mod tests {
             r#"{
                 "languages": {
                     "JSON": {
-                        "auto_indent": true
+                        "auto_indent": "syntax_aware"
                     }
                 }
             }"#
@@ -1706,12 +1716,12 @@ mod tests {
                     .languages_mut()
                     .get_mut("JSON")
                     .unwrap()
-                    .auto_indent = Some(false);
+                    .auto_indent = Some(crate::AutoIndentMode::None);
 
                 settings.languages_mut().insert(
                     "Rust".into(),
                     LanguageSettingsContent {
-                        auto_indent: Some(true),
+                        auto_indent: Some(crate::AutoIndentMode::SyntaxAware),
                         ..Default::default()
                     },
                 );
@@ -1719,10 +1729,10 @@ mod tests {
             r#"{
                 "languages": {
                     "Rust": {
-                        "auto_indent": true
+                        "auto_indent": "syntax_aware"
                     },
                     "JSON": {
-                        "auto_indent": false
+                        "auto_indent": "none"
                     }
                 }
             }"#

crates/settings/src/vscode_import.rs 🔗

@@ -801,6 +801,7 @@ impl VsCodeSettings {
             starts_open: None,
             sticky_scroll: None,
             auto_open: None,
+            diagnostic_badges: None,
         };
 
         if let (Some(false), Some(false)) = (

crates/settings_content/src/agent.rs 🔗

@@ -290,6 +290,7 @@ impl JsonSchema for LanguageModelProviderSetting {
                         "openai",
                         "openrouter",
                         "vercel",
+                        "vercel_ai_gateway",
                         "x_ai",
                         "zed.dev"
                     ]
@@ -316,73 +317,21 @@ impl From<&str> for LanguageModelProviderSetting {
 
 #[with_fallible_options]
 #[derive(Default, PartialEq, Deserialize, Serialize, Clone, JsonSchema, MergeFrom, Debug)]
-pub struct AllAgentServersSettings {
-    pub gemini: Option<BuiltinAgentServerSettings>,
-    pub claude: Option<BuiltinAgentServerSettings>,
-    pub codex: Option<BuiltinAgentServerSettings>,
-
-    /// Custom agent servers configured by the user
-    #[serde(flatten)]
-    pub custom: HashMap<String, CustomAgentServerSettings>,
+#[serde(transparent)]
+pub struct AllAgentServersSettings(pub HashMap<String, CustomAgentServerSettings>);
+
+impl std::ops::Deref for AllAgentServersSettings {
+    type Target = HashMap<String, CustomAgentServerSettings>;
+
+    fn deref(&self) -> &Self::Target {
+        &self.0
+    }
 }
 
-#[with_fallible_options]
-#[derive(Default, Deserialize, Serialize, Clone, JsonSchema, MergeFrom, Debug, PartialEq)]
-pub struct BuiltinAgentServerSettings {
-    /// Absolute path to a binary to be used when launching this agent.
-    ///
-    /// This can be used to run a specific binary without automatic downloads or searching `$PATH`.
-    #[serde(rename = "command")]
-    pub path: Option<PathBuf>,
-    /// If a binary is specified in `command`, it will be passed these arguments.
-    pub args: Option<Vec<String>>,
-    /// If a binary is specified in `command`, it will be passed these environment variables.
-    pub env: Option<HashMap<String, String>>,
-    /// Whether to skip searching `$PATH` for an agent server binary when
-    /// launching this agent.
-    ///
-    /// This has no effect if a `command` is specified. Otherwise, when this is
-    /// `false`, Zed will search `$PATH` for an agent server binary and, if one
-    /// is found, use it for threads with this agent. If no agent binary is
-    /// found on `$PATH`, Zed will automatically install and use its own binary.
-    /// When this is `true`, Zed will not search `$PATH`, and will always use
-    /// its own binary.
-    ///
-    /// Default: true
-    pub ignore_system_version: Option<bool>,
-    /// The default mode to use for this agent.
-    ///
-    /// Note: Not only all agents support modes.
-    ///
-    /// Default: None
-    pub default_mode: Option<String>,
-    /// The default model to use for this agent.
-    ///
-    /// This should be the model ID as reported by the agent.
-    ///
-    /// Default: None
-    pub default_model: Option<String>,
-    /// The favorite models for this agent.
-    ///
-    /// These are the model IDs as reported by the agent.
-    ///
-    /// Default: []
-    #[serde(default, skip_serializing_if = "Vec::is_empty")]
-    pub favorite_models: Vec<String>,
-    /// Default values for session config options.
-    ///
-    /// This is a map from config option ID to value ID.
-    ///
-    /// Default: {}
-    #[serde(default, skip_serializing_if = "HashMap::is_empty")]
-    pub default_config_options: HashMap<String, String>,
-    /// Favorited values for session config options.
-    ///
-    /// This is a map from config option ID to a list of favorited value IDs.
-    ///
-    /// Default: {}
-    #[serde(default, skip_serializing_if = "HashMap::is_empty")]
-    pub favorite_config_option_values: HashMap<String, Vec<String>>,
+impl std::ops::DerefMut for AllAgentServersSettings {
+    fn deref_mut(&mut self) -> &mut Self::Target {
+        &mut self.0
+    }
 }
 
 #[with_fallible_options]

crates/settings_content/src/language.rs 🔗

@@ -81,7 +81,6 @@ pub enum EditPredictionProvider {
     None,
     #[default]
     Copilot,
-    Supermaven,
     Zed,
     Codestral,
     Ollama,
@@ -91,7 +90,7 @@ pub enum EditPredictionProvider {
     Experimental(&'static str),
 }
 
-pub const EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME: &str = "zeta2";
+const EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME: &str = "zeta2";
 
 impl<'de> Deserialize<'de> for EditPredictionProvider {
     fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
@@ -103,7 +102,6 @@ impl<'de> Deserialize<'de> for EditPredictionProvider {
         pub enum Content {
             None,
             Copilot,
-            Supermaven,
             Zed,
             Codestral,
             Ollama,
@@ -116,7 +114,6 @@ impl<'de> Deserialize<'de> for EditPredictionProvider {
         Ok(match Content::deserialize(deserializer)? {
             Content::None => EditPredictionProvider::None,
             Content::Copilot => EditPredictionProvider::Copilot,
-            Content::Supermaven => EditPredictionProvider::Supermaven,
             Content::Zed => EditPredictionProvider::Zed,
             Content::Codestral => EditPredictionProvider::Codestral,
             Content::Ollama => EditPredictionProvider::Ollama,
@@ -126,9 +123,7 @@ impl<'de> Deserialize<'de> for EditPredictionProvider {
             Content::Experimental(name)
                 if name == EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME =>
             {
-                EditPredictionProvider::Experimental(
-                    EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME,
-                )
+                EditPredictionProvider::Zed
             }
             Content::Experimental(name) => {
                 return Err(D::Error::custom(format!(
@@ -146,7 +141,6 @@ impl EditPredictionProvider {
             EditPredictionProvider::Zed => true,
             EditPredictionProvider::None
             | EditPredictionProvider::Copilot
-            | EditPredictionProvider::Supermaven
             | EditPredictionProvider::Codestral
             | EditPredictionProvider::Ollama
             | EditPredictionProvider::OpenAiCompatibleApi
@@ -160,14 +154,10 @@ impl EditPredictionProvider {
         match self {
             EditPredictionProvider::Zed => Some("Zed AI"),
             EditPredictionProvider::Copilot => Some("GitHub Copilot"),
-            EditPredictionProvider::Supermaven => Some("Supermaven"),
             EditPredictionProvider::Codestral => Some("Codestral"),
             EditPredictionProvider::Sweep => Some("Sweep"),
             EditPredictionProvider::Mercury => Some("Mercury"),
-            EditPredictionProvider::Experimental(
-                EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME,
-            ) => Some("Zeta2"),
-            EditPredictionProvider::None | EditPredictionProvider::Experimental(_) => None,
+            EditPredictionProvider::Experimental(_) | EditPredictionProvider::None => None,
             EditPredictionProvider::Ollama => Some("Ollama"),
             EditPredictionProvider::OpenAiCompatibleApi => Some("OpenAI-Compatible API"),
         }
@@ -245,6 +235,7 @@ pub enum EditPredictionPromptFormat {
     #[default]
     Infer,
     Zeta,
+    Zeta2,
     CodeLlama,
     StarCoder,
     DeepseekCoder,
@@ -378,6 +369,32 @@ pub enum EditPredictionsMode {
     Eager,
 }
 
+/// Controls the soft-wrapping behavior in the editor.
+#[derive(
+    Copy,
+    Clone,
+    Debug,
+    Serialize,
+    Deserialize,
+    PartialEq,
+    Eq,
+    JsonSchema,
+    MergeFrom,
+    strum::VariantArray,
+    strum::VariantNames,
+)]
+#[serde(rename_all = "snake_case")]
+pub enum AutoIndentMode {
+    /// Adjusts indentation based on syntax context when typing.
+    /// Uses tree-sitter to analyze code structure and indent accordingly.
+    SyntaxAware,
+    /// Preserve the indentation of the current line when creating new lines,
+    /// but don't adjust based on syntax context.
+    PreserveIndent,
+    /// No automatic indentation. New lines start at column 0.
+    None,
+}
+
 /// Controls the soft-wrapping behavior in the editor.
 #[derive(
     Copy,
@@ -580,10 +597,14 @@ pub struct LanguageSettingsContent {
     ///
     /// Default: true
     pub linked_edits: Option<bool>,
-    /// Whether indentation should be adjusted based on the context whilst typing.
+    /// Controls automatic indentation behavior when typing.
     ///
-    /// Default: true
-    pub auto_indent: Option<bool>,
+    /// - "syntax_aware": Adjusts indentation based on syntax context (default)
+    /// - "preserve_indent": Preserves current line's indentation on new lines
+    /// - "none": No automatic indentation
+    ///
+    /// Default: syntax_aware
+    pub auto_indent: Option<AutoIndentMode>,
     /// Whether indentation of pasted content should be adjusted based on the context.
     ///
     /// Default: true

crates/settings_content/src/language_model.rs 🔗

@@ -20,6 +20,7 @@ pub struct AllLanguageModelSettingsContent {
     pub openai: Option<OpenAiSettingsContent>,
     pub openai_compatible: Option<HashMap<Arc<str>, OpenAiCompatibleSettingsContent>>,
     pub vercel: Option<VercelSettingsContent>,
+    pub vercel_ai_gateway: Option<VercelAiGatewaySettingsContent>,
     pub x_ai: Option<XAiSettingsContent>,
     #[serde(rename = "zed.dev")]
     pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
@@ -301,6 +302,25 @@ pub struct VercelAvailableModel {
     pub max_completion_tokens: Option<u64>,
 }
 
+#[with_fallible_options]
+#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
+pub struct VercelAiGatewaySettingsContent {
+    pub api_url: Option<String>,
+    pub available_models: Option<Vec<VercelAiGatewayAvailableModel>>,
+}
+
+#[with_fallible_options]
+#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
+pub struct VercelAiGatewayAvailableModel {
+    pub name: String,
+    pub display_name: Option<String>,
+    pub max_tokens: u64,
+    pub max_output_tokens: Option<u64>,
+    pub max_completion_tokens: Option<u64>,
+    #[serde(default)]
+    pub capabilities: OpenAiCompatibleModelCapabilities,
+}
+
 #[with_fallible_options]
 #[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
 pub struct GoogleSettingsContent {

crates/settings_content/src/settings_content.rs 🔗

@@ -619,6 +619,11 @@ pub struct GitPanelSettingsContent {
     ///
     /// Default: false
     pub tree_view: Option<bool>,
+
+    /// Whether to show the addition/deletion change count next to each file in the Git panel.
+    ///
+    /// Default: false
+    pub diff_stats: Option<bool>,
 }
 
 #[derive(
@@ -711,10 +716,6 @@ pub struct FileFinderSettingsContent {
     ///
     /// Default: true
     pub skip_focus_for_active_in_search: Option<bool>,
-    /// Determines whether to show the git status in the file finder
-    ///
-    /// Default: true
-    pub git_status: Option<bool>,
     /// Whether to use gitignored files when searching.
     /// Only the file Zed had indexed will be used, not necessary all the gitignored files.
     ///
@@ -1147,11 +1148,6 @@ pub struct ReplSettingsContent {
     ///
     /// Default: 0
     pub output_max_height_lines: Option<usize>,
-    /// Maximum number of columns of output to display before scaling images.
-    /// Set to 0 to disable output width limits.
-    ///
-    /// Default: 0
-    pub output_max_width_columns: Option<usize>,
 }
 
 /// Settings for configuring the which-key popup behaviour.

crates/settings_content/src/theme.rs 🔗

@@ -1033,6 +1033,9 @@ pub struct ThemeColorsContent {
     /// Background color for Vim Visual Block mode indicator.
     #[serde(rename = "vim.visual_block.background")]
     pub vim_visual_block_background: Option<String>,
+    /// Background color for Vim yank highlight.
+    #[serde(rename = "vim.yank.background")]
+    pub vim_yank_background: Option<String>,
     /// Background color for Vim Helix Normal mode indicator.
     #[serde(rename = "vim.helix_normal.background")]
     pub vim_helix_normal_background: Option<String>,

crates/settings_content/src/workspace.rs 🔗

@@ -739,6 +739,10 @@ pub struct ProjectPanelSettingsContent {
     ///
     /// Default: directories_first
     pub sort_mode: Option<ProjectPanelSortMode>,
+    /// Whether to show error and warning count badges next to file names in the project panel.
+    ///
+    /// Default: true
+    pub diagnostic_badges: Option<bool>,
 }
 
 #[derive(

crates/settings_ui/src/page_data.rs 🔗

@@ -3154,7 +3154,7 @@ fn search_and_files_page() -> SettingsPage {
         ]
     }
 
-    fn file_finder_section() -> [SettingsPageItem; 6] {
+    fn file_finder_section() -> [SettingsPageItem; 5] {
         [
             SettingsPageItem::SectionHeader("File Finder"),
             // todo: null by default
@@ -3242,24 +3242,6 @@ fn search_and_files_page() -> SettingsPage {
                 metadata: None,
                 files: USER,
             }),
-            SettingsPageItem::SettingItem(SettingItem {
-                title: "Git Status",
-                description: "Show the Git status in the file finder.",
-                field: Box::new(SettingField {
-                    json_path: Some("file_finder.git_status"),
-                    pick: |settings_content| {
-                        settings_content.file_finder.as_ref()?.git_status.as_ref()
-                    },
-                    write: |settings_content, value| {
-                        settings_content
-                            .file_finder
-                            .get_or_insert_default()
-                            .git_status = value;
-                    },
-                }),
-                metadata: None,
-                files: USER,
-            }),
         ]
     }
 
@@ -4256,7 +4238,7 @@ fn window_and_layout_page() -> SettingsPage {
 }
 
 fn panels_page() -> SettingsPage {
-    fn project_panel_section() -> [SettingsPageItem; 21] {
+    fn project_panel_section() -> [SettingsPageItem; 22] {
         [
             SettingsPageItem::SectionHeader("Project Panel"),
             SettingsPageItem::SettingItem(SettingItem {
@@ -4556,6 +4538,28 @@ fn panels_page() -> SettingsPage {
                 metadata: None,
                 files: USER,
             }),
+            SettingsPageItem::SettingItem(SettingItem {
+                title: "Diagnostic Badges",
+                description: "Show error and warning count badges next to file names in the project panel.",
+                field: Box::new(SettingField {
+                    json_path: Some("project_panel.diagnostic_badges"),
+                    pick: |settings_content| {
+                        settings_content
+                            .project_panel
+                            .as_ref()?
+                            .diagnostic_badges
+                            .as_ref()
+                    },
+                    write: |settings_content, value| {
+                        settings_content
+                            .project_panel
+                            .get_or_insert_default()
+                            .diagnostic_badges = value;
+                    },
+                }),
+                metadata: None,
+                files: USER,
+            }),
             SettingsPageItem::SettingItem(SettingItem {
                 title: "Sticky Scroll",
                 description: "Whether to stick parent directories at top of the project panel.",
@@ -5017,7 +5021,7 @@ fn panels_page() -> SettingsPage {
         ]
     }
 
-    fn git_panel_section() -> [SettingsPageItem; 10] {
+    fn git_panel_section() -> [SettingsPageItem; 11] {
         [
             SettingsPageItem::SectionHeader("Git Panel"),
             SettingsPageItem::SettingItem(SettingItem {
@@ -5159,6 +5163,24 @@ fn panels_page() -> SettingsPage {
                 metadata: None,
                 files: USER,
             }),
+            SettingsPageItem::SettingItem(SettingItem {
+                title: "Diff Stats",
+                description: "Whether to show the addition/deletion change count next to each file in the Git panel.",
+                field: Box::new(SettingField {
+                    json_path: Some("git_panel.diff_stats"),
+                    pick: |settings_content| {
+                        settings_content.git_panel.as_ref()?.diff_stats.as_ref()
+                    },
+                    write: |settings_content, value| {
+                        settings_content
+                            .git_panel
+                            .get_or_insert_default()
+                            .diff_stats = value;
+                    },
+                }),
+                metadata: None,
+                files: USER,
+            }),
             SettingsPageItem::SettingItem(SettingItem {
                 title: "Scroll Bar",
                 description: "How and when the scrollbar should be displayed.",
@@ -7383,7 +7405,7 @@ fn language_settings_data() -> Box<[SettingsPageItem]> {
             }),
             SettingsPageItem::SettingItem(SettingItem {
                 title: "Auto Indent",
-                description: "Whether indentation should be adjusted based on the context whilst typing.",
+                description: "Controls automatic indentation behavior when typing.",
                 field: Box::new(SettingField {
                     json_path: Some("languages.$(language).auto_indent"),
                     pick: |settings_content| {

crates/settings_ui/src/pages/edit_prediction_provider_setup.rs 🔗

@@ -2,6 +2,7 @@ use codestral::{CODESTRAL_API_URL, codestral_api_key_state, codestral_api_url};
 use edit_prediction::{
     ApiKeyState,
     mercury::{MERCURY_CREDENTIALS_URL, mercury_api_token},
+    open_ai_compatible::{open_ai_compatible_api_token, open_ai_compatible_api_url},
     sweep_ai::{SWEEP_CREDENTIALS_URL, sweep_api_token},
 };
 use edit_prediction_ui::{get_available_providers, set_completion_provider};
@@ -33,7 +34,9 @@ pub(crate) fn render_edit_prediction_setup_page(
             render_api_key_provider(
                 IconName::Inception,
                 "Mercury",
-                "https://platform.inceptionlabs.ai/dashboard/api-keys".into(),
+                ApiKeyDocs::Link {
+                    dashboard_url: "https://platform.inceptionlabs.ai/dashboard/api-keys".into(),
+                },
                 mercury_api_token(cx),
                 |_cx| MERCURY_CREDENTIALS_URL,
                 None,
@@ -46,7 +49,9 @@ pub(crate) fn render_edit_prediction_setup_page(
             render_api_key_provider(
                 IconName::SweepAi,
                 "Sweep",
-                "https://app.sweep.dev/".into(),
+                ApiKeyDocs::Link {
+                    dashboard_url: "https://app.sweep.dev/".into(),
+                },
                 sweep_api_token(cx),
                 |_cx| SWEEP_CREDENTIALS_URL,
                 Some(
@@ -68,7 +73,9 @@ pub(crate) fn render_edit_prediction_setup_page(
             render_api_key_provider(
                 IconName::AiMistral,
                 "Codestral",
-                "https://console.mistral.ai/codestral".into(),
+                ApiKeyDocs::Link {
+                    dashboard_url: "https://console.mistral.ai/codestral".into(),
+                },
                 codestral_api_key_state(cx),
                 |cx| codestral_api_url(cx),
                 Some(
@@ -87,7 +94,31 @@ pub(crate) fn render_edit_prediction_setup_page(
             .into_any_element(),
         ),
         Some(render_ollama_provider(settings_window, window, cx).into_any_element()),
-        Some(render_open_ai_compatible_provider(settings_window, window, cx).into_any_element()),
+        Some(
+            render_api_key_provider(
+                IconName::AiOpenAiCompat,
+                "OpenAI Compatible API",
+                ApiKeyDocs::Custom {
+                    message: "Set an API key here. It will be sent as Authorization: Bearer {key}."
+                        .into(),
+                },
+                open_ai_compatible_api_token(cx),
+                |cx| open_ai_compatible_api_url(cx),
+                Some(
+                    settings_window
+                        .render_sub_page_items_section(
+                            open_ai_compatible_settings().iter().enumerate(),
+                            true,
+                            window,
+                            cx,
+                        )
+                        .into_any_element(),
+                ),
+                window,
+                cx,
+            )
+            .into_any_element(),
+        ),
     ];
 
     div()
@@ -162,10 +193,15 @@ fn render_provider_dropdown(window: &mut Window, cx: &mut App) -> AnyElement {
         .into_any_element()
 }
 
+enum ApiKeyDocs {
+    Link { dashboard_url: SharedString },
+    Custom { message: SharedString },
+}
+
 fn render_api_key_provider(
     icon: IconName,
     title: &'static str,
-    link: SharedString,
+    docs: ApiKeyDocs,
     api_key_state: Entity<ApiKeyState>,
     current_url: fn(&mut App) -> SharedString,
     additional_fields: Option<AnyElement>,
@@ -209,25 +245,32 @@ fn render_api_key_provider(
         .icon(icon)
         .no_padding(true);
     let button_link_label = format!("{} dashboard", title);
-    let description = h_flex()
-        .min_w_0()
-        .gap_0p5()
-        .child(
-            Label::new("Visit the")
+    let description = match docs {
+        ApiKeyDocs::Custom { message } => h_flex().min_w_0().gap_0p5().child(
+            Label::new(message)
                 .size(LabelSize::Small)
                 .color(Color::Muted),
-        )
-        .child(
-            ButtonLink::new(button_link_label, link)
-                .no_icon(true)
-                .label_size(LabelSize::Small)
-                .label_color(Color::Muted),
-        )
-        .child(
-            Label::new("to generate an API key.")
-                .size(LabelSize::Small)
-                .color(Color::Muted),
-        );
+        ),
+        ApiKeyDocs::Link { dashboard_url } => h_flex()
+            .min_w_0()
+            .gap_0p5()
+            .child(
+                Label::new("Visit the")
+                    .size(LabelSize::Small)
+                    .color(Color::Muted),
+            )
+            .child(
+                ButtonLink::new(button_link_label, dashboard_url)
+                    .no_icon(true)
+                    .label_size(LabelSize::Small)
+                    .label_color(Color::Muted),
+            )
+            .child(
+                Label::new("to generate an API key.")
+                    .size(LabelSize::Small)
+                    .color(Color::Muted),
+            ),
+    };
     let configured_card_label = if is_from_env_var {
         "API Key Set in Environment Variable"
     } else {
@@ -484,34 +527,6 @@ fn ollama_settings() -> Box<[SettingsPageItem]> {
     ])
 }
 
-fn render_open_ai_compatible_provider(
-    settings_window: &SettingsWindow,
-    window: &mut Window,
-    cx: &mut Context<SettingsWindow>,
-) -> impl IntoElement {
-    let open_ai_compatible_settings = open_ai_compatible_settings();
-    let additional_fields = settings_window
-        .render_sub_page_items_section(
-            open_ai_compatible_settings.iter().enumerate(),
-            true,
-            window,
-            cx,
-        )
-        .into_any_element();
-
-    v_flex()
-        .id("open-ai-compatible")
-        .min_w_0()
-        .pt_8()
-        .gap_1p5()
-        .child(
-            SettingsSectionHeader::new("OpenAI Compatible API")
-                .icon(IconName::AiOpenAiCompat)
-                .no_padding(true),
-        )
-        .child(div().px_neg_8().child(additional_fields))
-}
-
 fn open_ai_compatible_settings() -> Box<[SettingsPageItem]> {
     Box::new([
         SettingsPageItem::SettingItem(SettingItem {

crates/settings_ui/src/settings_ui.rs 🔗

@@ -474,6 +474,7 @@ fn init_renderers(cx: &mut App) {
         .add_basic_renderer::<settings::CurrentLineHighlight>(render_dropdown)
         .add_basic_renderer::<settings::ShowWhitespaceSetting>(render_dropdown)
         .add_basic_renderer::<settings::SoftWrap>(render_dropdown)
+        .add_basic_renderer::<settings::AutoIndentMode>(render_dropdown)
         .add_basic_renderer::<settings::ScrollBeyondLastLine>(render_dropdown)
         .add_basic_renderer::<settings::SnippetSortOrder>(render_dropdown)
         .add_basic_renderer::<settings::ClosePosition>(render_dropdown)
@@ -1574,8 +1575,10 @@ impl SettingsWindow {
                 };
 
                 this_weak
-                    .update(cx, |this, cx| {
-                        this.fetch_files(window, cx);
+                    .update(cx, |_, cx| {
+                        cx.defer_in(window, |settings_window, window, cx| {
+                            settings_window.fetch_files(window, cx)
+                        });
                         cx.observe_release_in(&project, window, |_, _, window, cx| {
                             cx.defer_in(window, |this, window, cx| this.fetch_files(window, cx));
                         })

crates/sidebar/Cargo.toml 🔗

@@ -13,30 +13,38 @@ path = "src/sidebar.rs"
 
 [features]
 default = []
-test-support = []
 
 [dependencies]
 acp_thread.workspace = true
+agent.workspace = true
+agent-client-protocol.workspace = true
 agent_ui.workspace = true
 chrono.workspace = true
+editor.workspace = true
 fs.workspace = true
-fuzzy.workspace = true
 gpui.workspace = true
-picker.workspace = true
+menu.workspace = true
 project.workspace = true
 recent_projects.workspace = true
+settings.workspace = true
 theme.workspace = true
 ui.workspace = true
-ui_input.workspace = true
 util.workspace = true
 workspace.workspace = true
+zed_actions.workspace = true
 
 [dev-dependencies]
+acp_thread = { workspace = true, features = ["test-support"] }
+agent = { workspace = true, features = ["test-support"] }
+agent_ui = { workspace = true, features = ["test-support"] }
+assistant_text_thread = { workspace = true, features = ["test-support"] }
 editor.workspace = true
+language_model = { workspace = true, features = ["test-support"] }
+recent_projects = { workspace = true, features = ["test-support"] }
+serde_json.workspace = true
 feature_flags.workspace = true
 fs = { workspace = true, features = ["test-support"] }
 gpui = { workspace = true, features = ["test-support"] }
 project = { workspace = true, features = ["test-support"] }
-recent_projects = { workspace = true, features = ["test-support"] }
 settings = { workspace = true, features = ["test-support"] }
-workspace = { workspace = true, features = ["test-support"] }
+workspace = { workspace = true, features = ["test-support"] }

crates/sidebar/src/sidebar.rs 🔗

@@ -1,711 +1,189 @@
 use acp_thread::ThreadStatus;
-use agent_ui::{AgentPanel, AgentPanelEvent};
-use chrono::{Datelike, Local, NaiveDate, TimeDelta};
-
-use fs::Fs;
-use fuzzy::StringMatchCandidate;
+use agent::ThreadStore;
+use agent_client_protocol as acp;
+use agent_ui::{AgentPanel, AgentPanelEvent, NewThread};
+use chrono::Utc;
+use editor::{Editor, EditorElement, EditorStyle};
 use gpui::{
-    App, Context, Entity, EventEmitter, FocusHandle, Focusable, Pixels, Render, SharedString,
-    Subscription, Task, Window, px,
+    AnyElement, App, Context, Entity, EventEmitter, FocusHandle, Focusable, FontStyle, ListState,
+    Pixels, Render, SharedString, Subscription, TextStyle, WeakEntity, Window, actions, list,
+    prelude::*, px, relative, rems,
 };
-use picker::{Picker, PickerDelegate};
+use menu::{Cancel, Confirm, SelectFirst, SelectLast, SelectNext, SelectPrevious};
 use project::Event as ProjectEvent;
-use recent_projects::{RecentProjectEntry, get_recent_projects};
-use std::fmt::Display;
-
+use settings::Settings;
 use std::collections::{HashMap, HashSet};
-
-use std::path::{Path, PathBuf};
-use std::sync::Arc;
-use theme::ActiveTheme;
+use std::mem;
+use theme::{ActiveTheme, ThemeSettings};
 use ui::utils::TRAFFIC_LIGHT_PADDING;
 use ui::{
-    AgentThreadStatus, Divider, DividerColor, KeyBinding, ListSubHeader, Tab, ThreadItem, Tooltip,
-    prelude::*,
+    AgentThreadStatus, HighlightedLabel, IconButtonShape, KeyBinding, ListItem, PopoverMenu, Tab,
+    ThreadItem, Tooltip, WithScrollbar, prelude::*,
 };
-use ui_input::ErasedEditor;
-use util::ResultExt as _;
+use util::path_list::PathList;
 use workspace::{
-    FocusWorkspaceSidebar, MultiWorkspace, NewWorkspaceInWindow, Sidebar as WorkspaceSidebar,
-    SidebarEvent, ToggleWorkspaceSidebar, Workspace,
+    FocusWorkspaceSidebar, MultiWorkspace, Sidebar as WorkspaceSidebar, SidebarEvent,
+    ToggleWorkspaceSidebar, Workspace,
 };
+use zed_actions::editor::{MoveDown, MoveUp};
+
+actions!(
+    agents_sidebar,
+    [
+        /// Collapses the selected entry in the workspace sidebar.
+        CollapseSelectedEntry,
+        /// Expands the selected entry in the workspace sidebar.
+        ExpandSelectedEntry,
+    ]
+);
+
+const DEFAULT_WIDTH: Pixels = px(320.0);
+const MIN_WIDTH: Pixels = px(200.0);
+const MAX_WIDTH: Pixels = px(800.0);
+const DEFAULT_THREADS_SHOWN: usize = 5;
 
 #[derive(Clone, Debug)]
-struct AgentThreadInfo {
+struct ActiveThreadInfo {
+    session_id: acp::SessionId,
     title: SharedString,
     status: AgentThreadStatus,
     icon: IconName,
+    icon_from_external_svg: Option<SharedString>,
+    is_background: bool,
 }
 
-const DEFAULT_WIDTH: Pixels = px(320.0);
-const MIN_WIDTH: Pixels = px(200.0);
-const MAX_WIDTH: Pixels = px(800.0);
-const MAX_MATCHES: usize = 100;
-
-#[derive(Clone)]
-struct WorkspaceThreadEntry {
-    index: usize,
-    worktree_label: SharedString,
-    full_path: SharedString,
-    thread_info: Option<AgentThreadInfo>,
-}
-
-impl WorkspaceThreadEntry {
-    fn new(index: usize, workspace: &Entity<Workspace>, cx: &App) -> Self {
-        let workspace_ref = workspace.read(cx);
-
-        let worktrees: Vec<_> = workspace_ref
-            .worktrees(cx)
-            .filter(|worktree| worktree.read(cx).is_visible())
-            .map(|worktree| worktree.read(cx).abs_path())
-            .collect();
-
-        let worktree_names: Vec<String> = worktrees
-            .iter()
-            .filter_map(|path| {
-                path.file_name()
-                    .map(|name| name.to_string_lossy().to_string())
-            })
-            .collect();
-
-        let worktree_label: SharedString = if worktree_names.is_empty() {
-            format!("Workspace {}", index + 1).into()
-        } else {
-            worktree_names.join(", ").into()
-        };
-
-        let full_path: SharedString = worktrees
-            .iter()
-            .map(|path| path.to_string_lossy().to_string())
-            .collect::<Vec<_>>()
-            .join("\n")
-            .into();
-
-        let thread_info = Self::thread_info(workspace, cx);
-
+impl From<&ActiveThreadInfo> for acp_thread::AgentSessionInfo {
+    fn from(info: &ActiveThreadInfo) -> Self {
         Self {
-            index,
-            worktree_label,
-            full_path,
-            thread_info,
-        }
-    }
-
-    fn thread_info(workspace: &Entity<Workspace>, cx: &App) -> Option<AgentThreadInfo> {
-        let agent_panel = workspace.read(cx).panel::<AgentPanel>(cx)?;
-        let agent_panel_ref = agent_panel.read(cx);
-
-        let thread_view = agent_panel_ref.as_active_thread_view(cx)?.read(cx);
-        let thread = thread_view.thread.read(cx);
-
-        let icon = thread_view.agent_icon;
-        let title = thread.title();
-
-        let status = if thread.is_waiting_for_confirmation() {
-            AgentThreadStatus::WaitingForConfirmation
-        } else if thread.had_error() {
-            AgentThreadStatus::Error
-        } else {
-            match thread.status() {
-                ThreadStatus::Generating => AgentThreadStatus::Running,
-                ThreadStatus::Idle => AgentThreadStatus::Completed,
-            }
-        };
-        Some(AgentThreadInfo {
-            title,
-            status,
-            icon,
-        })
-    }
-}
-
-#[derive(Clone)]
-enum SidebarEntry {
-    Separator(SharedString),
-    WorkspaceThread(WorkspaceThreadEntry),
-    RecentProject(RecentProjectEntry),
-}
-
-impl SidebarEntry {
-    fn searchable_text(&self) -> &str {
-        match self {
-            SidebarEntry::Separator(_) => "",
-            SidebarEntry::WorkspaceThread(entry) => entry.worktree_label.as_ref(),
-            SidebarEntry::RecentProject(entry) => entry.name.as_ref(),
+            session_id: info.session_id.clone(),
+            cwd: None,
+            title: Some(info.title.clone()),
+            updated_at: Some(Utc::now()),
+            meta: None,
         }
     }
 }
 
-#[derive(Clone)]
-struct SidebarMatch {
-    entry: SidebarEntry,
-    positions: Vec<usize>,
+#[derive(Clone, Debug)]
+#[allow(dead_code)]
+enum ListEntry {
+    ProjectHeader {
+        path_list: PathList,
+        label: SharedString,
+        highlight_positions: Vec<usize>,
+    },
+    Thread {
+        session_info: acp_thread::AgentSessionInfo,
+        icon: IconName,
+        icon_from_external_svg: Option<SharedString>,
+        status: AgentThreadStatus,
+        diff_stats: Option<(usize, usize)>,
+        workspace_index: usize,
+        is_live: bool,
+        is_background: bool,
+        highlight_positions: Vec<usize>,
+    },
+    ViewMore {
+        path_list: PathList,
+        remaining_count: usize,
+    },
+    NewThread {
+        path_list: PathList,
+    },
 }
 
-struct WorkspacePickerDelegate {
-    multi_workspace: Entity<MultiWorkspace>,
-    entries: Vec<SidebarEntry>,
-    active_workspace_index: usize,
-    workspace_thread_count: usize,
-    /// All recent projects including what's filtered out of entries
-    /// used to add unopened projects to entries on rebuild
-    recent_projects: Vec<RecentProjectEntry>,
-    recent_project_thread_titles: HashMap<SharedString, SharedString>,
-    matches: Vec<SidebarMatch>,
-    selected_index: usize,
-    query: String,
-    hovered_thread_item: Option<usize>,
-    notified_workspaces: HashSet<usize>,
+#[derive(Default)]
+struct SidebarContents {
+    entries: Vec<ListEntry>,
+    notified_threads: HashSet<acp::SessionId>,
 }
 
-impl WorkspacePickerDelegate {
-    fn new(multi_workspace: Entity<MultiWorkspace>) -> Self {
-        Self {
-            multi_workspace,
-            entries: Vec::new(),
-            active_workspace_index: 0,
-            workspace_thread_count: 0,
-            recent_projects: Vec::new(),
-            recent_project_thread_titles: HashMap::new(),
-            matches: Vec::new(),
-            selected_index: 0,
-            query: String::new(),
-            hovered_thread_item: None,
-            notified_workspaces: HashSet::new(),
-        }
+impl SidebarContents {
+    fn is_thread_notified(&self, session_id: &acp::SessionId) -> bool {
+        self.notified_threads.contains(session_id)
     }
+}
 
-    fn set_entries(
-        &mut self,
-        workspace_threads: Vec<WorkspaceThreadEntry>,
-        active_workspace_index: usize,
-        cx: &App,
-    ) {
-        if let Some(hovered_index) = self.hovered_thread_item {
-            let still_exists = workspace_threads
-                .iter()
-                .any(|thread| thread.index == hovered_index);
-            if !still_exists {
-                self.hovered_thread_item = None;
-            }
-        }
-
-        let old_statuses: HashMap<usize, AgentThreadStatus> = self
-            .entries
-            .iter()
-            .filter_map(|entry| match entry {
-                SidebarEntry::WorkspaceThread(thread) => thread
-                    .thread_info
-                    .as_ref()
-                    .map(|info| (thread.index, info.status)),
-                _ => None,
-            })
-            .collect();
+fn fuzzy_match_positions(query: &str, candidate: &str) -> Option<Vec<usize>> {
+    let mut positions = Vec::new();
+    let mut query_chars = query.chars().peekable();
 
-        for thread in &workspace_threads {
-            if let Some(info) = &thread.thread_info {
-                if info.status == AgentThreadStatus::Completed
-                    && thread.index != active_workspace_index
-                {
-                    if old_statuses.get(&thread.index) == Some(&AgentThreadStatus::Running) {
-                        self.notified_workspaces.insert(thread.index);
-                    }
-                }
+    for (byte_idx, candidate_char) in candidate.char_indices() {
+        if let Some(&query_char) = query_chars.peek() {
+            if candidate_char.eq_ignore_ascii_case(&query_char) {
+                positions.push(byte_idx);
+                query_chars.next();
             }
+        } else {
+            break;
         }
-
-        if self.active_workspace_index != active_workspace_index {
-            self.notified_workspaces.remove(&active_workspace_index);
-        }
-        self.active_workspace_index = active_workspace_index;
-        self.workspace_thread_count = workspace_threads.len();
-        self.rebuild_entries(workspace_threads, cx);
     }
 
-    fn set_recent_projects(&mut self, recent_projects: Vec<RecentProjectEntry>, cx: &App) {
-        self.recent_project_thread_titles.clear();
-
-        self.recent_projects = recent_projects;
-
-        let workspace_threads: Vec<WorkspaceThreadEntry> = self
-            .entries
-            .iter()
-            .filter_map(|entry| match entry {
-                SidebarEntry::WorkspaceThread(thread) => Some(thread.clone()),
-                _ => None,
-            })
-            .collect();
-        self.rebuild_entries(workspace_threads, cx);
-    }
-
-    fn open_workspace_path_sets(&self, cx: &App) -> Vec<Vec<Arc<Path>>> {
-        self.multi_workspace
-            .read(cx)
-            .workspaces()
-            .iter()
-            .map(|workspace| {
-                let mut paths = workspace.read(cx).root_paths(cx);
-                paths.sort();
-                paths
-            })
-            .collect()
-    }
-
-    fn rebuild_entries(&mut self, workspace_threads: Vec<WorkspaceThreadEntry>, cx: &App) {
-        let open_path_sets = self.open_workspace_path_sets(cx);
-
-        self.entries.clear();
-
-        if !workspace_threads.is_empty() {
-            self.entries
-                .push(SidebarEntry::Separator("Active Workspaces".into()));
-            for thread in workspace_threads {
-                self.entries.push(SidebarEntry::WorkspaceThread(thread));
-            }
-        }
-
-        let recent: Vec<_> = self
-            .recent_projects
-            .iter()
-            .filter(|project| {
-                let mut project_paths: Vec<&Path> =
-                    project.paths.iter().map(|p| p.as_path()).collect();
-                project_paths.sort();
-                !open_path_sets.iter().any(|open_paths| {
-                    open_paths.len() == project_paths.len()
-                        && open_paths
-                            .iter()
-                            .zip(&project_paths)
-                            .all(|(a, b)| a.as_ref() == *b)
-                })
-            })
-            .cloned()
-            .collect();
-
-        if !recent.is_empty() {
-            let today = Local::now().naive_local().date();
-            let mut current_bucket: Option<TimeBucket> = None;
-
-            for project in recent {
-                let entry_date = project.timestamp.with_timezone(&Local).naive_local().date();
-                let bucket = TimeBucket::from_dates(today, entry_date);
-
-                if current_bucket != Some(bucket) {
-                    current_bucket = Some(bucket);
-                    self.entries
-                        .push(SidebarEntry::Separator(bucket.to_string().into()));
-                }
-
-                self.entries.push(SidebarEntry::RecentProject(project));
-            }
-        }
+    if query_chars.peek().is_none() {
+        Some(positions)
+    } else {
+        None
     }
 }
 
-#[derive(Clone, Copy, Debug, PartialEq, Eq)]
-enum TimeBucket {
-    Today,
-    Yesterday,
-    ThisWeek,
-    PastWeek,
-    All,
-}
-
-impl TimeBucket {
-    fn from_dates(reference: NaiveDate, date: NaiveDate) -> Self {
-        if date == reference {
-            return TimeBucket::Today;
-        }
-
-        if date == reference - TimeDelta::days(1) {
-            return TimeBucket::Yesterday;
-        }
-
-        let week = date.iso_week();
-
-        if reference.iso_week() == week {
-            return TimeBucket::ThisWeek;
-        }
-
-        let last_week = (reference - TimeDelta::days(7)).iso_week();
-
-        if week == last_week {
-            return TimeBucket::PastWeek;
+fn workspace_path_list_and_label(
+    workspace: &Entity<Workspace>,
+    cx: &App,
+) -> (PathList, SharedString) {
+    let workspace_ref = workspace.read(cx);
+    let mut paths = Vec::new();
+    let mut names = Vec::new();
+
+    for worktree in workspace_ref.worktrees(cx) {
+        let worktree_ref = worktree.read(cx);
+        if !worktree_ref.is_visible() {
+            continue;
         }
-
-        TimeBucket::All
-    }
-}
-
-impl Display for TimeBucket {
-    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        match self {
-            TimeBucket::Today => write!(f, "Today"),
-            TimeBucket::Yesterday => write!(f, "Yesterday"),
-            TimeBucket::ThisWeek => write!(f, "This Week"),
-            TimeBucket::PastWeek => write!(f, "Past Week"),
-            TimeBucket::All => write!(f, "All"),
+        let abs_path = worktree_ref.abs_path();
+        paths.push(abs_path.to_path_buf());
+        if let Some(name) = abs_path.file_name() {
+            names.push(name.to_string_lossy().to_string());
         }
     }
-}
 
-fn open_recent_project(paths: Vec<PathBuf>, window: &mut Window, cx: &mut App) {
-    let Some(handle) = window.window_handle().downcast::<MultiWorkspace>() else {
-        return;
+    let label: SharedString = if names.is_empty() {
+        // TODO: Can we do something better in this case?
+        "Empty Workspace".into()
+    } else {
+        names.join(", ").into()
     };
 
-    cx.defer(move |cx| {
-        if let Some(task) = handle
-            .update(cx, |multi_workspace, window, cx| {
-                multi_workspace.open_project(paths, window, cx)
-            })
-            .log_err()
-        {
-            task.detach_and_log_err(cx);
-        }
-    });
+    (PathList::new(&paths), label)
 }
 
-impl PickerDelegate for WorkspacePickerDelegate {
-    type ListItem = AnyElement;
-
-    fn match_count(&self) -> usize {
-        self.matches.len()
-    }
-
-    fn selected_index(&self) -> usize {
-        self.selected_index
-    }
-
-    fn set_selected_index(
-        &mut self,
-        ix: usize,
-        _window: &mut Window,
-        _cx: &mut Context<Picker<Self>>,
-    ) {
-        self.selected_index = ix;
-    }
-
-    fn can_select(
-        &mut self,
-        ix: usize,
-        _window: &mut Window,
-        _cx: &mut Context<Picker<Self>>,
-    ) -> bool {
-        match self.matches.get(ix) {
-            Some(SidebarMatch {
-                entry: SidebarEntry::Separator(_),
-                ..
-            }) => false,
-            _ => true,
-        }
-    }
-
-    fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> {
-        "Search…".into()
-    }
-
-    fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option<SharedString> {
-        if self.query.is_empty() {
-            None
-        } else {
-            Some("No threads match your search.".into())
-        }
-    }
-
-    fn update_matches(
-        &mut self,
-        query: String,
-        window: &mut Window,
-        cx: &mut Context<Picker<Self>>,
-    ) -> Task<()> {
-        let query_changed = self.query != query;
-        self.query = query.clone();
-        if query_changed {
-            self.hovered_thread_item = None;
-        }
-        let entries = self.entries.clone();
-
-        if query.is_empty() {
-            self.matches = entries
-                .into_iter()
-                .map(|entry| SidebarMatch {
-                    entry,
-                    positions: Vec::new(),
-                })
-                .collect();
-
-            let separator_offset = if self.workspace_thread_count > 0 {
-                1
-            } else {
-                0
-            };
-            self.selected_index = (self.active_workspace_index + separator_offset)
-                .min(self.matches.len().saturating_sub(1));
-            return Task::ready(());
-        }
-
-        let executor = cx.background_executor().clone();
-        cx.spawn_in(window, async move |picker, cx| {
-            let matches = cx
-                .background_spawn(async move {
-                    let data_entries: Vec<(usize, &SidebarEntry)> = entries
-                        .iter()
-                        .enumerate()
-                        .filter(|(_, entry)| !matches!(entry, SidebarEntry::Separator(_)))
-                        .collect();
-
-                    let candidates: Vec<StringMatchCandidate> = data_entries
-                        .iter()
-                        .enumerate()
-                        .map(|(candidate_index, (_, entry))| {
-                            StringMatchCandidate::new(candidate_index, entry.searchable_text())
-                        })
-                        .collect();
-
-                    let search_matches = fuzzy::match_strings(
-                        &candidates,
-                        &query,
-                        false,
-                        true,
-                        MAX_MATCHES,
-                        &Default::default(),
-                        executor,
-                    )
-                    .await;
-
-                    let mut workspace_matches = Vec::new();
-                    let mut project_matches = Vec::new();
-
-                    for search_match in search_matches {
-                        let (original_index, _) = data_entries[search_match.candidate_id];
-                        let entry = entries[original_index].clone();
-                        let sidebar_match = SidebarMatch {
-                            positions: search_match.positions,
-                            entry: entry.clone(),
-                        };
-                        match entry {
-                            SidebarEntry::WorkspaceThread(_) => {
-                                workspace_matches.push(sidebar_match)
-                            }
-                            SidebarEntry::RecentProject(_) => project_matches.push(sidebar_match),
-                            SidebarEntry::Separator(_) => {}
-                        }
-                    }
-
-                    let mut result = Vec::new();
-                    if !workspace_matches.is_empty() {
-                        result.push(SidebarMatch {
-                            entry: SidebarEntry::Separator("Active Workspaces".into()),
-                            positions: Vec::new(),
-                        });
-                        result.extend(workspace_matches);
-                    }
-                    if !project_matches.is_empty() {
-                        result.push(SidebarMatch {
-                            entry: SidebarEntry::Separator("Recent Projects".into()),
-                            positions: Vec::new(),
-                        });
-                        result.extend(project_matches);
-                    }
-                    result
-                })
-                .await;
-
-            picker
-                .update_in(cx, |picker, _window, _cx| {
-                    picker.delegate.matches = matches;
-                    if picker.delegate.matches.is_empty() {
-                        picker.delegate.selected_index = 0;
-                    } else {
-                        let first_selectable = picker
-                            .delegate
-                            .matches
-                            .iter()
-                            .position(|m| !matches!(m.entry, SidebarEntry::Separator(_)))
-                            .unwrap_or(0);
-                        picker.delegate.selected_index = first_selectable;
-                    }
-                })
-                .log_err();
+fn workspace_index_for_path_list(
+    workspaces: &[Entity<Workspace>],
+    path_list: &PathList,
+    cx: &App,
+) -> Option<usize> {
+    workspaces
+        .iter()
+        .enumerate()
+        .find_map(|(index, workspace)| {
+            let (candidate, _) = workspace_path_list_and_label(workspace, cx);
+            (candidate == *path_list).then_some(index)
         })
-    }
-
-    fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
-        let Some(selected_match) = self.matches.get(self.selected_index) else {
-            return;
-        };
-
-        match &selected_match.entry {
-            SidebarEntry::Separator(_) => {}
-            SidebarEntry::WorkspaceThread(thread_entry) => {
-                let target_index = thread_entry.index;
-                self.multi_workspace.update(cx, |multi_workspace, cx| {
-                    multi_workspace.activate_index(target_index, window, cx);
-                });
-            }
-            SidebarEntry::RecentProject(project_entry) => {
-                let paths = project_entry.paths.clone();
-                open_recent_project(paths, window, cx);
-            }
-        }
-    }
-
-    fn dismissed(&mut self, _window: &mut Window, _cx: &mut Context<Picker<Self>>) {}
-
-    fn render_match(
-        &self,
-        index: usize,
-        selected: bool,
-        _window: &mut Window,
-        cx: &mut Context<Picker<Self>>,
-    ) -> Option<Self::ListItem> {
-        let match_entry = self.matches.get(index)?;
-        let SidebarMatch { entry, positions } = match_entry;
-
-        match entry {
-            SidebarEntry::Separator(title) => Some(
-                v_flex()
-                    .when(index > 0, |this| {
-                        this.mt_1()
-                            .gap_2()
-                            .child(Divider::horizontal().color(DividerColor::BorderFaded))
-                    })
-                    .child(ListSubHeader::new(title.clone()).inset(true))
-                    .into_any_element(),
-            ),
-            SidebarEntry::WorkspaceThread(thread_entry) => {
-                let worktree_label = thread_entry.worktree_label.clone();
-                let full_path = thread_entry.full_path.clone();
-                let thread_info = thread_entry.thread_info.clone();
-                let workspace_index = thread_entry.index;
-                let multi_workspace = self.multi_workspace.clone();
-                let workspace_count = self.multi_workspace.read(cx).workspaces().len();
-                let is_hovered = self.hovered_thread_item == Some(workspace_index);
-
-                let remove_btn = IconButton::new(
-                    format!("remove-workspace-{}", workspace_index),
-                    IconName::Close,
-                )
-                .icon_size(IconSize::Small)
-                .icon_color(Color::Muted)
-                .tooltip(Tooltip::text("Remove Workspace"))
-                .on_click({
-                    let multi_workspace = multi_workspace;
-                    move |_, window, cx| {
-                        multi_workspace.update(cx, |mw, cx| {
-                            mw.remove_workspace(workspace_index, window, cx);
-                        });
-                    }
-                });
-
-                let has_notification = self.notified_workspaces.contains(&workspace_index);
-                let thread_subtitle = thread_info.as_ref().map(|info| info.title.clone());
-                let status = thread_info
-                    .as_ref()
-                    .map_or(AgentThreadStatus::default(), |info| info.status);
-                let running = matches!(
-                    status,
-                    AgentThreadStatus::Running | AgentThreadStatus::WaitingForConfirmation
-                );
-
-                Some(
-                    ThreadItem::new(
-                        ("workspace-item", thread_entry.index),
-                        thread_subtitle.unwrap_or("New Thread".into()),
-                    )
-                    .icon(
-                        thread_info
-                            .as_ref()
-                            .map_or(IconName::ZedAgent, |info| info.icon),
-                    )
-                    .running(running)
-                    .generation_done(has_notification)
-                    .status(status)
-                    .selected(selected)
-                    .worktree(worktree_label.clone())
-                    .worktree_highlight_positions(positions.clone())
-                    .when(workspace_count > 1, |item| item.action_slot(remove_btn))
-                    .hovered(is_hovered)
-                    .on_hover(cx.listener(move |picker, is_hovered, _window, cx| {
-                        let mut changed = false;
-                        if *is_hovered {
-                            if picker.delegate.hovered_thread_item != Some(workspace_index) {
-                                picker.delegate.hovered_thread_item = Some(workspace_index);
-                                changed = true;
-                            }
-                        } else if picker.delegate.hovered_thread_item == Some(workspace_index) {
-                            picker.delegate.hovered_thread_item = None;
-                            changed = true;
-                        }
-                        if changed {
-                            cx.notify();
-                        }
-                    }))
-                    .when(!full_path.is_empty(), |this| {
-                        this.tooltip(move |_, cx| {
-                            Tooltip::with_meta(worktree_label.clone(), None, full_path.clone(), cx)
-                        })
-                    })
-                    .into_any_element(),
-                )
-            }
-            SidebarEntry::RecentProject(project_entry) => {
-                let name = project_entry.name.clone();
-                let full_path = project_entry.full_path.clone();
-                let item_id: SharedString =
-                    format!("recent-project-{:?}", project_entry.workspace_id).into();
-
-                Some(
-                    ThreadItem::new(item_id, name.clone())
-                        .icon(IconName::Folder)
-                        .selected(selected)
-                        .highlight_positions(positions.clone())
-                        .tooltip(move |_, cx| {
-                            Tooltip::with_meta(name.clone(), None, full_path.clone(), cx)
-                        })
-                        .into_any_element(),
-                )
-            }
-        }
-    }
-
-    fn render_editor(
-        &self,
-        editor: &Arc<dyn ErasedEditor>,
-        window: &mut Window,
-        cx: &mut Context<Picker<Self>>,
-    ) -> Div {
-        h_flex()
-            .h(Tab::container_height(cx))
-            .w_full()
-            .px_2()
-            .gap_2()
-            .justify_between()
-            .border_b_1()
-            .border_color(cx.theme().colors().border)
-            .child(
-                Icon::new(IconName::MagnifyingGlass)
-                    .color(Color::Muted)
-                    .size(IconSize::Small),
-            )
-            .child(editor.render(window, cx))
-    }
 }
 
 pub struct Sidebar {
-    multi_workspace: Entity<MultiWorkspace>,
+    multi_workspace: WeakEntity<MultiWorkspace>,
     width: Pixels,
-    picker: Entity<Picker<WorkspacePickerDelegate>>,
-    _subscription: Subscription,
+    focus_handle: FocusHandle,
+    filter_editor: Entity<Editor>,
+    list_state: ListState,
+    contents: SidebarContents,
+    selection: Option<usize>,
+    collapsed_groups: HashSet<PathList>,
+    expanded_groups: HashSet<PathList>,
+    _subscriptions: Vec<Subscription>,
     _project_subscriptions: Vec<Subscription>,
     _agent_panel_subscriptions: Vec<Subscription>,
-    _thread_subscriptions: Vec<Subscription>,
-    #[cfg(any(test, feature = "test-support"))]
-    test_thread_infos: HashMap<usize, AgentThreadInfo>,
-    #[cfg(any(test, feature = "test-support"))]
-    test_recent_project_thread_titles: HashMap<SharedString, SharedString>,
-    _fetch_recent_projects: Task<()>,
+    _thread_store_subscription: Option<Subscription>,
 }
 
 impl EventEmitter<SidebarEvent> for Sidebar {}
@@ -716,15 +194,17 @@ impl Sidebar {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) -> Self {
-        let delegate = WorkspacePickerDelegate::new(multi_workspace.clone());
-        let picker = cx.new(|cx| {
-            Picker::list(delegate, window, cx)
-                .max_height(None)
-                .show_scrollbar(true)
-                .modal(false)
+        let focus_handle = cx.focus_handle();
+        cx.on_focus_in(&focus_handle, window, Self::focus_in)
+            .detach();
+
+        let filter_editor = cx.new(|cx| {
+            let mut editor = Editor::single_line(window, cx);
+            editor.set_placeholder_text("Search threads…", window, cx);
+            editor
         });
 
-        let subscription = cx.observe_in(
+        let observe_subscription = cx.observe_in(
             &multi_workspace,
             window,
             |this, _multi_workspace, window, cx| {
@@ -732,38 +212,46 @@ impl Sidebar {
             },
         );
 
-        let fetch_recent_projects = {
-            let picker = picker.downgrade();
-            let fs = <dyn Fs>::global(cx);
-            cx.spawn_in(window, async move |_this, cx| {
-                let projects = get_recent_projects(None, None, fs).await;
-
-                cx.update(|window, cx| {
-                    if let Some(picker) = picker.upgrade() {
-                        picker.update(cx, |picker, cx| {
-                            picker.delegate.set_recent_projects(projects, cx);
-                            let query = picker.query(cx);
-                            picker.update_matches(query, window, cx);
+        let filter_subscription = cx.subscribe(&filter_editor, |this: &mut Self, _, event, cx| {
+            if let editor::EditorEvent::BufferEdited = event {
+                let query = this.filter_editor.read(cx).text(cx);
+                if !query.is_empty() {
+                    this.selection.take();
+                }
+                this.rebuild_contents(cx);
+                this.list_state.reset(this.contents.entries.len());
+                if !query.is_empty() {
+                    this.selection = this
+                        .contents
+                        .entries
+                        .iter()
+                        .position(|entry| matches!(entry, ListEntry::Thread { .. }))
+                        .or_else(|| {
+                            if this.contents.entries.is_empty() {
+                                None
+                            } else {
+                                Some(0)
+                            }
                         });
-                    }
-                })
-                .log_err();
-            })
-        };
+                }
+                cx.notify();
+            }
+        });
 
         let mut this = Self {
-            multi_workspace,
+            multi_workspace: multi_workspace.downgrade(),
             width: DEFAULT_WIDTH,
-            picker,
-            _subscription: subscription,
+            focus_handle,
+            filter_editor,
+            list_state: ListState::new(0, gpui::ListAlignment::Top, px(1000.)),
+            contents: SidebarContents::default(),
+            selection: None,
+            collapsed_groups: HashSet::new(),
+            expanded_groups: HashSet::new(),
+            _subscriptions: vec![observe_subscription, filter_subscription],
             _project_subscriptions: Vec::new(),
             _agent_panel_subscriptions: Vec::new(),
-            _thread_subscriptions: Vec::new(),
-            #[cfg(any(test, feature = "test-support"))]
-            test_thread_infos: HashMap::new(),
-            #[cfg(any(test, feature = "test-support"))]
-            test_recent_project_thread_titles: HashMap::new(),
-            _fetch_recent_projects: fetch_recent_projects,
+            _thread_store_subscription: None,
         };
         this.update_entries(window, cx);
         this
@@ -774,8 +262,10 @@ impl Sidebar {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) -> Vec<Subscription> {
-        let projects: Vec<_> = self
-            .multi_workspace
+        let Some(multi_workspace) = self.multi_workspace.upgrade() else {
+            return Vec::new();
+        };
+        let projects: Vec<_> = multi_workspace
             .read(cx)
             .workspaces()
             .iter()
@@ -801,80 +291,15 @@ impl Sidebar {
             .collect()
     }
 
-    fn build_workspace_thread_entries(
-        &self,
-        multi_workspace: &MultiWorkspace,
-        cx: &App,
-    ) -> (Vec<WorkspaceThreadEntry>, usize) {
-        #[allow(unused_mut)]
-        let mut entries: Vec<WorkspaceThreadEntry> = multi_workspace
-            .workspaces()
-            .iter()
-            .enumerate()
-            .map(|(index, workspace)| WorkspaceThreadEntry::new(index, workspace, cx))
-            .collect();
-
-        #[cfg(any(test, feature = "test-support"))]
-        for (index, info) in &self.test_thread_infos {
-            if let Some(entry) = entries.get_mut(*index) {
-                entry.thread_info = Some(info.clone());
-            }
-        }
-
-        (entries, multi_workspace.active_workspace_index())
-    }
-
-    #[cfg(any(test, feature = "test-support"))]
-    pub fn set_test_recent_projects(
-        &self,
-        projects: Vec<RecentProjectEntry>,
-        cx: &mut Context<Self>,
-    ) {
-        self.picker.update(cx, |picker, _cx| {
-            picker.delegate.recent_projects = projects;
-        });
-    }
-
-    #[cfg(any(test, feature = "test-support"))]
-    pub fn set_test_thread_info(
-        &mut self,
-        index: usize,
-        title: SharedString,
-        status: AgentThreadStatus,
-    ) {
-        self.test_thread_infos.insert(
-            index,
-            AgentThreadInfo {
-                title,
-                status,
-                icon: IconName::ZedAgent,
-            },
-        );
-    }
-
-    #[cfg(any(test, feature = "test-support"))]
-    pub fn set_test_recent_project_thread_title(
+    fn subscribe_to_agent_panels(
         &mut self,
-        full_path: SharedString,
-        title: SharedString,
-        cx: &mut Context<Self>,
-    ) {
-        self.test_recent_project_thread_titles
-            .insert(full_path.clone(), title.clone());
-        self.picker.update(cx, |picker, _cx| {
-            picker
-                .delegate
-                .recent_project_thread_titles
-                .insert(full_path, title);
-        });
-    }
-
-    fn subscribe_to_agent_panels(
-        &mut self,
-        window: &mut Window,
+        window: &mut Window,
         cx: &mut Context<Self>,
     ) -> Vec<Subscription> {
-        let workspaces: Vec<_> = self.multi_workspace.read(cx).workspaces().to_vec();
+        let Some(multi_workspace) = self.multi_workspace.upgrade() else {
+            return Vec::new();
+        };
+        let workspaces: Vec<_> = multi_workspace.read(cx).workspaces().to_vec();
 
         workspaces
             .iter()

crates/sum_tree/src/sum_tree.rs 🔗

@@ -661,6 +661,51 @@ impl<T: Item> SumTree<T> {
         }
     }
 
+    pub fn update_first(
+        &mut self,
+        f: impl FnOnce(&mut T),
+        cx: <T::Summary as Summary>::Context<'_>,
+    ) {
+        self.update_first_recursive(f, cx);
+    }
+
+    fn update_first_recursive(
+        &mut self,
+        f: impl FnOnce(&mut T),
+        cx: <T::Summary as Summary>::Context<'_>,
+    ) -> Option<T::Summary> {
+        match Arc::make_mut(&mut self.0) {
+            Node::Internal {
+                summary,
+                child_summaries,
+                child_trees,
+                ..
+            } => {
+                let first_summary = child_summaries.first_mut().unwrap();
+                let first_child = child_trees.first_mut().unwrap();
+                *first_summary = first_child.update_first_recursive(f, cx).unwrap();
+                *summary = sum(child_summaries.iter(), cx);
+                Some(summary.clone())
+            }
+            Node::Leaf {
+                summary,
+                items,
+                item_summaries,
+            } => {
+                if let Some((item, item_summary)) =
+                    items.first_mut().zip(item_summaries.first_mut())
+                {
+                    (f)(item);
+                    *item_summary = item.summary(cx);
+                    *summary = sum(item_summaries.iter(), cx);
+                    Some(summary.clone())
+                } else {
+                    None
+                }
+            }
+        }
+    }
+
     pub fn extent<'a, D: Dimension<'a, T::Summary>>(
         &'a self,
         cx: <T::Summary as Summary>::Context<'_>,

crates/sum_tree/src/tree_map.rs 🔗

@@ -53,6 +53,10 @@ impl<K: Clone + Ord, V: Clone> TreeMap<K, V> {
         self.0.is_empty()
     }
 
+    pub fn contains_key(&self, key: &K) -> bool {
+        self.get(key).is_some()
+    }
+
     pub fn get(&self, key: &K) -> Option<&V> {
         let (.., item) = self
             .0

crates/supermaven/Cargo.toml 🔗

@@ -1,44 +0,0 @@
-[package]
-name = "supermaven"
-version = "0.1.0"
-edition.workspace = true
-publish.workspace = true
-license = "GPL-3.0-or-later"
-
-[lints]
-workspace = true
-
-[lib]
-path = "src/supermaven.rs"
-doctest = false
-
-[dependencies]
-anyhow.workspace = true
-client.workspace = true
-collections.workspace = true
-edit_prediction_types.workspace = true
-futures.workspace = true
-gpui.workspace = true
-language.workspace = true
-log.workspace = true
-postage.workspace = true
-serde.workspace = true
-serde_json.workspace = true
-settings.workspace = true
-smol.workspace = true
-supermaven_api.workspace = true
-text.workspace = true
-ui.workspace = true
-unicode-segmentation.workspace = true
-util.workspace = true
-
-[dev-dependencies]
-editor = { workspace = true, features = ["test-support"] }
-env_logger.workspace = true
-gpui = { workspace = true, features = ["test-support"] }
-language = { workspace = true, features = ["test-support"] }
-project = { workspace = true, features = ["test-support"] }
-settings = { workspace = true, features = ["test-support"] }
-theme = { workspace = true, features = ["test-support"] }
-util = { workspace = true, features = ["test-support"] }
-http_client = { workspace = true, features = ["test-support"] }

crates/supermaven/src/messages.rs 🔗

@@ -1,146 +0,0 @@
-use serde::{Deserialize, Serialize};
-
-// Outbound messages
-#[derive(Debug, Serialize)]
-#[serde(tag = "kind", rename_all = "snake_case")]
-pub enum OutboundMessage {
-    StateUpdate(StateUpdateMessage),
-    #[allow(dead_code)]
-    UseFreeVersion,
-    Logout,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct StateUpdateMessage {
-    pub new_id: String,
-    pub updates: Vec<StateUpdate>,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(tag = "kind", rename_all = "snake_case")]
-pub enum StateUpdate {
-    FileUpdate(FileUpdateMessage),
-    CursorUpdate(CursorPositionUpdateMessage),
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "snake_case")]
-pub struct FileUpdateMessage {
-    pub path: String,
-    pub content: String,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "snake_case")]
-pub struct CursorPositionUpdateMessage {
-    pub path: String,
-    pub offset: usize,
-}
-
-// Inbound messages coming in on stdout
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(tag = "kind", rename_all = "snake_case")]
-pub enum ResponseItem {
-    // A completion
-    Text { text: String },
-    // Vestigial message type from old versions -- safe to ignore
-    Del { text: String },
-    // Be able to delete whitespace prior to the cursor, likely for the rest of the completion
-    Dedent { text: String },
-    // When the completion is over
-    End,
-    // Got the closing parentheses and shouldn't show any more after
-    Barrier,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SupermavenResponse {
-    pub state_id: String,
-    pub items: Vec<ResponseItem>,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-pub struct SupermavenMetadataMessage {
-    pub dust_strings: Option<Vec<String>>,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-pub struct SupermavenTaskUpdateMessage {
-    pub task: String,
-    pub status: TaskStatus,
-    pub percent_complete: Option<f32>,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "snake_case")]
-pub enum TaskStatus {
-    InProgress,
-    Complete,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-pub struct SupermavenActiveRepoMessage {
-    pub repo_simple_name: Option<String>,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(tag = "kind", rename_all = "snake_case")]
-pub enum SupermavenPopupAction {
-    OpenUrl { label: String, url: String },
-    NoOp { label: String },
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "snake_case")]
-pub struct SupermavenPopupMessage {
-    pub message: String,
-    pub actions: Vec<SupermavenPopupAction>,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(tag = "kind", rename_all = "camelCase")]
-pub struct ActivationRequest {
-    pub activate_url: Option<String>,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SupermavenSetMessage {
-    pub key: String,
-    pub value: serde_json::Value,
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize)]
-pub enum ServiceTier {
-    FreeNoLicense,
-    #[serde(other)]
-    Unknown,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(tag = "kind", rename_all = "snake_case")]
-pub enum SupermavenMessage {
-    Response(SupermavenResponse),
-    Metadata(SupermavenMetadataMessage),
-    Apology {
-        message: Option<String>,
-    },
-    ActivationRequest(ActivationRequest),
-    ActivationSuccess,
-    Passthrough {
-        passthrough: Box<SupermavenMessage>,
-    },
-    Popup(SupermavenPopupMessage),
-    TaskStatus(SupermavenTaskUpdateMessage),
-    ActiveRepo(SupermavenActiveRepoMessage),
-    ServiceTier {
-        service_tier: ServiceTier,
-    },
-
-    Set(SupermavenSetMessage),
-    #[serde(other)]
-    Unknown,
-}

crates/supermaven/src/supermaven.rs 🔗

@@ -1,485 +0,0 @@
-mod messages;
-mod supermaven_edit_prediction_delegate;
-
-pub use supermaven_edit_prediction_delegate::*;
-
-use anyhow::{Context as _, Result};
-#[allow(unused_imports)]
-use client::{Client, proto};
-use collections::BTreeMap;
-
-use futures::{AsyncBufReadExt, StreamExt, channel::mpsc, io::BufReader};
-use gpui::{App, AsyncApp, Context, Entity, EntityId, Global, Task, WeakEntity, actions};
-use language::{
-    Anchor, Buffer, BufferSnapshot, ToOffset, language_settings::all_language_settings,
-};
-use messages::*;
-use postage::watch;
-use serde::{Deserialize, Serialize};
-use settings::SettingsStore;
-use smol::io::AsyncWriteExt;
-use std::{path::PathBuf, sync::Arc};
-use ui::prelude::*;
-use util::ResultExt;
-use util::command::Child;
-use util::command::Stdio;
-
-actions!(
-    supermaven,
-    [
-        /// Signs out of Supermaven.
-        SignOut
-    ]
-);
-
-pub fn init(client: Arc<Client>, cx: &mut App) {
-    let supermaven = cx.new(|_| Supermaven::Starting);
-    Supermaven::set_global(supermaven.clone(), cx);
-
-    let mut provider = all_language_settings(None, cx).edit_predictions.provider;
-    if provider == language::language_settings::EditPredictionProvider::Supermaven {
-        supermaven.update(cx, |supermaven, cx| supermaven.start(client.clone(), cx));
-    }
-
-    cx.observe_global::<SettingsStore>(move |cx| {
-        let new_provider = all_language_settings(None, cx).edit_predictions.provider;
-        if new_provider != provider {
-            provider = new_provider;
-            if provider == language::language_settings::EditPredictionProvider::Supermaven {
-                supermaven.update(cx, |supermaven, cx| supermaven.start(client.clone(), cx));
-            } else {
-                supermaven.update(cx, |supermaven, _cx| supermaven.stop());
-            }
-        }
-    })
-    .detach();
-
-    cx.on_action(|_: &SignOut, cx| {
-        if let Some(supermaven) = Supermaven::global(cx) {
-            supermaven.update(cx, |supermaven, _cx| supermaven.sign_out());
-        }
-    });
-}
-
-pub enum Supermaven {
-    Starting,
-    FailedDownload { error: anyhow::Error },
-    Spawned(SupermavenAgent),
-    Error { error: anyhow::Error },
-}
-
-#[derive(Clone)]
-pub enum AccountStatus {
-    Unknown,
-    NeedsActivation { activate_url: String },
-    Ready,
-}
-
-#[derive(Clone)]
-struct SupermavenGlobal(Entity<Supermaven>);
-
-impl Global for SupermavenGlobal {}
-
-impl Supermaven {
-    pub fn global(cx: &App) -> Option<Entity<Self>> {
-        cx.try_global::<SupermavenGlobal>()
-            .map(|model| model.0.clone())
-    }
-
-    pub fn set_global(supermaven: Entity<Self>, cx: &mut App) {
-        cx.set_global(SupermavenGlobal(supermaven));
-    }
-
-    pub fn start(&mut self, client: Arc<Client>, cx: &mut Context<Self>) {
-        if let Self::Starting = self {
-            cx.spawn(async move |this, cx| {
-                let binary_path =
-                    supermaven_api::get_supermaven_agent_path(client.http_client()).await?;
-
-                this.update(cx, |this, cx| {
-                    if let Self::Starting = this {
-                        *this =
-                            Self::Spawned(SupermavenAgent::new(binary_path, client.clone(), cx)?);
-                    }
-                    anyhow::Ok(())
-                })
-            })
-            .detach_and_log_err(cx)
-        }
-    }
-
-    pub fn stop(&mut self) {
-        *self = Self::Starting;
-    }
-
-    pub fn is_enabled(&self) -> bool {
-        matches!(self, Self::Spawned { .. })
-    }
-
-    pub fn complete(
-        &mut self,
-        buffer: &Entity<Buffer>,
-        cursor_position: Anchor,
-        cx: &App,
-    ) -> Option<SupermavenCompletion> {
-        if let Self::Spawned(agent) = self {
-            let buffer_id = buffer.entity_id();
-            let buffer = buffer.read(cx);
-            let path = buffer
-                .file()
-                .and_then(|file| Some(file.as_local()?.abs_path(cx)))
-                .unwrap_or_else(|| PathBuf::from("untitled"))
-                .to_string_lossy()
-                .to_string();
-            let content = buffer.text();
-            let offset = cursor_position.to_offset(buffer);
-            let state_id = agent.next_state_id;
-            agent.next_state_id.0 += 1;
-
-            let (updates_tx, mut updates_rx) = watch::channel();
-            postage::stream::Stream::try_recv(&mut updates_rx).unwrap();
-
-            agent.states.insert(
-                state_id,
-                SupermavenCompletionState {
-                    buffer_id,
-                    prefix_anchor: cursor_position,
-                    prefix_offset: offset,
-                    text: String::new(),
-                    dedent: String::new(),
-                    updates_tx,
-                },
-            );
-            // ensure the states map is max 1000 elements
-            if agent.states.len() > 1000 {
-                // state id is monotonic so it's sufficient to remove the first element
-                agent
-                    .states
-                    .remove(&agent.states.keys().next().unwrap().clone());
-            }
-
-            let _ = agent
-                .outgoing_tx
-                .unbounded_send(OutboundMessage::StateUpdate(StateUpdateMessage {
-                    new_id: state_id.0.to_string(),
-                    updates: vec![
-                        StateUpdate::FileUpdate(FileUpdateMessage {
-                            path: path.clone(),
-                            content,
-                        }),
-                        StateUpdate::CursorUpdate(CursorPositionUpdateMessage { path, offset }),
-                    ],
-                }));
-
-            Some(SupermavenCompletion {
-                id: state_id,
-                updates: updates_rx,
-            })
-        } else {
-            None
-        }
-    }
-
-    pub fn completion(
-        &self,
-        buffer: &Entity<Buffer>,
-        cursor_position: Anchor,
-        cx: &App,
-    ) -> Option<&str> {
-        if let Self::Spawned(agent) = self {
-            find_relevant_completion(
-                &agent.states,
-                buffer.entity_id(),
-                &buffer.read(cx).snapshot(),
-                cursor_position,
-            )
-        } else {
-            None
-        }
-    }
-
-    pub fn sign_out(&mut self) {
-        if let Self::Spawned(agent) = self {
-            agent
-                .outgoing_tx
-                .unbounded_send(OutboundMessage::Logout)
-                .ok();
-            // The account status will get set to RequiresActivation or Ready when the next
-            // message from the agent comes in. Until that happens, set the status to Unknown
-            // to disable the button.
-            agent.account_status = AccountStatus::Unknown;
-        }
-    }
-}
-
-fn find_relevant_completion<'a>(
-    states: &'a BTreeMap<SupermavenCompletionStateId, SupermavenCompletionState>,
-    buffer_id: EntityId,
-    buffer: &BufferSnapshot,
-    cursor_position: Anchor,
-) -> Option<&'a str> {
-    let mut best_completion: Option<&str> = None;
-    'completions: for state in states.values() {
-        if state.buffer_id != buffer_id {
-            continue;
-        }
-        let Some(state_completion) = state.text.strip_prefix(&state.dedent) else {
-            continue;
-        };
-
-        let current_cursor_offset = cursor_position.to_offset(buffer);
-        if current_cursor_offset < state.prefix_offset {
-            continue;
-        }
-
-        let original_cursor_offset = buffer.clip_offset(state.prefix_offset, text::Bias::Left);
-        let text_inserted_since_completion_request: String = buffer
-            .text_for_range(original_cursor_offset..current_cursor_offset)
-            .collect();
-        let trimmed_completion =
-            match state_completion.strip_prefix(&text_inserted_since_completion_request) {
-                Some(suffix) => suffix,
-                None => continue 'completions,
-            };
-
-        if best_completion.is_some_and(|best| best.len() > trimmed_completion.len()) {
-            continue;
-        }
-
-        best_completion = Some(trimmed_completion);
-    }
-    best_completion
-}
-
-pub struct SupermavenAgent {
-    _process: Child,
-    next_state_id: SupermavenCompletionStateId,
-    states: BTreeMap<SupermavenCompletionStateId, SupermavenCompletionState>,
-    outgoing_tx: mpsc::UnboundedSender<OutboundMessage>,
-    _handle_outgoing_messages: Task<Result<()>>,
-    _handle_incoming_messages: Task<Result<()>>,
-    pub account_status: AccountStatus,
-    service_tier: Option<ServiceTier>,
-    #[allow(dead_code)]
-    client: Arc<Client>,
-}
-
-impl SupermavenAgent {
-    fn new(
-        binary_path: PathBuf,
-        client: Arc<Client>,
-        cx: &mut Context<Supermaven>,
-    ) -> Result<Self> {
-        let mut process = util::command::new_command(&binary_path)
-            .arg("stdio")
-            .stdin(Stdio::piped())
-            .stdout(Stdio::piped())
-            .stderr(Stdio::piped())
-            .kill_on_drop(true)
-            .spawn()
-            .context("failed to start the binary")?;
-
-        let stdin = process
-            .stdin
-            .take()
-            .context("failed to get stdin for process")?;
-        let stdout = process
-            .stdout
-            .take()
-            .context("failed to get stdout for process")?;
-
-        let (outgoing_tx, outgoing_rx) = mpsc::unbounded();
-
-        Ok(Self {
-            _process: process,
-            next_state_id: SupermavenCompletionStateId::default(),
-            states: BTreeMap::default(),
-            outgoing_tx,
-            _handle_outgoing_messages: cx.spawn(async move |_, _cx| {
-                Self::handle_outgoing_messages(outgoing_rx, stdin).await
-            }),
-            _handle_incoming_messages: cx.spawn(async move |this, cx| {
-                Self::handle_incoming_messages(this, stdout, cx).await
-            }),
-            account_status: AccountStatus::Unknown,
-            service_tier: None,
-            client,
-        })
-    }
-
-    async fn handle_outgoing_messages<W: smol::io::AsyncWrite + Unpin>(
-        mut outgoing: mpsc::UnboundedReceiver<OutboundMessage>,
-        mut stdin: W,
-    ) -> Result<()> {
-        while let Some(message) = outgoing.next().await {
-            let bytes = serde_json::to_vec(&message)?;
-            stdin.write_all(&bytes).await?;
-            stdin.write_all(&[b'\n']).await?;
-        }
-        Ok(())
-    }
-
-    async fn handle_incoming_messages<R: smol::io::AsyncRead + Unpin>(
-        this: WeakEntity<Supermaven>,
-        stdout: R,
-        cx: &mut AsyncApp,
-    ) -> Result<()> {
-        const MESSAGE_PREFIX: &str = "SM-MESSAGE ";
-
-        let stdout = BufReader::new(stdout);
-        let mut lines = stdout.lines();
-        while let Some(line) = lines.next().await {
-            let Some(line) = line.context("failed to read line from stdout").log_err() else {
-                continue;
-            };
-            let Some(line) = line.strip_prefix(MESSAGE_PREFIX) else {
-                continue;
-            };
-            let Some(message) = serde_json::from_str::<SupermavenMessage>(line)
-                .with_context(|| format!("failed to deserialize line from stdout: {:?}", line))
-                .log_err()
-            else {
-                continue;
-            };
-
-            this.update(cx, |this, _cx| {
-                if let Supermaven::Spawned(this) = this {
-                    this.handle_message(message);
-                }
-                Task::ready(anyhow::Ok(()))
-            })?
-            .await?;
-        }
-
-        Ok(())
-    }
-
-    fn handle_message(&mut self, message: SupermavenMessage) {
-        match message {
-            SupermavenMessage::ActivationRequest(request) => {
-                self.account_status = match request.activate_url {
-                    Some(activate_url) => AccountStatus::NeedsActivation { activate_url },
-                    None => AccountStatus::Ready,
-                };
-            }
-            SupermavenMessage::ActivationSuccess => {
-                self.account_status = AccountStatus::Ready;
-            }
-            SupermavenMessage::ServiceTier { service_tier } => {
-                self.account_status = AccountStatus::Ready;
-                self.service_tier = Some(service_tier);
-            }
-            SupermavenMessage::Response(response) => {
-                let state_id = SupermavenCompletionStateId(response.state_id.parse().unwrap());
-                if let Some(state) = self.states.get_mut(&state_id) {
-                    for item in &response.items {
-                        match item {
-                            ResponseItem::Text { text } => state.text.push_str(text),
-                            ResponseItem::Dedent { text } => state.dedent.push_str(text),
-                            _ => {}
-                        }
-                    }
-                    *state.updates_tx.borrow_mut() = ();
-                }
-            }
-            SupermavenMessage::Passthrough { passthrough } => self.handle_message(*passthrough),
-            _ => {
-                log::warn!("unhandled message: {:?}", message);
-            }
-        }
-    }
-}
-
-#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)]
-pub struct SupermavenCompletionStateId(usize);
-
-#[allow(dead_code)]
-pub struct SupermavenCompletionState {
-    buffer_id: EntityId,
-    prefix_anchor: Anchor,
-    // prefix_offset is tracked independently because the anchor biases left which
-    // doesn't allow us to determine if the prior text has been deleted.
-    prefix_offset: usize,
-    text: String,
-    dedent: String,
-    updates_tx: watch::Sender<()>,
-}
-
-pub struct SupermavenCompletion {
-    pub id: SupermavenCompletionStateId,
-    pub updates: watch::Receiver<()>,
-}
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-    use collections::BTreeMap;
-    use gpui::TestAppContext;
-    use language::Buffer;
-
-    #[gpui::test]
-    async fn test_find_relevant_completion_no_first_letter_skip(cx: &mut TestAppContext) {
-        let buffer = cx.new(|cx| Buffer::local("hello world", cx));
-        let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
-
-        let mut states = BTreeMap::new();
-        let state_id = SupermavenCompletionStateId(1);
-        let (updates_tx, _) = watch::channel();
-
-        states.insert(
-            state_id,
-            SupermavenCompletionState {
-                buffer_id: buffer.entity_id(),
-                prefix_anchor: buffer_snapshot.anchor_before(0), // Start of buffer
-                prefix_offset: 0,
-                text: "hello".to_string(),
-                dedent: String::new(),
-                updates_tx,
-            },
-        );
-
-        let cursor_position = buffer_snapshot.anchor_after(1);
-
-        let result = find_relevant_completion(
-            &states,
-            buffer.entity_id(),
-            &buffer_snapshot,
-            cursor_position,
-        );
-
-        assert_eq!(result, Some("ello"));
-    }
-
-    #[gpui::test]
-    async fn test_find_relevant_completion_with_multiple_chars(cx: &mut TestAppContext) {
-        let buffer = cx.new(|cx| Buffer::local("hello world", cx));
-        let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
-
-        let mut states = BTreeMap::new();
-        let state_id = SupermavenCompletionStateId(1);
-        let (updates_tx, _) = watch::channel();
-
-        states.insert(
-            state_id,
-            SupermavenCompletionState {
-                buffer_id: buffer.entity_id(),
-                prefix_anchor: buffer_snapshot.anchor_before(0), // Start of buffer
-                prefix_offset: 0,
-                text: "hello".to_string(),
-                dedent: String::new(),
-                updates_tx,
-            },
-        );
-
-        let cursor_position = buffer_snapshot.anchor_after(3);
-
-        let result = find_relevant_completion(
-            &states,
-            buffer.entity_id(),
-            &buffer_snapshot,
-            cursor_position,
-        );
-
-        assert_eq!(result, Some("lo"));
-    }
-}

crates/supermaven/src/supermaven_edit_prediction_delegate.rs 🔗

@@ -1,303 +0,0 @@
-use crate::{Supermaven, SupermavenCompletionStateId};
-use anyhow::Result;
-use edit_prediction_types::{
-    EditPrediction, EditPredictionDelegate, EditPredictionDiscardReason, EditPredictionIconSet,
-};
-use futures::StreamExt as _;
-use gpui::{App, Context, Entity, EntityId, Task};
-use language::{Anchor, Buffer, BufferSnapshot};
-use std::{
-    ops::{AddAssign, Range},
-    path::Path,
-    sync::Arc,
-    time::Duration,
-};
-use text::{ToOffset, ToPoint};
-use ui::prelude::*;
-use unicode_segmentation::UnicodeSegmentation;
-
-pub const DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(75);
-
-pub struct SupermavenEditPredictionDelegate {
-    supermaven: Entity<Supermaven>,
-    buffer_id: Option<EntityId>,
-    completion_id: Option<SupermavenCompletionStateId>,
-    completion_text: Option<String>,
-    file_extension: Option<String>,
-    pending_refresh: Option<Task<Result<()>>>,
-    completion_position: Option<language::Anchor>,
-}
-
-impl SupermavenEditPredictionDelegate {
-    pub fn new(supermaven: Entity<Supermaven>) -> Self {
-        Self {
-            supermaven,
-            buffer_id: None,
-            completion_id: None,
-            completion_text: None,
-            file_extension: None,
-            pending_refresh: None,
-            completion_position: None,
-        }
-    }
-}
-
-// Computes the edit prediction from the difference between the completion text.
-// This is defined by greedily matching the buffer text against the completion text.
-// Inlays are inserted for parts of the completion text that are not present in the buffer text.
-// For example, given the completion text "axbyc" and the buffer text "xy", the rendered output in the editor would be "[a]x[b]y[c]".
-// The parts in brackets are the inlays.
-fn completion_from_diff(
-    snapshot: BufferSnapshot,
-    completion_text: &str,
-    position: Anchor,
-    delete_range: Range<Anchor>,
-) -> EditPrediction {
-    let buffer_text = snapshot.text_for_range(delete_range).collect::<String>();
-
-    let mut edits: Vec<(Range<language::Anchor>, Arc<str>)> = Vec::new();
-
-    let completion_graphemes: Vec<&str> = completion_text.graphemes(true).collect();
-    let buffer_graphemes: Vec<&str> = buffer_text.graphemes(true).collect();
-
-    let mut offset = position.to_offset(&snapshot);
-
-    let mut i = 0;
-    let mut j = 0;
-    while i < completion_graphemes.len() && j < buffer_graphemes.len() {
-        // find the next instance of the buffer text in the completion text.
-        let k = completion_graphemes[i..]
-            .iter()
-            .position(|c| *c == buffer_graphemes[j]);
-        match k {
-            Some(k) => {
-                if k != 0 {
-                    let offset = snapshot.anchor_after(offset);
-                    // the range from the current position to item is an inlay.
-                    let edit = (
-                        offset..offset,
-                        completion_graphemes[i..i + k].join("").into(),
-                    );
-                    edits.push(edit);
-                }
-                i += k + 1;
-                j += 1;
-                offset.add_assign(buffer_graphemes[j - 1].len());
-            }
-            None => {
-                // there are no more matching completions, so drop the remaining
-                // completion text as an inlay.
-                break;
-            }
-        }
-    }
-
-    if j == buffer_graphemes.len() && i < completion_graphemes.len() {
-        let offset = snapshot.anchor_after(offset);
-        // there is leftover completion text, so drop it as an inlay.
-        let edit_range = offset..offset;
-        let edit_text = completion_graphemes[i..].join("");
-        edits.push((edit_range, edit_text.into()));
-    }
-
-    EditPrediction::Local {
-        id: None,
-        edits,
-        cursor_position: None,
-        edit_preview: None,
-    }
-}
-
-impl EditPredictionDelegate for SupermavenEditPredictionDelegate {
-    fn name() -> &'static str {
-        "supermaven"
-    }
-
-    fn display_name() -> &'static str {
-        "Supermaven"
-    }
-
-    fn show_predictions_in_menu() -> bool {
-        true
-    }
-
-    fn show_tab_accept_marker() -> bool {
-        true
-    }
-
-    fn supports_jump_to_edit() -> bool {
-        false
-    }
-
-    fn icons(&self, _cx: &App) -> EditPredictionIconSet {
-        EditPredictionIconSet::new(IconName::Supermaven)
-            .with_disabled(IconName::SupermavenDisabled)
-            .with_error(IconName::SupermavenError)
-    }
-
-    fn is_enabled(&self, _buffer: &Entity<Buffer>, _cursor_position: Anchor, cx: &App) -> bool {
-        self.supermaven.read(cx).is_enabled()
-    }
-
-    fn is_refreshing(&self, _cx: &App) -> bool {
-        self.pending_refresh.is_some() && self.completion_id.is_none()
-    }
-
-    fn refresh(
-        &mut self,
-        buffer_handle: Entity<Buffer>,
-        cursor_position: Anchor,
-        debounce: bool,
-        cx: &mut Context<Self>,
-    ) {
-        // Only make new completion requests when debounce is true (i.e., when text is typed)
-        // When debounce is false (i.e., cursor movement), we should not make new requests
-        if !debounce {
-            return;
-        }
-
-        reset_completion_cache(self, cx);
-
-        let Some(mut completion) = self.supermaven.update(cx, |supermaven, cx| {
-            supermaven.complete(&buffer_handle, cursor_position, cx)
-        }) else {
-            return;
-        };
-
-        self.pending_refresh = Some(cx.spawn(async move |this, cx| {
-            if debounce {
-                cx.background_executor().timer(DEBOUNCE_TIMEOUT).await;
-            }
-
-            while let Some(()) = completion.updates.next().await {
-                this.update(cx, |this, cx| {
-                    // Get the completion text and cache it
-                    if let Some(text) =
-                        this.supermaven
-                            .read(cx)
-                            .completion(&buffer_handle, cursor_position, cx)
-                    {
-                        this.completion_text = Some(text.to_string());
-
-                        this.completion_position = Some(cursor_position);
-                    }
-
-                    this.completion_id = Some(completion.id);
-                    this.buffer_id = Some(buffer_handle.entity_id());
-                    this.file_extension = buffer_handle.read(cx).file().and_then(|file| {
-                        Some(
-                            Path::new(file.file_name(cx))
-                                .extension()?
-                                .to_str()?
-                                .to_string(),
-                        )
-                    });
-                    cx.notify();
-                })?;
-            }
-            Ok(())
-        }));
-    }
-
-    fn accept(&mut self, _cx: &mut Context<Self>) {
-        reset_completion_cache(self, _cx);
-    }
-
-    fn discard(&mut self, _reason: EditPredictionDiscardReason, _cx: &mut Context<Self>) {
-        reset_completion_cache(self, _cx);
-    }
-
-    fn suggest(
-        &mut self,
-        buffer: &Entity<Buffer>,
-        cursor_position: Anchor,
-        cx: &mut Context<Self>,
-    ) -> Option<EditPrediction> {
-        if self.buffer_id != Some(buffer.entity_id()) {
-            return None;
-        }
-
-        if self.completion_id.is_none() {
-            return None;
-        }
-
-        let completion_text = if let Some(cached_text) = &self.completion_text {
-            cached_text.as_str()
-        } else {
-            let text = self
-                .supermaven
-                .read(cx)
-                .completion(buffer, cursor_position, cx)?;
-            self.completion_text = Some(text.to_string());
-            text
-        };
-
-        // Check if the cursor is still at the same position as the completion request
-        // If we don't have a completion position stored, don't show the completion
-        if let Some(completion_position) = self.completion_position {
-            if cursor_position != completion_position {
-                return None;
-            }
-        } else {
-            return None;
-        }
-
-        let completion_text = trim_to_end_of_line_unless_leading_newline(completion_text);
-
-        let completion_text = completion_text.trim_end();
-
-        if !completion_text.trim().is_empty() {
-            let snapshot = buffer.read(cx).snapshot();
-
-            // Calculate the range from cursor to end of line correctly
-            let cursor_point = cursor_position.to_point(&snapshot);
-            let end_of_line = snapshot.anchor_after(language::Point::new(
-                cursor_point.row,
-                snapshot.line_len(cursor_point.row),
-            ));
-            let delete_range = cursor_position..end_of_line;
-
-            Some(completion_from_diff(
-                snapshot,
-                completion_text,
-                cursor_position,
-                delete_range,
-            ))
-        } else {
-            None
-        }
-    }
-}
-
-fn reset_completion_cache(
-    provider: &mut SupermavenEditPredictionDelegate,
-    _cx: &mut Context<SupermavenEditPredictionDelegate>,
-) {
-    provider.pending_refresh = None;
-    provider.completion_id = None;
-    provider.completion_text = None;
-    provider.completion_position = None;
-    provider.buffer_id = None;
-}
-
-fn trim_to_end_of_line_unless_leading_newline(text: &str) -> &str {
-    if has_leading_newline(text) {
-        text
-    } else if let Some(i) = text.find('\n') {
-        &text[..i]
-    } else {
-        text
-    }
-}
-
-fn has_leading_newline(text: &str) -> bool {
-    for c in text.chars() {
-        if c == '\n' {
-            return true;
-        }
-        if !c.is_whitespace() {
-            return false;
-        }
-    }
-    false
-}

crates/supermaven_api/Cargo.toml 🔗

@@ -1,23 +0,0 @@
-[package]
-name = "supermaven_api"
-version = "0.1.0"
-edition.workspace = true
-publish.workspace = true
-license = "GPL-3.0-or-later"
-
-[lints]
-workspace = true
-
-[lib]
-path = "src/supermaven_api.rs"
-doctest = false
-
-[dependencies]
-anyhow.workspace = true
-futures.workspace = true
-http_client.workspace = true
-paths.workspace = true
-serde.workspace = true
-serde_json.workspace = true
-smol.workspace = true
-util.workspace = true

crates/supermaven_api/src/supermaven_api.rs 🔗

@@ -1,125 +0,0 @@
-use anyhow::{Context as _, Result};
-use futures::AsyncReadExt;
-use futures::io::BufReader;
-use http_client::{AsyncBody, HttpClient, Request as HttpRequest};
-use paths::supermaven_dir;
-use serde::Deserialize;
-use smol::fs::{self, File};
-use std::path::{Path, PathBuf};
-use std::sync::Arc;
-
-use util::fs::{make_file_executable, remove_matching};
-
-#[derive(Deserialize)]
-pub struct SupermavenApiError {
-    pub message: String,
-}
-
-#[derive(Debug, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SupermavenDownloadResponse {
-    pub download_url: String,
-    pub version: u64,
-    pub sha256_hash: String,
-}
-
-pub async fn latest_release(
-    client: Arc<dyn HttpClient>,
-    platform: &str,
-    arch: &str,
-) -> Result<SupermavenDownloadResponse> {
-    let uri = format!(
-        "https://supermaven.com/api/download-path?platform={}&arch={}",
-        platform, arch
-    );
-
-    // Download is not authenticated
-    let request = HttpRequest::get(&uri);
-
-    let mut response = client
-        .send(request.body(AsyncBody::default())?)
-        .await
-        .with_context(|| "Unable to acquire Supermaven Agent".to_string())?;
-
-    let mut body = Vec::new();
-    response.body_mut().read_to_end(&mut body).await?;
-
-    if response.status().is_client_error() || response.status().is_server_error() {
-        let body_str = std::str::from_utf8(&body)?;
-        let error: SupermavenApiError = serde_json::from_str(body_str)?;
-        anyhow::bail!("Supermaven API error: {}", error.message);
-    }
-
-    serde_json::from_slice::<SupermavenDownloadResponse>(&body)
-        .with_context(|| "Unable to parse Supermaven Agent response".to_string())
-}
-
-pub fn version_path(version: u64) -> PathBuf {
-    supermaven_dir().join(format!(
-        "sm-agent-{}{}",
-        version,
-        std::env::consts::EXE_SUFFIX
-    ))
-}
-
-pub async fn has_version(version_path: &Path) -> bool {
-    fs::metadata(version_path).await.is_ok_and(|m| m.is_file())
-}
-
-pub async fn get_supermaven_agent_path(client: Arc<dyn HttpClient>) -> Result<PathBuf> {
-    fs::create_dir_all(supermaven_dir())
-        .await
-        .with_context(|| {
-            format!(
-                "Could not create Supermaven Agent Directory at {:?}",
-                supermaven_dir()
-            )
-        })?;
-
-    let platform = match std::env::consts::OS {
-        "macos" => "darwin",
-        "windows" => "windows",
-        "linux" => "linux",
-        unsupported => anyhow::bail!("unsupported platform {unsupported}"),
-    };
-
-    let arch = match std::env::consts::ARCH {
-        "x86_64" => "amd64",
-        "aarch64" => "arm64",
-        unsupported => anyhow::bail!("unsupported architecture {unsupported}"),
-    };
-
-    let download_info = latest_release(client.clone(), platform, arch).await?;
-
-    let binary_path = version_path(download_info.version);
-
-    if has_version(&binary_path).await {
-        // Due to an issue with the Supermaven binary not being made executable on
-        // earlier Zed versions and Supermaven releases not occurring that frequently,
-        // we ensure here that the found binary is actually executable.
-        make_file_executable(&binary_path).await?;
-
-        return Ok(binary_path);
-    }
-
-    let request = HttpRequest::get(&download_info.download_url);
-
-    let mut response = client
-        .send(request.body(AsyncBody::default())?)
-        .await
-        .with_context(|| "Unable to download Supermaven Agent".to_string())?;
-
-    let mut file = File::create(&binary_path)
-        .await
-        .with_context(|| format!("Unable to create file at {:?}", binary_path))?;
-
-    futures::io::copy(BufReader::new(response.body_mut()), &mut file)
-        .await
-        .with_context(|| format!("Unable to write binary to file at {:?}", binary_path))?;
-
-    make_file_executable(&binary_path).await?;
-
-    remove_matching(supermaven_dir(), |file| file != binary_path).await;
-
-    Ok(binary_path)
-}

crates/terminal/src/terminal_hyperlinks.rs 🔗

@@ -905,6 +905,18 @@ mod tests {
                 );
             }
 
+            #[test]
+            // <https://github.com/zed-industries/zed/issues/50531>
+            fn issue_50531() {
+                // Paths preceded by "N:" prefix (e.g. grep output line numbers)
+                // should still be clickable
+                test_path!("0: ‹«foo/👉bar.txt»›");
+                test_path!("0: ‹«👉foo/bar.txt»›");
+                test_path!("42: ‹«👉foo/bar.txt»›");
+                test_path!("1: ‹«/👉test/cool.rs»›");
+                test_path!("1: ‹«/👉test/cool.rs»:«4»:«2»›");
+            }
+
             #[test]
             // <https://github.com/zed-industries/zed/issues/46795>
             fn issue_46795() {

crates/terminal_view/src/terminal_panel.rs 🔗

@@ -397,10 +397,7 @@ impl TerminalPanel {
                             };
                             panel
                                 .update_in(cx, |panel, window, cx| {
-                                    panel
-                                        .center
-                                        .split(&pane, &new_pane, direction, cx)
-                                        .log_err();
+                                    panel.center.split(&pane, &new_pane, direction, cx);
                                     window.focus(&new_pane.focus_handle(cx), cx);
                                 })
                                 .ok();
@@ -424,7 +421,7 @@ impl TerminalPanel {
                         new_pane.update(cx, |pane, cx| {
                             pane.add_item(item, true, true, None, window, cx);
                         });
-                        self.center.split(&pane, &new_pane, direction, cx).log_err();
+                        self.center.split(&pane, &new_pane, direction, cx);
                         window.focus(&new_pane.focus_handle(cx), cx);
                     }
                 };
@@ -1303,17 +1300,13 @@ pub fn new_terminal_pane(
                                             &new_pane,
                                             split_direction,
                                             cx,
-                                        )?;
-                                        anyhow::Ok(new_pane)
+                                        );
+                                        new_pane
                                     })
                                 else {
                                     return;
                                 };
 
-                                let Some(new_pane) = new_pane.log_err() else {
-                                    return;
-                                };
-
                                 move_item(
                                     &source,
                                     &new_pane,
@@ -1569,15 +1562,12 @@ impl Render for TerminalPanel {
                                     _ = terminal_panel.update_in(
                                         cx,
                                         |terminal_panel, window, cx| {
-                                            terminal_panel
-                                                .center
-                                                .split(
-                                                    &terminal_panel.active_pane,
-                                                    &new_pane,
-                                                    SplitDirection::Right,
-                                                    cx,
-                                                )
-                                                .log_err();
+                                            terminal_panel.center.split(
+                                                &terminal_panel.active_pane,
+                                                &new_pane,
+                                                SplitDirection::Right,
+                                                cx,
+                                            );
                                             let new_pane = new_pane.read(cx);
                                             window.focus(&new_pane.focus_handle(cx), cx);
                                         },

crates/terminal_view/src/terminal_scrollbar.rs 🔗

@@ -3,7 +3,7 @@ use std::{
     rc::Rc,
 };
 
-use gpui::{Bounds, Point, Size, size};
+use gpui::{Bounds, Point, point, size};
 use terminal::Terminal;
 use ui::{Pixels, ScrollableHandle, px};
 
@@ -46,9 +46,9 @@ impl TerminalScrollHandle {
 }
 
 impl ScrollableHandle for TerminalScrollHandle {
-    fn max_offset(&self) -> Size<Pixels> {
+    fn max_offset(&self) -> Point<Pixels> {
         let state = self.state.borrow();
-        size(
+        point(
             Pixels::ZERO,
             state.total_lines.saturating_sub(state.viewport_lines) as f32 * state.line_height,
         )

crates/text/src/anchor.rs 🔗

@@ -20,7 +20,7 @@ pub struct Anchor {
 
     /// The byte offset into the text inserted in the operation
     /// at `timestamp`.
-    pub offset: usize,
+    pub offset: u32,
     /// Whether this anchor stays attached to the character *before* or *after*
     /// the offset.
     pub bias: Bias,
@@ -49,7 +49,7 @@ impl Anchor {
     pub const MIN: Self = Self {
         timestamp_replica_id: clock::Lamport::MIN.replica_id,
         timestamp_value: clock::Lamport::MIN.value,
-        offset: usize::MIN,
+        offset: u32::MIN,
         bias: Bias::Left,
         buffer_id: None,
     };
@@ -57,14 +57,14 @@ impl Anchor {
     pub const MAX: Self = Self {
         timestamp_replica_id: clock::Lamport::MAX.replica_id,
         timestamp_value: clock::Lamport::MAX.value,
-        offset: usize::MAX,
+        offset: u32::MAX,
         bias: Bias::Right,
         buffer_id: None,
     };
 
     pub fn new(
         timestamp: clock::Lamport,
-        offset: usize,
+        offset: u32,
         bias: Bias,
         buffer_id: Option<BufferId>,
     ) -> Self {
@@ -81,7 +81,7 @@ impl Anchor {
         Self {
             timestamp_replica_id: clock::Lamport::MIN.replica_id,
             timestamp_value: clock::Lamport::MIN.value,
-            offset: usize::MIN,
+            offset: u32::MIN,
             bias: Bias::Left,
             buffer_id: Some(buffer_id),
         }
@@ -91,7 +91,7 @@ impl Anchor {
         Self {
             timestamp_replica_id: clock::Lamport::MAX.replica_id,
             timestamp_value: clock::Lamport::MAX.value,
-            offset: usize::MAX,
+            offset: u32::MAX,
             bias: Bias::Right,
             buffer_id: Some(buffer_id),
         }
@@ -190,13 +190,13 @@ impl Anchor {
 
     pub fn is_min(&self) -> bool {
         self.timestamp() == clock::Lamport::MIN
-            && self.offset == usize::MIN
+            && self.offset == u32::MIN
             && self.bias == Bias::Left
     }
 
     pub fn is_max(&self) -> bool {
         self.timestamp() == clock::Lamport::MAX
-            && self.offset == usize::MAX
+            && self.offset == u32::MAX
             && self.bias == Bias::Right
     }
 

crates/text/src/locator.rs 🔗

@@ -8,18 +8,32 @@ use std::iter;
 ///
 /// The initial location for a collection should be `Locator::between(Locator::min(), Locator::max())`,
 /// leaving room for items to be inserted before and after it.
-#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
-pub struct Locator(SmallVec<[u64; 4]>);
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct Locator(SmallVec<[u64; 2]>);
+
+impl Clone for Locator {
+    fn clone(&self) -> Self {
+        // We manually implement clone to avoid the overhead of SmallVec's clone implementation.
+        // Using `from_slice` is faster than `clone` for SmallVec as we can use our `Copy` implementation of u64.
+        Self {
+            0: SmallVec::from_slice(&self.0),
+        }
+    }
+
+    fn clone_from(&mut self, source: &Self) {
+        self.0.clone_from(&source.0);
+    }
+}
 
 impl Locator {
     pub const fn min() -> Self {
-        // SAFETY: 1 is <= 4
-        Self(unsafe { SmallVec::from_const_with_len_unchecked([u64::MIN; 4], 1) })
+        // SAFETY: 1 is <= 2
+        Self(unsafe { SmallVec::from_const_with_len_unchecked([u64::MIN; 2], 1) })
     }
 
     pub const fn max() -> Self {
-        // SAFETY: 1 is <= 4
-        Self(unsafe { SmallVec::from_const_with_len_unchecked([u64::MAX; 4], 1) })
+        // SAFETY: 1 is <= 2
+        Self(unsafe { SmallVec::from_const_with_len_unchecked([u64::MAX; 2], 1) })
     }
 
     pub const fn min_ref() -> &'static Self {
@@ -40,6 +54,7 @@ impl Locator {
         let rhs = rhs.0.iter().copied().chain(iter::repeat(u64::MAX));
         let mut location = SmallVec::new();
         for (lhs, rhs) in lhs.zip(rhs) {
+            // This shift is essential! It optimizes for the common case of sequential typing.
             let mid = lhs + ((rhs.saturating_sub(lhs)) >> 48);
             location.push(mid);
             if mid > lhs {
@@ -127,4 +142,36 @@ mod tests {
             );
         }
     }
+
+    // Simulates 100,000 sequential forward appends (the pattern used when
+    // building a buffer's initial fragments and when
+    // `push_fragments_for_insertion` chains new text fragments).
+    #[test]
+    fn test_sequential_forward_append_stays_at_depth_1() {
+        let mut prev = Locator::min();
+        let max = Locator::max();
+        for _ in 0..100_000 {
+            let loc = Locator::between(&prev, &max);
+            assert_eq!(loc.len(), 1, "sequential forward append grew past depth 1");
+            prev = loc;
+        }
+    }
+
+    // Simulates the most common real editing pattern: a fragment is split
+    // (producing a depth-2 prefix), then 10,000 new fragments are inserted
+    // sequentially forward within that split region.
+    #[test]
+    fn test_typing_at_cursor_stays_at_depth_2() {
+        let initial = Locator::between(&Locator::min(), &Locator::max());
+        let prefix = Locator::between(&Locator::min(), &initial);
+        assert_eq!(prefix.len(), 2);
+
+        let suffix_id = initial;
+        let mut prev = prefix;
+        for _ in 0..10_000 {
+            let loc = Locator::between(&prev, &suffix_id);
+            assert_eq!(loc.len(), 2, "forward typing after split grew past depth 2");
+            prev = loc;
+        }
+    }
 }

crates/text/src/tests.rs 🔗

@@ -810,3 +810,188 @@ fn test_random_concurrent_edits(mut rng: StdRng) {
         buffer.check_invariants();
     }
 }
+
+#[test]
+fn test_new_normalized_splits_large_base_text() {
+    // ASCII text that exceeds max_insertion_len
+    let text = "abcdefghij".repeat(10); // 100 bytes
+    let rope = Rope::from(text.as_str());
+    let buffer = Buffer::new_normalized(
+        ReplicaId::LOCAL,
+        BufferId::new(1).unwrap(),
+        LineEnding::Unix,
+        rope,
+    );
+    assert_eq!(buffer.text(), text);
+    buffer.check_invariants();
+
+    // Verify anchors at various positions, including across chunk boundaries
+    for offset in [0, 1, 15, 16, 17, 50, 99] {
+        let anchor = buffer.anchor_before(offset);
+        assert_eq!(
+            anchor.to_offset(&buffer),
+            offset,
+            "anchor_before({offset}) round-tripped incorrectly"
+        );
+        let anchor = buffer.anchor_after(offset);
+        assert_eq!(
+            anchor.to_offset(&buffer),
+            offset,
+            "anchor_after({offset}) round-tripped incorrectly"
+        );
+    }
+
+    // Verify editing works after a split initialization
+    let mut buffer = buffer;
+    buffer.edit([(50..60, "XYZ")]);
+    let mut expected = text;
+    expected.replace_range(50..60, "XYZ");
+    assert_eq!(buffer.text(), expected);
+    buffer.check_invariants();
+}
+
+#[test]
+fn test_new_normalized_splits_large_base_text_with_multibyte_chars() {
+    // Use multi-byte chars (é is 2 bytes in UTF-8) so that a naive byte-level
+    // split would land in the middle of a character.
+    let unit = "ééééééééé"; // 9 chars × 2 bytes = 18 bytes
+    let text = unit.repeat(6); // 108 bytes
+    let rope = Rope::from(text.as_str());
+    let buffer = Buffer::new_normalized(
+        ReplicaId::LOCAL,
+        BufferId::new(1).unwrap(),
+        LineEnding::Unix,
+        rope,
+    );
+    assert_eq!(buffer.text(), text);
+    buffer.check_invariants();
+
+    // Every anchor should resolve correctly even though chunks had to be
+    // rounded down to a char boundary.
+    let snapshot = buffer.snapshot();
+    for offset in (0..text.len()).filter(|o| text.is_char_boundary(*o)) {
+        let anchor = snapshot.anchor_before(offset);
+        assert_eq!(
+            anchor.to_offset(snapshot),
+            offset,
+            "anchor round-trip failed at byte offset {offset}"
+        );
+    }
+}
+
+#[test]
+fn test_new_normalized_small_text_unchanged() {
+    // Text that fits in a single chunk should produce exactly one fragment,
+    // matching the original single-fragment behaviour.
+    let text = "hello world";
+    let rope = Rope::from(text);
+    let buffer = Buffer::new_normalized(
+        ReplicaId::LOCAL,
+        BufferId::new(1).unwrap(),
+        LineEnding::Unix,
+        rope,
+    );
+    assert_eq!(buffer.text(), text);
+    buffer.check_invariants();
+    assert_eq!(buffer.snapshot().fragments.items(&None).len(), 1);
+}
+
+#[test]
+fn test_edit_splits_large_insertion() {
+    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "abcdefghij");
+
+    let large_text: Arc<str> = "X".repeat(100).into();
+    let edits = vec![(3..7, large_text.clone())];
+
+    buffer.edit(edits);
+
+    let expected = format!("abc{}hij", large_text);
+    assert_eq!(buffer.text(), expected);
+    buffer.check_invariants();
+
+    // Anchors should resolve correctly throughout the buffer.
+    for offset in [0, 3, 50, 103, expected.len()] {
+        let anchor = buffer.anchor_before(offset);
+        assert_eq!(
+            anchor.to_offset(&buffer),
+            offset,
+            "anchor_before({offset}) round-tripped incorrectly"
+        );
+    }
+}
+
+#[test]
+fn test_edit_splits_large_insertion_with_multibyte_chars() {
+    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "abcdefghij");
+
+    // 4-byte chars so that naive byte splits would land mid-character.
+    let large_text: Arc<str> = "😀".repeat(30).into(); // 30 × 4 = 120 bytes
+    let edits = vec![(5..5, large_text.clone())];
+
+    buffer.edit(edits);
+
+    let expected = format!("abcde{}fghij", large_text);
+    assert_eq!(buffer.text(), expected);
+    buffer.check_invariants();
+}
+
+#[test]
+fn test_edit_splits_large_insertion_among_multiple_edits() {
+    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "ABCDEFGHIJ");
+
+    let large_text: Arc<str> = "x".repeat(60).into();
+    // Three edits: small, large, small. The large one must be split while
+    // preserving the correct positions of the surrounding edits.
+    let edits = vec![
+        (1..2, Arc::from("y")),     // replace "B" with "y"
+        (4..6, large_text.clone()), // replace "EF" with 60 x's
+        (9..9, Arc::from("z")),     // insert "z" before "J"
+    ];
+
+    buffer.edit(edits);
+
+    // Original: A B C D E F G H I J
+    // After (1..2, "y"):       A y C D E F G H I J
+    // After (4..6, large):     A y C D <60 x's> G H I J
+    // After (9..9, "z"):       A y C D <60 x's> G H I z J
+    let expected = format!("AyCD{}GHIzJ", large_text);
+    assert_eq!(buffer.text(), expected);
+    buffer.check_invariants();
+}
+
+#[test]
+fn test_edit_splits_multiple_large_insertions() {
+    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "ABCDE");
+
+    let text1: Arc<str> = "a".repeat(40).into();
+    let text2: Arc<str> = "b".repeat(40).into();
+    let edits = vec![
+        (1..2, text1.clone()), // replace "B" with 40 a's
+        (3..4, text2.clone()), // replace "D" with 40 b's
+    ];
+
+    buffer.edit(edits);
+
+    let expected = format!("A{}C{}E", text1, text2);
+    assert_eq!(buffer.text(), expected);
+    buffer.check_invariants();
+}
+
+#[test]
+fn test_edit_undo_after_split() {
+    let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "hello world");
+    buffer.set_group_interval(Duration::from_secs(0));
+    let original = buffer.text();
+
+    let large_text: Arc<str> = "Z".repeat(50).into();
+    let edits = vec![(5..6, large_text)];
+    buffer.edit(edits);
+    assert_ne!(buffer.text(), original);
+    buffer.check_invariants();
+
+    // Undo should restore the original text even though the edit was split
+    // into multiple internal operations grouped in one transaction.
+    buffer.undo();
+    assert_eq!(buffer.text(), original);
+    buffer.check_invariants();
+}

crates/text/src/text.rs 🔗

@@ -48,6 +48,12 @@ use util::RandomCharIter;
 static LINE_SEPARATORS_REGEX: LazyLock<Regex> =
     LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX"));
 
+/// The maximum length of a single insertion operation.
+/// Fragments larger than this will be split into multiple smaller
+/// fragments. This allows us to use relative `u32` offsets instead of `usize`,
+/// reducing memory usage.
+const MAX_INSERTION_LEN: usize = if cfg!(test) { 16 } else { u32::MAX as usize };
+
 pub type TransactionId = clock::Lamport;
 
 pub struct Buffer {
@@ -155,18 +161,38 @@ struct History {
 
 #[derive(Clone, Debug, Eq, PartialEq)]
 struct InsertionSlice {
-    edit_id: clock::Lamport,
-    insertion_id: clock::Lamport,
-    range: Range<usize>,
+    // Inline the lamports to allow the replica ids to share the same alignment
+    // saving 4 bytes space edit_id: clock::Lamport,
+    edit_id_value: clock::Seq,
+    edit_id_replica_id: ReplicaId,
+    // insertion_id: clock::Lamport,
+    insertion_id_value: clock::Seq,
+    insertion_id_replica_id: ReplicaId,
+    range: Range<u32>,
 }
 
 impl Ord for InsertionSlice {
     fn cmp(&self, other: &Self) -> Ordering {
-        self.edit_id
-            .cmp(&other.edit_id)
-            .then_with(|| self.insertion_id.cmp(&other.insertion_id))
-            .then_with(|| self.range.start.cmp(&other.range.start))
-            .then_with(|| self.range.end.cmp(&other.range.end))
+        Lamport {
+            value: self.edit_id_value,
+            replica_id: self.edit_id_replica_id,
+        }
+        .cmp(&Lamport {
+            value: other.edit_id_value,
+            replica_id: other.edit_id_replica_id,
+        })
+        .then_with(|| {
+            Lamport {
+                value: self.insertion_id_value,
+                replica_id: self.insertion_id_replica_id,
+            }
+            .cmp(&Lamport {
+                value: other.insertion_id_value,
+                replica_id: other.insertion_id_replica_id,
+            })
+        })
+        .then_with(|| self.range.start.cmp(&other.range.start))
+        .then_with(|| self.range.end.cmp(&other.range.end))
     }
 }
 
@@ -179,8 +205,10 @@ impl PartialOrd for InsertionSlice {
 impl InsertionSlice {
     fn from_fragment(edit_id: clock::Lamport, fragment: &Fragment) -> Self {
         Self {
-            edit_id,
-            insertion_id: fragment.timestamp,
+            edit_id_value: edit_id.value,
+            edit_id_replica_id: edit_id.replica_id,
+            insertion_id_value: fragment.timestamp.value,
+            insertion_id_replica_id: fragment.timestamp.replica_id,
             range: fragment.insertion_offset..fragment.insertion_offset + fragment.len,
         }
     }
@@ -309,6 +337,7 @@ impl History {
 
     fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
         self.undo_stack.last_mut().map(|entry| {
+            entry.transaction.edit_ids.shrink_to_fit();
             entry.suppress_grouping = true;
             &entry.transaction
         })
@@ -489,7 +518,7 @@ struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> {
     since: &'a clock::Global,
     old_end: D,
     new_end: D,
-    range: Range<(&'a Locator, usize)>,
+    range: Range<(&'a Locator, u32)>,
     buffer_id: BufferId,
 }
 
@@ -536,18 +565,18 @@ impl<D1, D2> Edit<(D1, D2)> {
 }
 
 #[derive(Eq, PartialEq, Clone, Debug)]
-pub struct Fragment {
-    pub id: Locator,
-    pub timestamp: clock::Lamport,
-    pub insertion_offset: usize,
-    pub len: usize,
-    pub visible: bool,
-    pub deletions: SmallVec<[clock::Lamport; 2]>,
-    pub max_undos: clock::Global,
+struct Fragment {
+    id: Locator,
+    timestamp: clock::Lamport,
+    insertion_offset: u32,
+    len: u32,
+    visible: bool,
+    deletions: SmallVec<[clock::Lamport; 2]>,
+    max_undos: clock::Global,
 }
 
 #[derive(Eq, PartialEq, Clone, Debug)]
-pub struct FragmentSummary {
+struct FragmentSummary {
     text: FragmentTextSummary,
     max_id: Locator,
     max_version: clock::Global,
@@ -575,14 +604,14 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary {
 #[derive(Eq, PartialEq, Clone, Debug)]
 struct InsertionFragment {
     timestamp: clock::Lamport,
-    split_offset: usize,
+    split_offset: u32,
     fragment_id: Locator,
 }
 
 #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
 struct InsertionFragmentKey {
     timestamp: clock::Lamport,
-    split_offset: usize,
+    split_offset: u32,
 }
 
 #[derive(Clone, Debug, Eq, PartialEq)]
@@ -740,18 +769,37 @@ impl Buffer {
             let insertion_timestamp = clock::Lamport::new(ReplicaId::LOCAL);
             lamport_clock.observe(insertion_timestamp);
             version.observe(insertion_timestamp);
-            let fragment_id = Locator::between(&Locator::min(), &Locator::max());
-            let fragment = Fragment {
-                id: fragment_id,
-                timestamp: insertion_timestamp,
-                insertion_offset: 0,
-                len: visible_text.len(),
-                visible: true,
-                deletions: Default::default(),
-                max_undos: Default::default(),
-            };
-            insertions.push(InsertionFragment::new(&fragment), ());
-            fragments.push(fragment, &None);
+
+            let mut insertion_offset: u32 = 0;
+            let mut text_offset: usize = 0;
+            let mut prev_locator = Locator::min();
+
+            while text_offset < visible_text.len() {
+                let target_end = visible_text.len().min(text_offset + MAX_INSERTION_LEN);
+                let chunk_end = if target_end == visible_text.len() {
+                    target_end
+                } else {
+                    visible_text.floor_char_boundary(target_end)
+                };
+                let chunk_len = chunk_end - text_offset;
+
+                let fragment_id = Locator::between(&prev_locator, &Locator::max());
+                let fragment = Fragment {
+                    id: fragment_id.clone(),
+                    timestamp: insertion_timestamp,
+                    insertion_offset,
+                    len: chunk_len as u32,
+                    visible: true,
+                    deletions: Default::default(),
+                    max_undos: Default::default(),
+                };
+                insertions.push(InsertionFragment::new(&fragment), ());
+                fragments.push(fragment, &None);
+
+                prev_locator = fragment_id;
+                insertion_offset += chunk_len as u32;
+                text_offset = chunk_end;
+            }
         }
 
         Buffer {
@@ -853,7 +901,7 @@ impl Buffer {
             new_text: Vec::with_capacity(edits.len()),
         };
         let mut new_insertions = Vec::new();
-        let mut insertion_offset = 0;
+        let mut insertion_offset: u32 = 0;
         let mut insertion_slices = Vec::new();
 
         let mut edits = edits
@@ -879,8 +927,9 @@ impl Buffer {
                 if fragment_start > old_fragments.start().visible {
                     if fragment_end > fragment_start {
                         let mut suffix = old_fragments.item().unwrap().clone();
-                        suffix.len = fragment_end - fragment_start;
-                        suffix.insertion_offset += fragment_start - old_fragments.start().visible;
+                        suffix.len = (fragment_end - fragment_start) as u32;
+                        suffix.insertion_offset +=
+                            (fragment_start - old_fragments.start().visible) as u32;
                         new_insertions.push(InsertionFragment::insert_new(&suffix));
                         new_ropes.push_fragment(&suffix, suffix.visible);
                         new_fragments.push(suffix, &None);
@@ -899,8 +948,8 @@ impl Buffer {
             // Preserve any portion of the current fragment that precedes this range.
             if fragment_start < range.start {
                 let mut prefix = old_fragments.item().unwrap().clone();
-                prefix.len = range.start - fragment_start;
-                prefix.insertion_offset += fragment_start - old_fragments.start().visible;
+                prefix.len = (range.start - fragment_start) as u32;
+                prefix.insertion_offset += (fragment_start - old_fragments.start().visible) as u32;
                 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
                 new_insertions.push(InsertionFragment::insert_new(&prefix));
                 new_ropes.push_fragment(&prefix, prefix.visible);
@@ -912,29 +961,24 @@ impl Buffer {
             if !new_text.is_empty() {
                 let new_start = new_fragments.summary().text.visible;
 
-                let fragment = Fragment {
-                    id: Locator::between(
-                        &new_fragments.summary().max_id,
-                        old_fragments
-                            .item()
-                            .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
-                    ),
+                let next_fragment_id = old_fragments
+                    .item()
+                    .map_or(Locator::max_ref(), |old_fragment| &old_fragment.id);
+                Self::push_fragments_for_insertion(
+                    new_text.as_ref(),
                     timestamp,
-                    insertion_offset,
-                    len: new_text.len(),
-                    deletions: Default::default(),
-                    max_undos: Default::default(),
-                    visible: true,
-                };
+                    &mut insertion_offset,
+                    &mut new_fragments,
+                    &mut new_insertions,
+                    &mut insertion_slices,
+                    &mut new_ropes,
+                    next_fragment_id,
+                    timestamp,
+                );
                 edits_patch.push(Edit {
                     old: fragment_start..fragment_start,
                     new: new_start..new_start + new_text.len(),
                 });
-                insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
-                new_insertions.push(InsertionFragment::insert_new(&fragment));
-                new_ropes.push_str(new_text.as_ref());
-                new_fragments.push(fragment, &None);
-                insertion_offset += new_text.len();
             }
 
             // Advance through every fragment that intersects this range, marking the intersecting
@@ -945,8 +989,9 @@ impl Buffer {
                 let mut intersection = fragment.clone();
                 let intersection_end = cmp::min(range.end, fragment_end);
                 if fragment.visible {
-                    intersection.len = intersection_end - fragment_start;
-                    intersection.insertion_offset += fragment_start - old_fragments.start().visible;
+                    intersection.len = (intersection_end - fragment_start) as u32;
+                    intersection.insertion_offset +=
+                        (fragment_start - old_fragments.start().visible) as u32;
                     intersection.id =
                         Locator::between(&new_fragments.summary().max_id, &intersection.id);
                     intersection.deletions.push(timestamp);
@@ -983,8 +1028,8 @@ impl Buffer {
             let fragment_end = old_fragments.end().visible;
             if fragment_end > fragment_start {
                 let mut suffix = old_fragments.item().unwrap().clone();
-                suffix.len = fragment_end - fragment_start;
-                suffix.insertion_offset += fragment_start - old_fragments.start().visible;
+                suffix.len = (fragment_end - fragment_start) as u32;
+                suffix.insertion_offset += (fragment_start - old_fragments.start().visible) as u32;
                 new_insertions.push(InsertionFragment::insert_new(&suffix));
                 new_ropes.push_fragment(&suffix, suffix.visible);
                 new_fragments.push(suffix, &None);
@@ -1075,7 +1120,7 @@ impl Buffer {
         let mut insertion_slices = Vec::new();
         let cx = Some(version.clone());
         let mut new_insertions = Vec::new();
-        let mut insertion_offset = 0;
+        let mut insertion_offset: u32 = 0;
         let mut new_ropes =
             RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
         let mut old_fragments = self
@@ -1097,9 +1142,9 @@ impl Buffer {
                 if fragment_start > old_fragments.start().0.full_offset() {
                     if fragment_end > fragment_start {
                         let mut suffix = old_fragments.item().unwrap().clone();
-                        suffix.len = fragment_end.0 - fragment_start.0;
+                        suffix.len = (fragment_end.0 - fragment_start.0) as u32;
                         suffix.insertion_offset +=
-                            fragment_start - old_fragments.start().0.full_offset();
+                            (fragment_start - old_fragments.start().0.full_offset()) as u32;
                         new_insertions.push(InsertionFragment::insert_new(&suffix));
                         new_ropes.push_fragment(&suffix, suffix.visible);
                         new_fragments.push(suffix, &None);
@@ -1118,8 +1163,9 @@ impl Buffer {
             let fragment_end = old_fragments.end().0.full_offset();
             if fragment_end == range.start && fragment_end > fragment_start {
                 let mut fragment = old_fragments.item().unwrap().clone();
-                fragment.len = fragment_end.0 - fragment_start.0;
-                fragment.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
+                fragment.len = (fragment_end.0 - fragment_start.0) as u32;
+                fragment.insertion_offset +=
+                    (fragment_start - old_fragments.start().0.full_offset()) as u32;
                 new_insertions.push(InsertionFragment::insert_new(&fragment));
                 new_ropes.push_fragment(&fragment, fragment.visible);
                 new_fragments.push(fragment, &None);
@@ -1144,8 +1190,9 @@ impl Buffer {
             // Preserve any portion of the current fragment that precedes this range.
             if fragment_start < range.start {
                 let mut prefix = old_fragments.item().unwrap().clone();
-                prefix.len = range.start.0 - fragment_start.0;
-                prefix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
+                prefix.len = (range.start.0 - fragment_start.0) as u32;
+                prefix.insertion_offset +=
+                    (fragment_start - old_fragments.start().0.full_offset()) as u32;
                 prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
                 new_insertions.push(InsertionFragment::insert_new(&prefix));
                 fragment_start = range.start;
@@ -1160,29 +1207,24 @@ impl Buffer {
                     old_start += fragment_start.0 - old_fragments.start().0.full_offset().0;
                 }
                 let new_start = new_fragments.summary().text.visible;
-                let fragment = Fragment {
-                    id: Locator::between(
-                        &new_fragments.summary().max_id,
-                        old_fragments
-                            .item()
-                            .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
-                    ),
+                let next_fragment_id = old_fragments
+                    .item()
+                    .map_or(Locator::max_ref(), |old_fragment| &old_fragment.id);
+                Self::push_fragments_for_insertion(
+                    new_text,
                     timestamp,
-                    insertion_offset,
-                    len: new_text.len(),
-                    deletions: Default::default(),
-                    max_undos: Default::default(),
-                    visible: true,
-                };
+                    &mut insertion_offset,
+                    &mut new_fragments,
+                    &mut new_insertions,
+                    &mut insertion_slices,
+                    &mut new_ropes,
+                    next_fragment_id,
+                    timestamp,
+                );
                 edits_patch.push(Edit {
                     old: old_start..old_start,
                     new: new_start..new_start + new_text.len(),
                 });
-                insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment));
-                new_insertions.push(InsertionFragment::insert_new(&fragment));
-                new_ropes.push_str(new_text);
-                new_fragments.push(fragment, &None);
-                insertion_offset += new_text.len();
             }
 
             // Advance through every fragment that intersects this range, marking the intersecting
@@ -1193,9 +1235,9 @@ impl Buffer {
                 let mut intersection = fragment.clone();
                 let intersection_end = cmp::min(range.end, fragment_end);
                 if fragment.was_visible(version, &self.undo_map) {
-                    intersection.len = intersection_end.0 - fragment_start.0;
+                    intersection.len = (intersection_end.0 - fragment_start.0) as u32;
                     intersection.insertion_offset +=
-                        fragment_start - old_fragments.start().0.full_offset();
+                        (fragment_start - old_fragments.start().0.full_offset()) as u32;
                     intersection.id =
                         Locator::between(&new_fragments.summary().max_id, &intersection.id);
                     intersection.deletions.push(timestamp);
@@ -1208,7 +1250,7 @@ impl Buffer {
                             + (fragment_start.0 - old_fragments.start().0.full_offset().0);
                         let new_start = new_fragments.summary().text.visible;
                         edits_patch.push(Edit {
-                            old: old_start..old_start + intersection.len,
+                            old: old_start..old_start + intersection.len as usize,
                             new: new_start..new_start,
                         });
                     }
@@ -1229,8 +1271,9 @@ impl Buffer {
             let fragment_end = old_fragments.end().0.full_offset();
             if fragment_end > fragment_start {
                 let mut suffix = old_fragments.item().unwrap().clone();
-                suffix.len = fragment_end.0 - fragment_start.0;
-                suffix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
+                suffix.len = (fragment_end.0 - fragment_start.0) as u32;
+                suffix.insertion_offset +=
+                    (fragment_start - old_fragments.start().0.full_offset()) as u32;
                 new_insertions.push(InsertionFragment::insert_new(&suffix));
                 new_ropes.push_fragment(&suffix, suffix.visible);
                 new_fragments.push(suffix, &None);
@@ -1252,6 +1295,49 @@ impl Buffer {
         self.subscriptions.publish_mut(&edits_patch)
     }
 
+    fn push_fragments_for_insertion(
+        new_text: &str,
+        timestamp: clock::Lamport,
+        insertion_offset: &mut u32,
+        new_fragments: &mut SumTree<Fragment>,
+        new_insertions: &mut Vec<sum_tree::Edit<InsertionFragment>>,
+        insertion_slices: &mut Vec<InsertionSlice>,
+        new_ropes: &mut RopeBuilder,
+        next_fragment_id: &Locator,
+        edit_timestamp: clock::Lamport,
+    ) {
+        let mut text_offset = 0;
+        while text_offset < new_text.len() {
+            let target_end = new_text.len().min(text_offset + MAX_INSERTION_LEN);
+            let chunk_end = if target_end == new_text.len() {
+                target_end
+            } else {
+                new_text.floor_char_boundary(target_end)
+            };
+            if chunk_end == text_offset {
+                break;
+            }
+            let chunk_len = chunk_end - text_offset;
+
+            let fragment = Fragment {
+                id: Locator::between(&new_fragments.summary().max_id, next_fragment_id),
+                timestamp,
+                insertion_offset: *insertion_offset,
+                len: chunk_len as u32,
+                deletions: Default::default(),
+                max_undos: Default::default(),
+                visible: true,
+            };
+            insertion_slices.push(InsertionSlice::from_fragment(edit_timestamp, &fragment));
+            new_insertions.push(InsertionFragment::insert_new(&fragment));
+            new_fragments.push(fragment, &None);
+
+            *insertion_offset += chunk_len as u32;
+            text_offset = chunk_end;
+        }
+        new_ropes.push_str(new_text);
+    }
+
     fn fragment_ids_for_edits<'a>(
         &'a self,
         edit_ids: impl Iterator<Item = &'a clock::Lamport>,
@@ -1260,38 +1346,56 @@ impl Buffer {
         let mut insertion_slices = Vec::new();
         for edit_id in edit_ids {
             let insertion_slice = InsertionSlice {
-                edit_id: *edit_id,
-                insertion_id: clock::Lamport::MIN,
+                edit_id_value: edit_id.value,
+                edit_id_replica_id: edit_id.replica_id,
+                insertion_id_value: Lamport::MIN.value,
+                insertion_id_replica_id: Lamport::MIN.replica_id,
                 range: 0..0,
             };
             let slices = self
                 .snapshot
                 .insertion_slices
                 .iter_from(&insertion_slice)
-                .take_while(|slice| slice.edit_id == *edit_id);
+                .take_while(|slice| {
+                    Lamport {
+                        value: slice.edit_id_value,
+                        replica_id: slice.edit_id_replica_id,
+                    } == *edit_id
+                });
             insertion_slices.extend(slices)
         }
-        insertion_slices
-            .sort_unstable_by_key(|s| (s.insertion_id, s.range.start, Reverse(s.range.end)));
+        insertion_slices.sort_unstable_by_key(|s| {
+            (
+                Lamport {
+                    value: s.insertion_id_value,
+                    replica_id: s.insertion_id_replica_id,
+                },
+                s.range.start,
+                Reverse(s.range.end),
+            )
+        });
 
         // Get all of the fragments corresponding to these insertion slices.
         let mut fragment_ids = Vec::new();
         let mut insertions_cursor = self.insertions.cursor::<InsertionFragmentKey>(());
         for insertion_slice in &insertion_slices {
-            if insertion_slice.insertion_id != insertions_cursor.start().timestamp
+            let insertion_id = Lamport {
+                value: insertion_slice.insertion_id_value,
+                replica_id: insertion_slice.insertion_id_replica_id,
+            };
+            if insertion_id != insertions_cursor.start().timestamp
                 || insertion_slice.range.start > insertions_cursor.start().split_offset
             {
                 insertions_cursor.seek_forward(
                     &InsertionFragmentKey {
-                        timestamp: insertion_slice.insertion_id,
+                        timestamp: insertion_id,
                         split_offset: insertion_slice.range.start,
                     },
                     Bias::Left,
                 );
             }
             while let Some(item) = insertions_cursor.item() {
-                if item.timestamp != insertion_slice.insertion_id
-                    || item.split_offset >= insertion_slice.range.end
+                if item.timestamp != insertion_id || item.split_offset >= insertion_slice.range.end
                 {
                     break;
                 }
@@ -1330,13 +1434,13 @@ impl Buffer {
                 let new_start = new_fragments.summary().text.visible;
                 if fragment_was_visible && !fragment.visible {
                     edits.push(Edit {
-                        old: old_start..old_start + fragment.len,
+                        old: old_start..old_start + fragment.len as usize,
                         new: new_start..new_start,
                     });
                 } else if !fragment_was_visible && fragment.visible {
                     edits.push(Edit {
                         old: old_start..old_start,
-                        new: new_start..new_start + fragment.len,
+                        new: new_start..new_start + fragment.len as usize,
                     });
                 }
                 new_ropes.push_fragment(&fragment, fragment_was_visible);
@@ -1582,7 +1686,12 @@ impl Buffer {
                 cursor.seek_forward(&Some(fragment_id), Bias::Left);
                 let fragment = cursor.item()?;
                 let start_offset = cursor.start().1;
-                let end_offset = start_offset + if fragment.visible { fragment.len } else { 0 };
+                let end_offset = start_offset
+                    + if fragment.visible {
+                        fragment.len as usize
+                    } else {
+                        0
+                    };
                 Some(start_offset..end_offset)
             });
 
@@ -2038,10 +2147,6 @@ impl BufferSnapshot {
         self.deleted_text.to_string()
     }
 
-    pub fn fragments(&self) -> impl Iterator<Item = &Fragment> {
-        self.fragments.iter()
-    }
-
     pub fn text_summary(&self) -> TextSummary {
         self.visible_text.summary()
     }
@@ -2287,7 +2392,7 @@ impl BufferSnapshot {
             let fragment = fragment_cursor.item().unwrap();
             let mut fragment_offset = fragment_cursor.start().1;
             if fragment.visible {
-                fragment_offset += anchor.offset - insertion.split_offset;
+                fragment_offset += (anchor.offset - insertion.split_offset) as usize;
             }
 
             position.add_assign(&text_cursor.summary(fragment_offset));
@@ -2332,7 +2437,7 @@ impl BufferSnapshot {
             let fragment = item.unwrap();
             let mut fragment_offset = start.1;
             if fragment.visible {
-                fragment_offset += anchor.offset - insertion.split_offset;
+                fragment_offset += (anchor.offset - insertion.split_offset) as usize;
             }
             fragment_offset
         }
@@ -2457,7 +2562,7 @@ impl BufferSnapshot {
             let overshoot = offset - start;
             Anchor::new(
                 fragment.timestamp,
-                fragment.insertion_offset + overshoot,
+                fragment.insertion_offset + overshoot as u32,
                 bias,
                 Some(self.remote_id),
             )
@@ -2546,7 +2651,7 @@ impl BufferSnapshot {
         let mut visible_start = start.1.visible;
         let mut deleted_start = start.1.deleted;
         if let Some(fragment) = item {
-            let overshoot = range.start.offset - fragment.insertion_offset;
+            let overshoot = (range.start.offset - fragment.insertion_offset) as usize;
             if fragment.visible {
                 visible_start += overshoot;
             } else {
@@ -2706,7 +2811,7 @@ impl<'a> RopeBuilder<'a> {
 
     fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) {
         debug_assert!(fragment.len > 0);
-        self.push(fragment.len, was_visible, fragment.visible)
+        self.push(fragment.len as usize, was_visible, fragment.visible)
     }
 
     fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) {
@@ -2781,7 +2886,8 @@ impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Ed
                 if fragment.id == *self.range.end.0 {
                     visible_end = cmp::min(
                         visible_end,
-                        cursor.start().visible + (self.range.end.1 - fragment.insertion_offset),
+                        cursor.start().visible
+                            + (self.range.end.1 - fragment.insertion_offset) as usize,
                     );
                 }
 
@@ -2807,7 +2913,8 @@ impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Ed
                 if fragment.id == *self.range.end.0 {
                     deleted_end = cmp::min(
                         deleted_end,
-                        cursor.start().deleted + (self.range.end.1 - fragment.insertion_offset),
+                        cursor.start().deleted
+                            + (self.range.end.1 - fragment.insertion_offset) as usize,
                     );
                 }
 
@@ -2872,7 +2979,7 @@ impl sum_tree::Item for Fragment {
             FragmentSummary {
                 max_id: self.id.clone(),
                 text: FragmentTextSummary {
-                    visible: self.len,
+                    visible: self.len as usize,
                     deleted: 0,
                 },
                 max_version,
@@ -2884,7 +2991,7 @@ impl sum_tree::Item for Fragment {
                 max_id: self.id.clone(),
                 text: FragmentTextSummary {
                     visible: 0,
-                    deleted: self.len,
+                    deleted: self.len as usize,
                 },
                 max_version,
                 min_insertion_version,

crates/theme/src/default_colors.rs 🔗

@@ -175,6 +175,7 @@ impl ThemeColors {
             vim_visual_background: system.transparent,
             vim_visual_line_background: system.transparent,
             vim_visual_block_background: system.transparent,
+            vim_yank_background: neutral().light_alpha().step_3(),
             vim_helix_normal_background: system.transparent,
             vim_helix_select_background: system.transparent,
             vim_normal_foreground: system.transparent,
@@ -320,6 +321,7 @@ impl ThemeColors {
             vim_visual_background: system.transparent,
             vim_visual_line_background: system.transparent,
             vim_visual_block_background: system.transparent,
+            vim_yank_background: neutral().dark_alpha().step_4(),
             vim_helix_normal_background: system.transparent,
             vim_helix_select_background: system.transparent,
             vim_normal_foreground: system.transparent,

crates/theme/src/fallback_themes.rs 🔗

@@ -257,6 +257,7 @@ pub(crate) fn zed_default_dark() -> Theme {
                 vim_visual_background: SystemColors::default().transparent,
                 vim_visual_line_background: SystemColors::default().transparent,
                 vim_visual_block_background: SystemColors::default().transparent,
+                vim_yank_background: hsla(207.8 / 360., 81. / 100., 66. / 100., 0.2),
                 vim_helix_normal_background: SystemColors::default().transparent,
                 vim_helix_select_background: SystemColors::default().transparent,
                 vim_normal_foreground: SystemColors::default().transparent,

crates/theme/src/icon_theme.rs 🔗

@@ -66,7 +66,7 @@ pub struct IconDefinition {
 }
 
 const FILE_STEMS_BY_ICON_KEY: &[(&str, &[&str])] = &[
-    ("docker", &["Dockerfile"]),
+    ("docker", &["Containerfile", "Dockerfile"]),
     ("ruby", &["Podfile"]),
     ("heroku", &["Procfile"]),
 ];
@@ -89,7 +89,7 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[
     (
         "cpp",
         &[
-            "c++", "h++", "cc", "cpp", "cxx", "hh", "hpp", "hxx", "inl", "ixx",
+            "c++", "h++", "cc", "cpp", "cppm", "cxx", "hh", "hpp", "hxx", "inl", "ixx",
         ],
     ),
     ("crystal", &["cr", "ecr"]),
@@ -99,6 +99,15 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[
     ("cue", &["cue"]),
     ("dart", &["dart"]),
     ("diff", &["diff"]),
+    (
+        "docker",
+        &[
+            "docker-compose.yml",
+            "docker-compose.yaml",
+            "compose.yml",
+            "compose.yaml",
+        ],
+    ),
     (
         "document",
         &[
@@ -138,12 +147,27 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[
     ("font", &["otf", "ttf", "woff", "woff2"]),
     ("fsharp", &["fs"]),
     ("fsproj", &["fsproj"]),
-    ("gitlab", &["gitlab-ci.yml"]),
+    ("gitlab", &["gitlab-ci.yml", "gitlab-ci.yaml"]),
     ("gleam", &["gleam"]),
     ("go", &["go", "mod", "work"]),
     ("graphql", &["gql", "graphql", "graphqls"]),
     ("haskell", &["hs"]),
     ("hcl", &["hcl"]),
+    (
+        "helm",
+        &[
+            "helmfile.yaml",
+            "helmfile.yml",
+            "Chart.yaml",
+            "Chart.yml",
+            "Chart.lock",
+            "values.yaml",
+            "values.yml",
+            "requirements.yaml",
+            "requirements.yml",
+            "tpl",
+        ],
+    ),
     ("html", &["htm", "html"]),
     (
         "image",
@@ -198,7 +222,7 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[
     ("rust", &["rs"]),
     ("sass", &["sass", "scss"]),
     ("scala", &["scala", "sc"]),
-    ("settings", &["conf", "ini", "yaml", "yml"]),
+    ("settings", &["conf", "ini"]),
     ("solidity", &["sol"]),
     (
         "storage",
@@ -279,6 +303,7 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[
     ("vue", &["vue"]),
     ("vyper", &["vy", "vyi"]),
     ("wgsl", &["wgsl"]),
+    ("yaml", &["yaml", "yml"]),
     ("zig", &["zig"]),
 ];
 
@@ -310,12 +335,13 @@ const FILE_ICONS: &[(&str, &str)] = &[
     ("font", "icons/file_icons/font.svg"),
     ("fsharp", "icons/file_icons/fsharp.svg"),
     ("fsproj", "icons/file_icons/file.svg"),
-    ("gitlab", "icons/file_icons/settings.svg"),
+    ("gitlab", "icons/file_icons/gitlab.svg"),
     ("gleam", "icons/file_icons/gleam.svg"),
     ("go", "icons/file_icons/go.svg"),
     ("graphql", "icons/file_icons/graphql.svg"),
     ("haskell", "icons/file_icons/haskell.svg"),
     ("hcl", "icons/file_icons/hcl.svg"),
+    ("helm", "icons/file_icons/helm.svg"),
     ("heroku", "icons/file_icons/heroku.svg"),
     ("html", "icons/file_icons/html.svg"),
     ("image", "icons/file_icons/image.svg"),
@@ -371,6 +397,7 @@ const FILE_ICONS: &[(&str, &str)] = &[
     ("vue", "icons/file_icons/vue.svg"),
     ("vyper", "icons/file_icons/vyper.svg"),
     ("wgsl", "icons/file_icons/wgsl.svg"),
+    ("yaml", "icons/file_icons/yaml.svg"),
     ("zig", "icons/file_icons/zig.svg"),
 ];
 

crates/theme/src/schema.rs 🔗

@@ -796,6 +796,11 @@ pub fn theme_colors_refinement(
             .vim_visual_block_background
             .as_ref()
             .and_then(|color| try_parse_color(color).ok()),
+        vim_yank_background: this
+            .vim_yank_background
+            .as_ref()
+            .and_then(|color| try_parse_color(color).ok())
+            .or(editor_document_highlight_read_background),
         vim_helix_normal_background: this
             .vim_helix_normal_background
             .as_ref()

crates/theme/src/styles/colors.rs 🔗

@@ -175,6 +175,8 @@ pub struct ThemeColors {
     pub vim_visual_line_background: Hsla,
     /// Background color for Vim Visual Block mode indicator.
     pub vim_visual_block_background: Hsla,
+    /// Background color for Vim yank highlight.
+    pub vim_yank_background: Hsla,
     /// Background color for Vim Helix Normal mode indicator.
     pub vim_helix_normal_background: Hsla,
     /// Background color for Vim Helix Select mode indicator.

crates/theme_importer/src/vscode/converter.rs 🔗

@@ -207,6 +207,7 @@ impl VsCodeThemeConverter {
             terminal_ansi_white: vscode_colors.terminal.ansi_white.clone(),
             terminal_ansi_bright_white: vscode_colors.terminal.ansi_bright_white.clone(),
             link_text_hover: vscode_colors.text_link.active_foreground.clone(),
+            vim_yank_background: vscode_colors.editor.range_highlight_background.clone(),
             ..Default::default()
         })
     }

crates/time_format/Cargo.toml 🔗

@@ -19,3 +19,6 @@ time.workspace = true
 [target.'cfg(target_os = "macos")'.dependencies]
 core-foundation.workspace = true
 core-foundation-sys.workspace = true
+
+[target.'cfg(target_os = "windows")'.dependencies]
+windows.workspace = true

crates/time_format/src/time_format.rs 🔗

@@ -86,10 +86,25 @@ fn format_absolute_date(
             macos::format_date(&timestamp)
         }
     }
-    #[cfg(not(target_os = "macos"))]
+    #[cfg(target_os = "windows")]
+    {
+        if !enhanced_date_formatting {
+            return windows::format_date(&timestamp);
+        }
+
+        let timestamp_date = timestamp.date();
+        let reference_date = reference.date();
+        if timestamp_date == reference_date {
+            "Today".to_string()
+        } else if reference_date.previous_day() == Some(timestamp_date) {
+            "Yesterday".to_string()
+        } else {
+            windows::format_date(&timestamp)
+        }
+    }
+    #[cfg(not(any(target_os = "macos", target_os = "windows")))]
     {
         // todo(linux) respect user's date/time preferences
-        // todo(windows) respect user's date/time preferences
         let current_locale = CURRENT_LOCALE
             .get_or_init(|| sys_locale::get_locale().unwrap_or_else(|| String::from("en-US")));
         format_timestamp_naive_date(
@@ -105,10 +120,13 @@ fn format_absolute_time(timestamp: OffsetDateTime) -> String {
     {
         macos::format_time(&timestamp)
     }
-    #[cfg(not(target_os = "macos"))]
+    #[cfg(target_os = "windows")]
+    {
+        windows::format_time(&timestamp)
+    }
+    #[cfg(not(any(target_os = "macos", target_os = "windows")))]
     {
         // todo(linux) respect user's date/time preferences
-        // todo(windows) respect user's date/time preferences
         let current_locale = CURRENT_LOCALE
             .get_or_init(|| sys_locale::get_locale().unwrap_or_else(|| String::from("en-US")));
         format_timestamp_naive_time(
@@ -123,7 +141,7 @@ fn format_absolute_timestamp(
     reference: OffsetDateTime,
     #[allow(unused_variables)] enhanced_date_formatting: bool,
 ) -> String {
-    #[cfg(target_os = "macos")]
+    #[cfg(any(target_os = "macos", target_os = "windows"))]
     {
         if !enhanced_date_formatting {
             return format!(
@@ -147,10 +165,9 @@ fn format_absolute_timestamp(
             )
         }
     }
-    #[cfg(not(target_os = "macos"))]
+    #[cfg(not(any(target_os = "macos", target_os = "windows")))]
     {
         // todo(linux) respect user's date/time preferences
-        // todo(windows) respect user's date/time preferences
         format_timestamp_fallback(timestamp, reference)
     }
 }
@@ -176,10 +193,25 @@ fn format_absolute_date_medium(
             macos::format_date_medium(&timestamp)
         }
     }
-    #[cfg(not(target_os = "macos"))]
+    #[cfg(target_os = "windows")]
+    {
+        if !enhanced_formatting {
+            return windows::format_date_medium(&timestamp);
+        }
+
+        let timestamp_date = timestamp.date();
+        let reference_date = reference.date();
+        if timestamp_date == reference_date {
+            "Today".to_string()
+        } else if reference_date.previous_day() == Some(timestamp_date) {
+            "Yesterday".to_string()
+        } else {
+            windows::format_date_medium(&timestamp)
+        }
+    }
+    #[cfg(not(any(target_os = "macos", target_os = "windows")))]
     {
         // todo(linux) respect user's date/time preferences
-        // todo(windows) respect user's date/time preferences
         let current_locale = CURRENT_LOCALE
             .get_or_init(|| sys_locale::get_locale().unwrap_or_else(|| String::from("en-US")));
         if !enhanced_formatting {
@@ -212,7 +244,11 @@ fn format_absolute_timestamp_medium(
     {
         format_absolute_date_medium(timestamp, reference, false)
     }
-    #[cfg(not(target_os = "macos"))]
+    #[cfg(target_os = "windows")]
+    {
+        format_absolute_date_medium(timestamp, reference, false)
+    }
+    #[cfg(not(any(target_os = "macos", target_os = "windows")))]
     {
         // todo(linux) respect user's date/time preferences
         // todo(windows) respect user's date/time preferences
@@ -360,7 +396,7 @@ fn format_timestamp_naive_date(
     }
 }
 
-#[cfg(not(target_os = "macos"))]
+#[cfg(not(any(target_os = "macos", target_os = "windows")))]
 fn format_timestamp_naive_date_medium(
     timestamp_local: OffsetDateTime,
     is_12_hour_time: bool,
@@ -415,10 +451,10 @@ pub fn format_timestamp_naive(
     }
 }
 
-#[cfg(not(target_os = "macos"))]
+#[cfg(not(any(target_os = "macos", target_os = "windows")))]
 static CURRENT_LOCALE: std::sync::OnceLock<String> = std::sync::OnceLock::new();
 
-#[cfg(not(target_os = "macos"))]
+#[cfg(not(any(target_os = "macos", target_os = "windows")))]
 fn format_timestamp_fallback(timestamp: OffsetDateTime, reference: OffsetDateTime) -> String {
     let current_locale = CURRENT_LOCALE
         .get_or_init(|| sys_locale::get_locale().unwrap_or_else(|| String::from("en-US")));
@@ -428,7 +464,7 @@ fn format_timestamp_fallback(timestamp: OffsetDateTime, reference: OffsetDateTim
 }
 
 /// Returns `true` if the locale is recognized as a 12-hour time locale.
-#[cfg(not(target_os = "macos"))]
+#[cfg(not(any(target_os = "macos", target_os = "windows")))]
 fn is_12_hour_time_by_locale(locale: &str) -> bool {
     [
         "es-MX", "es-CO", "es-SV", "es-NI",
@@ -522,6 +558,57 @@ mod macos {
     }
 }
 
+#[cfg(target_os = "windows")]
+mod windows {
+    use windows::Globalization::DateTimeFormatting::DateTimeFormatter;
+
+    pub fn format_time(timestamp: &time::OffsetDateTime) -> String {
+        format_with_formatter(DateTimeFormatter::ShortTime(), timestamp, true)
+    }
+
+    pub fn format_date(timestamp: &time::OffsetDateTime) -> String {
+        format_with_formatter(DateTimeFormatter::ShortDate(), timestamp, false)
+    }
+
+    pub fn format_date_medium(timestamp: &time::OffsetDateTime) -> String {
+        format_with_formatter(
+            DateTimeFormatter::CreateDateTimeFormatter(windows::core::h!(
+                "month.abbreviated day year.full"
+            )),
+            timestamp,
+            false,
+        )
+    }
+
+    fn format_with_formatter(
+        formatter: windows::core::Result<DateTimeFormatter>,
+        timestamp: &time::OffsetDateTime,
+        is_time: bool,
+    ) -> String {
+        formatter
+            .and_then(|formatter| formatter.Format(to_winrt_datetime(timestamp)))
+            .map(|hstring| hstring.to_string())
+            .unwrap_or_else(|_| {
+                if is_time {
+                    super::format_timestamp_naive_time(*timestamp, true)
+                } else {
+                    super::format_timestamp_naive_date(*timestamp, *timestamp, true)
+                }
+            })
+    }
+
+    fn to_winrt_datetime(timestamp: &time::OffsetDateTime) -> windows::Foundation::DateTime {
+        // DateTime uses 100-nanosecond intervals since January 1, 1601 (UTC).
+        const WINDOWS_EPOCH: time::OffsetDateTime = time::macros::datetime!(1601-01-01 0:00 UTC);
+        let duration_since_winrt_epoch = *timestamp - WINDOWS_EPOCH;
+        let universal_time = duration_since_winrt_epoch.whole_nanoseconds() / 100;
+
+        windows::Foundation::DateTime {
+            UniversalTime: universal_time as i64,
+        }
+    }
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;

crates/title_bar/src/title_bar.rs 🔗

@@ -31,7 +31,9 @@ use gpui::{
     StatefulInteractiveElement, Styled, Subscription, WeakEntity, Window, actions, div,
 };
 use onboarding_banner::OnboardingBanner;
-use project::{Project, git_store::GitStoreEvent, trusted_worktrees::TrustedWorktrees};
+use project::{
+    DisableAiSettings, Project, git_store::GitStoreEvent, trusted_worktrees::TrustedWorktrees,
+};
 use remote::RemoteConnectionOptions;
 use settings::Settings;
 use settings::WorktreeId;
@@ -686,7 +688,7 @@ impl TitleBar {
         _window: &mut Window,
         cx: &mut Context<Self>,
     ) -> Option<AnyElement> {
-        if !cx.has_flag::<AgentV2FeatureFlag>() {
+        if !cx.has_flag::<AgentV2FeatureFlag>() || DisableAiSettings::get_global(cx).disable_ai {
             return None;
         }
 

crates/ui/src/components/ai/thread_item.rs 🔗

@@ -3,7 +3,7 @@ use crate::{
     prelude::*,
 };
 
-use gpui::{AnyView, ClickEvent, SharedString};
+use gpui::{AnyView, ClickEvent, Hsla, SharedString};
 
 #[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
 pub enum AgentThreadStatus {
@@ -18,10 +18,10 @@ pub enum AgentThreadStatus {
 pub struct ThreadItem {
     id: ElementId,
     icon: IconName,
+    custom_icon_from_external_svg: Option<SharedString>,
     title: SharedString,
     timestamp: SharedString,
-    running: bool,
-    generation_done: bool,
+    notified: bool,
     status: AgentThreadStatus,
     selected: bool,
     hovered: bool,
@@ -41,10 +41,10 @@ impl ThreadItem {
         Self {
             id: id.into(),
             icon: IconName::ZedAgent,
+            custom_icon_from_external_svg: None,
             title: title.into(),
             timestamp: "".into(),
-            running: false,
-            generation_done: false,
+            notified: false,
             status: AgentThreadStatus::default(),
             selected: false,
             hovered: false,
@@ -70,13 +70,13 @@ impl ThreadItem {
         self
     }
 
-    pub fn running(mut self, running: bool) -> Self {
-        self.running = running;
+    pub fn custom_icon_from_external_svg(mut self, svg: impl Into<SharedString>) -> Self {
+        self.custom_icon_from_external_svg = Some(svg.into());
         self
     }
 
-    pub fn generation_done(mut self, generation_done: bool) -> Self {
-        self.generation_done = generation_done;
+    pub fn notified(mut self, notified: bool) -> Self {
+        self.notified = notified;
         self
     }
 
@@ -155,49 +155,34 @@ impl RenderOnce for ThreadItem {
         // };
 
         let icon_container = || h_flex().size_4().justify_center();
-        let agent_icon = Icon::new(self.icon)
-            .color(Color::Muted)
-            .size(IconSize::Small);
+        let agent_icon = if let Some(custom_svg) = self.custom_icon_from_external_svg {
+            Icon::from_external_svg(custom_svg)
+                .color(Color::Muted)
+                .size(IconSize::Small)
+        } else {
+            Icon::new(self.icon)
+                .color(Color::Muted)
+                .size(IconSize::Small)
+        };
 
-        let decoration = if self.status == AgentThreadStatus::WaitingForConfirmation {
-            Some(
-                IconDecoration::new(
-                    IconDecorationKind::Triangle,
-                    cx.theme().colors().surface_background,
-                    cx,
-                )
-                .color(cx.theme().status().warning)
+        let decoration = |icon: IconDecorationKind, color: Hsla| {
+            IconDecoration::new(icon, cx.theme().colors().surface_background, cx)
+                .color(color)
                 .position(gpui::Point {
                     x: px(-2.),
                     y: px(-2.),
-                }),
-            )
+                })
+        };
+
+        let decoration = if self.status == AgentThreadStatus::WaitingForConfirmation {
+            Some(decoration(
+                IconDecorationKind::Triangle,
+                cx.theme().status().warning,
+            ))
         } else if self.status == AgentThreadStatus::Error {
-            Some(
-                IconDecoration::new(
-                    IconDecorationKind::X,
-                    cx.theme().colors().surface_background,
-                    cx,
-                )
-                .color(cx.theme().status().error)
-                .position(gpui::Point {
-                    x: px(-2.),
-                    y: px(-2.),
-                }),
-            )
-        } else if self.generation_done {
-            Some(
-                IconDecoration::new(
-                    IconDecorationKind::Dot,
-                    cx.theme().colors().surface_background,
-                    cx,
-                )
-                .color(cx.theme().colors().text_accent)
-                .position(gpui::Point {
-                    x: px(-2.),
-                    y: px(-2.),
-                }),
-            )
+            Some(decoration(IconDecorationKind::X, cx.theme().status().error))
+        } else if self.notified {
+            Some(decoration(IconDecorationKind::Dot, clr.text_accent))
         } else {
             None
         };
@@ -208,9 +193,11 @@ impl RenderOnce for ThreadItem {
             icon_container().child(agent_icon)
         };
 
-        let running_or_action = self.running || (self.hovered && self.action_slot.is_some());
-
-        // let has_no_changes = self.added.is_none() && self.removed.is_none();
+        let is_running = matches!(
+            self.status,
+            AgentThreadStatus::Running | AgentThreadStatus::WaitingForConfirmation
+        );
+        let running_or_action = is_running || (self.hovered && self.action_slot.is_some());
 
         let title = self.title;
         let highlight_positions = self.highlight_positions;
@@ -225,6 +212,7 @@ impl RenderOnce for ThreadItem {
         v_flex()
             .id(self.id.clone())
             .cursor_pointer()
+            .w_full()
             .map(|this| {
                 if self.worktree.is_some() {
                     this.p_2()
@@ -255,7 +243,7 @@ impl RenderOnce for ThreadItem {
                         this.child(
                             h_flex()
                                 .gap_1()
-                                .when(self.running, |this| {
+                                .when(is_running, |this| {
                                     this.child(
                                         icon_container()
                                             .child(SpinnerLabel::new().color(Color::Accent)),
@@ -347,12 +335,12 @@ impl Component for ThreadItem {
                     .into_any_element(),
             ),
             single_example(
-                "Generation Done",
+                "Notified",
                 container()
                     .child(
                         ThreadItem::new("ti-2", "Refine thread view scrolling behavior")
                             .timestamp("12:12 AM")
-                            .generation_done(true),
+                            .notified(true),
                     )
                     .into_any_element(),
             ),
@@ -383,7 +371,7 @@ impl Component for ThreadItem {
                         ThreadItem::new("ti-3", "Add line numbers option to FileEditBlock")
                             .icon(IconName::AiClaude)
                             .timestamp("7:30 PM")
-                            .running(true),
+                            .status(AgentThreadStatus::Running),
                     )
                     .into_any_element(),
             ),

crates/ui/src/components/callout.rs 🔗

@@ -295,7 +295,7 @@ impl Component for Callout {
                                 "Error details:",
                                 "• Quota exceeded for metric",
                                 "• Limit: 0",
-                                "• Model: gemini-3-pro",
+                                "• Model: gemini-3.1-pro",
                                 "Please retry in 26.33s.",
                                 "Additional details:",
                                 "- Request ID: abc123def456",

crates/ui/src/components/data_table.rs 🔗

@@ -36,6 +36,13 @@ pub mod table_row {
     pub struct TableRow<T>(Vec<T>);
 
     impl<T> TableRow<T> {
+        pub fn from_element(element: T, length: usize) -> Self
+        where
+            T: Clone,
+        {
+            Self::from_vec(vec![element; length], length)
+        }
+
         /// Constructs a `TableRow` from a `Vec<T>`, panicking if the length does not match `expected_length`.
         ///
         /// Use this when you want to ensure at construction time that the row has the correct number of columns.
@@ -70,7 +77,8 @@ pub mod table_row {
         ///
         /// # Panics
         /// Panics if `col` is out of bounds (i.e., `col >= self.cols()`).
-        pub fn expect_get(&self, col: usize) -> &T {
+        pub fn expect_get(&self, col: impl Into<usize>) -> &T {
+            let col = col.into();
             self.0.get(col).unwrap_or_else(|| {
                 panic!(
                     "Expected table row of `{}` to have {col:?}",
@@ -79,8 +87,8 @@ pub mod table_row {
             })
         }
 
-        pub fn get(&self, col: usize) -> Option<&T> {
-            self.0.get(col)
+        pub fn get(&self, col: impl Into<usize>) -> Option<&T> {
+            self.0.get(col.into())
         }
 
         pub fn as_slice(&self) -> &[T] {
@@ -735,6 +743,7 @@ pub struct Table {
     empty_table_callback: Option<Rc<dyn Fn(&mut Window, &mut App) -> AnyElement>>,
     /// The number of columns in the table. Used to assert column numbers in `TableRow` collections
     cols: usize,
+    disable_base_cell_style: bool,
 }
 
 impl Table {
@@ -753,9 +762,19 @@ impl Table {
             use_ui_font: true,
             empty_table_callback: None,
             col_widths: None,
+            disable_base_cell_style: false,
         }
     }
 
+    /// Disables based styling of row cell (paddings, text ellipsis, nowrap, etc), keeping width settings
+    ///
+    /// Doesn't affect base style of header cell.
+    /// Doesn't remove overflow-hidden
+    pub fn disable_base_style(mut self) -> Self {
+        self.disable_base_cell_style = true;
+        self
+    }
+
     /// Enables uniform list rendering.
     /// The provided function will be passed directly to the `uniform_list` element.
     /// Therefore, if this method is called, any calls to [`Table::row`] before or after
@@ -973,10 +992,18 @@ pub fn render_table_row(
             .into_iter()
             .zip(column_widths.into_vec())
             .map(|(cell, width)| {
-                base_cell_style_text(width, table_context.use_ui_font, cx)
-                    .px_1()
-                    .py_0p5()
-                    .child(cell)
+                if table_context.disable_base_cell_style {
+                    div()
+                        .when_some(width, |this, width| this.w(width))
+                        .when(width.is_none(), |this| this.flex_1())
+                        .overflow_hidden()
+                        .child(cell)
+                } else {
+                    base_cell_style_text(width, table_context.use_ui_font, cx)
+                        .px_1()
+                        .py_0p5()
+                        .child(cell)
+                }
             }),
     );
 
@@ -1071,6 +1098,7 @@ pub struct TableRenderContext {
     pub column_widths: Option<TableRow<Length>>,
     pub map_row: Option<Rc<dyn Fn((usize, Stateful<Div>), &mut Window, &mut App) -> AnyElement>>,
     pub use_ui_font: bool,
+    pub disable_base_cell_style: bool,
 }
 
 impl TableRenderContext {
@@ -1083,6 +1111,7 @@ impl TableRenderContext {
             column_widths: table.col_widths.as_ref().map(|widths| widths.lengths(cx)),
             map_row: table.map_row.clone(),
             use_ui_font: table.use_ui_font,
+            disable_base_cell_style: table.disable_base_cell_style,
         }
     }
 }

crates/ui/src/components/scrollbar.rs 🔗

@@ -9,8 +9,8 @@ use gpui::{
     Along, App, AppContext as _, Axis as ScrollbarAxis, BorderStyle, Bounds, ContentMask, Context,
     Corner, Corners, CursorStyle, DispatchPhase, Div, Edges, Element, ElementId, Entity, EntityId,
     GlobalElementId, Hitbox, HitboxBehavior, Hsla, InteractiveElement, IntoElement, IsZero,
-    LayoutId, ListState, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Negate,
-    ParentElement, Pixels, Point, Position, Render, ScrollHandle, ScrollWheelEvent, Size, Stateful,
+    LayoutId, ListState, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, ParentElement,
+    Pixels, Point, Position, Render, ScrollHandle, ScrollWheelEvent, Size, Stateful,
     StatefulInteractiveElement, Style, Styled, Task, UniformListDecoration,
     UniformListScrollHandle, Window, ease_in_out, prelude::FluentBuilder as _, px, quad, relative,
     size,
@@ -258,7 +258,7 @@ impl<T: ScrollableHandle> UniformListDecoration for ScrollbarStateWrapper<T> {
         _cx: &mut App,
     ) -> gpui::AnyElement {
         ScrollbarElement {
-            origin: scroll_offset.negate(),
+            origin: -scroll_offset,
             state: self.0.clone(),
         }
         .into_any()
@@ -911,7 +911,7 @@ impl ThumbState {
 }
 
 impl ScrollableHandle for UniformListScrollHandle {
-    fn max_offset(&self) -> Size<Pixels> {
+    fn max_offset(&self) -> Point<Pixels> {
         self.0.borrow().base_handle.max_offset()
     }
 
@@ -929,7 +929,7 @@ impl ScrollableHandle for UniformListScrollHandle {
 }
 
 impl ScrollableHandle for ListState {
-    fn max_offset(&self) -> Size<Pixels> {
+    fn max_offset(&self) -> Point<Pixels> {
         self.max_offset_for_scrollbar()
     }
 
@@ -955,7 +955,7 @@ impl ScrollableHandle for ListState {
 }
 
 impl ScrollableHandle for ScrollHandle {
-    fn max_offset(&self) -> Size<Pixels> {
+    fn max_offset(&self) -> Point<Pixels> {
         self.max_offset()
     }
 
@@ -973,7 +973,7 @@ impl ScrollableHandle for ScrollHandle {
 }
 
 pub trait ScrollableHandle: 'static + Any + Sized + Clone {
-    fn max_offset(&self) -> Size<Pixels>;
+    fn max_offset(&self) -> Point<Pixels>;
     fn set_offset(&self, point: Point<Pixels>);
     fn offset(&self) -> Point<Pixels>;
     fn viewport(&self) -> Bounds<Pixels>;
@@ -984,7 +984,7 @@ pub trait ScrollableHandle: 'static + Any + Sized + Clone {
         self.max_offset().along(axis) > Pixels::ZERO
     }
     fn content_size(&self) -> Size<Pixels> {
-        self.viewport().size + self.max_offset()
+        self.viewport().size + self.max_offset().into()
     }
 }
 
@@ -1006,7 +1006,7 @@ impl ScrollbarLayout {
     fn compute_click_offset(
         &self,
         event_position: Point<Pixels>,
-        max_offset: Size<Pixels>,
+        max_offset: Point<Pixels>,
         event_type: ScrollbarMouseEvent,
     ) -> Pixels {
         let Self {

crates/util/Cargo.toml 🔗

@@ -19,14 +19,11 @@ test-support = ["git2", "rand", "util_macros"]
 
 [dependencies]
 anyhow.workspace = true
-async-fs.workspace = true
 async_zip.workspace = true
 collections.workspace = true
-dirs.workspace = true
 dunce = "1.0"
 futures-lite.workspace = true
 futures.workspace = true
-git2 = { workspace = true, optional = true }
 globset.workspace = true
 itertools.workspace = true
 log.workspace = true
@@ -38,15 +35,21 @@ serde.workspace = true
 serde_json.workspace = true
 serde_json_lenient.workspace = true
 shlex.workspace = true
-smol.workspace = true
 take-until.workspace = true
 tempfile.workspace = true
 unicase.workspace = true
 url.workspace = true
 percent-encoding.workspace = true
 util_macros = { workspace = true, optional = true }
-walkdir.workspace = true
+gpui_util.workspace = true
+
+[target.'cfg(not(target_family = "wasm"))'.dependencies]
+smol.workspace = true
 which.workspace = true
+git2 = { workspace = true, optional = true }
+async-fs.workspace = true
+walkdir.workspace = true
+dirs.workspace = true
 
 [target.'cfg(unix)'.dependencies]
 command-fds = "0.3.1"

crates/util/src/archive.rs 🔗

@@ -6,6 +6,7 @@ use async_zip::base::read;
 use futures::AsyncSeek;
 use futures::{AsyncRead, io::BufReader};
 
+#[cfg(any(unix, windows))]
 fn archive_path_is_normal(filename: &str) -> bool {
     Path::new(filename).components().all(|c| {
         matches!(
@@ -64,7 +65,7 @@ pub async fn extract_zip<R: AsyncRead + Unpin>(destination: &Path, reader: R) ->
     Ok(())
 }
 
-#[cfg(not(windows))]
+#[cfg(unix)]
 pub async fn extract_zip<R: AsyncRead + Unpin>(destination: &Path, reader: R) -> Result<()> {
     // Unix needs file permissions copied when extracting.
     // This is only possible to do when a reader impls `AsyncSeek` and `seek::ZipFileReader` is used.
@@ -81,7 +82,7 @@ pub async fn extract_zip<R: AsyncRead + Unpin>(destination: &Path, reader: R) ->
     extract_seekable_zip(destination, file).await
 }
 
-#[cfg(not(windows))]
+#[cfg(unix)]
 pub async fn extract_seekable_zip<R: AsyncRead + AsyncSeek + Unpin>(
     destination: &Path,
     reader: R,

crates/workspace/src/path_list.rs → crates/util/src/path_list.rs 🔗

@@ -3,8 +3,9 @@ use std::{
     sync::Arc,
 };
 
+use crate::paths::SanitizedPath;
 use itertools::Itertools;
-use util::paths::SanitizedPath;
+use serde::{Deserialize, Deserializer, Serialize, Serializer};
 
 /// A list of absolute paths, in a specific order.
 ///
@@ -12,7 +13,7 @@ use util::paths::SanitizedPath;
 /// other path lists without regard to the order of the paths.
 ///
 /// The paths can be retrieved in the original order using `ordered_paths()`.
-#[derive(Default, PartialEq, Eq, Debug, Clone)]
+#[derive(Default, PartialEq, Eq, Hash, Debug, Clone)]
 pub struct PathList {
     /// The paths, in lexicographic order.
     paths: Arc<[PathBuf]>,
@@ -118,6 +119,19 @@ impl PathList {
     }
 }
 
+impl Serialize for PathList {
+    fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
+        self.paths.serialize(serializer)
+    }
+}
+
+impl<'de> Deserialize<'de> for PathList {
+    fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
+        let paths: Vec<PathBuf> = Vec::deserialize(deserializer)?;
+        Ok(PathList::new(&paths))
+    }
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;

crates/util/src/paths.rs 🔗

@@ -1,4 +1,3 @@
-use anyhow::Context;
 use globset::{GlobBuilder, GlobSet, GlobSetBuilder};
 use itertools::Itertools;
 use regex::Regex;
@@ -9,20 +8,19 @@ use std::error::Error;
 use std::fmt::{Display, Formatter};
 use std::mem;
 use std::path::StripPrefixError;
-use std::sync::{Arc, OnceLock};
+use std::sync::Arc;
 use std::{
     ffi::OsStr,
     path::{Path, PathBuf},
     sync::LazyLock,
 };
 
+use crate::rel_path::RelPath;
 use crate::rel_path::RelPathBuf;
-use crate::{rel_path::RelPath, shell::ShellKind};
-
-static HOME_DIR: OnceLock<PathBuf> = OnceLock::new();
 
 /// Returns the path to the user's home directory.
 pub fn home_dir() -> &'static PathBuf {
+    static HOME_DIR: std::sync::OnceLock<PathBuf> = std::sync::OnceLock::new();
     HOME_DIR.get_or_init(|| {
         if cfg!(any(test, feature = "test-support")) {
             if cfg!(target_os = "macos") {
@@ -56,6 +54,13 @@ pub trait PathExt {
     where
         Self: From<&'a Path>,
     {
+        #[cfg(target_family = "wasm")]
+        {
+            std::str::from_utf8(bytes)
+                .map(Path::new)
+                .map(Into::into)
+                .map_err(Into::into)
+        }
         #[cfg(unix)]
         {
             use std::os::unix::prelude::OsStrExt;
@@ -63,6 +68,7 @@ pub trait PathExt {
         }
         #[cfg(windows)]
         {
+            use anyhow::Context;
             use tendril::fmt::{Format, WTF8};
             WTF8::validate(bytes)
                 .then(|| {
@@ -86,11 +92,17 @@ pub trait PathExt {
     fn multiple_extensions(&self) -> Option<String>;
 
     /// Try to make a shell-safe representation of the path.
-    fn try_shell_safe(&self, shell_kind: ShellKind) -> anyhow::Result<String>;
+    #[cfg(not(target_family = "wasm"))]
+    fn try_shell_safe(&self, shell_kind: crate::shell::ShellKind) -> anyhow::Result<String>;
 }
 
 impl<T: AsRef<Path>> PathExt for T {
     fn compact(&self) -> PathBuf {
+        #[cfg(target_family = "wasm")]
+        {
+            self.as_ref().to_path_buf()
+        }
+        #[cfg(not(target_family = "wasm"))]
         if cfg!(any(target_os = "linux", target_os = "freebsd")) || cfg!(target_os = "macos") {
             match self.as_ref().strip_prefix(home_dir().as_path()) {
                 Ok(relative_path) => {
@@ -164,7 +176,9 @@ impl<T: AsRef<Path>> PathExt for T {
         Some(parts.into_iter().join("."))
     }
 
-    fn try_shell_safe(&self, shell_kind: ShellKind) -> anyhow::Result<String> {
+    #[cfg(not(target_family = "wasm"))]
+    fn try_shell_safe(&self, shell_kind: crate::shell::ShellKind) -> anyhow::Result<String> {
+        use anyhow::Context;
         let path_str = self
             .as_ref()
             .to_str()

crates/util/src/process.rs 🔗

@@ -36,7 +36,12 @@ impl Child {
             .stdout(stdout)
             .stderr(stderr)
             .spawn()
-            .with_context(|| format!("failed to spawn command {command:?}"))?;
+            .with_context(|| {
+                format!(
+                    "failed to spawn command {}",
+                    crate::redact::redact_command(&format!("{command:?}"))
+                )
+            })?;
         Ok(Self { process })
     }
 
@@ -55,7 +60,12 @@ impl Child {
             .stdout(stdout)
             .stderr(stderr)
             .spawn()
-            .with_context(|| format!("failed to spawn command {command:?}"))?;
+            .with_context(|| {
+                format!(
+                    "failed to spawn command {}",
+                    crate::redact::redact_command(&format!("{command:?}"))
+                )
+            })?;
 
         Ok(Self { process })
     }

crates/util/src/shell.rs 🔗

@@ -1012,4 +1012,40 @@ mod tests {
             "uname".to_string()
         );
     }
+
+    #[test]
+    fn test_try_quote_single_quote_paths() {
+        let path_with_quote = r"C:\Temp\O'Brien\repo";
+        let shlex_shells = [
+            ShellKind::Posix,
+            ShellKind::Fish,
+            ShellKind::Csh,
+            ShellKind::Tcsh,
+            ShellKind::Rc,
+            ShellKind::Xonsh,
+            ShellKind::Elvish,
+            ShellKind::Nushell,
+        ];
+
+        for shell_kind in shlex_shells {
+            let quoted = shell_kind.try_quote(path_with_quote).unwrap().into_owned();
+            assert_ne!(quoted, path_with_quote);
+            assert_eq!(
+                shlex::split(&quoted),
+                Some(vec![path_with_quote.to_string()])
+            );
+
+            if shell_kind == ShellKind::Nushell {
+                let prefixed = shell_kind.prepend_command_prefix(&quoted);
+                assert!(prefixed.starts_with('^'));
+            }
+        }
+
+        for shell_kind in [ShellKind::PowerShell, ShellKind::Pwsh] {
+            let quoted = shell_kind.try_quote(path_with_quote).unwrap().into_owned();
+            assert!(quoted.starts_with('\''));
+            assert!(quoted.ends_with('\''));
+            assert!(quoted.contains("O''Brien"));
+        }
+    }
 }

crates/util/src/shell_env.rs 🔗

@@ -141,6 +141,14 @@ async fn capture_windows(
         std::env::current_exe().context("Failed to determine current zed executable path.")?;
 
     let shell_kind = ShellKind::new(shell_path, true);
+    let directory_string = directory.display().to_string();
+    let zed_path_string = zed_path.display().to_string();
+    let quote_for_shell = |value: &str| {
+        shell_kind
+            .try_quote(value)
+            .map(|quoted| quoted.into_owned())
+            .unwrap_or_else(|| value.to_owned())
+    };
     let mut cmd = crate::command::new_command(shell_path);
     cmd.args(args);
     let cmd = match shell_kind {
@@ -149,52 +157,54 @@ async fn capture_windows(
         | ShellKind::Rc
         | ShellKind::Fish
         | ShellKind::Xonsh
-        | ShellKind::Posix => cmd.args([
-            "-l",
-            "-i",
-            "-c",
-            &format!(
-                "cd '{}'; '{}' --printenv",
-                directory.display(),
-                zed_path.display()
-            ),
-        ]),
-        ShellKind::PowerShell | ShellKind::Pwsh => cmd.args([
-            "-NonInteractive",
-            "-NoProfile",
-            "-Command",
-            &format!(
-                "Set-Location '{}'; & '{}' --printenv",
-                directory.display(),
-                zed_path.display()
-            ),
-        ]),
-        ShellKind::Elvish => cmd.args([
-            "-c",
-            &format!(
-                "cd '{}'; '{}' --printenv",
-                directory.display(),
-                zed_path.display()
-            ),
-        ]),
-        ShellKind::Nushell => cmd.args([
-            "-c",
-            &format!(
-                "cd '{}'; {}'{}' --printenv",
-                directory.display(),
-                shell_kind
-                    .command_prefix()
-                    .map(|prefix| prefix.to_string())
-                    .unwrap_or_default(),
-                zed_path.display()
-            ),
-        ]),
+        | ShellKind::Posix => {
+            let quoted_directory = quote_for_shell(&directory_string);
+            let quoted_zed_path = quote_for_shell(&zed_path_string);
+            cmd.args([
+                "-l",
+                "-i",
+                "-c",
+                &format!("cd {}; {} --printenv", quoted_directory, quoted_zed_path),
+            ])
+        }
+        ShellKind::PowerShell | ShellKind::Pwsh => {
+            let quoted_directory = ShellKind::quote_pwsh(&directory_string);
+            let quoted_zed_path = ShellKind::quote_pwsh(&zed_path_string);
+            cmd.args([
+                "-NonInteractive",
+                "-NoProfile",
+                "-Command",
+                &format!(
+                    "Set-Location {}; & {} --printenv",
+                    quoted_directory, quoted_zed_path
+                ),
+            ])
+        }
+        ShellKind::Elvish => {
+            let quoted_directory = quote_for_shell(&directory_string);
+            let quoted_zed_path = quote_for_shell(&zed_path_string);
+            cmd.args([
+                "-c",
+                &format!("cd {}; {} --printenv", quoted_directory, quoted_zed_path),
+            ])
+        }
+        ShellKind::Nushell => {
+            let quoted_directory = quote_for_shell(&directory_string);
+            let quoted_zed_path = quote_for_shell(&zed_path_string);
+            let zed_command = shell_kind
+                .prepend_command_prefix(&quoted_zed_path)
+                .into_owned();
+            cmd.args([
+                "-c",
+                &format!("cd {}; {} --printenv", quoted_directory, zed_command),
+            ])
+        }
         ShellKind::Cmd => cmd.args([
             "/c",
             "cd",
-            &directory.display().to_string(),
+            &directory_string,
             "&&",
-            &zed_path.display().to_string(),
+            &zed_path_string,
             "--printenv",
         ]),
     }

crates/util/src/test.rs 🔗

@@ -1,16 +1,14 @@
 mod assertions;
 mod marked_text;
 
-use git2;
-use std::{
-    ffi::OsStr,
-    path::{Path, PathBuf},
-};
-use tempfile::TempDir;
-
 pub use assertions::*;
 pub use marked_text::*;
 
+use git2;
+use std::ffi::OsStr;
+use std::path::{Path, PathBuf};
+use tempfile::TempDir;
+
 pub struct TempTree {
     _temp_dir: TempDir,
     path: PathBuf,
@@ -45,6 +43,7 @@ fn write_tree(path: &Path, tree: serde_json::Value) {
                 Value::Object(_) => {
                     fs::create_dir(&path).unwrap();
 
+                    #[cfg(not(target_family = "wasm"))]
                     if path.file_name() == Some(OsStr::new(".git")) {
                         git2::Repository::init(path.parent().unwrap()).unwrap();
                     }

crates/util/src/util.rs 🔗

@@ -1,8 +1,8 @@
-pub mod arc_cow;
 pub mod archive;
 pub mod command;
 pub mod fs;
 pub mod markdown;
+pub mod path_list;
 pub mod paths;
 pub mod process;
 pub mod redact;
@@ -17,40 +17,27 @@ pub mod size;
 pub mod test;
 pub mod time;
 
-use anyhow::{Context as _, Result};
-use futures::Future;
+use anyhow::Result;
 use itertools::Either;
-use paths::PathExt;
 use regex::Regex;
 use std::path::{Path, PathBuf};
-use std::sync::{LazyLock, OnceLock};
+use std::sync::LazyLock;
 use std::{
     borrow::Cow,
     cmp::{self, Ordering},
-    env,
-    ops::{AddAssign, Range, RangeInclusive},
-    panic::Location,
-    pin::Pin,
-    task::{Context, Poll},
-    time::Instant,
+    ops::{Range, RangeInclusive},
 };
 use unicase::UniCase;
 
+pub use gpui_util::*;
+
 pub use take_until::*;
 #[cfg(any(test, feature = "test-support"))]
 pub use util_macros::{line_endings, path, uri};
 
-#[macro_export]
-macro_rules! debug_panic {
-    ( $($fmt_arg:tt)* ) => {
-        if cfg!(debug_assertions) {
-            panic!( $($fmt_arg)* );
-        } else {
-            let backtrace = std::backtrace::Backtrace::capture();
-            log::error!("{}\n{:?}", format_args!($($fmt_arg)*), backtrace);
-        }
-    };
-}
+pub use self::shell::{
+    get_default_system_shell, get_default_system_shell_preferring_bash, get_system_shell,
+};
 
 #[inline]
 pub const fn is_utf8_char_boundary(u8: u8) -> bool {
@@ -174,12 +161,6 @@ fn test_truncate_lines_to_byte_limit() {
     );
 }
 
-pub fn post_inc<T: From<u8> + AddAssign<T> + Copy>(value: &mut T) -> T {
-    let prev = *value;
-    *value += T::from(1);
-    prev
-}
-
 /// Extend a sorted vector with a sorted sequence of items, maintaining the vector's sort order and
 /// enforcing a maximum length. This also de-duplicates items. Sort the items according to the given callback. Before calling this,
 /// both `vec` and `new_items` should already be sorted according to the `cmp` comparator.
@@ -287,7 +268,7 @@ fn load_shell_from_passwd() -> Result<()> {
     );
 
     let shell = unsafe { std::ffi::CStr::from_ptr(entry.pw_shell).to_str().unwrap() };
-    let should_set_shell = env::var("SHELL").map_or(true, |shell_env| {
+    let should_set_shell = std::env::var("SHELL").map_or(true, |shell_env| {
         shell_env != shell && !std::path::Path::new(&shell_env).exists()
     });
 
@@ -296,7 +277,7 @@ fn load_shell_from_passwd() -> Result<()> {
             "updating SHELL environment variable to value from passwd entry: {:?}",
             shell,
         );
-        unsafe { env::set_var("SHELL", shell) };
+        unsafe { std::env::set_var("SHELL", shell) };
     }
 
     Ok(())
@@ -304,6 +285,8 @@ fn load_shell_from_passwd() -> Result<()> {
 
 /// Returns a shell escaped path for the current zed executable
 pub fn get_shell_safe_zed_path(shell_kind: shell::ShellKind) -> anyhow::Result<String> {
+    use anyhow::Context as _;
+    use paths::PathExt;
     let mut zed_path =
         std::env::current_exe().context("Failed to determine current zed executable path.")?;
     if cfg!(target_os = "linux")
@@ -326,6 +309,7 @@ pub fn get_shell_safe_zed_path(shell_kind: shell::ShellKind) -> anyhow::Result<S
 /// Returns a path for the zed cli executable, this function
 /// should be called from the zed executable, not zed-cli.
 pub fn get_zed_cli_path() -> Result<PathBuf> {
+    use anyhow::Context as _;
     let zed_path =
         std::env::current_exe().context("Failed to determine current zed executable path.")?;
     let parent = zed_path
@@ -365,6 +349,8 @@ pub fn get_zed_cli_path() -> Result<PathBuf> {
 
 #[cfg(unix)]
 pub async fn load_login_shell_environment() -> Result<()> {
+    use anyhow::Context as _;
+
     load_shell_from_passwd().log_err();
 
     // If possible, we want to `cd` in the user's `$HOME` to trigger programs
@@ -383,7 +369,7 @@ pub async fn load_login_shell_environment() -> Result<()> {
         if name == "SHLVL" {
             continue;
         }
-        unsafe { env::set_var(&name, &value) };
+        unsafe { std::env::set_var(&name, &value) };
     }
 
     log::info!(
@@ -404,7 +390,7 @@ pub fn set_pre_exec_to_start_new_session(
 ) -> &mut std::process::Command {
     // safety: code in pre_exec should be signal safe.
     // https://man7.org/linux/man-pages/man7/signal-safety.7.html
-    #[cfg(not(target_os = "windows"))]
+    #[cfg(unix)]
     unsafe {
         use std::os::unix::process::CommandExt;
         command.pre_exec(|| {
@@ -485,25 +471,6 @@ pub fn merge_non_null_json_value_into(source: serde_json::Value, target: &mut se
     }
 }
 
-pub fn measure<R>(label: &str, f: impl FnOnce() -> R) -> R {
-    static ZED_MEASUREMENTS: OnceLock<bool> = OnceLock::new();
-    let zed_measurements = ZED_MEASUREMENTS.get_or_init(|| {
-        env::var("ZED_MEASUREMENTS")
-            .map(|measurements| measurements == "1" || measurements == "true")
-            .unwrap_or(false)
-    });
-
-    if *zed_measurements {
-        let start = Instant::now();
-        let result = f();
-        let elapsed = start.elapsed();
-        eprintln!("{}: {:?}", label, elapsed);
-        result
-    } else {
-        f()
-    }
-}
-
 pub fn expanded_and_wrapped_usize_range(
     range: Range<usize>,
     additional_before: usize,
@@ -570,222 +537,6 @@ pub fn wrapped_usize_outward_from(
     })
 }
 
-pub trait ResultExt<E> {
-    type Ok;
-
-    fn log_err(self) -> Option<Self::Ok>;
-    /// Assert that this result should never be an error in development or tests.
-    fn debug_assert_ok(self, reason: &str) -> Self;
-    fn warn_on_err(self) -> Option<Self::Ok>;
-    fn log_with_level(self, level: log::Level) -> Option<Self::Ok>;
-    fn anyhow(self) -> anyhow::Result<Self::Ok>
-    where
-        E: Into<anyhow::Error>;
-}
-
-impl<T, E> ResultExt<E> for Result<T, E>
-where
-    E: std::fmt::Debug,
-{
-    type Ok = T;
-
-    #[track_caller]
-    fn log_err(self) -> Option<T> {
-        self.log_with_level(log::Level::Error)
-    }
-
-    #[track_caller]
-    fn debug_assert_ok(self, reason: &str) -> Self {
-        if let Err(error) = &self {
-            debug_panic!("{reason} - {error:?}");
-        }
-        self
-    }
-
-    #[track_caller]
-    fn warn_on_err(self) -> Option<T> {
-        self.log_with_level(log::Level::Warn)
-    }
-
-    #[track_caller]
-    fn log_with_level(self, level: log::Level) -> Option<T> {
-        match self {
-            Ok(value) => Some(value),
-            Err(error) => {
-                log_error_with_caller(*Location::caller(), error, level);
-                None
-            }
-        }
-    }
-
-    fn anyhow(self) -> anyhow::Result<T>
-    where
-        E: Into<anyhow::Error>,
-    {
-        self.map_err(Into::into)
-    }
-}
-
-fn log_error_with_caller<E>(caller: core::panic::Location<'_>, error: E, level: log::Level)
-where
-    E: std::fmt::Debug,
-{
-    #[cfg(not(target_os = "windows"))]
-    let file = caller.file();
-    #[cfg(target_os = "windows")]
-    let file = caller.file().replace('\\', "/");
-    // In this codebase all crates reside in a `crates` directory,
-    // so discard the prefix up to that segment to find the crate name
-    let file = file.split_once("crates/");
-    let target = file.as_ref().and_then(|(_, s)| s.split_once("/src/"));
-
-    let module_path = target.map(|(krate, module)| {
-        if module.starts_with(krate) {
-            module.trim_end_matches(".rs").replace('/', "::")
-        } else {
-            krate.to_owned() + "::" + &module.trim_end_matches(".rs").replace('/', "::")
-        }
-    });
-    let file = file.map(|(_, file)| format!("crates/{file}"));
-    log::logger().log(
-        &log::Record::builder()
-            .target(module_path.as_deref().unwrap_or(""))
-            .module_path(file.as_deref())
-            .args(format_args!("{:?}", error))
-            .file(Some(caller.file()))
-            .line(Some(caller.line()))
-            .level(level)
-            .build(),
-    );
-}
-
-pub fn log_err<E: std::fmt::Debug>(error: &E) {
-    log_error_with_caller(*Location::caller(), error, log::Level::Error);
-}
-
-pub trait TryFutureExt {
-    fn log_err(self) -> LogErrorFuture<Self>
-    where
-        Self: Sized;
-
-    fn log_tracked_err(self, location: core::panic::Location<'static>) -> LogErrorFuture<Self>
-    where
-        Self: Sized;
-
-    fn warn_on_err(self) -> LogErrorFuture<Self>
-    where
-        Self: Sized;
-    fn unwrap(self) -> UnwrapFuture<Self>
-    where
-        Self: Sized;
-}
-
-impl<F, T, E> TryFutureExt for F
-where
-    F: Future<Output = Result<T, E>>,
-    E: std::fmt::Debug,
-{
-    #[track_caller]
-    fn log_err(self) -> LogErrorFuture<Self>
-    where
-        Self: Sized,
-    {
-        let location = Location::caller();
-        LogErrorFuture(self, log::Level::Error, *location)
-    }
-
-    fn log_tracked_err(self, location: core::panic::Location<'static>) -> LogErrorFuture<Self>
-    where
-        Self: Sized,
-    {
-        LogErrorFuture(self, log::Level::Error, location)
-    }
-
-    #[track_caller]
-    fn warn_on_err(self) -> LogErrorFuture<Self>
-    where
-        Self: Sized,
-    {
-        let location = Location::caller();
-        LogErrorFuture(self, log::Level::Warn, *location)
-    }
-
-    fn unwrap(self) -> UnwrapFuture<Self>
-    where
-        Self: Sized,
-    {
-        UnwrapFuture(self)
-    }
-}
-
-#[must_use]
-pub struct LogErrorFuture<F>(F, log::Level, core::panic::Location<'static>);
-
-impl<F, T, E> Future for LogErrorFuture<F>
-where
-    F: Future<Output = Result<T, E>>,
-    E: std::fmt::Debug,
-{
-    type Output = Option<T>;
-
-    fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
-        let level = self.1;
-        let location = self.2;
-        let inner = unsafe { Pin::new_unchecked(&mut self.get_unchecked_mut().0) };
-        match inner.poll(cx) {
-            Poll::Ready(output) => Poll::Ready(match output {
-                Ok(output) => Some(output),
-                Err(error) => {
-                    log_error_with_caller(location, error, level);
-                    None
-                }
-            }),
-            Poll::Pending => Poll::Pending,
-        }
-    }
-}
-
-pub struct UnwrapFuture<F>(F);
-
-impl<F, T, E> Future for UnwrapFuture<F>
-where
-    F: Future<Output = Result<T, E>>,
-    E: std::fmt::Debug,
-{
-    type Output = T;
-
-    fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
-        let inner = unsafe { Pin::new_unchecked(&mut self.get_unchecked_mut().0) };
-        match inner.poll(cx) {
-            Poll::Ready(result) => Poll::Ready(result.unwrap()),
-            Poll::Pending => Poll::Pending,
-        }
-    }
-}
-
-pub struct Deferred<F: FnOnce()>(Option<F>);
-
-impl<F: FnOnce()> Deferred<F> {
-    /// Drop without running the deferred function.
-    pub fn abort(mut self) {
-        self.0.take();
-    }
-}
-
-impl<F: FnOnce()> Drop for Deferred<F> {
-    fn drop(&mut self) {
-        if let Some(f) = self.0.take() {
-            f()
-        }
-    }
-}
-
-/// Run the given function when the returned value is dropped (unless it's cancelled).
-#[must_use]
-pub fn defer<F: FnOnce()>(f: F) -> Deferred<F> {
-    Deferred(Some(f))
-}
-
 #[cfg(any(test, feature = "test-support"))]
 mod rng {
     use rand::prelude::*;
@@ -849,23 +600,6 @@ pub fn asset_str<A: rust_embed::RustEmbed>(path: &str) -> Cow<'static, str> {
     }
 }
 
-/// Expands to an immediately-invoked function expression. Good for using the ? operator
-/// in functions which do not return an Option or Result.
-///
-/// Accepts a normal block, an async block, or an async move block.
-#[macro_export]
-macro_rules! maybe {
-    ($block:block) => {
-        (|| $block)()
-    };
-    (async $block:block) => {
-        (async || $block)()
-    };
-    (async move $block:block) => {
-        (async move || $block)()
-    };
-}
-
 pub trait RangeExt<T> {
     fn sorted(&self) -> Self;
     fn to_inclusive(&self) -> RangeInclusive<T>;
@@ -1022,10 +756,6 @@ pub fn default<D: Default>() -> D {
     Default::default()
 }
 
-pub use self::shell::{
-    get_default_system_shell, get_default_system_shell_preferring_bash, get_system_shell,
-};
-
 #[derive(Debug)]
 pub enum ConnectionResult<O> {
     Timeout,
@@ -1049,15 +779,6 @@ impl<O> From<anyhow::Result<O>> for ConnectionResult<O> {
     }
 }
 
-#[track_caller]
-pub fn some_or_debug_panic<T>(option: Option<T>) -> Option<T> {
-    #[cfg(debug_assertions)]
-    if option.is_none() {
-        panic!("Unexpected None");
-    }
-    option
-}
-
 /// Normalizes a path by resolving `.` and `..` components without
 /// requiring the path to exist on disk (unlike `canonicalize`).
 pub fn normalize_path(path: &Path) -> PathBuf {

crates/vim/src/normal/increment.rs 🔗

@@ -203,20 +203,25 @@ fn find_target(
     let start_offset = start.to_offset(snapshot);
     let end_offset = end.to_offset(snapshot);
 
-    let mut offset = start_offset;
     let mut first_char_is_num = snapshot
-        .chars_at(offset)
+        .chars_at(start_offset)
         .next()
         .map_or(false, |ch| ch.is_ascii_hexdigit());
     let mut pre_char = String::new();
 
-    let next_offset = offset
+    let next_offset = start_offset
         + snapshot
             .chars_at(start_offset)
             .next()
             .map_or(0, |ch| ch.len_utf8());
-    // Backward scan to find the start of the number, but stop at start_offset
+    // Backward scan to find the start of the number, but stop at start_offset.
+    // We track `offset` as the start position of the current character. Initialize
+    // to `next_offset` and decrement at the start of each iteration so that `offset`
+    // always lands on a valid character boundary (not in the middle of a multibyte char).
+    let mut offset = next_offset;
     for ch in snapshot.reversed_chars_at(next_offset) {
+        offset -= ch.len_utf8();
+
         // Search boundaries
         if offset.0 == 0 || ch.is_whitespace() || (need_range && offset <= start_offset) {
             break;
@@ -238,7 +243,6 @@ fn find_target(
         }
 
         pre_char.insert(0, ch);
-        offset -= ch.len_utf8();
     }
 
     // The backward scan breaks on whitespace, including newlines. Without this
@@ -895,4 +899,15 @@ mod test {
             .await
             .assert_eq("# Title\n2. item\nˇ2. item\n3. item");
     }
+
+    #[gpui::test]
+    async fn test_increment_with_multibyte_characters(cx: &mut gpui::TestAppContext) {
+        let mut cx = VimTestContext::new(cx, true).await;
+
+        // Test cursor after a multibyte character - this would panic before the fix
+        // because the backward scan would land in the middle of the Korean character
+        cx.set_state("지ˇ1", Mode::Normal);
+        cx.simulate_keystrokes("ctrl-a");
+        cx.assert_state("지ˇ2", Mode::Normal);
+    }
 }

crates/vim/src/normal/yank.rs 🔗

@@ -228,7 +228,7 @@ impl Vim {
         editor.highlight_background(
             HighlightKey::HighlightOnYank,
             &ranges_to_highlight,
-            |_, colors| colors.colors().editor_document_highlight_read_background,
+            |_, colors| colors.colors().vim_yank_background,
             cx,
         );
         cx.spawn(async move |this, cx| {

crates/web_search_providers/Cargo.toml 🔗

@@ -14,6 +14,7 @@ path = "src/web_search_providers.rs"
 [dependencies]
 anyhow.workspace = true
 client.workspace = true
+cloud_api_types.workspace = true
 cloud_llm_client.workspace = true
 futures.workspace = true
 gpui.workspace = true

crates/web_search_providers/src/cloud.rs 🔗

@@ -1,7 +1,8 @@
 use std::sync::Arc;
 
 use anyhow::{Context as _, Result};
-use client::Client;
+use client::{Client, UserStore};
+use cloud_api_types::OrganizationId;
 use cloud_llm_client::{WebSearchBody, WebSearchResponse};
 use futures::AsyncReadExt as _;
 use gpui::{App, AppContext, Context, Entity, Subscription, Task};
@@ -14,8 +15,8 @@ pub struct CloudWebSearchProvider {
 }
 
 impl CloudWebSearchProvider {
-    pub fn new(client: Arc<Client>, cx: &mut App) -> Self {
-        let state = cx.new(|cx| State::new(client, cx));
+    pub fn new(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut App) -> Self {
+        let state = cx.new(|cx| State::new(client, user_store, cx));
 
         Self { state }
     }
@@ -23,24 +24,31 @@ impl CloudWebSearchProvider {
 
 pub struct State {
     client: Arc<Client>,
+    user_store: Entity<UserStore>,
     llm_api_token: LlmApiToken,
     _llm_token_subscription: Subscription,
 }
 
 impl State {
-    pub fn new(client: Arc<Client>, cx: &mut Context<Self>) -> Self {
+    pub fn new(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut Context<Self>) -> Self {
         let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx);
 
         Self {
             client,
+            user_store,
             llm_api_token: LlmApiToken::default(),
             _llm_token_subscription: cx.subscribe(
                 &refresh_llm_token_listener,
                 |this, _, _event, cx| {
                     let client = this.client.clone();
                     let llm_api_token = this.llm_api_token.clone();
+                    let organization_id = this
+                        .user_store
+                        .read(cx)
+                        .current_organization()
+                        .map(|o| o.id.clone());
                     cx.spawn(async move |_this, _cx| {
-                        llm_api_token.refresh(&client).await?;
+                        llm_api_token.refresh(&client, organization_id).await?;
                         anyhow::Ok(())
                     })
                     .detach_and_log_err(cx);
@@ -61,21 +69,31 @@ impl WebSearchProvider for CloudWebSearchProvider {
         let state = self.state.read(cx);
         let client = state.client.clone();
         let llm_api_token = state.llm_api_token.clone();
+        let organization_id = state
+            .user_store
+            .read(cx)
+            .current_organization()
+            .map(|o| o.id.clone());
         let body = WebSearchBody { query };
-        cx.background_spawn(async move { perform_web_search(client, llm_api_token, body).await })
+        cx.background_spawn(async move {
+            perform_web_search(client, llm_api_token, organization_id, body).await
+        })
     }
 }
 
 async fn perform_web_search(
     client: Arc<Client>,
     llm_api_token: LlmApiToken,
+    organization_id: Option<OrganizationId>,
     body: WebSearchBody,
 ) -> Result<WebSearchResponse> {
     const MAX_RETRIES: usize = 3;
 
     let http_client = &client.http_client();
     let mut retries_remaining = MAX_RETRIES;
-    let mut token = llm_api_token.acquire(&client).await?;
+    let mut token = llm_api_token
+        .acquire(&client, organization_id.clone())
+        .await?;
 
     loop {
         if retries_remaining == 0 {
@@ -100,7 +118,9 @@ async fn perform_web_search(
             response.body_mut().read_to_string(&mut body).await?;
             return Ok(serde_json::from_str(&body)?);
         } else if response.needs_llm_token_refresh() {
-            token = llm_api_token.refresh(&client).await?;
+            token = llm_api_token
+                .refresh(&client, organization_id.clone())
+                .await?;
             retries_remaining -= 1;
         } else {
             // For now we will only retry if the LLM token is expired,

crates/web_search_providers/src/web_search_providers.rs 🔗

@@ -1,26 +1,28 @@
 mod cloud;
 
-use client::Client;
+use client::{Client, UserStore};
 use gpui::{App, Context, Entity};
 use language_model::LanguageModelRegistry;
 use std::sync::Arc;
 use web_search::{WebSearchProviderId, WebSearchRegistry};
 
-pub fn init(client: Arc<Client>, cx: &mut App) {
+pub fn init(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut App) {
     let registry = WebSearchRegistry::global(cx);
     registry.update(cx, |registry, cx| {
-        register_web_search_providers(registry, client, cx);
+        register_web_search_providers(registry, client, user_store, cx);
     });
 }
 
 fn register_web_search_providers(
     registry: &mut WebSearchRegistry,
     client: Arc<Client>,
+    user_store: Entity<UserStore>,
     cx: &mut Context<WebSearchRegistry>,
 ) {
     register_zed_web_search_provider(
         registry,
         client.clone(),
+        user_store.clone(),
         &LanguageModelRegistry::global(cx),
         cx,
     );
@@ -29,7 +31,13 @@ fn register_web_search_providers(
         &LanguageModelRegistry::global(cx),
         move |this, registry, event, cx| {
             if let language_model::Event::DefaultModelChanged = event {
-                register_zed_web_search_provider(this, client.clone(), &registry, cx)
+                register_zed_web_search_provider(
+                    this,
+                    client.clone(),
+                    user_store.clone(),
+                    &registry,
+                    cx,
+                )
             }
         },
     )
@@ -39,6 +47,7 @@ fn register_web_search_providers(
 fn register_zed_web_search_provider(
     registry: &mut WebSearchRegistry,
     client: Arc<Client>,
+    user_store: Entity<UserStore>,
     language_model_registry: &Entity<LanguageModelRegistry>,
     cx: &mut Context<WebSearchRegistry>,
 ) {
@@ -47,7 +56,10 @@ fn register_zed_web_search_provider(
         .default_model()
         .is_some_and(|default| default.is_provided_by_zed());
     if using_zed_provider {
-        registry.register_provider(cloud::CloudWebSearchProvider::new(client, cx), cx)
+        registry.register_provider(
+            cloud::CloudWebSearchProvider::new(client, user_store, cx),
+            cx,
+        )
     } else {
         registry.unregister_provider(WebSearchProviderId(
             cloud::ZED_WEB_SEARCH_PROVIDER_ID.into(),

crates/workspace/Cargo.toml 🔗

@@ -14,7 +14,6 @@ doctest = false
 
 [features]
 test-support = [
-    "call/test-support",
     "client/test-support",
     "http_client/test-support",
     "db/test-support",
@@ -72,7 +71,6 @@ zed_actions.workspace = true
 windows.workspace = true
 
 [dev-dependencies]
-call = { workspace = true, features = ["test-support"] }
 client = { workspace = true, features = ["test-support"] }
 dap = { workspace = true, features = ["test-support"] }
 db = { workspace = true, features = ["test-support"] }

crates/workspace/src/item.rs 🔗

@@ -925,10 +925,10 @@ impl<T: Item> ItemHandle for Entity<T> {
                 },
             ));
 
-            cx.on_blur(
+            cx.on_focus_out(
                 &self.read(cx).focus_handle(cx),
                 window,
-                move |workspace, window, cx| {
+                move |workspace, _event, window, cx| {
                     if let Some(item) = weak_item.upgrade()
                         && item.workspace_settings(cx).autosave == AutosaveSetting::OnFocusChange
                     {
@@ -1371,7 +1371,8 @@ pub mod test {
     };
     use gpui::{
         AnyElement, App, AppContext as _, Context, Entity, EntityId, EventEmitter, Focusable,
-        InteractiveElement, IntoElement, Render, SharedString, Task, WeakEntity, Window,
+        InteractiveElement, IntoElement, ParentElement, Render, SharedString, Task, WeakEntity,
+        Window,
     };
     use project::{Project, ProjectEntryId, ProjectPath, WorktreeId};
     use std::{any::Any, cell::Cell, sync::Arc};
@@ -1400,6 +1401,7 @@ pub mod test {
         pub tab_detail: Cell<Option<usize>>,
         serialize: Option<Box<dyn Fn() -> Option<Task<anyhow::Result<()>>>>>,
         focus_handle: gpui::FocusHandle,
+        pub child_focus_handles: Vec<gpui::FocusHandle>,
     }
 
     impl project::ProjectItem for TestProjectItem {
@@ -1482,6 +1484,7 @@ pub mod test {
                 workspace_id: Default::default(),
                 focus_handle: cx.focus_handle(),
                 serialize: None,
+                child_focus_handles: Vec::new(),
             }
         }
 
@@ -1529,6 +1532,11 @@ pub mod test {
             self
         }
 
+        pub fn with_child_focus_handles(mut self, count: usize, cx: &mut Context<Self>) -> Self {
+            self.child_focus_handles = (0..count).map(|_| cx.focus_handle()).collect();
+            self
+        }
+
         pub fn set_state(&mut self, state: String, cx: &mut Context<Self>) {
             self.push_to_nav_history(cx);
             self.state = state;
@@ -1543,7 +1551,12 @@ pub mod test {
 
     impl Render for TestItem {
         fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
-            gpui::div().track_focus(&self.focus_handle(cx))
+            let parent = gpui::div().track_focus(&self.focus_handle(cx));
+            self.child_focus_handles
+                .iter()
+                .fold(parent, |parent, child_handle| {
+                    parent.child(gpui::div().track_focus(child_handle))
+                })
         }
     }
 
@@ -1641,23 +1654,30 @@ pub mod test {
         where
             Self: Sized,
         {
-            Task::ready(Some(cx.new(|cx| Self {
-                state: self.state.clone(),
-                label: self.label.clone(),
-                save_count: self.save_count,
-                save_as_count: self.save_as_count,
-                reload_count: self.reload_count,
-                is_dirty: self.is_dirty,
-                buffer_kind: self.buffer_kind,
-                has_conflict: self.has_conflict,
-                has_deleted_file: self.has_deleted_file,
-                project_items: self.project_items.clone(),
-                nav_history: None,
-                tab_descriptions: None,
-                tab_detail: Default::default(),
-                workspace_id: self.workspace_id,
-                focus_handle: cx.focus_handle(),
-                serialize: None,
+            Task::ready(Some(cx.new(|cx| {
+                Self {
+                    state: self.state.clone(),
+                    label: self.label.clone(),
+                    save_count: self.save_count,
+                    save_as_count: self.save_as_count,
+                    reload_count: self.reload_count,
+                    is_dirty: self.is_dirty,
+                    buffer_kind: self.buffer_kind,
+                    has_conflict: self.has_conflict,
+                    has_deleted_file: self.has_deleted_file,
+                    project_items: self.project_items.clone(),
+                    nav_history: None,
+                    tab_descriptions: None,
+                    tab_detail: Default::default(),
+                    workspace_id: self.workspace_id,
+                    focus_handle: cx.focus_handle(),
+                    serialize: None,
+                    child_focus_handles: self
+                        .child_focus_handles
+                        .iter()
+                        .map(|_| cx.focus_handle())
+                        .collect(),
+                }
             })))
         }
 

crates/workspace/src/multi_workspace.rs 🔗

@@ -5,7 +5,8 @@ use gpui::{
     ManagedView, MouseButton, Pixels, Render, Subscription, Task, Tiling, Window, WindowId,
     actions, deferred, px,
 };
-use project::Project;
+use project::{DisableAiSettings, Project};
+use settings::Settings;
 use std::future::Future;
 use std::path::PathBuf;
 use ui::prelude::*;
@@ -14,8 +15,8 @@ use util::ResultExt;
 const SIDEBAR_RESIZE_HANDLE_SIZE: Pixels = px(6.0);
 
 use crate::{
-    DockPosition, Item, ModalView, Panel, Toast, Workspace, WorkspaceId, client_side_decorations,
-    notifications::NotificationId,
+    CloseIntent, CloseWindow, DockPosition, Event as WorkspaceEvent, Item, ModalView, Panel, Toast,
+    Workspace, WorkspaceId, client_side_decorations, notifications::NotificationId,
 };
 
 actions!(
@@ -122,6 +123,13 @@ impl MultiWorkspace {
             }
         });
         let quit_subscription = cx.on_app_quit(Self::app_will_quit);
+        let settings_subscription =
+            cx.observe_global_in::<settings::SettingsStore>(window, |this, window, cx| {
+                if DisableAiSettings::get_global(cx).disable_ai && this.sidebar_open {
+                    this.close_sidebar(window, cx);
+                }
+            });
+        Self::subscribe_to_workspace(&workspace, cx);
         Self {
             window_id: window.window_handle().window_id(),
             workspaces: vec![workspace],
@@ -132,7 +140,11 @@ impl MultiWorkspace {
             pending_removal_tasks: Vec::new(),
             _serialize_task: None,
             _create_task: None,
-            _subscriptions: vec![release_subscription, quit_subscription],
+            _subscriptions: vec![
+                release_subscription,
+                quit_subscription,
+                settings_subscription,
+            ],
         }
     }
 
@@ -168,7 +180,7 @@ impl MultiWorkspace {
     }
 
     pub fn multi_workspace_enabled(&self, cx: &App) -> bool {
-        cx.has_flag::<AgentV2FeatureFlag>()
+        cx.has_flag::<AgentV2FeatureFlag>() && !DisableAiSettings::get_global(cx).disable_ai
     }
 
     pub fn toggle_sidebar(&mut self, window: &mut Window, cx: &mut Context<Self>) {
@@ -237,6 +249,41 @@ impl MultiWorkspace {
         cx.notify();
     }
 
+    pub fn close_window(&mut self, _: &CloseWindow, window: &mut Window, cx: &mut Context<Self>) {
+        cx.spawn_in(window, async move |this, cx| {
+            let workspaces = this.update(cx, |multi_workspace, _cx| {
+                multi_workspace.workspaces().to_vec()
+            })?;
+
+            for workspace in workspaces {
+                let should_continue = workspace
+                    .update_in(cx, |workspace, window, cx| {
+                        workspace.prepare_to_close(CloseIntent::CloseWindow, window, cx)
+                    })?
+                    .await?;
+                if !should_continue {
+                    return anyhow::Ok(());
+                }
+            }
+
+            cx.update(|window, _cx| {
+                window.remove_window();
+            })?;
+
+            anyhow::Ok(())
+        })
+        .detach_and_log_err(cx);
+    }
+
+    fn subscribe_to_workspace(workspace: &Entity<Workspace>, cx: &mut Context<Self>) {
+        cx.subscribe(workspace, |this, workspace, event, cx| {
+            if let WorkspaceEvent::Activate = event {
+                this.activate(workspace, cx);
+            }
+        })
+        .detach();
+    }
+
     pub fn is_sidebar_open(&self) -> bool {
         self.sidebar_open
     }
@@ -290,6 +337,7 @@ impl MultiWorkspace {
                     workspace.set_workspace_sidebar_open(true, cx);
                 });
             }
+            Self::subscribe_to_workspace(&workspace, cx);
             self.workspaces.push(workspace);
             cx.notify();
             self.workspaces.len() - 1
@@ -412,6 +460,7 @@ impl MultiWorkspace {
             .update(cx, |workspace, cx| workspace.focus_panel::<T>(window, cx))
     }
 
+    // used in a test
     pub fn toggle_modal<V: ModalView, B>(
         &mut self,
         window: &mut Window,
@@ -673,10 +722,20 @@ impl Render for MultiWorkspace {
             None
         };
 
+        let ui_font = theme::setup_ui_font(window, cx);
+        let text_color = cx.theme().colors().text;
+
+        let workspace = self.workspace().clone();
+        let workspace_key_context = workspace.update(cx, |workspace, cx| workspace.key_context(cx));
+        let root = workspace.update(cx, |workspace, cx| workspace.actions(h_flex(), window, cx));
+
         client_side_decorations(
-            h_flex()
-                .key_context("Workspace")
+            root.key_context(workspace_key_context)
+                .relative()
                 .size_full()
+                .font(ui_font)
+                .text_color(text_color)
+                .on_action(cx.listener(Self::close_window))
                 .on_action(
                     cx.listener(|this: &mut Self, _: &NewWorkspaceInWindow, window, cx| {
                         this.create_workspace(window, cx);
@@ -692,16 +751,18 @@ impl Render for MultiWorkspace {
                         this.activate_previous_workspace(window, cx);
                     },
                 ))
-                .on_action(cx.listener(
-                    |this: &mut Self, _: &ToggleWorkspaceSidebar, window, cx| {
-                        this.toggle_sidebar(window, cx);
-                    },
-                ))
-                .on_action(
-                    cx.listener(|this: &mut Self, _: &FocusWorkspaceSidebar, window, cx| {
-                        this.focus_sidebar(window, cx);
-                    }),
-                )
+                .when(self.multi_workspace_enabled(cx), |this| {
+                    this.on_action(cx.listener(
+                        |this: &mut Self, _: &ToggleWorkspaceSidebar, window, cx| {
+                            this.toggle_sidebar(window, cx);
+                        },
+                    ))
+                    .on_action(cx.listener(
+                        |this: &mut Self, _: &FocusWorkspaceSidebar, window, cx| {
+                            this.focus_sidebar(window, cx);
+                        },
+                    ))
+                })
                 .when(
                     self.sidebar_open() && self.multi_workspace_enabled(cx),
                     |this| {
@@ -723,7 +784,8 @@ impl Render for MultiWorkspace {
                         .size_full()
                         .overflow_hidden()
                         .child(self.workspace().clone()),
-                ),
+                )
+                .child(self.workspace().read(cx).modal_layer.clone()),
             window,
             cx,
             Tiling {
@@ -733,3 +795,92 @@ impl Render for MultiWorkspace {
         )
     }
 }
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use fs::FakeFs;
+    use gpui::TestAppContext;
+    use settings::SettingsStore;
+
+    fn init_test(cx: &mut TestAppContext) {
+        cx.update(|cx| {
+            let settings_store = SettingsStore::test(cx);
+            cx.set_global(settings_store);
+            theme::init(theme::LoadThemes::JustBase, cx);
+            DisableAiSettings::register(cx);
+            cx.update_flags(false, vec!["agent-v2".into()]);
+        });
+    }
+
+    #[gpui::test]
+    async fn test_sidebar_disabled_when_disable_ai_is_enabled(cx: &mut TestAppContext) {
+        init_test(cx);
+        let fs = FakeFs::new(cx.executor());
+        let project = Project::test(fs, [], cx).await;
+
+        let (multi_workspace, cx) =
+            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+
+        multi_workspace.read_with(cx, |mw, cx| {
+            assert!(mw.multi_workspace_enabled(cx));
+        });
+
+        multi_workspace.update_in(cx, |mw, _window, cx| {
+            mw.open_sidebar(cx);
+            assert!(mw.is_sidebar_open());
+        });
+
+        cx.update(|_window, cx| {
+            DisableAiSettings::override_global(DisableAiSettings { disable_ai: true }, cx);
+        });
+        cx.run_until_parked();
+
+        multi_workspace.read_with(cx, |mw, cx| {
+            assert!(
+                !mw.is_sidebar_open(),
+                "Sidebar should be closed when disable_ai is true"
+            );
+            assert!(
+                !mw.multi_workspace_enabled(cx),
+                "Multi-workspace should be disabled when disable_ai is true"
+            );
+        });
+
+        multi_workspace.update_in(cx, |mw, window, cx| {
+            mw.toggle_sidebar(window, cx);
+        });
+        multi_workspace.read_with(cx, |mw, _cx| {
+            assert!(
+                !mw.is_sidebar_open(),
+                "Sidebar should remain closed when toggled with disable_ai true"
+            );
+        });
+
+        cx.update(|_window, cx| {
+            DisableAiSettings::override_global(DisableAiSettings { disable_ai: false }, cx);
+        });
+        cx.run_until_parked();
+
+        multi_workspace.read_with(cx, |mw, cx| {
+            assert!(
+                mw.multi_workspace_enabled(cx),
+                "Multi-workspace should be enabled after re-enabling AI"
+            );
+            assert!(
+                !mw.is_sidebar_open(),
+                "Sidebar should still be closed after re-enabling AI (not auto-opened)"
+            );
+        });
+
+        multi_workspace.update_in(cx, |mw, window, cx| {
+            mw.toggle_sidebar(window, cx);
+        });
+        multi_workspace.read_with(cx, |mw, _cx| {
+            assert!(
+                mw.is_sidebar_open(),
+                "Sidebar should open when toggled after re-enabling AI"
+            );
+        });
+    }
+}

crates/workspace/src/pane.rs 🔗

@@ -1468,7 +1468,8 @@ impl Pane {
     fn update_active_tab(&mut self, index: usize) {
         if !self.is_tab_pinned(index) {
             self.suppress_scroll = false;
-            self.tab_bar_scroll_handle.scroll_to_item(index);
+            self.tab_bar_scroll_handle
+                .scroll_to_item(index - self.pinned_tab_count);
         }
     }
 
@@ -3449,7 +3450,7 @@ impl Pane {
                 cx,
             )
             .children(pinned_tabs.len().ne(&0).then(|| {
-                let max_scroll = self.tab_bar_scroll_handle.max_offset().width;
+                let max_scroll = self.tab_bar_scroll_handle.max_offset().x;
                 // We need to check both because offset returns delta values even when the scroll handle is not scrollable
                 let is_scrolled = self.tab_bar_scroll_handle.offset().x < px(0.);
                 // Avoid flickering when max_offset is very small (< 2px).
@@ -7935,6 +7936,71 @@ mod tests {
         );
     }
 
+    #[gpui::test]
+    async fn test_pinned_tabs_scroll_to_item_uses_correct_index(cx: &mut TestAppContext) {
+        init_test(cx);
+        let fs = FakeFs::new(cx.executor());
+
+        let project = Project::test(fs, None, cx).await;
+        let (workspace, cx) =
+            cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
+        let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());
+
+        cx.simulate_resize(size(px(400.), px(300.)));
+
+        for label in ["A", "B", "C"] {
+            add_labeled_item(&pane, label, false, cx);
+        }
+
+        pane.update_in(cx, |pane, window, cx| {
+            pane.pin_tab_at(0, window, cx);
+            pane.pin_tab_at(1, window, cx);
+            pane.pin_tab_at(2, window, cx);
+        });
+
+        for label in ["D", "E", "F", "G", "H", "I", "J", "K"] {
+            add_labeled_item(&pane, label, false, cx);
+        }
+
+        assert_item_labels(
+            &pane,
+            ["A!", "B!", "C!", "D", "E", "F", "G", "H", "I", "J", "K*"],
+            cx,
+        );
+
+        cx.run_until_parked();
+
+        // Verify overflow exists (precondition for scroll test)
+        let scroll_handle =
+            pane.update_in(cx, |pane, _window, _cx| pane.tab_bar_scroll_handle.clone());
+        assert!(
+            scroll_handle.max_offset().x > px(0.),
+            "Test requires tab overflow to verify scrolling. Increase tab count or reduce window width."
+        );
+
+        // Activate a different tab first, then activate K
+        // This ensures we're not just re-activating an already-active tab
+        pane.update_in(cx, |pane, window, cx| {
+            pane.activate_item(3, true, true, window, cx);
+        });
+        cx.run_until_parked();
+
+        pane.update_in(cx, |pane, window, cx| {
+            pane.activate_item(10, true, true, window, cx);
+        });
+        cx.run_until_parked();
+
+        let scroll_handle =
+            pane.update_in(cx, |pane, _window, _cx| pane.tab_bar_scroll_handle.clone());
+        let k_tab_bounds = cx.debug_bounds("TAB-10").unwrap();
+        let scroll_bounds = scroll_handle.bounds();
+
+        assert!(
+            k_tab_bounds.left() >= scroll_bounds.left(),
+            "Active tab K should be scrolled into view"
+        );
+    }
+
     #[gpui::test]
     async fn test_close_all_items_including_pinned(cx: &mut TestAppContext) {
         init_test(cx);

crates/workspace/src/pane_group.rs 🔗

@@ -61,22 +61,33 @@ impl PaneGroup {
         new_pane: &Entity<Pane>,
         direction: SplitDirection,
         cx: &mut App,
-    ) -> Result<()> {
-        let result = match &mut self.root {
+    ) {
+        let found = match &mut self.root {
             Member::Pane(pane) => {
                 if pane == old_pane {
                     self.root = Member::new_axis(old_pane.clone(), new_pane.clone(), direction);
-                    Ok(())
+                    true
                 } else {
-                    anyhow::bail!("Pane not found");
+                    false
                 }
             }
             Member::Axis(axis) => axis.split(old_pane, new_pane, direction),
         };
-        if result.is_ok() {
-            self.mark_positions(cx);
+
+        // If the pane wasn't found, fall back to splitting the first pane in the tree.
+        if !found {
+            let first_pane = self.root.first_pane();
+            match &mut self.root {
+                Member::Pane(_) => {
+                    self.root = Member::new_axis(first_pane, new_pane.clone(), direction);
+                }
+                Member::Axis(axis) => {
+                    let _ = axis.split(&first_pane, new_pane, direction);
+                }
+            }
         }
-        result
+
+        self.mark_positions(cx);
     }
 
     pub fn bounding_box_for_pane(&self, pane: &Entity<Pane>) -> Option<Bounds<Pixels>> {
@@ -612,12 +623,12 @@ impl PaneAxis {
         old_pane: &Entity<Pane>,
         new_pane: &Entity<Pane>,
         direction: SplitDirection,
-    ) -> Result<()> {
+    ) -> bool {
         for (mut idx, member) in self.members.iter_mut().enumerate() {
             match member {
                 Member::Axis(axis) => {
-                    if axis.split(old_pane, new_pane, direction).is_ok() {
-                        return Ok(());
+                    if axis.split(old_pane, new_pane, direction) {
+                        return true;
                     }
                 }
                 Member::Pane(pane) => {
@@ -631,12 +642,12 @@ impl PaneAxis {
                             *member =
                                 Member::new_axis(old_pane.clone(), new_pane.clone(), direction);
                         }
-                        return Ok(());
+                        return true;
                     }
                 }
             }
         }
-        anyhow::bail!("Pane not found");
+        false
     }
 
     fn insert_pane(&mut self, idx: usize, new_pane: &Entity<Pane>) {

crates/workspace/src/persistence.rs 🔗

@@ -4359,4 +4359,114 @@ mod tests {
             "Pending removal task should have deleted the workspace row when awaited"
         );
     }
+
+    #[gpui::test]
+    async fn test_create_workspace_bounds_observer_uses_fresh_id(cx: &mut gpui::TestAppContext) {
+        use crate::multi_workspace::MultiWorkspace;
+        use feature_flags::FeatureFlagAppExt;
+        use project::Project;
+
+        crate::tests::init_test(cx);
+
+        cx.update(|cx| {
+            cx.set_staff(true);
+            cx.update_flags(true, vec!["agent-v2".to_string()]);
+        });
+
+        let fs = fs::FakeFs::new(cx.executor());
+        let project = Project::test(fs.clone(), [], cx).await;
+
+        let (multi_workspace, cx) =
+            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+
+        multi_workspace.update_in(cx, |mw, _, cx| {
+            mw.set_random_database_id(cx);
+        });
+
+        multi_workspace.update_in(cx, |mw, window, cx| {
+            mw.create_workspace(window, cx);
+        });
+
+        cx.run_until_parked();
+
+        let new_workspace_db_id =
+            multi_workspace.read_with(cx, |mw, cx| mw.workspace().read(cx).database_id());
+        assert!(
+            new_workspace_db_id.is_some(),
+            "After run_until_parked, the workspace should have a database_id"
+        );
+
+        let workspace_id = new_workspace_db_id.unwrap();
+
+        assert!(
+            DB.workspace_for_id(workspace_id).is_some(),
+            "The workspace row should exist in the DB"
+        );
+
+        cx.simulate_resize(gpui::size(px(1024.0), px(768.0)));
+
+        // Advance the clock past the 100ms debounce timer so the bounds
+        // observer task fires
+        cx.executor().advance_clock(Duration::from_millis(200));
+        cx.run_until_parked();
+
+        let serialized = DB
+            .workspace_for_id(workspace_id)
+            .expect("workspace row should still exist");
+        assert!(
+            serialized.window_bounds.is_some(),
+            "The bounds observer should write bounds for the workspace's real DB ID, \
+             even when the workspace was created via create_workspace (where the ID \
+             is assigned asynchronously after construction)."
+        );
+    }
+
+    #[gpui::test]
+    async fn test_flush_serialization_writes_bounds(cx: &mut gpui::TestAppContext) {
+        use crate::multi_workspace::MultiWorkspace;
+        use feature_flags::FeatureFlagAppExt;
+        use project::Project;
+
+        crate::tests::init_test(cx);
+
+        cx.update(|cx| {
+            cx.set_staff(true);
+            cx.update_flags(true, vec!["agent-v2".to_string()]);
+        });
+
+        let fs = fs::FakeFs::new(cx.executor());
+        let dir = tempfile::TempDir::with_prefix("flush_bounds_test").unwrap();
+        fs.insert_tree(dir.path(), json!({})).await;
+
+        let project = Project::test(fs.clone(), [dir.path()], cx).await;
+
+        let (multi_workspace, cx) =
+            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+
+        let workspace_id = DB.next_id().await.unwrap();
+        multi_workspace.update_in(cx, |mw, _, cx| {
+            mw.workspace().update(cx, |ws, _cx| {
+                ws.set_database_id(workspace_id);
+            });
+        });
+
+        let task = multi_workspace.update_in(cx, |mw, window, cx| {
+            mw.workspace()
+                .update(cx, |ws, cx| ws.flush_serialization(window, cx))
+        });
+        task.await;
+
+        let after = DB
+            .workspace_for_id(workspace_id)
+            .expect("workspace row should exist after flush_serialization");
+        assert!(
+            !after.paths.is_empty(),
+            "flush_serialization should have written paths via save_workspace"
+        );
+        assert!(
+            after.window_bounds.is_some(),
+            "flush_serialization should ensure window bounds are persisted to the DB \
+             before the process exits."
+        );
+    }
 }

crates/workspace/src/persistence/model.rs 🔗

@@ -93,9 +93,9 @@ pub(crate) struct SerializedWorkspace {
 
 #[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize)]
 pub struct DockStructure {
-    pub(crate) left: DockData,
-    pub(crate) right: DockData,
-    pub(crate) bottom: DockData,
+    pub left: DockData,
+    pub right: DockData,
+    pub bottom: DockData,
 }
 
 impl RemoteConnectionKind {
@@ -143,9 +143,9 @@ impl Bind for DockStructure {
 
 #[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize)]
 pub struct DockData {
-    pub(crate) visible: bool,
-    pub(crate) active_panel: Option<String>,
-    pub(crate) zoom: bool,
+    pub visible: bool,
+    pub active_panel: Option<String>,
+    pub zoom: bool,
 }
 
 impl Column for DockData {

crates/workspace/src/welcome.rs 🔗

@@ -151,7 +151,7 @@ const CONTENT: (Section<4>, Section<3>) = (
             SectionEntry {
                 icon: IconName::FolderOpen,
                 title: "Open Project",
-                action: &Open,
+                action: &Open::DEFAULT,
             },
             SectionEntry {
                 icon: IconName::CloudDownload,

crates/workspace/src/workspace.rs 🔗

@@ -7,11 +7,14 @@ mod multi_workspace;
 pub mod notifications;
 pub mod pane;
 pub mod pane_group;
-mod path_list;
+pub mod path_list {
+    pub use util::path_list::{PathList, SerializedPathList};
+}
 mod persistence;
 pub mod searchable;
 mod security_modal;
 pub mod shared_screen;
+use db::smol::future::yield_now;
 pub use shared_screen::SharedScreen;
 mod status_bar;
 pub mod tasks;
@@ -28,7 +31,7 @@ pub use multi_workspace::{
     NextWorkspaceInWindow, PreviousWorkspaceInWindow, Sidebar, SidebarEvent, SidebarHandle,
     ToggleWorkspaceSidebar,
 };
-pub use path_list::PathList;
+pub use path_list::{PathList, SerializedPathList};
 pub use toast_layer::{ToastAction, ToastLayer, ToastView};
 
 use anyhow::{Context as _, Result, anyhow};
@@ -76,7 +79,10 @@ pub use pane_group::{
 use persistence::{DB, SerializedWindowBounds, model::SerializedWorkspace};
 pub use persistence::{
     DB as WORKSPACE_DB, WorkspaceDb, delete_unloaded_items,
-    model::{ItemId, SerializedMultiWorkspace, SerializedWorkspaceLocation, SessionWorkspace},
+    model::{
+        DockStructure, ItemId, SerializedMultiWorkspace, SerializedWorkspaceLocation,
+        SessionWorkspace,
+    },
     read_serialized_multi_workspaces,
 };
 use postage::stream::Stream;
@@ -146,7 +152,7 @@ use crate::{item::ItemBufferKind, notifications::NotificationId};
 use crate::{
     persistence::{
         SerializedAxis,
-        model::{DockData, DockStructure, SerializedItem, SerializedPane, SerializedPaneGroup},
+        model::{DockData, SerializedItem, SerializedPane, SerializedPaneGroup},
     },
     security_modal::SecurityModal,
 };
@@ -203,6 +209,34 @@ pub trait DebuggerProvider {
     fn active_thread_state(&self, cx: &App) -> Option<ThreadStatus>;
 }
 
+/// Opens a file or directory.
+#[derive(Clone, PartialEq, Deserialize, JsonSchema, Action)]
+#[action(namespace = workspace)]
+pub struct Open {
+    /// When true, opens in a new window. When false, adds to the current
+    /// window as a new workspace (multi-workspace).
+    #[serde(default = "Open::default_create_new_window")]
+    pub create_new_window: bool,
+}
+
+impl Open {
+    pub const DEFAULT: Self = Self {
+        create_new_window: true,
+    };
+
+    /// Used by `#[serde(default)]` on the `create_new_window` field so that
+    /// the serde default and `Open::DEFAULT` stay in sync.
+    fn default_create_new_window() -> bool {
+        Self::DEFAULT.create_new_window
+    }
+}
+
+impl Default for Open {
+    fn default() -> Self {
+        Self::DEFAULT
+    }
+}
+
 actions!(
     workspace,
     [
@@ -248,8 +282,6 @@ actions!(
         NewSearch,
         /// Opens a new window.
         NewWindow,
-        /// Opens a file or directory.
-        Open,
         /// Opens multiple files.
         OpenFiles,
         /// Opens the current location in terminal.
@@ -620,19 +652,19 @@ fn prompt_and_open_paths(app_state: Arc<AppState>, options: PathPromptOptions, c
             .update(cx, |multi_workspace, window, cx| {
                 let workspace = multi_workspace.workspace().clone();
                 workspace.update(cx, |workspace, cx| {
-                    prompt_for_open_path_and_open(workspace, app_state, options, window, cx);
+                    prompt_for_open_path_and_open(workspace, app_state, options, true, window, cx);
                 });
             })
             .ok();
     } else {
-        let task = Workspace::new_local(Vec::new(), app_state.clone(), None, None, None, cx);
+        let task = Workspace::new_local(Vec::new(), app_state.clone(), None, None, None, true, cx);
         cx.spawn(async move |cx| {
             let (window, _) = task.await?;
             window.update(cx, |multi_workspace, window, cx| {
                 window.activate_window();
                 let workspace = multi_workspace.workspace().clone();
                 workspace.update(cx, |workspace, cx| {
-                    prompt_for_open_path_and_open(workspace, app_state, options, window, cx);
+                    prompt_for_open_path_and_open(workspace, app_state, options, true, window, cx);
                 });
             })?;
             anyhow::Ok(())
@@ -645,6 +677,7 @@ pub fn prompt_for_open_path_and_open(
     workspace: &mut Workspace,
     app_state: Arc<AppState>,
     options: PathPromptOptions,
+    create_new_window: bool,
     window: &mut Window,
     cx: &mut Context<Workspace>,
 ) {
@@ -654,10 +687,24 @@ pub fn prompt_for_open_path_and_open(
         window,
         cx,
     );
+    let multi_workspace_handle = window.window_handle().downcast::<MultiWorkspace>();
     cx.spawn_in(window, async move |this, cx| {
         let Some(paths) = paths.await.log_err().flatten() else {
             return;
         };
+        if !create_new_window {
+            if let Some(handle) = multi_workspace_handle {
+                if let Some(task) = handle
+                    .update(cx, |multi_workspace, window, cx| {
+                        multi_workspace.open_project(paths, window, cx)
+                    })
+                    .log_err()
+                {
+                    task.await.log_err();
+                }
+                return;
+            }
+        }
         if let Some(task) = this
             .update_in(cx, |this, window, cx| {
                 this.open_workspace_for_paths(false, paths, window, cx)
@@ -1182,6 +1229,7 @@ pub enum Event {
     },
     ZoomChanged,
     ModalOpened,
+    Activate,
 }
 
 #[derive(Debug, Clone)]
@@ -1250,7 +1298,7 @@ pub struct Workspace {
     last_active_center_pane: Option<WeakEntity<Pane>>,
     last_active_view_id: Option<proto::ViewId>,
     status_bar: Entity<StatusBar>,
-    modal_layer: Entity<ModalLayer>,
+    pub(crate) modal_layer: Entity<ModalLayer>,
     toast_layer: Entity<ToastLayer>,
     titlebar_item: Option<AnyView>,
     notifications: Notifications,
@@ -1286,6 +1334,7 @@ pub struct Workspace {
     scheduled_tasks: Vec<Task<()>>,
     last_open_dock_positions: Vec<DockPosition>,
     removing: bool,
+    _panels_task: Option<Task<Result<()>>>,
 }
 
 impl EventEmitter<Event> for Workspace {}
@@ -1601,36 +1650,7 @@ impl Workspace {
                         .timer(Duration::from_millis(100))
                         .await;
                     this.update_in(cx, |this, window, cx| {
-                        if let Some(display) = window.display(cx)
-                            && let Ok(display_uuid) = display.uuid()
-                        {
-                            let window_bounds = window.inner_window_bounds();
-                            let has_paths = !this.root_paths(cx).is_empty();
-                            if !has_paths {
-                                cx.background_executor()
-                                    .spawn(persistence::write_default_window_bounds(
-                                        window_bounds,
-                                        display_uuid,
-                                    ))
-                                    .detach_and_log_err(cx);
-                            }
-                            if let Some(database_id) = workspace_id {
-                                cx.background_executor()
-                                    .spawn(DB.set_window_open_status(
-                                        database_id,
-                                        SerializedWindowBounds(window_bounds),
-                                        display_uuid,
-                                    ))
-                                    .detach_and_log_err(cx);
-                            } else {
-                                cx.background_executor()
-                                    .spawn(persistence::write_default_window_bounds(
-                                        window_bounds,
-                                        display_uuid,
-                                    ))
-                                    .detach_and_log_err(cx);
-                            }
-                        }
+                        this.save_window_bounds(window, cx).detach();
                         this.bounds_save_task_queued.take();
                     })
                     .ok();
@@ -1685,6 +1705,7 @@ impl Workspace {
             left_dock,
             bottom_dock,
             right_dock,
+            _panels_task: None,
             project: project.clone(),
             follower_states: Default::default(),
             last_leaders_by_pane: Default::default(),
@@ -1728,6 +1749,7 @@ impl Workspace {
         requesting_window: Option<WindowHandle<MultiWorkspace>>,
         env: Option<HashMap<String, String>>,
         init: Option<Box<dyn FnOnce(&mut Workspace, &mut Window, &mut Context<Workspace>) + Send>>,
+        activate: bool,
         cx: &mut App,
     ) -> Task<
         anyhow::Result<(
@@ -1855,7 +1877,11 @@ impl Workspace {
 
                             workspace
                         });
-                        multi_workspace.activate(workspace.clone(), cx);
+                        if activate {
+                            multi_workspace.activate(workspace.clone(), cx);
+                        } else {
+                            multi_workspace.add_workspace(workspace.clone(), cx);
+                        }
                         workspace
                     })?;
                     (window, workspace)
@@ -2009,6 +2035,76 @@ impl Workspace {
         [&self.left_dock, &self.bottom_dock, &self.right_dock]
     }
 
+    pub fn capture_dock_state(&self, _window: &Window, cx: &App) -> DockStructure {
+        let left_dock = self.left_dock.read(cx);
+        let left_visible = left_dock.is_open();
+        let left_active_panel = left_dock
+            .active_panel()
+            .map(|panel| panel.persistent_name().to_string());
+        // `zoomed_position` is kept in sync with individual panel zoom state
+        // by the dock code in `Dock::new` and `Dock::add_panel`.
+        let left_dock_zoom = self.zoomed_position == Some(DockPosition::Left);
+
+        let right_dock = self.right_dock.read(cx);
+        let right_visible = right_dock.is_open();
+        let right_active_panel = right_dock
+            .active_panel()
+            .map(|panel| panel.persistent_name().to_string());
+        let right_dock_zoom = self.zoomed_position == Some(DockPosition::Right);
+
+        let bottom_dock = self.bottom_dock.read(cx);
+        let bottom_visible = bottom_dock.is_open();
+        let bottom_active_panel = bottom_dock
+            .active_panel()
+            .map(|panel| panel.persistent_name().to_string());
+        let bottom_dock_zoom = self.zoomed_position == Some(DockPosition::Bottom);
+
+        DockStructure {
+            left: DockData {
+                visible: left_visible,
+                active_panel: left_active_panel,
+                zoom: left_dock_zoom,
+            },
+            right: DockData {
+                visible: right_visible,
+                active_panel: right_active_panel,
+                zoom: right_dock_zoom,
+            },
+            bottom: DockData {
+                visible: bottom_visible,
+                active_panel: bottom_active_panel,
+                zoom: bottom_dock_zoom,
+            },
+        }
+    }
+
+    pub fn set_dock_structure(
+        &self,
+        docks: DockStructure,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        for (dock, data) in [
+            (&self.left_dock, docks.left),
+            (&self.bottom_dock, docks.bottom),
+            (&self.right_dock, docks.right),
+        ] {
+            dock.update(cx, |dock, cx| {
+                dock.serialized_dock = Some(data);
+                dock.restore_state(window, cx);
+            });
+        }
+    }
+
+    pub fn open_item_abs_paths(&self, cx: &App) -> Vec<PathBuf> {
+        self.items(cx)
+            .filter_map(|item| {
+                let project_path = item.project_path(cx)?;
+                self.project.read(cx).absolute_path(&project_path, cx)
+            })
+            .collect()
+    }
+
     pub fn dock_at_position(&self, position: DockPosition) -> &Entity<Dock> {
         match position {
             DockPosition::Left => &self.left_dock,
@@ -2068,6 +2164,14 @@ impl Workspace {
         &self.app_state
     }
 
+    pub fn set_panels_task(&mut self, task: Task<Result<()>>) {
+        self._panels_task = Some(task);
+    }
+
+    pub fn take_panels_task(&mut self) -> Option<Task<Result<()>>> {
+        self._panels_task.take()
+    }
+
     pub fn user_store(&self) -> &Entity<UserStore> {
         &self.app_state.user_store
     }
@@ -2573,7 +2677,15 @@ impl Workspace {
             Task::ready(Ok(callback(self, window, cx)))
         } else {
             let env = self.project.read(cx).cli_environment(cx);
-            let task = Self::new_local(Vec::new(), self.app_state.clone(), None, env, None, cx);
+            let task = Self::new_local(
+                Vec::new(),
+                self.app_state.clone(),
+                None,
+                env,
+                None,
+                true,
+                cx,
+            );
             cx.spawn_in(window, async move |_vh, cx| {
                 let (multi_workspace_window, _) = task.await?;
                 multi_workspace_window.update(cx, |multi_workspace, window, cx| {
@@ -2603,7 +2715,15 @@ impl Workspace {
             Task::ready(Ok(callback(self, window, cx)))
         } else {
             let env = self.project.read(cx).cli_environment(cx);
-            let task = Self::new_local(Vec::new(), self.app_state.clone(), None, env, None, cx);
+            let task = Self::new_local(
+                Vec::new(),
+                self.app_state.clone(),
+                None,
+                env,
+                None,
+                true,
+                cx,
+            );
             cx.spawn_in(window, async move |_vh, cx| {
                 let (multi_workspace_window, _) = task.await?;
                 multi_workspace_window.update(cx, |multi_workspace, window, cx| {
@@ -2658,17 +2778,6 @@ impl Workspace {
         });
     }
 
-    pub fn close_window(&mut self, _: &CloseWindow, window: &mut Window, cx: &mut Context<Self>) {
-        let prepare = self.prepare_to_close(CloseIntent::CloseWindow, window, cx);
-        cx.spawn_in(window, async move |_, cx| {
-            if prepare.await? {
-                cx.update(|window, _cx| window.remove_window())?;
-            }
-            anyhow::Ok(())
-        })
-        .detach_and_log_err(cx)
-    }
-
     pub fn move_focused_panel_to_next_position(
         &mut self,
         _: &MoveFocusedPanelToNextPosition,
@@ -2746,6 +2855,7 @@ impl Workspace {
                     .unwrap_or(false)
             {
                 if close_intent == CloseIntent::CloseWindow {
+                    this.update(cx, |_, cx| cx.emit(Event::Activate))?;
                     let answer = cx.update(|window, cx| {
                         window.prompt(
                             PromptLevel::Warning,
@@ -2856,13 +2966,15 @@ impl Workspace {
                     .spawn(cx, async move |cx| {
                         // limit to 100 keystrokes to avoid infinite recursion.
                         for _ in 0..100 {
-                            let mut state = keystrokes.borrow_mut();
-                            let Some(keystroke) = state.queue.pop_front() else {
-                                state.dispatched.clear();
-                                state.task.take();
-                                return;
+                            let keystroke = {
+                                let mut state = keystrokes.borrow_mut();
+                                let Some(keystroke) = state.queue.pop_front() else {
+                                    state.dispatched.clear();
+                                    state.task.take();
+                                    return;
+                                };
+                                keystroke
                             };
-                            drop(state);
                             cx.update(|window, cx| {
                                 let focused = window.focused(cx);
                                 window.dispatch_keystroke(keystroke.clone(), cx);
@@ -2877,6 +2989,10 @@ impl Workspace {
                                 }
                             })
                             .ok();
+
+                            // Yield between synthetic keystrokes so deferred focus and
+                            // other effects can settle before dispatching the next key.
+                            yield_now().await;
                         }
 
                         *keystrokes.borrow_mut() = Default::default();
@@ -2934,6 +3050,10 @@ impl Workspace {
 
                 futures::future::try_join_all(serialize_tasks).await?;
 
+                if !remaining_dirty_items.is_empty() {
+                    workspace.update(cx, |_, cx| cx.emit(Event::Activate))?;
+                }
+
                 if remaining_dirty_items.len() > 1 {
                     let answer = workspace.update_in(cx, |_, window, cx| {
                         let detail = Pane::file_names_for_prompt(
@@ -4275,14 +4395,7 @@ impl Workspace {
                     .find_pane_in_direction(direction, cx)
                     .unwrap_or_else(|| self.active_pane.clone());
                 let new_pane = self.add_pane(window, cx);
-                if self
-                    .center
-                    .split(&split_off_pane, &new_pane, direction, cx)
-                    .log_err()
-                    .is_none()
-                {
-                    return;
-                };
+                self.center.split(&split_off_pane, &new_pane, direction, cx);
                 new_pane
             }
         };
@@ -4465,14 +4578,8 @@ impl Workspace {
                     return;
                 }
                 let new_pane = self.add_pane(window, cx);
-                if self
-                    .center
-                    .split(&self.active_pane, &new_pane, action.direction, cx)
-                    .log_err()
-                    .is_none()
-                {
-                    return;
-                };
+                self.center
+                    .split(&self.active_pane, &new_pane, action.direction, cx);
                 new_pane
             }
         };
@@ -4770,8 +4877,7 @@ impl Workspace {
     ) -> Entity<Pane> {
         let new_pane = self.add_pane(window, cx);
         self.center
-            .split(&pane_to_split, &new_pane, split_direction, cx)
-            .unwrap();
+            .split(&pane_to_split, &new_pane, split_direction, cx);
         cx.notify();
         new_pane
     }
@@ -4790,7 +4896,7 @@ impl Workspace {
         new_pane.update(cx, |pane, cx| {
             pane.add_item(item, true, true, None, window, cx)
         });
-        self.center.split(&pane, &new_pane, direction, cx).unwrap();
+        self.center.split(&pane, &new_pane, direction, cx);
         cx.notify();
     }
 
@@ -4817,7 +4923,7 @@ impl Workspace {
                         pane.set_nav_history(nav_history, cx);
                         pane.add_item(clone, true, true, None, window, cx)
                     });
-                    this.center.split(&pane, &new_pane, direction, cx).unwrap();
+                    this.center.split(&pane, &new_pane, direction, cx);
                     cx.notify();
                     new_pane
                 })
@@ -5871,6 +5977,40 @@ impl Workspace {
         self.session_id.clone()
     }
 
+    fn save_window_bounds(&self, window: &mut Window, cx: &mut App) -> Task<()> {
+        let Some(display) = window.display(cx) else {
+            return Task::ready(());
+        };
+        let Ok(display_uuid) = display.uuid() else {
+            return Task::ready(());
+        };
+
+        let window_bounds = window.inner_window_bounds();
+        let database_id = self.database_id;
+        let has_paths = !self.root_paths(cx).is_empty();
+
+        cx.background_executor().spawn(async move {
+            if !has_paths {
+                persistence::write_default_window_bounds(window_bounds, display_uuid)
+                    .await
+                    .log_err();
+            }
+            if let Some(database_id) = database_id {
+                DB.set_window_open_status(
+                    database_id,
+                    SerializedWindowBounds(window_bounds),
+                    display_uuid,
+                )
+                .await
+                .log_err();
+            } else {
+                persistence::write_default_window_bounds(window_bounds, display_uuid)
+                    .await
+                    .log_err();
+            }
+        })
+    }
+
     /// Bypass the 200ms serialization throttle and write workspace state to
     /// the DB immediately. Returns a task the caller can await to ensure the
     /// write completes. Used by the quit handler so the most recent state
@@ -5878,7 +6018,14 @@ impl Workspace {
     pub fn flush_serialization(&mut self, window: &mut Window, cx: &mut App) -> Task<()> {
         self._schedule_serialize_workspace.take();
         self._serialize_workspace_task.take();
-        self.serialize_workspace_internal(window, cx)
+        self.bounds_save_task_queued.take();
+
+        let bounds_task = self.save_window_bounds(window, cx);
+        let serialize_task = self.serialize_workspace_internal(window, cx);
+        cx.spawn(async move |_| {
+            bounds_task.await;
+            serialize_task.await;
+        })
     }
 
     pub fn root_paths(&self, cx: &App) -> Vec<Arc<Path>> {
@@ -6010,53 +6157,7 @@ impl Workspace {
             window: &mut Window,
             cx: &mut App,
         ) -> DockStructure {
-            let left_dock = this.left_dock.read(cx);
-            let left_visible = left_dock.is_open();
-            let left_active_panel = left_dock
-                .active_panel()
-                .map(|panel| panel.persistent_name().to_string());
-            let left_dock_zoom = left_dock
-                .active_panel()
-                .map(|panel| panel.is_zoomed(window, cx))
-                .unwrap_or(false);
-
-            let right_dock = this.right_dock.read(cx);
-            let right_visible = right_dock.is_open();
-            let right_active_panel = right_dock
-                .active_panel()
-                .map(|panel| panel.persistent_name().to_string());
-            let right_dock_zoom = right_dock
-                .active_panel()
-                .map(|panel| panel.is_zoomed(window, cx))
-                .unwrap_or(false);
-
-            let bottom_dock = this.bottom_dock.read(cx);
-            let bottom_visible = bottom_dock.is_open();
-            let bottom_active_panel = bottom_dock
-                .active_panel()
-                .map(|panel| panel.persistent_name().to_string());
-            let bottom_dock_zoom = bottom_dock
-                .active_panel()
-                .map(|panel| panel.is_zoomed(window, cx))
-                .unwrap_or(false);
-
-            DockStructure {
-                left: DockData {
-                    visible: left_visible,
-                    active_panel: left_active_panel,
-                    zoom: left_dock_zoom,
-                },
-                right: DockData {
-                    visible: right_visible,
-                    active_panel: right_active_panel,
-                    zoom: right_dock_zoom,
-                },
-                bottom: DockData {
-                    visible: bottom_visible,
-                    active_panel: bottom_active_panel,
-                    zoom: bottom_dock_zoom,
-                },
-            }
+            this.capture_dock_state(window, cx)
         }
 
         match self.workspace_location(cx) {
@@ -6340,7 +6441,47 @@ impl Workspace {
         })
     }
 
-    fn actions(&self, div: Div, window: &mut Window, cx: &mut Context<Self>) -> Div {
+    pub fn key_context(&self, cx: &App) -> KeyContext {
+        let mut context = KeyContext::new_with_defaults();
+        context.add("Workspace");
+        context.set("keyboard_layout", cx.keyboard_layout().name().to_string());
+        if let Some(status) = self
+            .debugger_provider
+            .as_ref()
+            .and_then(|provider| provider.active_thread_state(cx))
+        {
+            match status {
+                ThreadStatus::Running | ThreadStatus::Stepping => {
+                    context.add("debugger_running");
+                }
+                ThreadStatus::Stopped => context.add("debugger_stopped"),
+                ThreadStatus::Exited | ThreadStatus::Ended => {}
+            }
+        }
+
+        if self.left_dock.read(cx).is_open() {
+            if let Some(active_panel) = self.left_dock.read(cx).active_panel() {
+                context.set("left_dock", active_panel.panel_key());
+            }
+        }
+
+        if self.right_dock.read(cx).is_open() {
+            if let Some(active_panel) = self.right_dock.read(cx).active_panel() {
+                context.set("right_dock", active_panel.panel_key());
+            }
+        }
+
+        if self.bottom_dock.read(cx).is_open() {
+            if let Some(active_panel) = self.bottom_dock.read(cx).active_panel() {
+                context.set("bottom_dock", active_panel.panel_key());
+            }
+        }
+
+        context
+    }
+
+    /// Multiworkspace uses this to add workspace action handling to itself
+    pub fn actions(&self, div: Div, window: &mut Window, cx: &mut Context<Self>) -> Div {
         self.add_workspace_actions_listeners(div, window, cx)
             .on_action(cx.listener(
                 |_workspace, action_sequence: &settings::ActionSequence, window, cx| {
@@ -6356,7 +6497,6 @@ impl Workspace {
             .on_action(cx.listener(Self::send_keystrokes))
             .on_action(cx.listener(Self::add_folder_to_project))
             .on_action(cx.listener(Self::follow_next_collaborator))
-            .on_action(cx.listener(Self::close_window))
             .on_action(cx.listener(Self::activate_pane_at_index))
             .on_action(cx.listener(Self::move_item_to_pane_at_index))
             .on_action(cx.listener(Self::move_focused_panel_to_next_position))
@@ -7398,40 +7538,6 @@ impl Render for Workspace {
         if FIRST_PAINT.swap(false, std::sync::atomic::Ordering::Relaxed) {
             log::info!("Rendered first frame");
         }
-        let mut context = KeyContext::new_with_defaults();
-        context.add("Workspace");
-        context.set("keyboard_layout", cx.keyboard_layout().name().to_string());
-        if let Some(status) = self
-            .debugger_provider
-            .as_ref()
-            .and_then(|provider| provider.active_thread_state(cx))
-        {
-            match status {
-                ThreadStatus::Running | ThreadStatus::Stepping => {
-                    context.add("debugger_running");
-                }
-                ThreadStatus::Stopped => context.add("debugger_stopped"),
-                ThreadStatus::Exited | ThreadStatus::Ended => {}
-            }
-        }
-
-        if self.left_dock.read(cx).is_open() {
-            if let Some(active_panel) = self.left_dock.read(cx).active_panel() {
-                context.set("left_dock", active_panel.panel_key());
-            }
-        }
-
-        if self.right_dock.read(cx).is_open() {
-            if let Some(active_panel) = self.right_dock.read(cx).active_panel() {
-                context.set("right_dock", active_panel.panel_key());
-            }
-        }
-
-        if self.bottom_dock.read(cx).is_open() {
-            if let Some(active_panel) = self.bottom_dock.read(cx).active_panel() {
-                context.set("bottom_dock", active_panel.panel_key());
-            }
-        }
 
         let centered_layout = self.centered_layout
             && self.center.panes().len() == 1
@@ -7469,8 +7575,7 @@ impl Render for Workspace {
             .collect::<Vec<_>>();
         let bottom_dock_layout = WorkspaceSettings::get_global(cx).bottom_dock_layout;
 
-        self.actions(div(), window, cx)
-            .key_context(context)
+        div()
             .relative()
             .size_full()
             .flex()
@@ -7870,7 +7975,6 @@ impl Render for Workspace {
                         .when(self.status_bar_visible(cx), |parent| {
                             parent.child(self.status_bar.clone())
                         })
-                        .child(self.modal_layer.clone())
                         .child(self.toast_layer.clone()),
                 )
     }
@@ -8082,6 +8186,7 @@ pub async fn restore_multiworkspace(
                     None,
                     None,
                     None,
+                    true,
                     cx,
                 )
             })
@@ -8111,6 +8216,7 @@ pub async fn restore_multiworkspace(
                     Some(window_handle),
                     None,
                     None,
+                    true,
                     cx,
                 )
             })
@@ -8380,6 +8486,7 @@ pub fn join_channel(
                         requesting_window,
                         None,
                         None,
+                        true,
                         cx,
                     )
                 })
@@ -8452,7 +8559,7 @@ pub async fn get_any_active_multi_workspace(
     // find an existing workspace to focus and show call controls
     let active_window = activate_any_workspace_window(&mut cx);
     if active_window.is_none() {
-        cx.update(|cx| Workspace::new_local(vec![], app_state.clone(), None, None, None, cx))
+        cx.update(|cx| Workspace::new_local(vec![], app_state.clone(), None, None, None, true, cx))
             .await?;
     }
     activate_any_workspace_window(&mut cx).context("could not open zed")
@@ -8840,6 +8947,7 @@ pub fn open_paths(
                         open_options.replace_window,
                         open_options.env,
                         None,
+                        true,
                         cx,
                     )
                 })
@@ -8903,6 +9011,7 @@ pub fn open_new(
         open_options.replace_window,
         open_options.env,
         Some(Box::new(init)),
+        true,
         cx,
     );
     cx.spawn(async move |cx| {
@@ -10054,6 +10163,87 @@ mod tests {
         assert!(!task.await.unwrap());
     }
 
+    #[gpui::test]
+    async fn test_multi_workspace_close_window_multiple_workspaces_cancel(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let fs = FakeFs::new(cx.executor());
+        fs.insert_tree("/root", json!({ "one": "" })).await;
+
+        let project_a = Project::test(fs.clone(), ["root".as_ref()], cx).await;
+        let project_b = Project::test(fs, ["root".as_ref()], cx).await;
+        let multi_workspace_handle =
+            cx.add_window(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx));
+        cx.run_until_parked();
+
+        let workspace_a = multi_workspace_handle
+            .read_with(cx, |mw, _| mw.workspace().clone())
+            .unwrap();
+
+        let workspace_b = multi_workspace_handle
+            .update(cx, |mw, window, cx| {
+                mw.test_add_workspace(project_b, window, cx)
+            })
+            .unwrap();
+
+        // Activate workspace A
+        multi_workspace_handle
+            .update(cx, |mw, window, cx| {
+                mw.activate_index(0, window, cx);
+            })
+            .unwrap();
+
+        let cx = &mut VisualTestContext::from_window(multi_workspace_handle.into(), cx);
+
+        // Workspace A has a clean item
+        let item_a = cx.new(TestItem::new);
+        workspace_a.update_in(cx, |w, window, cx| {
+            w.add_item_to_active_pane(Box::new(item_a.clone()), None, true, window, cx)
+        });
+
+        // Workspace B has a dirty item
+        let item_b = cx.new(|cx| TestItem::new(cx).with_dirty(true));
+        workspace_b.update_in(cx, |w, window, cx| {
+            w.add_item_to_active_pane(Box::new(item_b.clone()), None, true, window, cx)
+        });
+
+        // Verify workspace A is active
+        multi_workspace_handle
+            .read_with(cx, |mw, _| {
+                assert_eq!(mw.active_workspace_index(), 0);
+            })
+            .unwrap();
+
+        // Dispatch CloseWindow — workspace A will pass, workspace B will prompt
+        multi_workspace_handle
+            .update(cx, |mw, window, cx| {
+                mw.close_window(&CloseWindow, window, cx);
+            })
+            .unwrap();
+        cx.run_until_parked();
+
+        // Workspace B should now be active since it has dirty items that need attention
+        multi_workspace_handle
+            .read_with(cx, |mw, _| {
+                assert_eq!(
+                    mw.active_workspace_index(),
+                    1,
+                    "workspace B should be activated when it prompts"
+                );
+            })
+            .unwrap();
+
+        // User cancels the save prompt from workspace B
+        cx.simulate_prompt_answer("Cancel");
+        cx.run_until_parked();
+
+        // Window should still exist because workspace B's close was cancelled
+        assert!(
+            multi_workspace_handle.update(cx, |_, _, _| ()).is_ok(),
+            "window should still exist after cancelling one workspace's close"
+        );
+    }
+
     #[gpui::test]
     async fn test_close_window_with_serializable_items(cx: &mut TestAppContext) {
         init_test(cx);
@@ -10502,6 +10692,85 @@ mod tests {
         item.read_with(cx, |item, _| assert_eq!(item.save_count, 6));
     }
 
+    #[gpui::test]
+    async fn test_autosave_on_focus_change_in_multibuffer(cx: &mut gpui::TestAppContext) {
+        init_test(cx);
+
+        let fs = FakeFs::new(cx.executor());
+        let project = Project::test(fs, [], cx).await;
+        let (workspace, cx) =
+            cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
+
+        // Create a multibuffer-like item with two child focus handles,
+        // simulating individual buffer editors within a multibuffer.
+        let item = cx.new(|cx| {
+            TestItem::new(cx)
+                .with_project_items(&[TestProjectItem::new(1, "1.txt", cx)])
+                .with_child_focus_handles(2, cx)
+        });
+        workspace.update_in(cx, |workspace, window, cx| {
+            workspace.add_item_to_active_pane(Box::new(item.clone()), None, true, window, cx);
+        });
+
+        // Set autosave to OnFocusChange and focus the first child handle,
+        // simulating the user's cursor being inside one of the multibuffer's excerpts.
+        item.update_in(cx, |item, window, cx| {
+            SettingsStore::update_global(cx, |settings, cx| {
+                settings.update_user_settings(cx, |settings| {
+                    settings.workspace.autosave = Some(AutosaveSetting::OnFocusChange);
+                })
+            });
+            item.is_dirty = true;
+            window.focus(&item.child_focus_handles[0], cx);
+        });
+        cx.executor().run_until_parked();
+        item.read_with(cx, |item, _| assert_eq!(item.save_count, 0));
+
+        // Moving focus from one child to another within the same item should
+        // NOT trigger autosave — focus is still within the item's focus hierarchy.
+        item.update_in(cx, |item, window, cx| {
+            window.focus(&item.child_focus_handles[1], cx);
+        });
+        cx.executor().run_until_parked();
+        item.read_with(cx, |item, _| {
+            assert_eq!(
+                item.save_count, 0,
+                "Switching focus between children within the same item should not autosave"
+            );
+        });
+
+        // Blurring the item saves the file. This is the core regression scenario:
+        // with `on_blur`, this would NOT trigger because `on_blur` only fires when
+        // the item's own focus handle is the leaf that lost focus. In a multibuffer,
+        // the leaf is always a child focus handle, so `on_blur` never detected
+        // focus leaving the item.
+        item.update_in(cx, |_, window, _| window.blur());
+        cx.executor().run_until_parked();
+        item.read_with(cx, |item, _| {
+            assert_eq!(
+                item.save_count, 1,
+                "Blurring should trigger autosave when focus was on a child of the item"
+            );
+        });
+
+        // Deactivating the window should also trigger autosave when a child of
+        // the multibuffer item currently owns focus.
+        item.update_in(cx, |item, window, cx| {
+            item.is_dirty = true;
+            window.focus(&item.child_focus_handles[0], cx);
+        });
+        cx.executor().run_until_parked();
+        item.read_with(cx, |item, _| assert_eq!(item.save_count, 1));
+
+        cx.deactivate_window();
+        item.read_with(cx, |item, _| {
+            assert_eq!(
+                item.save_count, 2,
+                "Deactivating window should trigger autosave when focus was on a child"
+            );
+        });
+    }
+
     #[gpui::test]
     async fn test_pane_navigation(cx: &mut gpui::TestAppContext) {
         init_test(cx);
@@ -10564,8 +10833,9 @@ mod tests {
         init_test(cx);
         let fs = FakeFs::new(cx.executor());
         let project = Project::test(fs, [], cx).await;
-        let (workspace, cx) =
-            cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
+        let (multi_workspace, cx) =
+            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+        let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
 
         workspace.update_in(cx, |workspace, window, cx| {
             let first_item = cx.new(|cx| {
@@ -11059,8 +11329,9 @@ mod tests {
         init_test(cx);
         let fs = FakeFs::new(cx.executor());
         let project = Project::test(fs, [], cx).await;
-        let (workspace, cx) =
-            cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
+        let (multi_workspace, cx) =
+            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+        let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
 
         // Open two docks (left and right) with one panel each
         let (left_panel, right_panel) = workspace.update_in(cx, |workspace, window, cx| {
@@ -11491,8 +11762,9 @@ mod tests {
         let fs = FakeFs::new(cx.executor());
 
         let project = Project::test(fs, [], cx).await;
-        let (workspace, cx) =
-            cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
+        let (multi_workspace, cx) =
+            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+        let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
 
         let (panel_1, panel_2) = workspace.update_in(cx, |workspace, window, cx| {
             let panel_1 = cx.new(|cx| TestPanel::new(DockPosition::Left, 100, cx));
@@ -12399,8 +12671,9 @@ mod tests {
         init_test(cx);
         let fs = FakeFs::new(cx.executor());
         let project = Project::test(fs, [], cx).await;
-        let (workspace, cx) =
-            cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
+        let (multi_workspace, cx) =
+            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+        let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
 
         // Add a new panel to the right dock, opening the dock and setting the
         // focus to the new panel.
@@ -13089,8 +13362,9 @@ mod tests {
 
         let fs = FakeFs::new(cx.executor());
         let project = Project::test(fs, [], cx).await;
-        let (workspace, cx) =
-            cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
+        let (multi_workspace, cx) =
+            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+        let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
         let panel = workspace.update_in(cx, |workspace, window, cx| {
             let panel = cx.new(|cx| TestPanel::new(DockPosition::Right, 100, cx));
             workspace.add_panel(panel.clone(), window, cx);

crates/worktree/src/worktree.rs 🔗

@@ -2945,7 +2945,7 @@ impl BackgroundScannerState {
         self.snapshot.check_invariants(false);
     }
 
-    fn remove_path(&mut self, path: &RelPath) {
+    fn remove_path(&mut self, path: &RelPath, watcher: &dyn Watcher) {
         log::trace!("background scanner removing path {path:?}");
         let mut new_entries;
         let removed_entries;
@@ -2961,7 +2961,12 @@ impl BackgroundScannerState {
         self.snapshot.entries_by_path = new_entries;
 
         let mut removed_ids = Vec::with_capacity(removed_entries.summary().count);
+        let mut removed_dir_abs_paths = Vec::new();
         for entry in removed_entries.cursor::<()>(()) {
+            if entry.is_dir() {
+                removed_dir_abs_paths.push(self.snapshot.absolutize(&entry.path));
+            }
+
             match self.removed_entries.entry(entry.inode) {
                 hash_map::Entry::Occupied(mut e) => {
                     let prev_removed_entry = e.get_mut();
@@ -2997,6 +3002,10 @@ impl BackgroundScannerState {
             .git_repositories
             .retain(|id, _| removed_ids.binary_search(id).is_err());
 
+        for removed_dir_abs_path in removed_dir_abs_paths {
+            watcher.remove(&removed_dir_abs_path).log_err();
+        }
+
         #[cfg(feature = "test-support")]
         self.snapshot.check_invariants(false);
     }
@@ -4461,7 +4470,10 @@ impl BackgroundScanner {
 
             if self.settings.is_path_excluded(&child_path) {
                 log::debug!("skipping excluded child entry {child_path:?}");
-                self.state.lock().await.remove_path(&child_path);
+                self.state
+                    .lock()
+                    .await
+                    .remove_path(&child_path, self.watcher.as_ref());
                 continue;
             }
 
@@ -4651,7 +4663,7 @@ impl BackgroundScanner {
         // detected regardless of the order of the paths.
         for (path, metadata) in relative_paths.iter().zip(metadata.iter()) {
             if matches!(metadata, Ok(None)) || doing_recursive_update {
-                state.remove_path(path);
+                state.remove_path(path, self.watcher.as_ref());
             }
         }
 

crates/x_ai/src/x_ai.rs 🔗

@@ -165,6 +165,18 @@ impl Model {
         }
     }
 
+    pub fn requires_json_schema_subset(&self) -> bool {
+        match self {
+            Self::Grok4
+            | Self::Grok4FastReasoning
+            | Self::Grok4FastNonReasoning
+            | Self::Grok41FastNonReasoning
+            | Self::Grok41FastReasoning
+            | Self::GrokCodeFast1 => true,
+            _ => false,
+        }
+    }
+
     pub fn supports_prompt_cache_key(&self) -> bool {
         false
     }

crates/zed/Cargo.toml 🔗

@@ -2,7 +2,7 @@
 description = "The fast, collaborative code editor."
 edition.workspace = true
 name = "zed"
-version = "0.226.0"
+version = "0.228.0"
 publish.workspace = true
 license = "GPL-3.0-or-later"
 authors = ["Zed Team <hi@zed.dev>"]
@@ -17,7 +17,6 @@ test-support = [
     "gpui/test-support",
     "gpui_platform/screen-capture",
     "dep:image",
-    "dep:semver",
     "workspace/test-support",
     "project/test-support",
     "editor/test-support",
@@ -32,7 +31,6 @@ visual-tests = [
     "gpui_platform/screen-capture",
     "gpui_platform/test-support",
     "dep:image",
-    "dep:semver",
     "dep:tempfile",
     "dep:action_log",
     "dep:agent_servers",
@@ -50,7 +48,6 @@ visual-tests = [
     "language_model/test-support",
     "fs/test-support",
     "recent_projects/test-support",
-    "sidebar/test-support",
     "title_bar/test-support",
 ]
 
@@ -76,7 +73,6 @@ assets.workspace = true
 audio.workspace = true
 auto_update.workspace = true
 auto_update_ui.workspace = true
-bincode.workspace = true
 breadcrumbs.workspace = true
 call.workspace = true
 chrono.workspace = true
@@ -94,6 +90,7 @@ copilot.workspace = true
 copilot_chat.workspace = true
 copilot_ui.workspace = true
 crashes.workspace = true
+csv_preview.workspace = true
 dap_adapters.workspace = true
 db.workspace = true
 debug_adapter_extension.workspace = true
@@ -118,14 +115,10 @@ git_hosting_providers.workspace = true
 git_ui.workspace = true
 go_to_line.workspace = true
 system_specs.workspace = true
-gpui = { workspace = true, features = [
-    "wayland",
-    "windows-manifest",
-    "x11",
-] }
+gpui.workspace = true
 gpui_platform = {workspace = true, features=["screen-capture", "font-kit", "wayland", "x11"]}
 image = { workspace = true, optional = true }
-semver = { workspace = true, optional = true }
+semver.workspace = true
 tempfile = { workspace = true, optional = true }
 clock = { workspace = true, optional = true }
 acp_thread.workspace = true
@@ -193,7 +186,6 @@ sidebar.workspace = true
 smol.workspace = true
 snippet_provider.workspace = true
 snippets_ui.workspace = true
-supermaven.workspace = true
 svg_preview.workspace = true
 sysinfo.workspace = true
 tab_switcher.workspace = true
@@ -232,13 +224,23 @@ zlog_settings.workspace = true
 [target.'cfg(target_os = "windows")'.dependencies]
 etw_tracing.workspace = true
 windows.workspace = true
+gpui = { workspace = true, features = [
+    "windows-manifest",
+] }
 
 [target.'cfg(target_os = "windows")'.build-dependencies]
 winresource = "0.1"
 
 [target.'cfg(any(target_os = "linux", target_os = "freebsd"))'.dependencies]
+gpui = { workspace = true, features = [
+    "wayland",
+    "x11",
+] }
 ashpd.workspace = true
 
+[target.'cfg(target_os = "linux")'.build-dependencies]
+pkg-config = "0.3.22"
+
 [dev-dependencies]
 call = { workspace = true, features = ["test-support"] }
 dap = { workspace = true, features = ["test-support"] }

crates/zed/build.rs 🔗

@@ -2,6 +2,25 @@
 use std::process::Command;
 
 fn main() {
+    #[cfg(target_os = "linux")]
+    {
+        // Add rpaths for libraries that webrtc-sys dlopens at runtime.
+        // This is mostly required for hosts with non-standard SO installation
+        // locations such as NixOS.
+        let dlopened_libs = ["libva", "libva-drm"];
+
+        let mut rpath_dirs = std::collections::BTreeSet::new();
+        for lib in &dlopened_libs {
+            if let Some(libdir) = pkg_config::get_variable(lib, "libdir").ok() {
+                rpath_dirs.insert(libdir);
+            }
+        }
+
+        for dir in &rpath_dirs {
+            println!("cargo:rustc-link-arg=-Wl,-rpath,{dir}");
+        }
+    }
+
     if cfg!(target_os = "macos") {
         println!("cargo:rustc-env=MACOSX_DEPLOYMENT_TARGET=10.15.7");
 

crates/zed/src/main.rs 🔗

@@ -335,7 +335,13 @@ fn main() {
     crashes::init(
         InitCrashHandler {
             session_id,
-            zed_version: app_version.to_string(),
+            // strip the build and channel information from the version string, we send them separately
+            zed_version: semver::Version::new(
+                app_version.major,
+                app_version.minor,
+                app_version.patch,
+            )
+            .to_string(),
             binary: "zed".to_string(),
             release_channel: release_channel::RELEASE_CHANNEL_NAME.clone(),
             commit_sha: app_commit_sha
@@ -573,6 +579,19 @@ fn main() {
             session.id().to_owned(),
             cx,
         );
+        cx.subscribe(&user_store, {
+            let telemetry = telemetry.clone();
+            move |_, evt: &client::user::Event, _| match evt {
+                client::user::Event::PrivateUserInfoUpdated => {
+                    crashes::set_user_info(crashes::UserInfo {
+                        metrics_id: telemetry.metrics_id().map(|s| s.to_string()),
+                        is_staff: telemetry.is_staff(),
+                    });
+                }
+                _ => {}
+            }
+        })
+        .detach();
 
         // We should rename these in the future to `first app open`, `first app open for release channel`, and `app open`
         if let (Some(system_id), Some(installation_id)) = (&system_id, &installation_id) {
@@ -638,7 +657,6 @@ fn main() {
         );
 
         copilot_ui::init(&app_state, cx);
-        supermaven::init(app_state.client.clone(), cx);
         language_model::init(app_state.client.clone(), cx);
         language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx);
         acp_tools::init(cx);
@@ -646,7 +664,7 @@ fn main() {
         zed::remote_debug::init(cx);
         edit_prediction_ui::init(cx);
         web_search::init(cx);
-        web_search_providers::init(app_state.client.clone(), cx);
+        web_search_providers::init(app_state.client.clone(), app_state.user_store.clone(), cx);
         snippet_provider::init(cx);
         edit_prediction_registry::init(app_state.client.clone(), app_state.user_store.clone(), cx);
         let prompt_builder = PromptBuilder::load(app_state.fs.clone(), stdout_is_a_pty(), cx);
@@ -716,6 +734,7 @@ fn main() {
         git_graph::init(cx);
         feedback::init(cx);
         markdown_preview::init(cx);
+        csv_preview::init(cx);
         svg_preview::init(cx);
         onboarding::init(cx);
         settings_ui::init(cx);
@@ -951,7 +970,12 @@ fn handle_open_request(request: OpenRequest, app_state: Arc<AppState>, cx: &mut
 
                     thread_store
                         .update(&mut cx.clone(), |store, cx| {
-                            store.save_thread(save_session_id.clone(), db_thread, cx)
+                            store.save_thread(
+                                save_session_id.clone(),
+                                db_thread,
+                                Default::default(),
+                                cx,
+                            )
                         })
                         .await?;
 
@@ -1393,7 +1417,7 @@ pub(crate) async fn restore_or_create_workspace(
                         .update(cx, |multi_workspace, _, cx| {
                             multi_workspace.workspace().update(cx, |workspace, cx| {
                                 workspace.show_toast(
-                                    Toast::new(NotificationId::unique::<()>(), message),
+                                    Toast::new(NotificationId::unique::<()>(), message.clone()),
                                     cx,
                                 )
                             });
@@ -1405,11 +1429,23 @@ pub(crate) async fn restore_or_create_workspace(
             });
 
             // If we couldn't show a toast (no windows opened successfully),
-            // we've already logged the errors above, so the user can check logs
+            // open a fallback empty workspace and show the error there
             if !toast_shown {
-                log::error!(
-                    "Failed to show notification for window restoration errors, because no workspace windows were available."
-                );
+                log::error!("All workspace restorations failed. Opening fallback empty workspace.");
+                cx.update(|cx| {
+                    workspace::open_new(
+                        Default::default(),
+                        app_state.clone(),
+                        cx,
+                        |workspace, _window, cx| {
+                            workspace.show_toast(
+                                Toast::new(NotificationId::unique::<()>(), message),
+                                cx,
+                            );
+                        },
+                    )
+                })
+                .await?;
             }
         }
     } else if matches!(KEY_VALUE_STORE.read_kvp(FIRST_OPEN), Ok(None)) {

crates/zed/src/reliability.rs 🔗

@@ -144,7 +144,7 @@ fn cleanup_old_hang_traces() {
                 entry
                     .path()
                     .extension()
-                    .is_some_and(|ext| ext == "miniprof")
+                    .is_some_and(|ext| ext == "json" || ext == "miniprof")
             })
             .collect();
 
@@ -175,7 +175,7 @@ fn save_hang_trace(
         .collect::<Vec<_>>();
 
     let trace_path = paths::hang_traces_dir().join(&format!(
-        "hang-{}.miniprof",
+        "hang-{}.miniprof.json",
         hang_time.format("%Y-%m-%d_%H-%M-%S")
     ));
 
@@ -193,7 +193,7 @@ fn save_hang_trace(
                 entry
                     .path()
                     .extension()
-                    .is_some_and(|ext| ext == "miniprof")
+                    .is_some_and(|ext| ext == "json" || ext == "miniprof")
             })
             .collect();
 
@@ -288,16 +288,23 @@ async fn upload_minidump(
         form = form.text("minidump_error", minidump_error);
     }
 
-    if let Some(id) = client.telemetry().metrics_id() {
-        form = form.text("sentry[user][id]", id.to_string());
+    if let Some(is_staff) = &metadata
+        .user_info
+        .as_ref()
+        .and_then(|user_info| user_info.is_staff)
+    {
         form = form.text(
             "sentry[user][is_staff]",
-            if client.telemetry().is_staff().unwrap_or_default() {
-                "true"
-            } else {
-                "false"
-            },
+            if *is_staff { "true" } else { "false" },
         );
+    }
+
+    if let Some(metrics_id) = metadata
+        .user_info
+        .as_ref()
+        .and_then(|user_info| user_info.metrics_id.as_ref())
+    {
+        form = form.text("sentry[user][id]", metrics_id.clone());
     } else if let Some(id) = client.telemetry().installation_id() {
         form = form.text("sentry[user][id]", format!("installation-{}", id))
     }
@@ -397,7 +404,7 @@ struct BuildTiming {
     duration_ms: f32,
     first_crate: String,
     target: String,
-    lock_wait_ms: f32,
+    blocked_ms: f32,
     command: String,
 }
 
@@ -452,7 +459,7 @@ async fn upload_build_timings(_client: Arc<Client>) -> Result<()> {
             duration_ms = timing.duration_ms,
             first_crate = timing.first_crate,
             target = timing.target,
-            lock_wait_ms = timing.lock_wait_ms,
+            blocked_ms = timing.blocked_ms,
             command = timing.command,
             cpu_count = cpu_count,
             ram_size_gb = ram_size_gb

crates/zed/src/visual_test_runner.rs 🔗

@@ -42,6 +42,55 @@ fn main() {
     std::process::exit(1);
 }
 
+#[cfg(target_os = "macos")]
+fn main() {
+    // Set ZED_STATELESS early to prevent file system access to real config directories
+    // This must be done before any code accesses zed_env_vars::ZED_STATELESS
+    // SAFETY: We're at the start of main(), before any threads are spawned
+    unsafe {
+        std::env::set_var("ZED_STATELESS", "1");
+    }
+
+    env_logger::builder()
+        .filter_level(log::LevelFilter::Info)
+        .init();
+
+    let update_baseline = std::env::var("UPDATE_BASELINE").is_ok();
+
+    // Create a temporary directory for test files
+    // Canonicalize the path to resolve symlinks (on macOS, /var -> /private/var)
+    // which prevents "path does not exist" errors during worktree scanning
+    // Use keep() to prevent auto-cleanup - background worktree tasks may still be running
+    // when tests complete, so we let the OS clean up temp directories on process exit
+    let temp_dir = tempfile::tempdir().expect("Failed to create temp directory");
+    let temp_path = temp_dir.keep();
+    let canonical_temp = temp_path
+        .canonicalize()
+        .expect("Failed to canonicalize temp directory");
+    let project_path = canonical_temp.join("project");
+    std::fs::create_dir_all(&project_path).expect("Failed to create project directory");
+
+    // Create test files in the real filesystem
+    create_test_files(&project_path);
+
+    let test_result = std::panic::catch_unwind(|| run_visual_tests(project_path, update_baseline));
+
+    // Note: We don't delete temp_path here because background worktree tasks may still
+    // be running. The directory will be cleaned up when the process exits or by the OS.
+
+    match test_result {
+        Ok(Ok(())) => {}
+        Ok(Err(e)) => {
+            eprintln!("Visual tests failed: {}", e);
+            std::process::exit(1);
+        }
+        Err(_) => {
+            eprintln!("Visual tests panicked");
+            std::process::exit(1);
+        }
+    }
+}
+
 // All macOS-specific imports grouped together
 #[cfg(target_os = "macos")]
 use {
@@ -50,7 +99,6 @@ use {
     agent_servers::{AgentServer, AgentServerDelegate},
     anyhow::{Context as _, Result},
     assets::Assets,
-    chrono::{Duration as ChronoDuration, Utc},
     editor::display_map::DisplayRow,
     feature_flags::FeatureFlagAppExt as _,
     git_ui::project_diff::ProjectDiff,
@@ -60,7 +108,6 @@ use {
     },
     image::RgbaImage,
     project_panel::ProjectPanel,
-    recent_projects::RecentProjectEntry,
     settings::{NotifyWhenAgentWaiting, Settings as _},
     settings_ui::SettingsWindow,
     std::{
@@ -71,7 +118,7 @@ use {
         time::Duration,
     },
     util::ResultExt as _,
-    workspace::{AppState, MultiWorkspace, Workspace, WorkspaceId},
+    workspace::{AppState, MultiWorkspace, Panel as _, Workspace},
     zed_actions::OpenSettingsAt,
 };
 
@@ -97,55 +144,6 @@ mod constants {
 #[cfg(target_os = "macos")]
 use constants::*;
 
-#[cfg(target_os = "macos")]
-fn main() {
-    // Set ZED_STATELESS early to prevent file system access to real config directories
-    // This must be done before any code accesses zed_env_vars::ZED_STATELESS
-    // SAFETY: We're at the start of main(), before any threads are spawned
-    unsafe {
-        std::env::set_var("ZED_STATELESS", "1");
-    }
-
-    env_logger::builder()
-        .filter_level(log::LevelFilter::Info)
-        .init();
-
-    let update_baseline = std::env::var("UPDATE_BASELINE").is_ok();
-
-    // Create a temporary directory for test files
-    // Canonicalize the path to resolve symlinks (on macOS, /var -> /private/var)
-    // which prevents "path does not exist" errors during worktree scanning
-    // Use keep() to prevent auto-cleanup - background worktree tasks may still be running
-    // when tests complete, so we let the OS clean up temp directories on process exit
-    let temp_dir = tempfile::tempdir().expect("Failed to create temp directory");
-    let temp_path = temp_dir.keep();
-    let canonical_temp = temp_path
-        .canonicalize()
-        .expect("Failed to canonicalize temp directory");
-    let project_path = canonical_temp.join("project");
-    std::fs::create_dir_all(&project_path).expect("Failed to create project directory");
-
-    // Create test files in the real filesystem
-    create_test_files(&project_path);
-
-    let test_result = std::panic::catch_unwind(|| run_visual_tests(project_path, update_baseline));
-
-    // Note: We don't delete temp_path here because background worktree tasks may still
-    // be running. The directory will be cleaned up when the process exits or by the OS.
-
-    match test_result {
-        Ok(Ok(())) => {}
-        Ok(Err(e)) => {
-            eprintln!("Visual tests failed: {}", e);
-            std::process::exit(1);
-        }
-        Err(_) => {
-            eprintln!("Visual tests panicked");
-            std::process::exit(1);
-        }
-    }
-}
-
 #[cfg(target_os = "macos")]
 fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()> {
     // Create the visual test context with deterministic task scheduling
@@ -548,6 +546,27 @@ fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()>
         }
     }
 
+    // Run Test 11: Thread target selector visual tests
+    #[cfg(feature = "visual-tests")]
+    {
+        println!("\n--- Test 11: start_thread_in_selector (6 variants) ---");
+        match run_start_thread_in_selector_visual_tests(app_state.clone(), &mut cx, update_baseline)
+        {
+            Ok(TestResult::Passed) => {
+                println!("✓ start_thread_in_selector: PASSED");
+                passed += 1;
+            }
+            Ok(TestResult::BaselineUpdated(_)) => {
+                println!("✓ start_thread_in_selector: Baselines updated");
+                updated += 1;
+            }
+            Err(e) => {
+                eprintln!("✗ start_thread_in_selector: FAILED - {}", e);
+                failed += 1;
+            }
+        }
+    }
+
     // Run Test 9: Tool Permissions Settings UI visual test
     println!("\n--- Test 9: tool_permissions_settings ---");
     match run_tool_permissions_visual_tests(app_state.clone(), &mut cx, update_baseline) {
@@ -1945,11 +1964,10 @@ impl AgentServer for StubAgentServer {
 
     fn connect(
         &self,
-        _root_dir: Option<&Path>,
         _delegate: AgentServerDelegate,
         _cx: &mut App,
-    ) -> gpui::Task<gpui::Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
-        gpui::Task::ready(Ok((Rc::new(self.connection.clone()), None)))
+    ) -> gpui::Task<gpui::Result<Rc<dyn AgentConnection>>> {
+        gpui::Task::ready(Ok(Rc::new(self.connection.clone())))
     }
 
     fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
@@ -1963,7 +1981,7 @@ fn run_agent_thread_view_test(
     cx: &mut VisualTestAppContext,
     update_baseline: bool,
 ) -> Result<TestResult> {
-    use agent::AgentTool;
+    use agent::{AgentTool, ToolInput};
     use agent_ui::AgentPanel;
 
     // Create a temporary directory with the test image
@@ -2012,32 +2030,9 @@ fn run_agent_thread_view_test(
 
     // Create the necessary entities for the ReadFileTool
     let action_log = cx.update(|cx| cx.new(|_| action_log::ActionLog::new(project.clone())));
-    let context_server_registry = cx.update(|cx| {
-        cx.new(|cx| agent::ContextServerRegistry::new(project.read(cx).context_server_store(), cx))
-    });
-    let fake_model = Arc::new(language_model::fake_provider::FakeLanguageModel::default());
-    let project_context = cx.update(|cx| cx.new(|_| prompt_store::ProjectContext::default()));
-
-    // Create the agent Thread
-    let thread = cx.update(|cx| {
-        cx.new(|cx| {
-            agent::Thread::new(
-                project.clone(),
-                project_context,
-                context_server_registry,
-                agent::Templates::new(),
-                Some(fake_model),
-                cx,
-            )
-        })
-    });
 
     // Create the ReadFileTool
-    let tool = Arc::new(agent::ReadFileTool::new(
-        thread.downgrade(),
-        project.clone(),
-        action_log,
-    ));
+    let tool = Arc::new(agent::ReadFileTool::new(project.clone(), action_log, true));
 
     // Create a test event stream to capture tool output
     let (event_stream, mut event_receiver) = agent::ToolCallEventStream::test();
@@ -2048,7 +2043,10 @@ fn run_agent_thread_view_test(
         start_line: None,
         end_line: None,
     };
-    let run_task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
+    let run_task = cx.update(|cx| {
+        tool.clone()
+            .run(ToolInput::resolved(input), event_stream, cx)
+    });
 
     cx.background_executor.allow_parking();
     let run_result = cx.foreground_executor.block_test(run_task);
@@ -2528,16 +2526,6 @@ fn run_multi_workspace_sidebar_visual_tests(
     std::fs::create_dir_all(&workspace1_dir)?;
     std::fs::create_dir_all(&workspace2_dir)?;
 
-    // Create directories for recent projects (they must exist on disk for display)
-    let recent1_dir = canonical_temp.join("tiny-project");
-    let recent2_dir = canonical_temp.join("font-kit");
-    let recent3_dir = canonical_temp.join("ideas");
-    let recent4_dir = canonical_temp.join("tmp");
-    std::fs::create_dir_all(&recent1_dir)?;
-    std::fs::create_dir_all(&recent2_dir)?;
-    std::fs::create_dir_all(&recent3_dir)?;
-    std::fs::create_dir_all(&recent4_dir)?;
-
     // Enable the agent-v2 feature flag so multi-workspace is active
     cx.update(|cx| {
         cx.update_flags(true, vec!["agent-v2".to_string()]);
@@ -2677,83 +2665,78 @@ fn run_multi_workspace_sidebar_visual_tests(
 
     cx.run_until_parked();
 
-    // Inject recent project entries into the sidebar.
-    // We update the sidebar entity directly (not through the MultiWorkspace window update)
-    // to avoid a re-entrant read panic: rebuild_entries reads MultiWorkspace, so we can't
-    // be inside a MultiWorkspace update when that happens.
-    cx.update(|cx| {
-        sidebar.update(cx, |sidebar, cx| {
-            let now = Utc::now();
-            let today_timestamp = now;
-            let yesterday_timestamp = now - ChronoDuration::days(1);
-            let past_week_timestamp = now - ChronoDuration::days(10);
-            let all_timestamp = now - ChronoDuration::days(60);
-
-            let recent_projects = vec![
-                RecentProjectEntry {
-                    name: "tiny-project".into(),
-                    full_path: recent1_dir.to_string_lossy().to_string().into(),
-                    paths: vec![recent1_dir.clone()],
-                    workspace_id: WorkspaceId::default(),
-                    timestamp: today_timestamp,
-                },
-                RecentProjectEntry {
-                    name: "font-kit".into(),
-                    full_path: recent2_dir.to_string_lossy().to_string().into(),
-                    paths: vec![recent2_dir.clone()],
-                    workspace_id: WorkspaceId::default(),
-                    timestamp: yesterday_timestamp,
-                },
-                RecentProjectEntry {
-                    name: "ideas".into(),
-                    full_path: recent3_dir.to_string_lossy().to_string().into(),
-                    paths: vec![recent3_dir.clone()],
-                    workspace_id: WorkspaceId::default(),
-                    timestamp: past_week_timestamp,
-                },
-                RecentProjectEntry {
-                    name: "tmp".into(),
-                    full_path: recent4_dir.to_string_lossy().to_string().into(),
-                    paths: vec![recent4_dir.clone()],
-                    workspace_id: WorkspaceId::default(),
-                    timestamp: all_timestamp,
-                },
-            ];
-            sidebar.set_test_recent_projects(recent_projects, cx);
-        });
-    });
-
-    // Set thread info directly on the sidebar for visual testing
-    cx.update(|cx| {
-        sidebar.update(cx, |sidebar, _cx| {
-            sidebar.set_test_thread_info(
-                0,
-                "Refine thread view scrolling behavior".into(),
-                ui::AgentThreadStatus::Completed,
-            );
-            sidebar.set_test_thread_info(
-                1,
-                "Add line numbers option to FileEditBlock".into(),
-                ui::AgentThreadStatus::Running,
-            );
-        });
-    });
+    // Save test threads to the ThreadStore for each workspace
+    let save_tasks = multi_workspace_window
+        .update(cx, |multi_workspace, _window, cx| {
+            let thread_store = agent::ThreadStore::global(cx);
+            let workspaces = multi_workspace.workspaces().to_vec();
+            let mut tasks = Vec::new();
+
+            for (index, workspace) in workspaces.iter().enumerate() {
+                let workspace_ref = workspace.read(cx);
+                let mut paths = Vec::new();
+                for worktree in workspace_ref.worktrees(cx) {
+                    let worktree_ref = worktree.read(cx);
+                    if worktree_ref.is_visible() {
+                        paths.push(worktree_ref.abs_path().to_path_buf());
+                    }
+                }
+                let path_list = util::path_list::PathList::new(&paths);
+
+                let (session_id, title, updated_at) = match index {
+                    0 => (
+                        "visual-test-thread-0",
+                        "Refine thread view scrolling behavior",
+                        chrono::TimeZone::with_ymd_and_hms(&chrono::Utc, 2024, 6, 15, 10, 30, 0)
+                            .unwrap(),
+                    ),
+                    1 => (
+                        "visual-test-thread-1",
+                        "Add line numbers option to FileEditBlock",
+                        chrono::TimeZone::with_ymd_and_hms(&chrono::Utc, 2024, 6, 15, 11, 0, 0)
+                            .unwrap(),
+                    ),
+                    _ => continue,
+                };
+
+                let task = thread_store.update(cx, |store, cx| {
+                    store.save_thread(
+                        acp::SessionId::new(Arc::from(session_id)),
+                        agent::DbThread {
+                            title: title.to_string().into(),
+                            messages: Vec::new(),
+                            updated_at,
+                            detailed_summary: None,
+                            initial_project_snapshot: None,
+                            cumulative_token_usage: Default::default(),
+                            request_token_usage: Default::default(),
+                            model: None,
+                            profile: None,
+                            imported: false,
+                            subagent_context: None,
+                            speed: None,
+                            thinking_enabled: false,
+                            thinking_effort: None,
+                            ui_scroll_position: None,
+                            draft_prompt: None,
+                        },
+                        path_list,
+                        cx,
+                    )
+                });
+                tasks.push(task);
+            }
+            tasks
+        })
+        .context("Failed to create test threads")?;
 
-    // Set last-worked-on thread titles on some recent projects for visual testing
-    cx.update(|cx| {
-        sidebar.update(cx, |sidebar, cx| {
-            sidebar.set_test_recent_project_thread_title(
-                recent1_dir.to_string_lossy().to_string().into(),
-                "Fix flaky test in CI pipeline".into(),
-                cx,
-            );
-            sidebar.set_test_recent_project_thread_title(
-                recent2_dir.to_string_lossy().to_string().into(),
-                "Upgrade font rendering engine".into(),
-                cx,
-            );
-        });
-    });
+    cx.background_executor.allow_parking();
+    for task in save_tasks {
+        cx.foreground_executor
+            .block_test(task)
+            .context("Failed to save test thread")?;
+    }
+    cx.background_executor.forbid_parking();
 
     cx.run_until_parked();
 
@@ -2909,12 +2892,12 @@ impl gpui::Render for ThreadItemIconDecorationsTestView {
                 container()
                     .child(ThreadItem::new("ti-none", "Default idle thread").timestamp("1:00 AM")),
             )
-            .child(section_label("Blue dot (generation done)"))
+            .child(section_label("Blue dot (notified)"))
             .child(
                 container().child(
                     ThreadItem::new("ti-done", "Generation completed successfully")
                         .timestamp("1:05 AM")
-                        .generation_done(true),
+                        .notified(true),
                 ),
             )
             .child(section_label("Yellow triangle (waiting for confirmation)"))
@@ -2939,18 +2922,17 @@ impl gpui::Render for ThreadItemIconDecorationsTestView {
                     ThreadItem::new("ti-running", "Generating response...")
                         .icon(IconName::AiClaude)
                         .timestamp("1:20 AM")
-                        .running(true),
+                        .status(ui::AgentThreadStatus::Running),
                 ),
             )
             .child(section_label(
-                "Spinner + yellow triangle (running + waiting)",
+                "Spinner + yellow triangle (waiting for confirmation)",
             ))
             .child(
                 container().child(
                     ThreadItem::new("ti-running-waiting", "Running but needs confirmation")
                         .icon(IconName::AiClaude)
                         .timestamp("1:25 AM")
-                        .running(true)
                         .status(ui::AgentThreadStatus::WaitingForConfirmation),
                 ),
             )
@@ -3064,3 +3046,626 @@ fn run_error_wrapping_visual_tests(
 
     Ok(test_result)
 }
+
+#[cfg(all(target_os = "macos", feature = "visual-tests"))]
+/// Runs a git command in the given directory and returns an error with
+/// stderr/stdout context if the command fails (non-zero exit status).
+fn run_git_command(args: &[&str], dir: &std::path::Path) -> Result<()> {
+    let output = std::process::Command::new("git")
+        .args(args)
+        .current_dir(dir)
+        .output()
+        .with_context(|| format!("failed to spawn `git {}`", args.join(" ")))?;
+
+    if !output.status.success() {
+        let stdout = String::from_utf8_lossy(&output.stdout);
+        let stderr = String::from_utf8_lossy(&output.stderr);
+        anyhow::bail!(
+            "`git {}` failed (exit {})\nstdout: {}\nstderr: {}",
+            args.join(" "),
+            output.status,
+            stdout.trim(),
+            stderr.trim(),
+        );
+    }
+    Ok(())
+}
+
+#[cfg(all(target_os = "macos", feature = "visual-tests"))]
+fn run_start_thread_in_selector_visual_tests(
+    app_state: Arc<AppState>,
+    cx: &mut VisualTestAppContext,
+    update_baseline: bool,
+) -> Result<TestResult> {
+    use agent_ui::{AgentPanel, StartThreadIn, WorktreeCreationStatus};
+
+    // Enable feature flags so the thread target selector renders
+    cx.update(|cx| {
+        cx.update_flags(true, vec!["agent-v2".to_string()]);
+    });
+
+    // Create a temp directory with a real git repo so "New Worktree" is enabled
+    let temp_dir = tempfile::tempdir()?;
+    let temp_path = temp_dir.keep();
+    let canonical_temp = temp_path.canonicalize()?;
+    let project_path = canonical_temp.join("project");
+    std::fs::create_dir_all(&project_path)?;
+
+    // Initialize git repo
+    run_git_command(&["init"], &project_path)?;
+    run_git_command(&["config", "user.email", "test@test.com"], &project_path)?;
+    run_git_command(&["config", "user.name", "Test User"], &project_path)?;
+
+    // Create source files
+    let src_dir = project_path.join("src");
+    std::fs::create_dir_all(&src_dir)?;
+    std::fs::write(
+        src_dir.join("main.rs"),
+        r#"fn main() {
+    println!("Hello, world!");
+
+    let x = 42;
+    let y = x * 2;
+
+    if y > 50 {
+        println!("y is greater than 50");
+    } else {
+        println!("y is not greater than 50");
+    }
+
+    for i in 0..10 {
+        println!("i = {}", i);
+    }
+}
+
+fn helper_function(a: i32, b: i32) -> i32 {
+    a + b
+}
+"#,
+    )?;
+
+    std::fs::write(
+        project_path.join("Cargo.toml"),
+        r#"[package]
+name = "test_project"
+version = "0.1.0"
+edition = "2021"
+"#,
+    )?;
+
+    // Commit so git status is clean
+    run_git_command(&["add", "."], &project_path)?;
+    run_git_command(&["commit", "-m", "Initial commit"], &project_path)?;
+
+    let project = cx.update(|cx| {
+        project::Project::local(
+            app_state.client.clone(),
+            app_state.node_runtime.clone(),
+            app_state.user_store.clone(),
+            app_state.languages.clone(),
+            app_state.fs.clone(),
+            None,
+            project::LocalProjectFlags {
+                init_worktree_trust: false,
+                ..Default::default()
+            },
+            cx,
+        )
+    });
+
+    // Use a wide window so we see project panel + editor + agent panel
+    let window_size = size(px(1280.0), px(800.0));
+    let bounds = Bounds {
+        origin: point(px(0.0), px(0.0)),
+        size: window_size,
+    };
+
+    let workspace_window: WindowHandle<MultiWorkspace> = cx
+        .update(|cx| {
+            cx.open_window(
+                WindowOptions {
+                    window_bounds: Some(WindowBounds::Windowed(bounds)),
+                    focus: false,
+                    show: false,
+                    ..Default::default()
+                },
+                |window, cx| {
+                    let workspace = cx.new(|cx| {
+                        Workspace::new(None, project.clone(), app_state.clone(), window, cx)
+                    });
+                    cx.new(|cx| MultiWorkspace::new(workspace, window, cx))
+                },
+            )
+        })
+        .context("Failed to open thread target selector test window")?;
+
+    cx.run_until_parked();
+
+    // Create and register the workspace sidebar
+    let sidebar = workspace_window
+        .update(cx, |_multi_workspace, window, cx| {
+            let multi_workspace_handle = cx.entity();
+            cx.new(|cx| sidebar::Sidebar::new(multi_workspace_handle, window, cx))
+        })
+        .context("Failed to create sidebar")?;
+
+    workspace_window
+        .update(cx, |multi_workspace, window, cx| {
+            multi_workspace.register_sidebar(sidebar.clone(), window, cx);
+        })
+        .context("Failed to register sidebar")?;
+
+    // Open the sidebar
+    workspace_window
+        .update(cx, |multi_workspace, window, cx| {
+            multi_workspace.toggle_sidebar(window, cx);
+        })
+        .context("Failed to toggle sidebar")?;
+
+    cx.run_until_parked();
+
+    // Add the git project as a worktree
+    let add_worktree_task = workspace_window
+        .update(cx, |multi_workspace, _window, cx| {
+            let workspace = &multi_workspace.workspaces()[0];
+            let project = workspace.read(cx).project().clone();
+            project.update(cx, |project, cx| {
+                project.find_or_create_worktree(&project_path, true, cx)
+            })
+        })
+        .context("Failed to start adding worktree")?;
+
+    cx.background_executor.allow_parking();
+    cx.foreground_executor
+        .block_test(add_worktree_task)
+        .context("Failed to add worktree")?;
+    cx.background_executor.forbid_parking();
+
+    cx.run_until_parked();
+
+    // Wait for worktree scan and git status
+    for _ in 0..5 {
+        cx.advance_clock(Duration::from_millis(100));
+        cx.run_until_parked();
+    }
+
+    // Open the project panel
+    let (weak_workspace, async_window_cx) = workspace_window
+        .update(cx, |multi_workspace, window, cx| {
+            let workspace = &multi_workspace.workspaces()[0];
+            (workspace.read(cx).weak_handle(), window.to_async(cx))
+        })
+        .context("Failed to get workspace handle")?;
+
+    cx.background_executor.allow_parking();
+    let project_panel = cx
+        .foreground_executor
+        .block_test(ProjectPanel::load(weak_workspace, async_window_cx))
+        .context("Failed to load project panel")?;
+    cx.background_executor.forbid_parking();
+
+    workspace_window
+        .update(cx, |multi_workspace, window, cx| {
+            let workspace = &multi_workspace.workspaces()[0];
+            workspace.update(cx, |workspace, cx| {
+                workspace.add_panel(project_panel, window, cx);
+                workspace.open_panel::<ProjectPanel>(window, cx);
+            });
+        })
+        .context("Failed to add project panel")?;
+
+    cx.run_until_parked();
+
+    // Open main.rs in the editor
+    let open_file_task = workspace_window
+        .update(cx, |multi_workspace, window, cx| {
+            let workspace = &multi_workspace.workspaces()[0];
+            workspace.update(cx, |workspace, cx| {
+                let worktree = workspace.project().read(cx).worktrees(cx).next();
+                if let Some(worktree) = worktree {
+                    let worktree_id = worktree.read(cx).id();
+                    let rel_path: std::sync::Arc<util::rel_path::RelPath> =
+                        util::rel_path::rel_path("src/main.rs").into();
+                    let project_path: project::ProjectPath = (worktree_id, rel_path).into();
+                    Some(workspace.open_path(project_path, None, true, window, cx))
+                } else {
+                    None
+                }
+            })
+        })
+        .log_err()
+        .flatten();
+
+    if let Some(task) = open_file_task {
+        cx.background_executor.allow_parking();
+        cx.foreground_executor.block_test(task).log_err();
+        cx.background_executor.forbid_parking();
+    }
+
+    cx.run_until_parked();
+
+    // Load the AgentPanel
+    let (weak_workspace, async_window_cx) = workspace_window
+        .update(cx, |multi_workspace, window, cx| {
+            let workspace = &multi_workspace.workspaces()[0];
+            (workspace.read(cx).weak_handle(), window.to_async(cx))
+        })
+        .context("Failed to get workspace handle for agent panel")?;
+
+    let prompt_builder =
+        cx.update(|cx| prompt_store::PromptBuilder::load(app_state.fs.clone(), false, cx));
+
+    // Register an observer so that workspaces created by the worktree creation
+    // flow get AgentPanel and ProjectPanel loaded automatically. Without this,
+    // `workspace.panel::<AgentPanel>(cx)` returns None in the new workspace and
+    // the creation flow's `focus_panel::<AgentPanel>` call is a no-op.
+    let _workspace_observer = cx.update({
+        let prompt_builder = prompt_builder.clone();
+        |cx| {
+            cx.observe_new(move |workspace: &mut Workspace, window, cx| {
+                let Some(window) = window else { return };
+                let prompt_builder = prompt_builder.clone();
+                let panels_task = cx.spawn_in(window, async move |workspace_handle, cx| {
+                    let project_panel = ProjectPanel::load(workspace_handle.clone(), cx.clone());
+                    let agent_panel =
+                        AgentPanel::load(workspace_handle.clone(), prompt_builder, cx.clone());
+                    if let Ok(panel) = project_panel.await {
+                        workspace_handle
+                            .update_in(cx, |workspace, window, cx| {
+                                workspace.add_panel(panel, window, cx);
+                            })
+                            .log_err();
+                    }
+                    if let Ok(panel) = agent_panel.await {
+                        workspace_handle
+                            .update_in(cx, |workspace, window, cx| {
+                                workspace.add_panel(panel, window, cx);
+                            })
+                            .log_err();
+                    }
+                    anyhow::Ok(())
+                });
+                workspace.set_panels_task(panels_task);
+            })
+        }
+    });
+
+    cx.background_executor.allow_parking();
+    let panel = cx
+        .foreground_executor
+        .block_test(AgentPanel::load(
+            weak_workspace,
+            prompt_builder,
+            async_window_cx,
+        ))
+        .context("Failed to load AgentPanel")?;
+    cx.background_executor.forbid_parking();
+
+    workspace_window
+        .update(cx, |multi_workspace, window, cx| {
+            let workspace = &multi_workspace.workspaces()[0];
+            workspace.update(cx, |workspace, cx| {
+                workspace.add_panel(panel.clone(), window, cx);
+                workspace.open_panel::<AgentPanel>(window, cx);
+            });
+        })
+        .context("Failed to add and open AgentPanel")?;
+
+    cx.run_until_parked();
+
+    // Inject the stub server and open a thread so the toolbar is visible
+    let connection = StubAgentConnection::new();
+    let stub_agent: Rc<dyn AgentServer> = Rc::new(StubAgentServer::new(connection));
+
+    cx.update_window(workspace_window.into(), |_, window, cx| {
+        panel.update(cx, |panel, cx| {
+            panel.open_external_thread_with_server(stub_agent.clone(), window, cx);
+        });
+    })?;
+
+    cx.run_until_parked();
+
+    // ---- Screenshot 1: Default "Local Project" selector (dropdown closed) ----
+    cx.update_window(workspace_window.into(), |_, window, _cx| {
+        window.refresh();
+    })?;
+    cx.run_until_parked();
+
+    let result_default = run_visual_test(
+        "start_thread_in_selector_default",
+        workspace_window.into(),
+        cx,
+        update_baseline,
+    );
+
+    // ---- Screenshot 2: Dropdown open showing menu entries ----
+    cx.update_window(workspace_window.into(), |_, window, cx| {
+        panel.update(cx, |panel, cx| {
+            panel.open_start_thread_in_menu_for_tests(window, cx);
+        });
+    })?;
+    cx.run_until_parked();
+
+    cx.update_window(workspace_window.into(), |_, window, _cx| {
+        window.refresh();
+    })?;
+    cx.run_until_parked();
+
+    let result_open_dropdown = run_visual_test(
+        "start_thread_in_selector_open",
+        workspace_window.into(),
+        cx,
+        update_baseline,
+    );
+
+    // ---- Screenshot 3: "New Worktree" selected (dropdown closed, label changed) ----
+    // First dismiss the dropdown, then change the target so the toolbar label is visible
+    cx.update_window(workspace_window.into(), |_, _window, cx| {
+        panel.update(cx, |panel, cx| {
+            panel.close_start_thread_in_menu_for_tests(cx);
+        });
+    })?;
+    cx.run_until_parked();
+
+    cx.update_window(workspace_window.into(), |_, _window, cx| {
+        panel.update(cx, |panel, cx| {
+            panel.set_start_thread_in_for_tests(StartThreadIn::NewWorktree, cx);
+        });
+    })?;
+    cx.run_until_parked();
+
+    cx.update_window(workspace_window.into(), |_, window, _cx| {
+        window.refresh();
+    })?;
+    cx.run_until_parked();
+
+    let result_new_worktree = run_visual_test(
+        "start_thread_in_selector_new_worktree",
+        workspace_window.into(),
+        cx,
+        update_baseline,
+    );
+
+    // ---- Screenshot 4: "Creating worktree…" status banner ----
+    cx.update_window(workspace_window.into(), |_, _window, cx| {
+        panel.update(cx, |panel, cx| {
+            panel
+                .set_worktree_creation_status_for_tests(Some(WorktreeCreationStatus::Creating), cx);
+        });
+    })?;
+    cx.run_until_parked();
+
+    cx.update_window(workspace_window.into(), |_, window, _cx| {
+        window.refresh();
+    })?;
+    cx.run_until_parked();
+
+    let result_creating = run_visual_test(
+        "worktree_creation_status_creating",
+        workspace_window.into(),
+        cx,
+        update_baseline,
+    );
+
+    // ---- Screenshot 5: Error status banner ----
+    cx.update_window(workspace_window.into(), |_, _window, cx| {
+        panel.update(cx, |panel, cx| {
+            panel.set_worktree_creation_status_for_tests(
+                Some(WorktreeCreationStatus::Error(
+                    "Failed to create worktree: branch already exists".into(),
+                )),
+                cx,
+            );
+        });
+    })?;
+    cx.run_until_parked();
+
+    cx.update_window(workspace_window.into(), |_, window, _cx| {
+        window.refresh();
+    })?;
+    cx.run_until_parked();
+
+    let result_error = run_visual_test(
+        "worktree_creation_status_error",
+        workspace_window.into(),
+        cx,
+        update_baseline,
+    );
+
+    // ---- Screenshot 6: Worktree creation succeeded ----
+    // Clear the error status and re-select New Worktree to ensure a clean state.
+    cx.update_window(workspace_window.into(), |_, _window, cx| {
+        panel.update(cx, |panel, cx| {
+            panel.set_worktree_creation_status_for_tests(None, cx);
+        });
+    })?;
+    cx.run_until_parked();
+
+    cx.update_window(workspace_window.into(), |_, window, cx| {
+        window.dispatch_action(Box::new(StartThreadIn::NewWorktree), cx);
+    })?;
+    cx.run_until_parked();
+
+    // Insert a message into the active thread's message editor and submit.
+    let thread_view = cx
+        .read(|cx| panel.read(cx).as_active_thread_view(cx))
+        .ok_or_else(|| anyhow::anyhow!("No active thread view"))?;
+
+    cx.update_window(workspace_window.into(), |_, window, cx| {
+        let message_editor = thread_view.read(cx).message_editor.clone();
+        message_editor.update(cx, |message_editor, cx| {
+            message_editor.set_message(
+                vec![acp::ContentBlock::Text(acp::TextContent::new(
+                    "Add a CLI flag to set the log level".to_string(),
+                ))],
+                window,
+                cx,
+            );
+            message_editor.send(cx);
+        });
+    })?;
+    cx.run_until_parked();
+
+    // Wait for the full worktree creation flow to complete. The creation status
+    // is cleared to `None` at the very end of the async task, after panels are
+    // loaded, the agent panel is focused, and the new workspace is activated.
+    cx.background_executor.allow_parking();
+    let mut creation_complete = false;
+    for _ in 0..120 {
+        cx.run_until_parked();
+        let status_cleared = cx.read(|cx| {
+            panel
+                .read(cx)
+                .worktree_creation_status_for_tests()
+                .is_none()
+        });
+        let workspace_count = workspace_window.update(cx, |multi_workspace, _window, _cx| {
+            multi_workspace.workspaces().len()
+        })?;
+        if workspace_count == 2 && status_cleared {
+            creation_complete = true;
+            break;
+        }
+        cx.advance_clock(Duration::from_millis(100));
+    }
+    cx.background_executor.forbid_parking();
+
+    if !creation_complete {
+        return Err(anyhow::anyhow!("Worktree creation did not complete"));
+    }
+
+    // The creation flow called `external_thread` on the new workspace's agent
+    // panel, which tried to launch a real agent binary and failed. Replace the
+    // error state by injecting the stub server, and shrink the panel so the
+    // editor content is visible.
+    workspace_window.update(cx, |multi_workspace, window, cx| {
+        let new_workspace = &multi_workspace.workspaces()[1];
+        new_workspace.update(cx, |workspace, cx| {
+            if let Some(new_panel) = workspace.panel::<AgentPanel>(cx) {
+                new_panel.update(cx, |panel, cx| {
+                    panel.set_size(Some(px(480.0)), window, cx);
+                    panel.open_external_thread_with_server(stub_agent.clone(), window, cx);
+                });
+            }
+        });
+    })?;
+    cx.run_until_parked();
+
+    // Type and send a message so the thread target dropdown disappears.
+    let new_panel = workspace_window.update(cx, |multi_workspace, _window, cx| {
+        let new_workspace = &multi_workspace.workspaces()[1];
+        new_workspace.read(cx).panel::<AgentPanel>(cx)
+    })?;
+    if let Some(new_panel) = new_panel {
+        let new_thread_view = cx.read(|cx| new_panel.read(cx).as_active_thread_view(cx));
+        if let Some(new_thread_view) = new_thread_view {
+            cx.update_window(workspace_window.into(), |_, window, cx| {
+                let message_editor = new_thread_view.read(cx).message_editor.clone();
+                message_editor.update(cx, |editor, cx| {
+                    editor.set_message(
+                        vec![acp::ContentBlock::Text(acp::TextContent::new(
+                            "Add a CLI flag to set the log level".to_string(),
+                        ))],
+                        window,
+                        cx,
+                    );
+                    editor.send(cx);
+                });
+            })?;
+            cx.run_until_parked();
+        }
+    }
+
+    cx.update_window(workspace_window.into(), |_, window, _cx| {
+        window.refresh();
+    })?;
+    cx.run_until_parked();
+
+    let result_succeeded = run_visual_test(
+        "worktree_creation_succeeded",
+        workspace_window.into(),
+        cx,
+        update_baseline,
+    );
+
+    // Clean up — drop the workspace observer first so no new panels are
+    // registered on workspaces created during teardown.
+    drop(_workspace_observer);
+
+    workspace_window
+        .update(cx, |multi_workspace, _window, cx| {
+            let workspace = &multi_workspace.workspaces()[0];
+            let project = workspace.read(cx).project().clone();
+            project.update(cx, |project, cx| {
+                let worktree_ids: Vec<_> =
+                    project.worktrees(cx).map(|wt| wt.read(cx).id()).collect();
+                for id in worktree_ids {
+                    project.remove_worktree(id, cx);
+                }
+            });
+        })
+        .log_err();
+
+    cx.run_until_parked();
+
+    cx.update_window(workspace_window.into(), |_, window, _cx| {
+        window.remove_window();
+    })
+    .log_err();
+
+    cx.run_until_parked();
+
+    for _ in 0..15 {
+        cx.advance_clock(Duration::from_millis(100));
+        cx.run_until_parked();
+    }
+
+    // Delete the preserved temp directory so visual-test runs don't
+    // accumulate filesystem artifacts.
+    if let Err(err) = std::fs::remove_dir_all(&temp_path) {
+        log::warn!(
+            "failed to clean up visual-test temp dir {}: {err}",
+            temp_path.display()
+        );
+    }
+
+    // Reset feature flags
+    cx.update(|cx| {
+        cx.update_flags(false, vec![]);
+    });
+
+    let results = [
+        ("default", result_default),
+        ("open_dropdown", result_open_dropdown),
+        ("new_worktree", result_new_worktree),
+        ("creating", result_creating),
+        ("error", result_error),
+        ("succeeded", result_succeeded),
+    ];
+
+    let mut has_baseline_update = None;
+    let mut failures = Vec::new();
+
+    for (name, result) in &results {
+        match result {
+            Ok(TestResult::Passed) => {}
+            Ok(TestResult::BaselineUpdated(p)) => {
+                has_baseline_update = Some(p.clone());
+            }
+            Err(e) => {
+                failures.push(format!("{}: {}", name, e));
+            }
+        }
+    }
+
+    if !failures.is_empty() {
+        Err(anyhow::anyhow!(
+            "start_thread_in_selector failures: {}",
+            failures.join("; ")
+        ))
+    } else if let Some(p) = has_baseline_update {
+        Ok(TestResult::BaselineUpdated(p))
+    } else {
+        Ok(TestResult::Passed)
+    }
+}

crates/zed/src/zed.rs 🔗

@@ -376,8 +376,19 @@ pub fn initialize_workspace(
             return;
         };
         let multi_workspace_handle = cx.entity();
-        let sidebar = cx.new(|cx| Sidebar::new(multi_workspace_handle, window, cx));
+        let sidebar = cx.new(|cx| Sidebar::new(multi_workspace_handle.clone(), window, cx));
         multi_workspace.register_sidebar(sidebar, window, cx);
+
+        let multi_workspace_handle = multi_workspace_handle.downgrade();
+        window.on_window_should_close(cx, move |window, cx| {
+            multi_workspace_handle
+                .update(cx, |multi_workspace, cx| {
+                    // We'll handle closing asynchronously
+                    multi_workspace.close_window(&CloseWindow, window, cx);
+                    false
+                })
+                .unwrap_or(true)
+        });
     })
     .detach();
 
@@ -411,16 +422,7 @@ pub fn initialize_workspace(
         if let Some(specs) = window.gpu_specs() {
             log::info!("Using GPU: {:?}", specs);
             show_software_emulation_warning_if_needed(specs.clone(), window, cx);
-            if let Some((crash_server, message)) = crashes::CRASH_HANDLER
-                .get()
-                .zip(bincode::serialize(&specs).ok())
-                && let Err(err) = crash_server.send_message(3, message)
-            {
-                log::warn!(
-                    "Failed to store active gpu info for crash reporting: {}",
-                    err
-                );
-            }
+            crashes::set_gpu_info(specs);
         }
 
         let edit_prediction_menu_handle = PopoverMenuHandle::default();
@@ -485,18 +487,8 @@ pub fn initialize_workspace(
             status_bar.add_right_item(image_info, window, cx);
         });
 
-        let handle = cx.entity().downgrade();
-        window.on_window_should_close(cx, move |window, cx| {
-            handle
-                .update(cx, |workspace, cx| {
-                    // We'll handle closing asynchronously
-                    workspace.close_window(&CloseWindow, window, cx);
-                    false
-                })
-                .unwrap_or(true)
-        });
-
-        initialize_panels(prompt_builder.clone(), window, cx);
+        let panels_task = initialize_panels(prompt_builder.clone(), window, cx);
+        workspace.set_panels_task(panels_task);
         register_actions(app_state.clone(), workspace, window, cx);
 
         workspace.focus_handle(cx).focus(window, cx);
@@ -620,7 +612,7 @@ fn initialize_panels(
     prompt_builder: Arc<PromptBuilder>,
     window: &mut Window,
     cx: &mut Context<Workspace>,
-) {
+) -> Task<anyhow::Result<()>> {
     cx.spawn_in(window, async move |workspace_handle, cx| {
         let project_panel = ProjectPanel::load(workspace_handle.clone(), cx.clone());
         let outline_panel = OutlinePanel::load(workspace_handle.clone(), cx.clone());
@@ -662,7 +654,6 @@ fn initialize_panels(
 
         anyhow::Ok(())
     })
-    .detach();
 }
 
 fn setup_or_teardown_ai_panel<P: Panel>(
@@ -794,7 +785,7 @@ fn register_actions(
                 }
             }
         })
-        .register_action(|workspace, _: &workspace::Open, window, cx| {
+        .register_action(|workspace, action: &workspace::Open, window, cx| {
             telemetry::event!("Project Opened");
             workspace::prompt_for_open_path_and_open(
                 workspace,
@@ -805,6 +796,7 @@ fn register_actions(
                     multiple: true,
                     prompt: None,
                 },
+                action.create_new_window,
                 window,
                 cx,
             );
@@ -820,6 +812,7 @@ fn register_actions(
                     multiple: true,
                     prompt: None,
                 },
+                true,
                 window,
                 cx,
             );
@@ -1103,7 +1096,7 @@ fn register_actions(
                             );
                         },
                     )
-                    .detach();
+                    .detach_and_log_err(cx);
                 }
             }
         })
@@ -2060,40 +2053,39 @@ fn open_settings_file(
     cx: &mut Context<Workspace>,
 ) {
     cx.spawn_in(window, async move |workspace, cx| {
-        let settings_open_task = workspace
+        let (worktree_creation_task, settings_open_task) = workspace
             .update_in(cx, |workspace, window, cx| {
-                workspace.with_local_workspace(window, cx, move |_workspace, window, cx| {
-                    cx.spawn_in(window, async move |workspace, cx| {
-                        let worktree_creation_task =
-                            workspace.update_in(cx, |workspace, _window, cx| {
-                                workspace.project().update(cx, |project, cx| {
-                                    // Set up a dedicated worktree for settings, since
-                                    // otherwise we're dropping and re-starting LSP servers
-                                    // for each file inside on every settings file
-                                    // close/open
-
-                                    // TODO: Do note that all other external files (e.g.
-                                    // drag and drop from OS) still have their worktrees
-                                    // released on file close, causing LSP servers'
-                                    // restarts.
-                                    project.find_or_create_worktree(
-                                        paths::config_dir().as_path(),
-                                        false,
-                                        cx,
-                                    )
-                                })
-                            })?;
-                        let _ = worktree_creation_task.await?;
-                        let settings_open_task =
-                            workspace.update_in(cx, |_workspace, window, cx| {
-                                create_and_open_local_file(abs_path, window, cx, default_content)
-                            })?;
-                        let _ = settings_open_task.await?;
-                        anyhow::Ok(())
-                    })
+                workspace.with_local_or_wsl_workspace(window, cx, move |workspace, window, cx| {
+                    let project = workspace.project().clone();
+
+                    let worktree_creation_task = cx.spawn_in(window, async move |_, cx| {
+                        let config_dir = project
+                            .update(cx, |project, cx| {
+                                project.try_windows_path_to_wsl(paths::config_dir().as_path(), cx)
+                            })
+                            .await?;
+                        // Set up a dedicated worktree for settings, since
+                        // otherwise we're dropping and re-starting LSP servers
+                        // for each file inside on every settings file
+                        // close/open
+
+                        // TODO: Do note that all other external files (e.g.
+                        // drag and drop from OS) still have their worktrees
+                        // released on file close, causing LSP servers'
+                        // restarts.
+                        project
+                            .update(cx, |project, cx| {
+                                project.find_or_create_worktree(&config_dir, false, cx)
+                            })
+                            .await
+                    });
+                    let settings_open_task =
+                        create_and_open_local_file(abs_path, window, cx, default_content);
+                    (worktree_creation_task, settings_open_task)
                 })
             })?
             .await?;
+        let _ = worktree_creation_task.await?;
         let _ = settings_open_task.await?;
         anyhow::Ok(())
     })
@@ -4793,6 +4785,7 @@ mod tests {
                 "action",
                 "activity_indicator",
                 "agent",
+                "agents_sidebar",
                 "app_menu",
                 "assistant",
                 "assistant2",
@@ -4810,6 +4803,7 @@ mod tests {
                 "console",
                 "context_server",
                 "copilot",
+                "csv",
                 "debug_panel",
                 "debugger",
                 "dev",
@@ -4861,7 +4855,6 @@ mod tests {
                 "settings_profile_selector",
                 "snippets",
                 "stash_picker",
-                "supermaven",
                 "svg",
                 "syntax_tree_view",
                 "tab_switcher",
@@ -5022,7 +5015,7 @@ mod tests {
             language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx);
             web_search::init(cx);
             git_graph::init(cx);
-            web_search_providers::init(app_state.client.clone(), cx);
+            web_search_providers::init(app_state.client.clone(), app_state.user_store.clone(), cx);
             let prompt_builder = PromptBuilder::load(app_state.fs.clone(), false, cx);
             project::AgentRegistryStore::init_global(
                 cx,
@@ -5809,7 +5802,15 @@ mod tests {
         //   Window B: workspace for dir3
         let (window_a, _) = cx
             .update(|cx| {
-                Workspace::new_local(vec![dir1.into()], app_state.clone(), None, None, None, cx)
+                Workspace::new_local(
+                    vec![dir1.into()],
+                    app_state.clone(),
+                    None,
+                    None,
+                    None,
+                    true,
+                    cx,
+                )
             })
             .await
             .expect("failed to open first workspace");
@@ -5825,7 +5826,15 @@ mod tests {
 
         let (window_b, _) = cx
             .update(|cx| {
-                Workspace::new_local(vec![dir3.into()], app_state.clone(), None, None, None, cx)
+                Workspace::new_local(
+                    vec![dir3.into()],
+                    app_state.clone(),
+                    None,
+                    None,
+                    None,
+                    true,
+                    cx,
+                )
             })
             .await
             .expect("failed to open third workspace");

crates/zed/src/zed/app_menus.rs 🔗

@@ -2,7 +2,7 @@ use collab_ui::collab_panel;
 use gpui::{App, Menu, MenuItem, OsAction};
 use release_channel::ReleaseChannel;
 use terminal_view::terminal_panel;
-use zed_actions::{ToggleFocus as ToggleDebugPanel, dev};
+use zed_actions::{debug_panel, dev};
 
 pub fn app_menus(cx: &mut App) -> Vec<Menu> {
     use zed_actions::Quit;
@@ -43,7 +43,7 @@ pub fn app_menus(cx: &mut App) -> Vec<Menu> {
         MenuItem::action("Outline Panel", outline_panel::ToggleFocus),
         MenuItem::action("Collab Panel", collab_panel::ToggleFocus),
         MenuItem::action("Terminal Panel", terminal_panel::ToggleFocus),
-        MenuItem::action("Debugger Panel", ToggleDebugPanel),
+        MenuItem::action("Debugger Panel", debug_panel::ToggleFocus),
         MenuItem::separator(),
         MenuItem::action("Diagnostics", diagnostics::Deploy),
         MenuItem::separator(),
@@ -125,7 +125,7 @@ pub fn app_menus(cx: &mut App) -> Vec<Menu> {
                     } else {
                         "Open…"
                     },
-                    workspace::Open,
+                    workspace::Open::default(),
                 ),
                 MenuItem::action(
                     "Open Recent...",

crates/zed/src/zed/edit_prediction_registry.rs 🔗

@@ -2,20 +2,18 @@ use client::{Client, UserStore};
 use codestral::{CodestralEditPredictionDelegate, load_codestral_api_key};
 use collections::HashMap;
 use copilot::CopilotEditPredictionDelegate;
-use edit_prediction::{EditPredictionModel, ZedEditPredictionDelegate, Zeta2FeatureFlag};
+use edit_prediction::{EditPredictionModel, ZedEditPredictionDelegate};
 use editor::Editor;
-use feature_flags::FeatureFlagAppExt;
 use gpui::{AnyWindowHandle, App, AppContext as _, Context, Entity, WeakEntity};
 use language::language_settings::{EditPredictionProvider, all_language_settings};
 
-use settings::{
-    EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, EditPredictionPromptFormat, SettingsStore,
-};
+use settings::{EditPredictionPromptFormat, SettingsStore};
 use std::{cell::RefCell, rc::Rc, sync::Arc};
-use supermaven::{Supermaven, SupermavenEditPredictionDelegate};
 use ui::Window;
 
 pub fn init(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut App) {
+    edit_prediction::EditPredictionStore::global(&client, &user_store, cx);
+
     let editors: Rc<RefCell<HashMap<WeakEntity<Editor>, AnyWindowHandle>>> = Rc::default();
     cx.observe_new({
         let editors = editors.clone();
@@ -60,13 +58,13 @@ pub fn init(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut App) {
 
     cx.on_action(clear_edit_prediction_store_edit_history);
 
-    let mut provider_config = edit_prediction_provider_config_for_settings(cx);
     cx.subscribe(&user_store, {
         let editors = editors.clone();
         let client = client.clone();
 
         move |user_store, event, cx| {
             if let client::user::Event::PrivateUserInfoUpdated = event {
+                let provider_config = edit_prediction_provider_config_for_settings(cx);
                 assign_edit_prediction_providers(
                     &editors,
                     provider_config,
@@ -80,18 +78,18 @@ pub fn init(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut App) {
     .detach();
 
     cx.observe_global::<SettingsStore>({
-        let user_store = user_store.clone();
+        let mut previous_config = edit_prediction_provider_config_for_settings(cx);
         move |cx| {
             let new_provider_config = edit_prediction_provider_config_for_settings(cx);
 
-            if new_provider_config != provider_config {
+            if new_provider_config != previous_config {
                 telemetry::event!(
                     "Edit Prediction Provider Changed",
-                    from = provider_config.map(|config| config.name()),
+                    from = previous_config.map(|config| config.name()),
                     to = new_provider_config.map(|config| config.name())
                 );
 
-                provider_config = new_provider_config;
+                previous_config = new_provider_config;
                 assign_edit_prediction_providers(
                     &editors,
                     new_provider_config,
@@ -111,10 +109,9 @@ fn edit_prediction_provider_config_for_settings(cx: &App) -> Option<EditPredicti
     match provider {
         EditPredictionProvider::None => None,
         EditPredictionProvider::Copilot => Some(EditPredictionProviderConfig::Copilot),
-        EditPredictionProvider::Supermaven => Some(EditPredictionProviderConfig::Supermaven),
-        EditPredictionProvider::Zed => Some(EditPredictionProviderConfig::Zed(
-            EditPredictionModel::Zeta1,
-        )),
+        EditPredictionProvider::Zed => {
+            Some(EditPredictionProviderConfig::Zed(EditPredictionModel::Zeta))
+        }
         EditPredictionProvider::Codestral => Some(EditPredictionProviderConfig::Codestral),
         EditPredictionProvider::Ollama | EditPredictionProvider::OpenAiCompatibleApi => {
             let custom_settings = if provider == EditPredictionProvider::Ollama {
@@ -133,10 +130,11 @@ fn edit_prediction_provider_config_for_settings(cx: &App) -> Option<EditPredicti
                 }
             }
 
-            if format == EditPredictionPromptFormat::Zeta {
-                Some(EditPredictionProviderConfig::Zed(
-                    EditPredictionModel::Zeta1,
-                ))
+            if matches!(
+                format,
+                EditPredictionPromptFormat::Zeta | EditPredictionPromptFormat::Zeta2
+            ) {
+                Some(EditPredictionProviderConfig::Zed(EditPredictionModel::Zeta))
             } else {
                 Some(EditPredictionProviderConfig::Zed(
                     EditPredictionModel::Fim { format },
@@ -149,17 +147,7 @@ fn edit_prediction_provider_config_for_settings(cx: &App) -> Option<EditPredicti
         EditPredictionProvider::Mercury => Some(EditPredictionProviderConfig::Zed(
             EditPredictionModel::Mercury,
         )),
-        EditPredictionProvider::Experimental(name) => {
-            if name == EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME
-                && cx.has_flag::<Zeta2FeatureFlag>()
-            {
-                Some(EditPredictionProviderConfig::Zed(
-                    EditPredictionModel::Zeta2,
-                ))
-            } else {
-                None
-            }
-        }
+        EditPredictionProvider::Experimental(_) => None,
     }
 }
 
@@ -183,7 +171,6 @@ fn infer_prompt_format(model: &str) -> Option<EditPredictionPromptFormat> {
 #[derive(Copy, Clone, PartialEq, Eq)]
 enum EditPredictionProviderConfig {
     Copilot,
-    Supermaven,
     Codestral,
     Zed(EditPredictionModel),
 }
@@ -192,11 +179,9 @@ impl EditPredictionProviderConfig {
     fn name(&self) -> &'static str {
         match self {
             EditPredictionProviderConfig::Copilot => "Copilot",
-            EditPredictionProviderConfig::Supermaven => "Supermaven",
             EditPredictionProviderConfig::Codestral => "Codestral",
             EditPredictionProviderConfig::Zed(model) => match model {
-                EditPredictionModel::Zeta1 => "Zeta1",
-                EditPredictionModel::Zeta2 => "Zeta2",
+                EditPredictionModel::Zeta => "Zeta",
                 EditPredictionModel::Fim { .. } => "FIM",
                 EditPredictionModel::Sweep => "Sweep",
                 EditPredictionModel::Mercury => "Mercury",
@@ -285,12 +270,6 @@ fn assign_edit_prediction_provider(
                 editor.set_edit_prediction_provider(Some(provider), window, cx);
             }
         }
-        Some(EditPredictionProviderConfig::Supermaven) => {
-            if let Some(supermaven) = Supermaven::global(cx) {
-                let provider = cx.new(|_| SupermavenEditPredictionDelegate::new(supermaven));
-                editor.set_edit_prediction_provider(Some(provider), window, cx);
-            }
-        }
         Some(EditPredictionProviderConfig::Codestral) => {
             let http_client = client.http_client();
             let provider = cx.new(|_| CodestralEditPredictionDelegate::new(http_client));
@@ -300,27 +279,122 @@ fn assign_edit_prediction_provider(
             let ep_store = edit_prediction::EditPredictionStore::global(client, &user_store, cx);
 
             if let Some(project) = editor.project() {
-                let has_model = ep_store.update(cx, |ep_store, cx| {
+                ep_store.update(cx, |ep_store, cx| {
                     ep_store.set_edit_prediction_model(model);
                     if let Some(buffer) = &singleton_buffer {
                         ep_store.register_buffer(buffer, project, cx);
                     }
-                    true
                 });
 
-                if has_model {
-                    let provider = cx.new(|cx| {
-                        ZedEditPredictionDelegate::new(
-                            project.clone(),
-                            singleton_buffer,
-                            &client,
-                            &user_store,
-                            cx,
-                        )
-                    });
-                    editor.set_edit_prediction_provider(Some(provider), window, cx);
-                }
+                let provider = cx.new(|cx| {
+                    ZedEditPredictionDelegate::new(
+                        project.clone(),
+                        singleton_buffer,
+                        &client,
+                        &user_store,
+                        cx,
+                    )
+                });
+                editor.set_edit_prediction_provider(Some(provider), window, cx);
             }
         }
     }
 }
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use editor::MultiBuffer;
+    use gpui::{BorrowAppContext, TestAppContext};
+    use settings::{EditPredictionProvider, SettingsStore};
+    use workspace::AppState;
+
+    #[gpui::test]
+    async fn test_subscribe_uses_stale_provider_config_after_settings_change(
+        cx: &mut TestAppContext,
+    ) {
+        let app_state = cx.update(|cx| {
+            let app_state = AppState::test(cx);
+            client::init(&app_state.client, cx);
+            language_model::init(app_state.client.clone(), cx);
+            editor::init(cx);
+            app_state
+        });
+
+        // Override the default provider to None so the subscribe closure
+        // captures None at init time. (The test default is Zed/Zeta1, which
+        // is a no-op on project-less editors and would mask the bug.)
+        cx.update(|cx| {
+            cx.update_global::<SettingsStore, _>(|store: &mut SettingsStore, cx| {
+                store.update_user_settings(cx, |settings| {
+                    settings.project.all_languages.edit_predictions =
+                        Some(settings::EditPredictionSettingsContent {
+                            provider: Some(EditPredictionProvider::None),
+                            ..Default::default()
+                        });
+                });
+            });
+        });
+
+        cx.update(|cx| {
+            init(app_state.client.clone(), app_state.user_store.clone(), cx);
+        });
+
+        // Create an editor in a window so observe_new registers it.
+        let editor = cx.add_window(|window, cx| {
+            let buffer = cx.new(|_cx| MultiBuffer::new(language::Capability::ReadWrite));
+            Editor::new(editor::EditorMode::full(), buffer, None, window, cx)
+        });
+
+        editor
+            .update(cx, |editor, _window, _cx| {
+                assert!(
+                    editor.edit_prediction_provider().is_none(),
+                    "editor should start with no provider when settings = None"
+                );
+            })
+            .unwrap();
+
+        // Change settings to Codestral. The observe_global closure updates its
+        // own copy of provider_config and assigns Codestral to all editors.
+        cx.update(|cx| {
+            cx.update_global::<SettingsStore, _>(|store: &mut SettingsStore, cx| {
+                store.update_user_settings(cx, |settings| {
+                    settings.project.all_languages.edit_predictions =
+                        Some(settings::EditPredictionSettingsContent {
+                            provider: Some(EditPredictionProvider::Codestral),
+                            ..Default::default()
+                        });
+                });
+            });
+        });
+
+        editor
+            .update(cx, |editor, _window, _cx| {
+                assert!(
+                    editor.edit_prediction_provider().is_some(),
+                    "editor should have a provider after changing settings to Codestral"
+                );
+            })
+            .unwrap();
+
+        // Emit PrivateUserInfoUpdated. The subscribe closure should use the
+        // CURRENT provider config (Codestral), but due to the bug it uses the
+        // stale init-time value (None) and clears the provider.
+        cx.update(|cx| {
+            app_state.user_store.update(cx, |_, cx| {
+                cx.emit(client::user::Event::PrivateUserInfoUpdated);
+            });
+        });
+        cx.run_until_parked();
+
+        editor
+            .update(cx, |editor, _window, _cx| {
+                assert!(
+                    editor.edit_prediction_provider().is_some(),
+                    "BUG: subscribe closure used stale provider_config (None) instead of current (Codestral)"
+                );
+            })
+            .unwrap();
+    }
+}

crates/zed/src/zed/quick_action_bar/preview.rs 🔗

@@ -1,3 +1,8 @@
+use csv_preview::{
+    CsvPreviewView, OpenPreview as CsvOpenPreview, OpenPreviewToTheSide as CsvOpenPreviewToTheSide,
+    TabularDataPreviewFeatureFlag,
+};
+use feature_flags::FeatureFlagAppExt as _;
 use gpui::{AnyElement, Modifiers, WeakEntity};
 use markdown_preview::{
     OpenPreview as MarkdownOpenPreview, OpenPreviewToTheSide as MarkdownOpenPreviewToTheSide,
@@ -16,6 +21,7 @@ use super::QuickActionBar;
 enum PreviewType {
     Markdown,
     Svg,
+    Csv,
 }
 
 impl QuickActionBar {
@@ -35,6 +41,10 @@ impl QuickActionBar {
                 } else if SvgPreviewView::resolve_active_item_as_svg_buffer(workspace, cx).is_some()
                 {
                     preview_type = Some(PreviewType::Svg);
+                } else if cx.has_flag::<TabularDataPreviewFeatureFlag>()
+                    && CsvPreviewView::resolve_active_item_as_csv_editor(workspace, cx).is_some()
+                {
+                    preview_type = Some(PreviewType::Csv);
                 }
             });
         }
@@ -57,6 +67,13 @@ impl QuickActionBar {
                     Box::new(SvgOpenPreviewToTheSide) as Box<dyn gpui::Action>,
                     &svg_preview::OpenPreview as &dyn gpui::Action,
                 ),
+                PreviewType::Csv => (
+                    "toggle-csv-preview",
+                    "Preview CSV",
+                    Box::new(CsvOpenPreview) as Box<dyn gpui::Action>,
+                    Box::new(CsvOpenPreviewToTheSide) as Box<dyn gpui::Action>,
+                    &csv_preview::OpenPreview as &dyn gpui::Action,
+                ),
             };
 
         let alt_click = gpui::Keystroke {

crates/zed_actions/src/lib.rs 🔗

@@ -639,15 +639,19 @@ actions!(
     ]
 );
 
-actions!(
-    debug_panel,
-    [
-        /// Toggles the debug panel.
-        Toggle,
-        /// Toggles focus on the debug panel.
-        ToggleFocus
-    ]
-);
+pub mod debug_panel {
+    use gpui::actions;
+    actions!(
+        debug_panel,
+        [
+            /// Toggles the debug panel.
+            Toggle,
+            /// Toggles focus on the debug panel.
+            ToggleFocus
+        ]
+    );
+}
+
 actions!(
     debugger,
     [
@@ -732,3 +736,17 @@ pub mod preview {
         );
     }
 }
+
+pub mod notebook {
+    use gpui::actions;
+
+    actions!(
+        notebook,
+        [
+            /// Move to down in cells
+            NotebookMoveDown,
+            /// Move to up in cells
+            NotebookMoveUp,
+        ]
+    );
+}

crates/zeta_prompt/src/zeta_prompt.rs 🔗

@@ -18,17 +18,10 @@ fn estimate_tokens(bytes: usize) -> usize {
     bytes / 3
 }
 
-/// The client's preferred edit prediction model. The server may override this.
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
-pub enum EditPredictionModelKind {
-    Zeta1,
-    Zeta2,
-}
-
 /// Pre-computed byte offset ranges within `cursor_excerpt` for different
 /// editable and context token budgets. Allows the server to select the
 /// appropriate ranges for whichever model it uses.
-#[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)]
+#[derive(Clone, Debug, Default, PartialEq, Hash, Serialize, Deserialize)]
 pub struct ExcerptRanges {
     /// Editable region computed with a 150-token budget.
     pub editable_150: Range<usize>,
@@ -36,37 +29,40 @@ pub struct ExcerptRanges {
     pub editable_180: Range<usize>,
     /// Editable region computed with a 350-token budget.
     pub editable_350: Range<usize>,
+    /// Editable region computed with a 350-token budget.
+    pub editable_512: Option<Range<usize>>,
     /// Context boundary when using editable_150 with 350 tokens of additional context.
     pub editable_150_context_350: Range<usize>,
     /// Context boundary when using editable_180 with 350 tokens of additional context.
     pub editable_180_context_350: Range<usize>,
     /// Context boundary when using editable_350 with 150 tokens of additional context.
     pub editable_350_context_150: Range<usize>,
+    pub editable_350_context_512: Option<Range<usize>>,
+    pub editable_350_context_1024: Option<Range<usize>>,
+    pub context_4096: Option<Range<usize>>,
+    pub context_8192: Option<Range<usize>>,
 }
 
 #[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)]
 pub struct ZetaPromptInput {
     pub cursor_path: Arc<Path>,
     pub cursor_excerpt: Arc<str>,
-    pub editable_range_in_excerpt: Range<usize>,
     pub cursor_offset_in_excerpt: usize,
     #[serde(default, skip_serializing_if = "Option::is_none")]
     pub excerpt_start_row: Option<u32>,
     pub events: Vec<Arc<Event>>,
     pub related_files: Vec<RelatedFile>,
-    /// When set, the excerpt was computed with a larger budget (~512 tokens)
-    /// and these ranges let the server select model-appropriate subsets.
-    /// When absent, the excerpt IS the context region and
-    /// `editable_range_in_excerpt` is the only editable range.
+    /// These ranges let the server select model-appropriate subsets.
+    pub excerpt_ranges: ExcerptRanges,
+    /// The name of the edit prediction model experiment to use.
     #[serde(default, skip_serializing_if = "Option::is_none")]
-    pub excerpt_ranges: Option<ExcerptRanges>,
-    /// Client's preferred model. The server may override.
-    #[serde(default, skip_serializing_if = "Option::is_none")]
-    pub preferred_model: Option<EditPredictionModelKind>,
+    pub experiment: Option<String>,
     #[serde(default)]
     pub in_open_source_repo: bool,
     #[serde(default)]
     pub can_collect_data: bool,
+    #[serde(default, skip_serializing_if = "Option::is_none")]
+    pub repo_url: Option<String>,
 }
 
 #[derive(
@@ -92,6 +88,8 @@ pub enum ZetaFormat {
     V0131GitMergeMarkersPrefix,
     V0211Prefill,
     V0211SeedCoder,
+    v0226Hashline,
+    V0304SeedNoEdits,
 }
 
 impl std::fmt::Display for ZetaFormat {
@@ -128,25 +126,6 @@ impl ZetaFormat {
             .collect::<Vec<_>>()
             .concat()
     }
-
-    pub fn special_tokens(&self) -> &'static [&'static str] {
-        match self {
-            ZetaFormat::V0112MiddleAtEnd
-            | ZetaFormat::V0113Ordered
-            | ZetaFormat::V0114180EditableRegion => &[
-                "<|fim_prefix|>",
-                "<|fim_suffix|>",
-                "<|fim_middle|>",
-                "<|file_sep|>",
-                CURSOR_MARKER,
-            ],
-            ZetaFormat::V0120GitMergeMarkers => v0120_git_merge_markers::special_tokens(),
-            ZetaFormat::V0131GitMergeMarkersPrefix | ZetaFormat::V0211Prefill => {
-                v0131_git_merge_markers_prefix::special_tokens()
-            }
-            ZetaFormat::V0211SeedCoder => seed_coder::special_tokens(),
-        }
-    }
 }
 
 #[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)]
@@ -213,36 +192,35 @@ pub struct RelatedFile {
 pub struct RelatedExcerpt {
     pub row_range: Range<u32>,
     pub text: Arc<str>,
+    #[serde(default)]
+    pub order: usize,
 }
 
 pub fn prompt_input_contains_special_tokens(input: &ZetaPromptInput, format: ZetaFormat) -> bool {
-    format
-        .special_tokens()
+    special_tokens_for_format(format)
         .iter()
         .any(|token| input.cursor_excerpt.contains(token))
 }
 
 pub fn format_zeta_prompt(input: &ZetaPromptInput, format: ZetaFormat) -> String {
-    format_zeta_prompt_with_budget(input, format, MAX_PROMPT_TOKENS)
+    format_prompt_with_budget_for_format(input, format, MAX_PROMPT_TOKENS)
 }
 
-/// Post-processes model output for the given zeta format by stripping format-specific suffixes.
-pub fn clean_zeta2_model_output(output: &str, format: ZetaFormat) -> &str {
+pub fn special_tokens_for_format(format: ZetaFormat) -> &'static [&'static str] {
     match format {
-        ZetaFormat::V0120GitMergeMarkers => output
-            .strip_suffix(v0120_git_merge_markers::END_MARKER)
-            .unwrap_or(output),
-        ZetaFormat::V0131GitMergeMarkersPrefix => output
-            .strip_suffix(v0131_git_merge_markers_prefix::END_MARKER)
-            .unwrap_or(output),
-        ZetaFormat::V0211SeedCoder => output
-            .strip_suffix(seed_coder::END_MARKER)
-            .unwrap_or(output),
-        _ => output,
+        ZetaFormat::V0112MiddleAtEnd => v0112_middle_at_end::special_tokens(),
+        ZetaFormat::V0113Ordered => v0113_ordered::special_tokens(),
+        ZetaFormat::V0114180EditableRegion => v0114180_editable_region::special_tokens(),
+        ZetaFormat::V0120GitMergeMarkers => v0120_git_merge_markers::special_tokens(),
+        ZetaFormat::V0131GitMergeMarkersPrefix => v0131_git_merge_markers_prefix::special_tokens(),
+        ZetaFormat::V0211Prefill => v0211_prefill::special_tokens(),
+        ZetaFormat::V0211SeedCoder => seed_coder::special_tokens(),
+        ZetaFormat::v0226Hashline => hashline::special_tokens(),
+        ZetaFormat::V0304SeedNoEdits => seed_coder::special_tokens(),
     }
 }
 
-pub fn excerpt_range_for_format(
+pub fn excerpt_ranges_for_format(
     format: ZetaFormat,
     ranges: &ExcerptRanges,
 ) -> (Range<usize>, Range<usize>) {
@@ -251,85 +229,93 @@ pub fn excerpt_range_for_format(
             ranges.editable_150.clone(),
             ranges.editable_150_context_350.clone(),
         ),
-        ZetaFormat::V0114180EditableRegion
-        | ZetaFormat::V0120GitMergeMarkers
+        ZetaFormat::V0114180EditableRegion => (
+            ranges.editable_180.clone(),
+            ranges.editable_180_context_350.clone(),
+        ),
+        ZetaFormat::V0120GitMergeMarkers
         | ZetaFormat::V0131GitMergeMarkersPrefix
         | ZetaFormat::V0211Prefill
-        | ZetaFormat::V0211SeedCoder => (
+        | ZetaFormat::V0211SeedCoder
+        | ZetaFormat::v0226Hashline
+        | ZetaFormat::V0304SeedNoEdits => (
             ranges.editable_350.clone(),
             ranges.editable_350_context_150.clone(),
         ),
     }
 }
 
-pub fn resolve_cursor_region(
-    input: &ZetaPromptInput,
-    format: ZetaFormat,
-) -> (&str, Range<usize>, usize) {
-    let Some(ranges) = &input.excerpt_ranges else {
-        return (
-            &input.cursor_excerpt,
-            input.editable_range_in_excerpt.clone(),
-            input.cursor_offset_in_excerpt,
-        );
-    };
-
-    let (editable_range, context_range) = excerpt_range_for_format(format, ranges);
-    let context_start = context_range.start;
-    let context_text = &input.cursor_excerpt[context_range];
-    let adjusted_editable =
-        (editable_range.start - context_start)..(editable_range.end - context_start);
-    let adjusted_cursor = input.cursor_offset_in_excerpt - context_start;
-
-    (context_text, adjusted_editable, adjusted_cursor)
-}
-
-fn format_zeta_prompt_with_budget(
-    input: &ZetaPromptInput,
+pub fn write_cursor_excerpt_section_for_format(
     format: ZetaFormat,
-    max_tokens: usize,
-) -> String {
-    let (context, editable_range, cursor_offset) = resolve_cursor_region(input, format);
-    let path = &*input.cursor_path;
-
-    let mut cursor_section = String::new();
+    prompt: &mut String,
+    path: &Path,
+    context: &str,
+    editable_range: &Range<usize>,
+    cursor_offset: usize,
+) {
     match format {
-        ZetaFormat::V0112MiddleAtEnd => {
-            v0112_middle_at_end::write_cursor_excerpt_section(
-                &mut cursor_section,
-                path,
-                context,
-                &editable_range,
-                cursor_offset,
-            );
-        }
+        ZetaFormat::V0112MiddleAtEnd => v0112_middle_at_end::write_cursor_excerpt_section(
+            prompt,
+            path,
+            context,
+            editable_range,
+            cursor_offset,
+        ),
         ZetaFormat::V0113Ordered | ZetaFormat::V0114180EditableRegion => {
             v0113_ordered::write_cursor_excerpt_section(
-                &mut cursor_section,
+                prompt,
                 path,
                 context,
-                &editable_range,
+                editable_range,
                 cursor_offset,
             )
         }
         ZetaFormat::V0120GitMergeMarkers => v0120_git_merge_markers::write_cursor_excerpt_section(
-            &mut cursor_section,
+            prompt,
             path,
             context,
-            &editable_range,
+            editable_range,
             cursor_offset,
         ),
         ZetaFormat::V0131GitMergeMarkersPrefix | ZetaFormat::V0211Prefill => {
             v0131_git_merge_markers_prefix::write_cursor_excerpt_section(
-                &mut cursor_section,
+                prompt,
                 path,
                 context,
-                &editable_range,
+                editable_range,
                 cursor_offset,
             )
         }
-        ZetaFormat::V0211SeedCoder => {
-            return seed_coder::format_prompt_with_budget(
+        ZetaFormat::V0211SeedCoder | ZetaFormat::V0304SeedNoEdits => {
+            seed_coder::write_cursor_excerpt_section(
+                prompt,
+                path,
+                context,
+                editable_range,
+                cursor_offset,
+            )
+        }
+        ZetaFormat::v0226Hashline => hashline::write_cursor_excerpt_section(
+            prompt,
+            path,
+            context,
+            editable_range,
+            cursor_offset,
+        ),
+    }
+}
+
+pub fn format_prompt_with_budget_for_format(
+    input: &ZetaPromptInput,
+    format: ZetaFormat,
+    max_tokens: usize,
+) -> String {
+    let (context, editable_range, cursor_offset) = resolve_cursor_region(input, format);
+    let path = &*input.cursor_path;
+
+    match format {
+        ZetaFormat::V0211SeedCoder | ZetaFormat::V0304SeedNoEdits => {
+            seed_coder::format_prompt_with_budget(
                 path,
                 context,
                 &editable_range,
@@ -337,50 +323,179 @@ fn format_zeta_prompt_with_budget(
                 &input.events,
                 &input.related_files,
                 max_tokens,
-            );
+            )
         }
-    }
+        _ => {
+            let mut cursor_section = String::new();
+            write_cursor_excerpt_section_for_format(
+                format,
+                &mut cursor_section,
+                path,
+                context,
+                &editable_range,
+                cursor_offset,
+            );
 
-    let cursor_tokens = estimate_tokens(cursor_section.len());
-    let budget_after_cursor = max_tokens.saturating_sub(cursor_tokens);
+            let cursor_tokens = estimate_tokens(cursor_section.len());
+            let budget_after_cursor = max_tokens.saturating_sub(cursor_tokens);
 
-    let edit_history_section = format_edit_history_within_budget(
-        &input.events,
-        "<|file_sep|>",
-        "edit history",
-        budget_after_cursor,
-    );
-    let edit_history_tokens = estimate_tokens(edit_history_section.len());
-    let budget_after_edit_history = budget_after_cursor.saturating_sub(edit_history_tokens);
+            let edit_history_section = format_edit_history_within_budget(
+                &input.events,
+                "<|file_sep|>",
+                "edit history",
+                budget_after_cursor,
+            );
+            let edit_history_tokens = estimate_tokens(edit_history_section.len());
+            let budget_after_edit_history = budget_after_cursor.saturating_sub(edit_history_tokens);
 
-    let related_files_section = format_related_files_within_budget(
-        &input.related_files,
-        "<|file_sep|>",
-        budget_after_edit_history,
-    );
+            let related_files_section = format_related_files_within_budget(
+                &input.related_files,
+                "<|file_sep|>",
+                "",
+                budget_after_edit_history,
+            );
 
-    let mut prompt = String::new();
-    prompt.push_str(&related_files_section);
-    prompt.push_str(&edit_history_section);
-    prompt.push_str(&cursor_section);
-    prompt
+            let mut prompt = String::new();
+            prompt.push_str(&related_files_section);
+            prompt.push_str(&edit_history_section);
+            prompt.push_str(&cursor_section);
+            prompt
+        }
+    }
 }
 
-pub fn get_prefill(input: &ZetaPromptInput, format: ZetaFormat) -> String {
+pub fn get_prefill_for_format(
+    format: ZetaFormat,
+    context: &str,
+    editable_range: &Range<usize>,
+) -> String {
     match format {
+        ZetaFormat::V0211Prefill => v0211_prefill::get_prefill(context, editable_range),
         ZetaFormat::V0112MiddleAtEnd
         | ZetaFormat::V0113Ordered
         | ZetaFormat::V0114180EditableRegion
         | ZetaFormat::V0120GitMergeMarkers
         | ZetaFormat::V0131GitMergeMarkersPrefix
-        | ZetaFormat::V0211SeedCoder => String::new(),
-        ZetaFormat::V0211Prefill => {
-            let (context, editable_range, _) = resolve_cursor_region(input, format);
-            v0211_prefill::get_prefill(context, &editable_range)
+        | ZetaFormat::V0211SeedCoder
+        | ZetaFormat::v0226Hashline
+        | ZetaFormat::V0304SeedNoEdits => String::new(),
+    }
+}
+
+pub fn output_end_marker_for_format(format: ZetaFormat) -> Option<&'static str> {
+    match format {
+        ZetaFormat::V0120GitMergeMarkers => Some(v0120_git_merge_markers::END_MARKER),
+        ZetaFormat::V0131GitMergeMarkersPrefix => Some(v0131_git_merge_markers_prefix::END_MARKER),
+        ZetaFormat::V0211Prefill => Some(v0131_git_merge_markers_prefix::END_MARKER),
+        ZetaFormat::V0211SeedCoder | ZetaFormat::V0304SeedNoEdits => Some(seed_coder::END_MARKER),
+        ZetaFormat::V0112MiddleAtEnd
+        | ZetaFormat::V0113Ordered
+        | ZetaFormat::V0114180EditableRegion
+        | ZetaFormat::v0226Hashline => None,
+    }
+}
+
+pub fn current_region_markers_for_format(format: ZetaFormat) -> (&'static str, &'static str) {
+    match format {
+        ZetaFormat::V0112MiddleAtEnd => ("<|fim_middle|>current\n", "<|fim_middle|>updated"),
+        ZetaFormat::V0113Ordered
+        | ZetaFormat::V0114180EditableRegion
+        | ZetaFormat::v0226Hashline => ("<|fim_middle|>current\n", "<|fim_suffix|>"),
+        ZetaFormat::V0120GitMergeMarkers
+        | ZetaFormat::V0131GitMergeMarkersPrefix
+        | ZetaFormat::V0211Prefill => (
+            v0120_git_merge_markers::START_MARKER,
+            v0120_git_merge_markers::SEPARATOR,
+        ),
+        ZetaFormat::V0211SeedCoder | ZetaFormat::V0304SeedNoEdits => {
+            (seed_coder::START_MARKER, seed_coder::SEPARATOR)
         }
     }
 }
 
+pub fn clean_extracted_region_for_format(format: ZetaFormat, region: &str) -> String {
+    match format {
+        ZetaFormat::v0226Hashline => hashline::strip_hashline_prefixes(region),
+        _ => region.to_string(),
+    }
+}
+
+pub fn encode_patch_as_output_for_format(
+    format: ZetaFormat,
+    old_editable_region: &str,
+    patch: &str,
+    cursor_offset: Option<usize>,
+) -> Result<Option<String>> {
+    match format {
+        ZetaFormat::v0226Hashline => {
+            hashline::patch_to_edit_commands(old_editable_region, patch, cursor_offset).map(Some)
+        }
+        ZetaFormat::V0304SeedNoEdits => Ok(seed_coder::no_edits(patch)),
+        _ => Ok(None),
+    }
+}
+
+pub fn output_with_context_for_format(
+    format: ZetaFormat,
+    old_editable_region: &str,
+    output: &str,
+) -> Result<Option<String>> {
+    match format {
+        ZetaFormat::v0226Hashline => {
+            if hashline::output_has_edit_commands(output) {
+                Ok(Some(hashline::apply_edit_commands(
+                    old_editable_region,
+                    output,
+                )))
+            } else {
+                Ok(None)
+            }
+        }
+        ZetaFormat::V0304SeedNoEdits => {
+            if output.starts_with(seed_coder::NO_EDITS) {
+                Ok(Some(old_editable_region.to_owned()))
+            } else {
+                Ok(None)
+            }
+        }
+        _ => Ok(None),
+    }
+}
+
+/// Post-processes model output for the given zeta format by stripping format-specific suffixes.
+pub fn clean_zeta2_model_output(output: &str, format: ZetaFormat) -> &str {
+    match output_end_marker_for_format(format) {
+        Some(marker) => output.strip_suffix(marker).unwrap_or(output),
+        None => output,
+    }
+}
+
+pub fn excerpt_range_for_format(
+    format: ZetaFormat,
+    ranges: &ExcerptRanges,
+) -> (Range<usize>, Range<usize>) {
+    excerpt_ranges_for_format(format, ranges)
+}
+
+pub fn resolve_cursor_region(
+    input: &ZetaPromptInput,
+    format: ZetaFormat,
+) -> (&str, Range<usize>, usize) {
+    let (editable_range, context_range) = excerpt_range_for_format(format, &input.excerpt_ranges);
+    let context_start = context_range.start;
+    let context_text = &input.cursor_excerpt[context_range];
+    let adjusted_editable =
+        (editable_range.start - context_start)..(editable_range.end - context_start);
+    let adjusted_cursor = input.cursor_offset_in_excerpt - context_start;
+
+    (context_text, adjusted_editable, adjusted_cursor)
+}
+
+pub fn get_prefill(input: &ZetaPromptInput, format: ZetaFormat) -> String {
+    let (context, editable_range, _) = resolve_cursor_region(input, format);
+    get_prefill_for_format(format, context, &editable_range)
+}
+
 fn format_edit_history_within_budget(
     events: &[Arc<Event>],
     file_marker: &str,
@@ -419,53 +534,98 @@ fn format_edit_history_within_budget(
     result
 }
 
-fn format_related_files_within_budget(
+fn excerpt_rendered_tokens(excerpt: &RelatedExcerpt, file_max_row: u32) -> usize {
+    let needs_newline = !excerpt.text.ends_with('\n');
+    let needs_ellipsis = excerpt.row_range.end < file_max_row;
+    let len = excerpt.text.len()
+        + if needs_newline { "\n".len() } else { 0 }
+        + if needs_ellipsis { "...\n".len() } else { 0 };
+    estimate_tokens(len)
+}
+
+pub fn format_related_files_within_budget(
     related_files: &[RelatedFile],
-    file_marker: &str,
+    file_prefix: &str,
+    file_suffix: &str,
     max_tokens: usize,
 ) -> String {
-    let mut result = String::new();
-    let mut total_tokens = 0;
+    struct ExcerptCandidate {
+        file_ix: usize,
+        excerpt_ix: usize,
+        order: usize,
+    }
 
-    for file in related_files {
-        let path_str = file.path.to_string_lossy();
-        let header = format!("{}{}\n", file_marker, path_str);
-        let header_tokens = estimate_tokens(header.len());
+    let mut excerpt_candidates: Vec<ExcerptCandidate> = related_files
+        .iter()
+        .enumerate()
+        .flat_map(|(file_ix, file)| {
+            file.excerpts
+                .iter()
+                .enumerate()
+                .map(move |(excerpt_ix, e)| ExcerptCandidate {
+                    file_ix,
+                    excerpt_ix,
+                    order: e.order,
+                })
+        })
+        .collect();
+
+    // Pre-compute file header strings and their token costs.
+    let file_headers: Vec<String> = related_files
+        .iter()
+        .map(|file| {
+            let path_str = file.path.to_string_lossy();
+            format!("{}{}\n", file_prefix, path_str)
+        })
+        .collect();
 
-        if total_tokens + header_tokens > max_tokens {
+    // Sort the excerpts by their order and determine how many fit within the budget.
+    let mut total_tokens = 0;
+    let mut included_excerpt_count = 0_usize;
+    let mut included_file_indices = vec![false; related_files.len()];
+    excerpt_candidates.sort_by_key(|e| (e.order, e.file_ix, e.excerpt_ix));
+    for candidate in &excerpt_candidates {
+        let file = &related_files[candidate.file_ix];
+        let excerpt = &file.excerpts[candidate.excerpt_ix];
+        let file_already_included = included_file_indices[candidate.file_ix];
+        let header_cost = if file_already_included {
+            0
+        } else {
+            estimate_tokens(file_headers[candidate.file_ix].len() + file_suffix.len())
+        };
+        let excerpt_cost = excerpt_rendered_tokens(excerpt, file.max_row);
+        if total_tokens + header_cost + excerpt_cost > max_tokens {
             break;
         }
+        total_tokens += header_cost + excerpt_cost;
+        if !file_already_included {
+            included_file_indices[candidate.file_ix] = true;
+        }
+        included_excerpt_count += 1;
+    }
 
-        let mut file_tokens = header_tokens;
-        let mut excerpts_to_include = 0;
+    excerpt_candidates.truncate(included_excerpt_count);
+    excerpt_candidates.sort_unstable_by_key(|c| (c.file_ix, c.excerpt_ix));
 
-        for excerpt in &file.excerpts {
-            let needs_newline = !excerpt.text.ends_with('\n');
-            let needs_ellipsis = excerpt.row_range.end < file.max_row;
-            let excerpt_len = excerpt.text.len()
-                + if needs_newline { "\n".len() } else { 0 }
-                + if needs_ellipsis { "...\n".len() } else { 0 };
-
-            let excerpt_tokens = estimate_tokens(excerpt_len);
-            if total_tokens + file_tokens + excerpt_tokens > max_tokens {
-                break;
+    // Render all of the files that fit within the token budget, in the original order.
+    let mut result = String::new();
+    let mut last_file_ix = None;
+    for candidate in &excerpt_candidates {
+        if last_file_ix != Some(candidate.file_ix) {
+            if last_file_ix.is_some() {
+                result.push_str(file_suffix);
             }
-            file_tokens += excerpt_tokens;
-            excerpts_to_include += 1;
+            result.push_str(&file_headers[candidate.file_ix]);
+            last_file_ix = Some(candidate.file_ix);
         }
-
-        if excerpts_to_include > 0 {
-            total_tokens += file_tokens;
-            result.push_str(&header);
-            for excerpt in file.excerpts.iter().take(excerpts_to_include) {
-                result.push_str(&excerpt.text);
-                if !result.ends_with('\n') {
-                    result.push('\n');
-                }
-                if excerpt.row_range.end < file.max_row {
-                    result.push_str("...\n");
-                }
-            }
+        let file = &related_files[candidate.file_ix];
+        let excerpt = &file.excerpts[candidate.excerpt_ix];
+        result.push_str(&excerpt.text);
+        if !result.ends_with('\n') {
+            result.push('\n');
+        }
+        if excerpt.row_range.end < file.max_row {
+            result.push_str("...\n");
         }
     }
 
@@ -499,6 +659,16 @@ pub fn write_related_files(
 mod v0112_middle_at_end {
     use super::*;
 
+    pub fn special_tokens() -> &'static [&'static str] {
+        &[
+            "<|fim_prefix|>",
+            "<|fim_suffix|>",
+            "<|fim_middle|>",
+            "<|file_sep|>",
+            CURSOR_MARKER,
+        ]
+    }
+
     pub fn write_cursor_excerpt_section(
         prompt: &mut String,
         path: &Path,
@@ -533,6 +703,16 @@ mod v0112_middle_at_end {
 mod v0113_ordered {
     use super::*;
 
+    pub fn special_tokens() -> &'static [&'static str] {
+        &[
+            "<|fim_prefix|>",
+            "<|fim_suffix|>",
+            "<|fim_middle|>",
+            "<|file_sep|>",
+            CURSOR_MARKER,
+        ]
+    }
+
     pub fn write_cursor_excerpt_section(
         prompt: &mut String,
         path: &Path,
@@ -567,6 +747,14 @@ mod v0113_ordered {
     }
 }
 
+mod v0114180_editable_region {
+    use super::*;
+
+    pub fn special_tokens() -> &'static [&'static str] {
+        v0113_ordered::special_tokens()
+    }
+}
+
 pub mod v0120_git_merge_markers {
     //! A prompt that uses git-style merge conflict markers to represent the editable region.
     //!
@@ -665,86 +853,1497 @@ pub mod v0131_git_merge_markers_prefix {
     //! changes applied
     //! >>>>>>> UPDATED
 
-    use super::*;
+    use super::*;
+
+    pub const START_MARKER: &str = "<<<<<<< CURRENT\n";
+    pub const SEPARATOR: &str = "=======\n";
+    pub const END_MARKER: &str = ">>>>>>> UPDATED\n";
+
+    pub fn special_tokens() -> &'static [&'static str] {
+        &[
+            "<|fim_prefix|>",
+            "<|fim_suffix|>",
+            "<|fim_middle|>",
+            "<|file_sep|>",
+            START_MARKER,
+            SEPARATOR,
+            END_MARKER,
+            CURSOR_MARKER,
+        ]
+    }
+
+    pub fn write_cursor_excerpt_section(
+        prompt: &mut String,
+        path: &Path,
+        context: &str,
+        editable_range: &Range<usize>,
+        cursor_offset: usize,
+    ) {
+        let path_str = path.to_string_lossy();
+        write!(prompt, "<|file_sep|>{}\n", path_str).ok();
+
+        prompt.push_str("<|fim_prefix|>");
+        prompt.push_str(&context[..editable_range.start]);
+        prompt.push_str(START_MARKER);
+        prompt.push_str(&context[editable_range.start..cursor_offset]);
+        prompt.push_str(CURSOR_MARKER);
+        prompt.push_str(&context[cursor_offset..editable_range.end]);
+        if !prompt.ends_with('\n') {
+            prompt.push('\n');
+        }
+        prompt.push_str(SEPARATOR);
+
+        prompt.push_str("<|fim_suffix|>");
+        prompt.push_str(&context[editable_range.end..]);
+        if !prompt.ends_with('\n') {
+            prompt.push('\n');
+        }
+
+        prompt.push_str("<|fim_middle|>");
+    }
+}
+
+pub mod v0211_prefill {
+    use super::*;
+
+    pub fn special_tokens() -> &'static [&'static str] {
+        v0131_git_merge_markers_prefix::special_tokens()
+    }
+
+    pub fn get_prefill(context: &str, editable_range: &Range<usize>) -> String {
+        let editable_region = &context[editable_range.start..editable_range.end];
+
+        let prefill_len = (editable_region.len() as f64 * PREFILL_RATIO) as usize;
+        let prefill_len = editable_region.floor_char_boundary(prefill_len);
+
+        // Find a token boundary to avoid splitting tokens in the prefill.
+        // In Qwen2.5-Coder, \n is always the END of a token (e.g. `;\n`,
+        // ` {\n`), and \n\n / \n\n\n are single tokens, so we must include
+        // the \n and consume any consecutive \n characters after it.
+        let prefill = &editable_region[..prefill_len];
+        match prefill.rfind('\n') {
+            Some(pos) => {
+                let mut end = pos + 1;
+                while end < editable_region.len()
+                    && editable_region.as_bytes().get(end) == Some(&b'\n')
+                {
+                    end += 1;
+                }
+                editable_region[..end].to_string()
+            }
+            // No newline found. Fall back to splitting before the last space
+            // (word-level boundary)
+            None => match prefill.rfind(' ') {
+                Some(pos) => prefill[..pos].to_string(),
+                None => prefill.to_string(),
+            },
+        }
+    }
+}
+
+pub mod hashline {
+
+    use std::fmt::Display;
+
+    pub const END_MARKER: &str = "<|fim_middle|>updated";
+    pub const START_MARKER: &str = "<|fim_middle|>current";
+
+    use super::*;
+
+    const SET_COMMAND_MARKER: &str = "<|set|>";
+    const INSERT_COMMAND_MARKER: &str = "<|insert|>";
+
+    pub fn special_tokens() -> &'static [&'static str] {
+        return &[
+            SET_COMMAND_MARKER,
+            "<|set_range|>",
+            INSERT_COMMAND_MARKER,
+            CURSOR_MARKER,
+            "<|file_sep|>",
+            "<|fim_prefix|>",
+            "<|fim_suffix|>",
+            "<|fim_middle|>",
+        ];
+    }
+
+    /// A parsed line reference like `3:c3` (line index 3 with hash 0xc3).
+    #[derive(Debug, Clone, PartialEq, Eq)]
+    struct LineRef {
+        index: usize,
+        hash: u8,
+    }
+
+    impl Display for LineRef {
+        fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+            write!(f, "{}:{:02x}", self.index, self.hash)
+        }
+    }
+
+    pub fn hash_line(line: &[u8]) -> u8 {
+        let mut h: u8 = 0;
+        for &byte in line {
+            h = h.wrapping_add(byte);
+        }
+        return h;
+    }
+
+    /// Write the hashline-encoded editable region into `out`. Each line of
+    /// `editable_text` is prefixed with `{line_index}:{hash}|` and the cursor
+    /// marker is inserted at `cursor_offset_in_editable` (byte offset relative
+    /// to the start of `editable_text`).
+    pub fn write_hashline_editable_region(
+        out: &mut String,
+        editable_text: &str,
+        cursor_offset_in_editable: usize,
+    ) {
+        let mut offset = 0;
+        for (i, line) in editable_text.lines().enumerate() {
+            let (head, cursor, tail) = if cursor_offset_in_editable > offset
+                && cursor_offset_in_editable < offset + line.len()
+            {
+                (
+                    &line[..cursor_offset_in_editable - offset],
+                    CURSOR_MARKER,
+                    &line[cursor_offset_in_editable - offset..],
+                )
+            } else {
+                (line, "", "")
+            };
+            write!(
+                out,
+                "\n{}|{head}{cursor}{tail}",
+                LineRef {
+                    index: i,
+                    hash: hash_line(line.as_bytes())
+                }
+            )
+            .unwrap();
+            offset += line.len() + 1;
+        }
+    }
+
+    pub fn write_cursor_excerpt_section(
+        prompt: &mut String,
+        path: &Path,
+        context: &str,
+        editable_range: &Range<usize>,
+        cursor_offset: usize,
+    ) {
+        let path_str = path.to_string_lossy();
+        write!(prompt, "<|file_sep|>{}\n", path_str).ok();
+
+        prompt.push_str("<|fim_prefix|>\n");
+        prompt.push_str(&context[..editable_range.start]);
+        prompt.push_str(START_MARKER);
+
+        let cursor_offset_in_editable = cursor_offset.saturating_sub(editable_range.start);
+        let editable_region = &context[editable_range.clone()];
+        write_hashline_editable_region(prompt, editable_region, cursor_offset_in_editable);
+
+        if !prompt.ends_with('\n') {
+            prompt.push('\n');
+        }
+
+        prompt.push_str("<|fim_suffix|>\n");
+        prompt.push_str(&context[editable_range.end..]);
+        if !prompt.ends_with('\n') {
+            prompt.push('\n');
+        }
+
+        prompt.push_str(END_MARKER);
+    }
+
+    /// A single edit command parsed from the model output.
+    #[derive(Debug)]
+    enum EditCommand<'a> {
+        /// Replace a range of lines (inclusive on both ends). Single-line set is
+        /// represented by `start == end`.
+        Set {
+            start: LineRef,
+            end: LineRef,
+            content: &'a str,
+        },
+        /// Insert new lines after the given line, or before the first line if
+        /// `after` is `None`.
+        Insert {
+            after: Option<LineRef>,
+            content: &'a str,
+        },
+    }
+
+    /// Parse a line reference like `3:c3` into a `LineRef`.
+    fn parse_line_ref(s: &str) -> Option<LineRef> {
+        let (idx_str, hash_str) = s.split_once(':')?;
+        let index = idx_str.parse::<usize>().ok()?;
+        let hash = u8::from_str_radix(hash_str, 16).ok()?;
+        Some(LineRef { index, hash })
+    }
+
+    /// Parse the model output into a list of `EditCommand`s.
+    fn parse_edit_commands(model_output: &str) -> Vec<EditCommand<'_>> {
+        let mut commands = Vec::new();
+        let mut offset = 0usize;
+
+        while offset < model_output.len() {
+            let next_nl = model_output[offset..]
+                .find('\n')
+                .map(|i| offset + i)
+                .unwrap_or(model_output.len());
+            let line = &model_output[offset..next_nl];
+            let line_end = if next_nl < model_output.len() {
+                next_nl + 1
+            } else {
+                next_nl
+            };
+
+            let trimmed = line.trim();
+            let (is_set, specifier) = if let Some(spec) = trimmed.strip_prefix(SET_COMMAND_MARKER) {
+                (true, spec)
+            } else if let Some(spec) = trimmed.strip_prefix(INSERT_COMMAND_MARKER) {
+                (false, spec)
+            } else {
+                offset = line_end;
+                continue;
+            };
+
+            let mut content_end = line_end;
+            let mut scan = line_end;
+
+            while scan < model_output.len() {
+                let body_nl = model_output[scan..]
+                    .find('\n')
+                    .map(|i| scan + i)
+                    .unwrap_or(model_output.len());
+                let body_line = &model_output[scan..body_nl];
+                if body_line.trim().starts_with(SET_COMMAND_MARKER)
+                    || body_line.trim().starts_with(INSERT_COMMAND_MARKER)
+                {
+                    break;
+                }
+                scan = if body_nl < model_output.len() {
+                    body_nl + 1
+                } else {
+                    body_nl
+                };
+                content_end = scan;
+            }
+
+            let content = &model_output[line_end..content_end];
+
+            if is_set {
+                if let Some((start_str, end_str)) = specifier.split_once('-') {
+                    if let (Some(start), Some(end)) =
+                        (parse_line_ref(start_str), parse_line_ref(end_str))
+                    {
+                        commands.push(EditCommand::Set {
+                            start,
+                            end,
+                            content,
+                        });
+                    }
+                } else if let Some(target) = parse_line_ref(specifier) {
+                    commands.push(EditCommand::Set {
+                        start: target.clone(),
+                        end: target,
+                        content,
+                    });
+                }
+            } else {
+                let after = parse_line_ref(specifier);
+                commands.push(EditCommand::Insert { after, content });
+            }
+
+            offset = scan;
+        }
+
+        commands
+    }
+
+    /// Returns `true` if the model output contains `<|set|>` or `<|insert|>` commands
+    /// (as opposed to being a plain full-replacement output).
+    /// Strip the `{line_num}:{hash}|` prefixes from each line of a hashline-encoded
+    /// editable region, returning the plain text content.
+    pub fn strip_hashline_prefixes(region: &str) -> String {
+        let mut decoded: String = region
+            .lines()
+            .map(|line| line.find('|').map_or(line, |pos| &line[pos + 1..]))
+            .collect::<Vec<_>>()
+            .join("\n");
+        if region.ends_with('\n') {
+            decoded.push('\n');
+        }
+        decoded
+    }
+
+    pub fn output_has_edit_commands(model_output: &str) -> bool {
+        model_output.contains(SET_COMMAND_MARKER) || model_output.contains(INSERT_COMMAND_MARKER)
+    }
+
+    /// Apply `<|set|>` and `<|insert|>` edit commands from the model output to the
+    /// original editable region text.
+    ///
+    /// `editable_region` is the original text of the editable region (without hash
+    /// prefixes). `model_output` is the raw model response containing edit commands.
+    ///
+    /// Returns the full replacement text for the editable region.
+    pub fn apply_edit_commands(editable_region: &str, model_output: &str) -> String {
+        let original_lines: Vec<&str> = editable_region.lines().collect();
+        let old_hashes: Vec<u8> = original_lines
+            .iter()
+            .map(|line| hash_line(line.as_bytes()))
+            .collect();
+
+        let commands = parse_edit_commands(model_output);
+
+        // For set operations: indexed by start line → Some((end line index, content))
+        // For insert operations: indexed by line index → vec of content to insert after
+        // Insert-before-first is tracked separately.
+        let mut set_ops: Vec<Option<(usize, &str)>> = vec![None; original_lines.len()];
+        let mut insert_before_first: Vec<&str> = Vec::new();
+        let mut insert_after: Vec<Vec<&str>> = vec![Vec::new(); original_lines.len()];
+
+        for command in &commands {
+            match command {
+                EditCommand::Set {
+                    start,
+                    end,
+                    content,
+                } => {
+                    if start.index < old_hashes.len()
+                        && end.index < old_hashes.len()
+                        && start.index <= end.index
+                        && old_hashes[start.index] == start.hash
+                        && old_hashes[end.index] == end.hash
+                    {
+                        set_ops[start.index] = Some((end.index, *content));
+                    }
+                }
+                EditCommand::Insert { after, content } => match after {
+                    None => insert_before_first.push(*content),
+                    Some(line_ref) => {
+                        if line_ref.index < old_hashes.len()
+                            && old_hashes[line_ref.index] == line_ref.hash
+                        {
+                            insert_after[line_ref.index].push(*content);
+                        }
+                    }
+                },
+            }
+        }
+
+        let mut result = String::new();
+
+        // Emit any insertions before the first line
+        for content in &insert_before_first {
+            result.push_str(content);
+            if !content.ends_with('\n') {
+                result.push('\n');
+            }
+        }
+
+        let mut i = 0;
+        while i < original_lines.len() {
+            if let Some((end_index, replacement)) = set_ops[i].as_ref() {
+                // Replace lines i..=end_index with the replacement content
+                result.push_str(replacement);
+                if !replacement.is_empty() && !replacement.ends_with('\n') {
+                    result.push('\n');
+                }
+                // Emit any insertions after the end of this set range
+                if *end_index < insert_after.len() {
+                    for content in &insert_after[*end_index] {
+                        result.push_str(content);
+                        if !content.ends_with('\n') {
+                            result.push('\n');
+                        }
+                    }
+                }
+                i = end_index + 1;
+            } else {
+                // Keep the original line
+                result.push_str(original_lines[i]);
+                result.push('\n');
+                // Emit any insertions after this line
+                for content in &insert_after[i] {
+                    result.push_str(content);
+                    if !content.ends_with('\n') {
+                        result.push('\n');
+                    }
+                }
+                i += 1;
+            }
+        }
+
+        // Preserve trailing newline behavior: if the original ended with a
+        // newline the result already has one; if it didn't, trim the extra one
+        // we added.
+        if !editable_region.ends_with('\n') && result.ends_with('\n') {
+            result.pop();
+        }
+
+        result
+    }
+
+    /// Convert a unified diff patch into hashline edit commands.
+    ///
+    /// Parses the unified diff `patch` directly to determine which lines of
+    /// `old_text` are deleted/replaced and what new lines are added, then emits
+    /// `<|set|>` and `<|insert|>` edit commands referencing old lines by their
+    /// `{index}:{hash}` identifiers.
+    ///
+    /// `cursor_offset` is an optional byte offset into the first hunk's new
+    /// text (context + additions) where the cursor marker should be placed.
+    pub fn patch_to_edit_commands(
+        old_text: &str,
+        patch: &str,
+        cursor_offset: Option<usize>,
+    ) -> Result<String> {
+        let old_lines: Vec<&str> = old_text.lines().collect();
+        let old_hashes: Vec<u8> = old_lines
+            .iter()
+            .map(|line| hash_line(line.as_bytes()))
+            .collect();
+
+        let mut result = String::new();
+        let mut first_hunk = true;
+
+        struct Hunk<'a> {
+            line_range: Range<usize>,
+            new_text_lines: Vec<&'a str>,
+            cursor_line_offset_in_new_text: Option<(usize, usize)>,
+        }
+
+        // Parse the patch line by line. We only care about hunk headers,
+        // context, deletions, and additions.
+        let mut old_line_index: usize = 0;
+        let mut current_hunk: Option<Hunk> = None;
+        // Byte offset tracking within the hunk's new text for cursor placement.
+        let mut new_text_byte_offset: usize = 0;
+        // The line index of the last old line seen before/in the current hunk
+        // (used for insert-after reference).
+        let mut last_old_line_before_hunk: Option<usize> = None;
+
+        fn flush_hunk(
+            hunk: Hunk,
+            last_old_line: Option<usize>,
+            result: &mut String,
+            old_hashes: &[u8],
+        ) {
+            if hunk.line_range.is_empty() {
+                // Pure insertion — reference the old line to insert after when in bounds.
+                if let Some(after) = last_old_line
+                    && let Some(&hash) = old_hashes.get(after)
+                {
+                    write!(
+                        result,
+                        "{INSERT_COMMAND_MARKER}{}\n",
+                        LineRef { index: after, hash }
+                    )
+                    .unwrap();
+                } else {
+                    result.push_str(INSERT_COMMAND_MARKER);
+                    result.push('\n');
+                }
+            } else {
+                let start = hunk.line_range.start;
+                let end_exclusive = hunk.line_range.end;
+                let deleted_line_count = end_exclusive.saturating_sub(start);
+
+                if deleted_line_count == 1 {
+                    if let Some(&hash) = old_hashes.get(start) {
+                        write!(
+                            result,
+                            "{SET_COMMAND_MARKER}{}\n",
+                            LineRef { index: start, hash }
+                        )
+                        .unwrap();
+                    } else {
+                        result.push_str(SET_COMMAND_MARKER);
+                        result.push('\n');
+                    }
+                } else {
+                    let end_inclusive = end_exclusive - 1;
+                    match (
+                        old_hashes.get(start).copied(),
+                        old_hashes.get(end_inclusive).copied(),
+                    ) {
+                        (Some(start_hash), Some(end_hash)) => {
+                            write!(
+                                result,
+                                "{SET_COMMAND_MARKER}{}-{}\n",
+                                LineRef {
+                                    index: start,
+                                    hash: start_hash
+                                },
+                                LineRef {
+                                    index: end_inclusive,
+                                    hash: end_hash
+                                }
+                            )
+                            .unwrap();
+                        }
+                        _ => {
+                            result.push_str(SET_COMMAND_MARKER);
+                            result.push('\n');
+                        }
+                    }
+                }
+            }
+            for (line_offset, line) in hunk.new_text_lines.iter().enumerate() {
+                if let Some((cursor_line_offset, char_offset)) = hunk.cursor_line_offset_in_new_text
+                    && line_offset == cursor_line_offset
+                {
+                    result.push_str(&line[..char_offset]);
+                    result.push_str(CURSOR_MARKER);
+                    result.push_str(&line[char_offset..]);
+                    continue;
+                }
+
+                result.push_str(line);
+            }
+        }
+
+        for raw_line in patch.split_inclusive('\n') {
+            if raw_line.starts_with("@@") {
+                // Flush any pending change hunk from a previous patch hunk.
+                if let Some(hunk) = current_hunk.take() {
+                    flush_hunk(hunk, last_old_line_before_hunk, &mut result, &old_hashes);
+                }
+
+                // Parse hunk header: @@ -old_start[,old_count] +new_start[,new_count] @@
+                // We intentionally do not trust old_start as a direct local index into `old_text`,
+                // because some patches are produced against a larger file region and carry
+                // non-local line numbers. We keep indexing local by advancing from parsed patch lines.
+                if first_hunk {
+                    new_text_byte_offset = 0;
+                    first_hunk = false;
+                }
+                continue;
+            }
+
+            if raw_line.starts_with("---") || raw_line.starts_with("+++") {
+                continue;
+            }
+            if raw_line.starts_with("\\ No newline") {
+                continue;
+            }
 
-    pub const START_MARKER: &str = "<<<<<<< CURRENT\n";
-    pub const SEPARATOR: &str = "=======\n";
-    pub const END_MARKER: &str = ">>>>>>> UPDATED\n";
+            if raw_line.starts_with('-') {
+                // Extend or start a change hunk with this deleted old line.
+                match &mut current_hunk {
+                    Some(Hunk {
+                        line_range: range, ..
+                    }) => range.end = old_line_index + 1,
+                    None => {
+                        current_hunk = Some(Hunk {
+                            line_range: old_line_index..old_line_index + 1,
+                            new_text_lines: Vec::new(),
+                            cursor_line_offset_in_new_text: None,
+                        });
+                    }
+                }
+                old_line_index += 1;
+            } else if let Some(added_content) = raw_line.strip_prefix('+') {
+                // Place cursor marker if cursor_offset falls within this line.
+                let mut cursor_line_offset = None;
+                if let Some(cursor_off) = cursor_offset
+                    && (first_hunk
+                        || cursor_off >= new_text_byte_offset
+                            && cursor_off <= new_text_byte_offset + added_content.len())
+                {
+                    let line_offset = added_content.floor_char_boundary(
+                        cursor_off
+                            .saturating_sub(new_text_byte_offset)
+                            .min(added_content.len()),
+                    );
+                    cursor_line_offset = Some(line_offset);
+                }
 
-    pub fn special_tokens() -> &'static [&'static str] {
-        &[
-            "<|fim_prefix|>",
-            "<|fim_suffix|>",
-            "<|fim_middle|>",
-            "<|file_sep|>",
-            START_MARKER,
-            SEPARATOR,
-            END_MARKER,
-            CURSOR_MARKER,
-        ]
+                new_text_byte_offset += added_content.len();
+
+                let hunk = current_hunk.get_or_insert(Hunk {
+                    line_range: old_line_index..old_line_index,
+                    new_text_lines: vec![],
+                    cursor_line_offset_in_new_text: None,
+                });
+                hunk.new_text_lines.push(added_content);
+                hunk.cursor_line_offset_in_new_text = cursor_line_offset
+                    .map(|offset_in_line| (hunk.new_text_lines.len() - 1, offset_in_line));
+            } else {
+                // Context line (starts with ' ' or is empty).
+                if let Some(hunk) = current_hunk.take() {
+                    flush_hunk(hunk, last_old_line_before_hunk, &mut result, &old_hashes);
+                }
+                last_old_line_before_hunk = Some(old_line_index);
+                old_line_index += 1;
+                let content = raw_line.strip_prefix(' ').unwrap_or(raw_line);
+                new_text_byte_offset += content.len();
+            }
+        }
+
+        // Flush final group.
+        if let Some(hunk) = current_hunk.take() {
+            flush_hunk(hunk, last_old_line_before_hunk, &mut result, &old_hashes);
+        }
+
+        // Trim a single trailing newline.
+        if result.ends_with('\n') {
+            result.pop();
+        }
+
+        Ok(result)
     }
 
-    pub fn write_cursor_excerpt_section(
-        prompt: &mut String,
-        path: &Path,
-        context: &str,
-        editable_range: &Range<usize>,
-        cursor_offset: usize,
-    ) {
-        let path_str = path.to_string_lossy();
-        write!(prompt, "<|file_sep|>{}\n", path_str).ok();
+    #[cfg(test)]
+    mod tests {
+        use super::*;
+        use indoc::indoc;
+
+        #[test]
+        fn test_format_cursor_region() {
+            struct Case {
+                name: &'static str,
+                context: &'static str,
+                editable_range: Range<usize>,
+                cursor_offset: usize,
+                expected: &'static str,
+            }
 
-        prompt.push_str("<|fim_prefix|>");
-        prompt.push_str(&context[..editable_range.start]);
-        prompt.push_str(START_MARKER);
-        prompt.push_str(&context[editable_range.start..cursor_offset]);
-        prompt.push_str(CURSOR_MARKER);
-        prompt.push_str(&context[cursor_offset..editable_range.end]);
-        if !prompt.ends_with('\n') {
-            prompt.push('\n');
+            let cases = [
+                Case {
+                    name: "basic_cursor_placement",
+                    context: "hello world\n",
+                    editable_range: 0..12,
+                    cursor_offset: 5,
+                    expected: indoc! {"
+                    <|file_sep|>test.rs
+                    <|fim_prefix|>
+                    <|fim_middle|>current
+                    0:5c|hello<|user_cursor|> world
+                    <|fim_suffix|>
+                    <|fim_middle|>updated"},
+                },
+                Case {
+                    name: "multiline_cursor_on_second_line",
+                    context: "aaa\nbbb\nccc\n",
+                    editable_range: 0..12,
+                    cursor_offset: 5, // byte 5 → 1 byte into "bbb"
+                    expected: indoc! {"
+                    <|file_sep|>test.rs
+                    <|fim_prefix|>
+                    <|fim_middle|>current
+                    0:23|aaa
+                    1:26|b<|user_cursor|>bb
+                    2:29|ccc
+                    <|fim_suffix|>
+                    <|fim_middle|>updated"},
+                },
+                Case {
+                    name: "no_trailing_newline_in_context",
+                    context: "line1\nline2",
+                    editable_range: 0..11,
+                    cursor_offset: 3,
+                    expected: indoc! {"
+                    <|file_sep|>test.rs
+                    <|fim_prefix|>
+                    <|fim_middle|>current
+                    0:d9|lin<|user_cursor|>e1
+                    1:da|line2
+                    <|fim_suffix|>
+                    <|fim_middle|>updated"},
+                },
+                Case {
+                    name: "leading_newline_in_editable_region",
+                    context: "\nabc\n",
+                    editable_range: 0..5,
+                    cursor_offset: 2, // byte 2 = 'a' in "abc" (after leading \n)
+                    expected: indoc! {"
+                    <|file_sep|>test.rs
+                    <|fim_prefix|>
+                    <|fim_middle|>current
+                    0:00|
+                    1:26|a<|user_cursor|>bc
+                    <|fim_suffix|>
+                    <|fim_middle|>updated"},
+                },
+                Case {
+                    name: "with_suffix",
+                    context: "abc\ndef",
+                    editable_range: 0..4, // editable region = "abc\n", suffix = "def"
+                    cursor_offset: 2,
+                    expected: indoc! {"
+                    <|file_sep|>test.rs
+                    <|fim_prefix|>
+                    <|fim_middle|>current
+                    0:26|ab<|user_cursor|>c
+                    <|fim_suffix|>
+                    def
+                    <|fim_middle|>updated"},
+                },
+                Case {
+                    name: "unicode_two_byte_chars",
+                    context: "héllo\n",
+                    editable_range: 0..7,
+                    cursor_offset: 3, // byte 3 = after "hé" (h=1 byte, é=2 bytes), before "llo"
+                    expected: indoc! {"
+                    <|file_sep|>test.rs
+                    <|fim_prefix|>
+                    <|fim_middle|>current
+                    0:1b|hé<|user_cursor|>llo
+                    <|fim_suffix|>
+                    <|fim_middle|>updated"},
+                },
+                Case {
+                    name: "unicode_three_byte_chars",
+                    context: "日本語\n",
+                    editable_range: 0..10,
+                    cursor_offset: 6, // byte 6 = after "日本" (3+3 bytes), before "語"
+                    expected: indoc! {"
+                    <|file_sep|>test.rs
+                    <|fim_prefix|>
+                    <|fim_middle|>current
+                    0:80|日本<|user_cursor|>語
+                    <|fim_suffix|>
+                    <|fim_middle|>updated"},
+                },
+                Case {
+                    name: "unicode_four_byte_chars",
+                    context: "a🌍b\n",
+                    editable_range: 0..7,
+                    cursor_offset: 5, // byte 5 = after "a🌍" (1+4 bytes), before "b"
+                    expected: indoc! {"
+                    <|file_sep|>test.rs
+                    <|fim_prefix|>
+                    <|fim_middle|>current
+                    0:6b|a🌍<|user_cursor|>b
+                    <|fim_suffix|>
+                    <|fim_middle|>updated"},
+                },
+                Case {
+                    name: "cursor_at_start_of_region_not_placed",
+                    context: "abc\n",
+                    editable_range: 0..4,
+                    cursor_offset: 0, // cursor_offset(0) > offset(0) is false → cursor not placed
+                    expected: indoc! {"
+                    <|file_sep|>test.rs
+                    <|fim_prefix|>
+                    <|fim_middle|>current
+                    0:26|abc
+                    <|fim_suffix|>
+                    <|fim_middle|>updated"},
+                },
+                Case {
+                    name: "cursor_at_end_of_line_not_placed",
+                    context: "abc\ndef\n",
+                    editable_range: 0..8,
+                    cursor_offset: 3, // byte 3 = the \n after "abc" → falls between lines, not placed
+                    expected: indoc! {"
+                    <|file_sep|>test.rs
+                    <|fim_prefix|>
+                    <|fim_middle|>current
+                    0:26|abc
+                    1:2f|def
+                    <|fim_suffix|>
+                    <|fim_middle|>updated"},
+                },
+                Case {
+                    name: "cursor_offset_relative_to_context_not_editable_region",
+                    // cursor_offset is relative to `context`, so when editable_range.start > 0,
+                    // write_cursor_excerpt_section must subtract it before comparing against
+                    // per-line offsets within the editable region.
+                    context: "pre\naaa\nbbb\nsuf\n",
+                    editable_range: 4..12, // editable region = "aaa\nbbb\n"
+                    cursor_offset: 9,      // byte 9 in context = second 'b' in "bbb"
+                    expected: indoc! {"
+                    <|file_sep|>test.rs
+                    <|fim_prefix|>
+                    pre
+                    <|fim_middle|>current
+                    0:23|aaa
+                    1:26|b<|user_cursor|>bb
+                    <|fim_suffix|>
+                    suf
+                    <|fim_middle|>updated"},
+                },
+            ];
+
+            for case in &cases {
+                let mut prompt = String::new();
+                hashline::write_cursor_excerpt_section(
+                    &mut prompt,
+                    Path::new("test.rs"),
+                    case.context,
+                    &case.editable_range,
+                    case.cursor_offset,
+                );
+                assert_eq!(prompt, case.expected, "failed case: {}", case.name);
+            }
         }
-        prompt.push_str(SEPARATOR);
 
-        prompt.push_str("<|fim_suffix|>");
-        prompt.push_str(&context[editable_range.end..]);
-        if !prompt.ends_with('\n') {
-            prompt.push('\n');
+        #[test]
+        fn test_apply_edit_commands() {
+            struct Case {
+                name: &'static str,
+                original: &'static str,
+                model_output: &'static str,
+                expected: &'static str,
+            }
+
+            let cases = vec![
+                Case {
+                    name: "set_single_line",
+                    original: indoc! {"
+                    let mut total = 0;
+                    for product in products {
+                        total += ;
+                    }
+                    total
+                "},
+                    model_output: indoc! {"
+                    <|set|>2:87
+                        total += product.price;
+                "},
+                    expected: indoc! {"
+                    let mut total = 0;
+                    for product in products {
+                        total += product.price;
+                    }
+                    total
+                "},
+                },
+                Case {
+                    name: "set_range",
+                    original: indoc! {"
+                    fn foo() {
+                        let x = 1;
+                        let y = 2;
+                        let z = 3;
+                    }
+                "},
+                    model_output: indoc! {"
+                    <|set|>1:46-3:4a
+                        let sum = 6;
+                "},
+                    expected: indoc! {"
+                    fn foo() {
+                        let sum = 6;
+                    }
+                "},
+                },
+                Case {
+                    name: "insert_after_line",
+                    original: indoc! {"
+                    fn main() {
+                        let x = 1;
+                    }
+                "},
+                    model_output: indoc! {"
+                    <|insert|>1:46
+                        let y = 2;
+                "},
+                    expected: indoc! {"
+                    fn main() {
+                        let x = 1;
+                        let y = 2;
+                    }
+                "},
+                },
+                Case {
+                    name: "insert_before_first",
+                    original: indoc! {"
+                    let x = 1;
+                    let y = 2;
+                "},
+                    model_output: indoc! {"
+                    <|insert|>
+                    use std::io;
+                "},
+                    expected: indoc! {"
+                    use std::io;
+                    let x = 1;
+                    let y = 2;
+                "},
+                },
+                Case {
+                    name: "set_with_cursor_marker",
+                    original: indoc! {"
+                    fn main() {
+                        println!();
+                    }
+                "},
+                    model_output: indoc! {"
+                    <|set|>1:34
+                        eprintln!(\"<|user_cursor|>\");
+                "},
+                    expected: indoc! {"
+                    fn main() {
+                        eprintln!(\"<|user_cursor|>\");
+                    }
+                "},
+                },
+                Case {
+                    name: "multiple_set_commands",
+                    original: indoc! {"
+                    aaa
+                    bbb
+                    ccc
+                    ddd
+                "},
+                    model_output: indoc! {"
+                    <|set|>0:23
+                    AAA
+                    <|set|>2:29
+                    CCC
+                "},
+                    expected: indoc! {"
+                    AAA
+                    bbb
+                    CCC
+                    ddd
+                "},
+                },
+                Case {
+                    name: "set_range_multiline_replacement",
+                    original: indoc! {"
+                    fn handle_submit() {
+                    }
+
+                    fn handle_keystroke() {
+                "},
+                    model_output: indoc! {"
+                    <|set|>0:3f-1:7d
+                    fn handle_submit(modal_state: &mut ModalState) {
+                        <|user_cursor|>
+                    }
+                "},
+                    expected: indoc! {"
+                    fn handle_submit(modal_state: &mut ModalState) {
+                        <|user_cursor|>
+                    }
+
+                    fn handle_keystroke() {
+                "},
+                },
+                Case {
+                    name: "no_edit_commands_returns_original",
+                    original: indoc! {"
+                    hello
+                    world
+                "},
+                    model_output: "some random text with no commands",
+                    expected: indoc! {"
+                    hello
+                    world
+                "},
+                },
+                Case {
+                    name: "wrong_hash_set_ignored",
+                    original: indoc! {"
+                    aaa
+                    bbb
+                "},
+                    model_output: indoc! {"
+                    <|set|>0:ff
+                    ZZZ
+                "},
+                    expected: indoc! {"
+                    aaa
+                    bbb
+                "},
+                },
+                Case {
+                    name: "insert_and_set_combined",
+                    original: indoc! {"
+                    alpha
+                    beta
+                    gamma
+                "},
+                    model_output: indoc! {"
+                    <|set|>0:06
+                    ALPHA
+                    <|insert|>1:9c
+                    beta_extra
+                "},
+                    expected: indoc! {"
+                    ALPHA
+                    beta
+                    beta_extra
+                    gamma
+                "},
+                },
+                Case {
+                    name: "no_trailing_newline_preserved",
+                    original: "hello\nworld",
+                    model_output: indoc! {"
+                    <|set|>0:14
+                    HELLO
+                "},
+                    expected: "HELLO\nworld",
+                },
+                Case {
+                    name: "set_range_hash_mismatch_in_end_bound",
+                    original: indoc! {"
+                    one
+                    two
+                    three
+                "},
+                    model_output: indoc! {"
+                    <|set|>0:42-2:ff
+                    ONE_TWO_THREE
+                "},
+                    expected: indoc! {"
+                    one
+                    two
+                    three
+                "},
+                },
+                Case {
+                    name: "set_range_start_greater_than_end_ignored",
+                    original: indoc! {"
+                    a
+                    b
+                    c
+                "},
+                    model_output: indoc! {"
+                    <|set|>2:63-1:62
+                    X
+                "},
+                    expected: indoc! {"
+                    a
+                    b
+                    c
+                "},
+                },
+                Case {
+                    name: "insert_out_of_bounds_ignored",
+                    original: indoc! {"
+                    x
+                    y
+                "},
+                    model_output: indoc! {"
+                    <|insert|>99:aa
+                    z
+                "},
+                    expected: indoc! {"
+                    x
+                    y
+                "},
+                },
+                Case {
+                    name: "set_out_of_bounds_ignored",
+                    original: indoc! {"
+                    x
+                    y
+                "},
+                    model_output: indoc! {"
+                    <|set|>99:aa
+                    z
+                "},
+                    expected: indoc! {"
+                    x
+                    y
+                "},
+                },
+                Case {
+                    name: "malformed_set_command_ignored",
+                    original: indoc! {"
+                    alpha
+                    beta
+                "},
+                    model_output: indoc! {"
+                    <|set|>not-a-line-ref
+                    UPDATED
+                "},
+                    expected: indoc! {"
+                    alpha
+                    beta
+                "},
+                },
+                Case {
+                    name: "malformed_insert_hash_treated_as_before_first",
+                    original: indoc! {"
+                    alpha
+                    beta
+                "},
+                    model_output: indoc! {"
+                    <|insert|>1:nothex
+                    preamble
+                "},
+                    expected: indoc! {"
+                    preamble
+                    alpha
+                    beta
+                "},
+                },
+                Case {
+                    name: "set_then_insert_same_target_orders_insert_after_replacement",
+                    original: indoc! {"
+                    cat
+                    dog
+                "},
+                    model_output: indoc! {"
+                    <|set|>0:38
+                    CAT
+                    <|insert|>0:38
+                    TAIL
+                "},
+                    expected: indoc! {"
+                    CAT
+                    TAIL
+                    dog
+                "},
+                },
+                Case {
+                    name: "overlapping_set_ranges_last_wins",
+                    original: indoc! {"
+                    a
+                    b
+                    c
+                    d
+                "},
+                    model_output: indoc! {"
+                    <|set|>0:61-2:63
+                    FIRST
+                    <|set|>1:62-3:64
+                    SECOND
+                "},
+                    expected: indoc! {"
+                    FIRST
+                    d
+                "},
+                },
+                Case {
+                    name: "insert_before_first_and_after_line",
+                    original: indoc! {"
+                    a
+                    b
+                "},
+                    model_output: indoc! {"
+                    <|insert|>
+                    HEAD
+                    <|insert|>0:61
+                    MID
+                "},
+                    expected: indoc! {"
+                    HEAD
+                    a
+                    MID
+                    b
+                "},
+                },
+            ];
+
+            for case in &cases {
+                let result = hashline::apply_edit_commands(case.original, &case.model_output);
+                assert_eq!(result, case.expected, "failed case: {}", case.name);
+            }
         }
 
-        prompt.push_str("<|fim_middle|>");
-    }
-}
+        #[test]
+        fn test_output_has_edit_commands() {
+            assert!(hashline::output_has_edit_commands(&format!(
+                "{}0:ab\nnew",
+                SET_COMMAND_MARKER
+            )));
+            assert!(hashline::output_has_edit_commands(&format!(
+                "{}0:ab\nnew",
+                INSERT_COMMAND_MARKER
+            )));
+            assert!(hashline::output_has_edit_commands(&format!(
+                "some text\n{}1:cd\nstuff",
+                SET_COMMAND_MARKER
+            )));
+            assert!(!hashline::output_has_edit_commands("just plain text"));
+            assert!(!hashline::output_has_edit_commands("NO_EDITS"));
+        }
 
-pub mod v0211_prefill {
-    use super::*;
+        // ---- hashline::patch_to_edit_commands round-trip tests ----
 
-    pub fn get_prefill(context: &str, editable_range: &Range<usize>) -> String {
-        let editable_region = &context[editable_range.start..editable_range.end];
+        #[test]
+        fn test_patch_to_edit_commands() {
+            struct Case {
+                name: &'static str,
+                old: &'static str,
+                patch: &'static str,
+                expected_new: &'static str,
+            }
 
-        let prefill_len = (editable_region.len() as f64 * PREFILL_RATIO) as usize;
-        let prefill_len = editable_region.floor_char_boundary(prefill_len);
+            let cases = [
+                Case {
+                    name: "single_line_replacement",
+                    old: indoc! {"
+                    let mut total = 0;
+                    for product in products {
+                        total += ;
+                    }
+                    total
+                "},
+                    patch: indoc! {"
+                    @@ -1,5 +1,5 @@
+                     let mut total = 0;
+                     for product in products {
+                    -    total += ;
+                    +    total += product.price;
+                     }
+                     total
+                "},
+                    expected_new: indoc! {"
+                    let mut total = 0;
+                    for product in products {
+                        total += product.price;
+                    }
+                    total
+                "},
+                },
+                Case {
+                    name: "multiline_replacement",
+                    old: indoc! {"
+                    fn foo() {
+                        let x = 1;
+                        let y = 2;
+                        let z = 3;
+                    }
+                "},
+                    patch: indoc! {"
+                    @@ -1,5 +1,3 @@
+                     fn foo() {
+                    -    let x = 1;
+                    -    let y = 2;
+                    -    let z = 3;
+                    +    let sum = 1 + 2 + 3;
+                     }
+                "},
+                    expected_new: indoc! {"
+                    fn foo() {
+                        let sum = 1 + 2 + 3;
+                    }
+                "},
+                },
+                Case {
+                    name: "insertion",
+                    old: indoc! {"
+                    fn main() {
+                        let x = 1;
+                    }
+                "},
+                    patch: indoc! {"
+                    @@ -1,3 +1,4 @@
+                     fn main() {
+                         let x = 1;
+                    +    let y = 2;
+                     }
+                "},
+                    expected_new: indoc! {"
+                    fn main() {
+                        let x = 1;
+                        let y = 2;
+                    }
+                "},
+                },
+                Case {
+                    name: "insertion_before_first",
+                    old: indoc! {"
+                    let x = 1;
+                    let y = 2;
+                "},
+                    patch: indoc! {"
+                    @@ -1,2 +1,3 @@
+                    +use std::io;
+                     let x = 1;
+                     let y = 2;
+                "},
+                    expected_new: indoc! {"
+                    use std::io;
+                    let x = 1;
+                    let y = 2;
+                "},
+                },
+                Case {
+                    name: "deletion",
+                    old: indoc! {"
+                    aaa
+                    bbb
+                    ccc
+                    ddd
+                "},
+                    patch: indoc! {"
+                    @@ -1,4 +1,2 @@
+                     aaa
+                    -bbb
+                    -ccc
+                     ddd
+                "},
+                    expected_new: indoc! {"
+                    aaa
+                    ddd
+                "},
+                },
+                Case {
+                    name: "multiple_changes",
+                    old: indoc! {"
+                    alpha
+                    beta
+                    gamma
+                    delta
+                    epsilon
+                "},
+                    patch: indoc! {"
+                    @@ -1,5 +1,5 @@
+                    -alpha
+                    +ALPHA
+                     beta
+                     gamma
+                    -delta
+                    +DELTA
+                     epsilon
+                "},
+                    expected_new: indoc! {"
+                    ALPHA
+                    beta
+                    gamma
+                    DELTA
+                    epsilon
+                "},
+                },
+                Case {
+                    name: "replace_with_insertion",
+                    old: indoc! {r#"
+                    fn handle() {
+                        modal_state.close();
+                        modal_state.dismiss();
+                "#},
+                    patch: indoc! {r#"
+                    @@ -1,3 +1,4 @@
+                     fn handle() {
+                         modal_state.close();
+                    +    eprintln!("");
+                         modal_state.dismiss();
+                "#},
+                    expected_new: indoc! {r#"
+                    fn handle() {
+                        modal_state.close();
+                        eprintln!("");
+                        modal_state.dismiss();
+                "#},
+                },
+                Case {
+                    name: "complete_replacement",
+                    old: indoc! {"
+                    aaa
+                    bbb
+                    ccc
+                "},
+                    patch: indoc! {"
+                    @@ -1,3 +1,3 @@
+                    -aaa
+                    -bbb
+                    -ccc
+                    +xxx
+                    +yyy
+                    +zzz
+                "},
+                    expected_new: indoc! {"
+                    xxx
+                    yyy
+                    zzz
+                "},
+                },
+                Case {
+                    name: "add_function_body",
+                    old: indoc! {"
+                    fn foo() {
+                        modal_state.dismiss();
+                    }
 
-        // Find a token boundary to avoid splitting tokens in the prefill.
-        // In Qwen2.5-Coder, \n is always the END of a token (e.g. `;\n`,
-        // ` {\n`), and \n\n / \n\n\n are single tokens, so we must include
-        // the \n and consume any consecutive \n characters after it.
-        let prefill = &editable_region[..prefill_len];
-        match prefill.rfind('\n') {
-            Some(pos) => {
-                let mut end = pos + 1;
-                while end < editable_region.len()
-                    && editable_region.as_bytes().get(end) == Some(&b'\n')
-                {
-                    end += 1;
-                }
-                editable_region[..end].to_string()
+                    fn
+
+                    fn handle_keystroke() {
+                "},
+                    patch: indoc! {"
+                    @@ -1,6 +1,8 @@
+                     fn foo() {
+                         modal_state.dismiss();
+                     }
+
+                    -fn
+                    +fn handle_submit() {
+                    +    todo()
+                    +}
+
+                     fn handle_keystroke() {
+                "},
+                    expected_new: indoc! {"
+                    fn foo() {
+                        modal_state.dismiss();
+                    }
+
+                    fn handle_submit() {
+                        todo()
+                    }
+
+                    fn handle_keystroke() {
+                "},
+                },
+                Case {
+                    name: "with_cursor_offset",
+                    old: indoc! {r#"
+                    fn main() {
+                        println!();
+                    }
+                "#},
+                    patch: indoc! {r#"
+                    @@ -1,3 +1,3 @@
+                     fn main() {
+                    -    println!();
+                    +    eprintln!("");
+                     }
+                "#},
+                    expected_new: indoc! {r#"
+                    fn main() {
+                        eprintln!("<|user_cursor|>");
+                    }
+                "#},
+                },
+                Case {
+                    name: "non_local_hunk_header_pure_insertion_repro",
+                    old: indoc! {"
+                    aaa
+                    bbb
+                "},
+                    patch: indoc! {"
+                    @@ -20,2 +20,3 @@
+                     aaa
+                    +xxx
+                     bbb
+                "},
+                    expected_new: indoc! {"
+                    aaa
+                    xxx
+                    bbb
+                "},
+                },
+            ];
+
+            for case in &cases {
+                // The cursor_offset for patch_to_edit_commands is relative to
+                // the first hunk's new text (context + additions). We compute
+                // it by finding where the marker sits in the expected output
+                // (which mirrors the new text of the hunk).
+                let cursor_offset = case.expected_new.find(CURSOR_MARKER);
+
+                let commands =
+                    hashline::patch_to_edit_commands(case.old, case.patch, cursor_offset)
+                        .unwrap_or_else(|e| panic!("failed case {}: {e}", case.name));
+
+                assert!(
+                    hashline::output_has_edit_commands(&commands),
+                    "case {}: expected edit commands, got: {commands:?}",
+                    case.name,
+                );
+
+                let applied = hashline::apply_edit_commands(case.old, &commands);
+                assert_eq!(applied, case.expected_new, "case {}", case.name);
             }
-            // No newline found. Fall back to splitting before the last space
-            // (word-level boundary)
-            None => match prefill.rfind(' ') {
-                Some(pos) => prefill[..pos].to_string(),
-                None => prefill.to_string(),
-            },
         }
     }
 }

crates/zlog/src/filter.rs 🔗

@@ -38,8 +38,6 @@ const DEFAULT_FILTERS: &[(&str, log::LevelFilter)] = &[
     #[cfg(any(target_os = "linux", target_os = "freebsd"))]
     ("zbus", log::LevelFilter::Warn),
     #[cfg(any(target_os = "linux", target_os = "freebsd", target_os = "windows"))]
-    ("wgpu", log::LevelFilter::Warn),
-    #[cfg(any(target_os = "linux", target_os = "freebsd", target_os = "windows"))]
     ("naga::back::spv::writer", log::LevelFilter::Warn),
     // usvg prints a lot of warnings on rendering an SVG with partial errors, which
     // can happen a lot with the SVG preview

crates/zlog/src/sink.rs 🔗

@@ -56,10 +56,9 @@ pub fn init_output_file(
     path: &'static PathBuf,
     path_rotate: Option<&'static PathBuf>,
 ) -> io::Result<()> {
-    let mut file = std::fs::OpenOptions::new()
-        .create(true)
-        .append(true)
-        .open(path)?;
+    let mut enabled_sinks_file = ENABLED_SINKS_FILE
+        .try_lock()
+        .expect("Log file lock is available during init");
 
     SINK_FILE_PATH
         .set(path)
@@ -70,22 +69,30 @@ pub fn init_output_file(
             .expect("Init file output should only be called once");
     }
 
-    let mut enabled_sinks_file = ENABLED_SINKS_FILE
-        .try_lock()
-        .expect("Log file lock is available during init");
-
-    let size_bytes = file.metadata().map_or(0, |metadata| metadata.len());
-    if size_bytes >= SINK_FILE_SIZE_BYTES_MAX {
-        rotate_log_file(&mut file, Some(path), path_rotate, &SINK_FILE_SIZE_BYTES);
-    } else {
-        SINK_FILE_SIZE_BYTES.store(size_bytes, Ordering::Release);
-    }
-
+    let file = open_or_create_log_file(path, path_rotate, SINK_FILE_SIZE_BYTES_MAX)?;
+    SINK_FILE_SIZE_BYTES.store(file.metadata().map_or(0, |m| m.len()), Ordering::Release);
     *enabled_sinks_file = Some(file);
 
     Ok(())
 }
 
+fn open_or_create_log_file(
+    path: &PathBuf,
+    path_rotate: Option<&PathBuf>,
+    sink_file_size_bytes_max: u64,
+) -> Result<fs::File, io::Error> {
+    let size_bytes = std::fs::metadata(path).map(|metadata| metadata.len());
+    match size_bytes {
+        Ok(size_bytes) if size_bytes >= sink_file_size_bytes_max => {
+            rotate_log_file(Some(path), path_rotate).map(|it| it.unwrap())
+        }
+        _ => std::fs::OpenOptions::new()
+            .create(true)
+            .append(true)
+            .open(path),
+    }
+}
+
 const LEVEL_OUTPUT_STRINGS: [&str; 6] = [
     "     ", // nop: ERROR = 1
     "ERROR", //
@@ -144,11 +151,11 @@ pub fn submit(mut record: Record) {
             record.message
         );
     }
-    let mut file = ENABLED_SINKS_FILE.lock().unwrap_or_else(|handle| {
+    let mut file_guard = ENABLED_SINKS_FILE.lock().unwrap_or_else(|handle| {
         ENABLED_SINKS_FILE.clear_poison();
         handle.into_inner()
     });
-    if let Some(file) = file.as_mut() {
+    if let Some(file) = file_guard.as_mut() {
         struct SizedWriter<'a> {
             file: &'a mut std::fs::File,
             written: u64,
@@ -182,12 +189,16 @@ pub fn submit(mut record: Record) {
             SINK_FILE_SIZE_BYTES.fetch_add(writer.written, Ordering::AcqRel) + writer.written
         };
         if file_size_bytes > SINK_FILE_SIZE_BYTES_MAX {
-            rotate_log_file(
-                file,
-                SINK_FILE_PATH.get(),
-                SINK_FILE_PATH_ROTATE.get(),
-                &SINK_FILE_SIZE_BYTES,
-            );
+            *file_guard = None;
+            let file = rotate_log_file(SINK_FILE_PATH.get(), SINK_FILE_PATH_ROTATE.get());
+            match file {
+                Ok(Some(file)) => *file_guard = Some(file),
+                Ok(None) => {}
+                Err(e) => {
+                    eprintln!("Failed to open log file: {e}")
+                }
+            }
+            SINK_FILE_SIZE_BYTES.store(0, Ordering::Release);
         }
     }
 }
@@ -247,19 +258,13 @@ impl std::fmt::Display for SourceFmt<'_> {
 }
 
 fn rotate_log_file<PathRef>(
-    file: &mut fs::File,
     path: Option<PathRef>,
     path_rotate: Option<PathRef>,
-    atomic_size: &AtomicU64,
-) where
+) -> std::io::Result<Option<fs::File>>
+where
     PathRef: AsRef<std::path::Path>,
 {
-    if let Err(err) = file.flush() {
-        eprintln!(
-            "Failed to flush log file before rotating, some logs may be lost: {}",
-            err
-        );
-    }
+    let path = path.as_ref().map(PathRef::as_ref);
     let rotation_error = match (path, path_rotate) {
         (Some(_), None) => Some(anyhow::anyhow!("No rotation log file path configured")),
         (None, _) => Some(anyhow::anyhow!("No log file path configured")),
@@ -270,46 +275,53 @@ fn rotate_log_file<PathRef>(
     if let Some(err) = rotation_error {
         eprintln!("Log file rotation failed. Truncating log file anyways: {err}",);
     }
-    _ = file.set_len(0);
-
-    // SAFETY: It is safe to set size to 0 even if set_len fails as
-    // according to the documentation, it only fails if:
-    // - the file is not writeable: should never happen,
-    // - the size would cause an overflow (implementation specific): 0 should never cause an overflow
-    atomic_size.store(0, Ordering::Release);
+    path.map(|path| {
+        fs::OpenOptions::new()
+            .create(true)
+            .write(true)
+            .truncate(true)
+            .open(path)
+    })
+    .transpose()
 }
 
 #[cfg(test)]
 mod tests {
+
     use super::*;
 
     #[test]
-    fn test_rotate_log_file() {
+    fn test_open_or_create_log_file_rotate() {
         let temp_dir = tempfile::tempdir().unwrap();
         let log_file_path = temp_dir.path().join("log.txt");
         let rotation_log_file_path = temp_dir.path().join("log_rotated.txt");
 
-        let mut file = fs::File::create(&log_file_path).unwrap();
         let contents = String::from("Hello, world!");
-        file.write_all(contents.as_bytes()).unwrap();
+        std::fs::write(&log_file_path, &contents).unwrap();
 
-        let size = AtomicU64::new(contents.len() as u64);
-
-        rotate_log_file(
-            &mut file,
-            Some(&log_file_path),
-            Some(&rotation_log_file_path),
-            &size,
-        );
+        open_or_create_log_file(&log_file_path, Some(&rotation_log_file_path), 4).unwrap();
 
         assert!(log_file_path.exists());
         assert_eq!(log_file_path.metadata().unwrap().len(), 0);
         assert!(rotation_log_file_path.exists());
-        assert_eq!(
-            std::fs::read_to_string(&rotation_log_file_path).unwrap(),
-            contents,
-        );
-        assert_eq!(size.load(Ordering::Acquire), 0);
+        assert_eq!(std::fs::read_to_string(&log_file_path).unwrap(), "");
+    }
+
+    #[test]
+    fn test_open_or_create_log_file() {
+        let temp_dir = tempfile::tempdir().unwrap();
+        let log_file_path = temp_dir.path().join("log.txt");
+        let rotation_log_file_path = temp_dir.path().join("log_rotated.txt");
+
+        let contents = String::from("Hello, world!");
+        std::fs::write(&log_file_path, &contents).unwrap();
+
+        open_or_create_log_file(&log_file_path, Some(&rotation_log_file_path), !0).unwrap();
+
+        assert!(log_file_path.exists());
+        assert_eq!(log_file_path.metadata().unwrap().len(), 13);
+        assert!(!rotation_log_file_path.exists());
+        assert_eq!(std::fs::read_to_string(&log_file_path).unwrap(), contents);
     }
 
     /// Regression test, ensuring that if log level values change we are made aware

docs/.doc-examples/complex-feature.md 🔗

@@ -91,11 +91,11 @@ To disable word diff for specific languages only, add this to your settings.json
 
 File History shows the commit history for an individual file. Each entry displays the commit's author, timestamp, and message. Selecting a commit opens a diff view filtered to show only the changes made to that file in that commit.
 
-To open File History:
+To view File History:
 
-- Right-click on a file in the Project Panel and select "Open File History"
-- Right-click on a file in the Git Panel and select "Open File History"
-- Right-click on an editor tab and select "Open File History"
+- Right-click on a file in the Project Panel and select "View File History"
+- Right-click on a file in the Git Panel and select "View File History"
+- Right-click on an editor tab and select "View File History"
 - Use the Command Palette and search for "file history"
 
 ## Fetch, Push, and Pull {#fetch-push-pull}

docs/.prettierignore 🔗

@@ -1,2 +1,5 @@
 # Handlebars partials are not supported by Prettier.
 *.hbs
+
+# Automatically generated
+theme/c15t@*.js

docs/README.md 🔗

@@ -64,6 +64,22 @@ This will render a human-readable version of the action name, e.g., "zed: open s
 Templates are functions that modify the source of the docs pages (usually with a regex match and replace).
 You can see how the actions and keybindings are templated in `crates/docs_preprocessor/src/main.rs` for reference on how to create new templates.
 
+## Consent Banner
+
+We pre-bundle the `c15t` package because the docs pipeline does not include a JS bundler. If you need to update `c15t` and rebuild the bundle, use:
+
+```
+mkdir c15t-bundle && cd c15t-bundle
+npm init -y
+npm install c15t@<version> esbuild
+echo "import { getOrCreateConsentRuntime } from 'c15t'; window.c15t = { getOrCreateConsentRuntime };" > entry.js
+npx esbuild entry.js --bundle --format=iife --minify --outfile=c15t@<version>.js
+cp c15t@<version>.js ../theme/c15t@<version>.js
+cd .. && rm -rf c15t-bundle
+```
+
+Replace `<version>` with the new version of `c15t` you are installing. Then update `book.toml` to reference the new bundle filename.
+
 ### References
 
 - Template Trait: `crates/docs_preprocessor/src/templates.rs`

docs/book.toml 🔗

@@ -23,8 +23,8 @@ default-description = "Learn how to use and customize Zed, the fast, collaborati
 default-title = "Zed Code Editor Documentation"
 no-section-label = true
 preferred-dark-theme = "dark"
-additional-css = ["theme/page-toc.css", "theme/plugins.css", "theme/highlight.css"]
-additional-js  = ["theme/page-toc.js", "theme/plugins.js"]
+additional-css = ["theme/page-toc.css", "theme/plugins.css", "theme/highlight.css", "theme/consent-banner.css"]
+additional-js  = ["theme/page-toc.js", "theme/plugins.js", "theme/c15t@2.0.0-rc.3.js", "theme/analytics.js"]
 
 [output.zed-html.print]
 enable = false

docs/src/ai/agent-panel.md 🔗

@@ -114,8 +114,6 @@ The agent can search your codebase to find relevant context, but providing it ex
 Add context by typing `@` in the message editor.
 You can mention files, directories, symbols, previous threads, rules files, and diagnostics.
 
-Copying images and pasting them in the panel's message editor is also supported.
-
 When you paste multi-line code selections copied from a buffer, Zed automatically formats them as @-mentions with the file context.
 To paste content without this automatic formatting, use {#kb agent::PasteRaw} to paste raw text directly.
 
@@ -123,6 +121,14 @@ To paste content without this automatic formatting, use {#kb agent::PasteRaw} to
 
 Additionally, you can also select text in a buffer or terminal and add it as context by using the {#kb agent::AddSelectionToThread} keybinding, running the {#action agent::AddSelectionToThread} action, or choosing the "Selection" item in the `+` menu in the message editor.
 
+### Images as Context
+
+It's also possible to attach images in your prompt for providers that support vision models.
+OpenAI GPT-4o and later, Anthropic Claude 3 and later, Google Gemini 1.5 and 2.0, and Bedrock vision models (Claude 3+, Amazon Nova Pro and Lite, Meta Llama 3.2 Vision, Mistral Pixtral) all support image inputs.
+
+To add an image, you can either search in your project's directory by @-mentioning it, or drag it from your file system directly into the agent panel message editor.
+Copying an image and pasting it is also supported.
+
 ## Token Usage {#token-usage}
 
 Zed surfaces how many tokens you are consuming for your currently active thread near the profile selector in the panel's message editor.
@@ -168,7 +174,7 @@ You can explore the exact tools enabled in each profile by clicking on the profi
 
 Alternatively, you can also use either the command palette, by running {#action agent::ManageProfiles}, or the keybinding directly, {#kb agent::ManageProfiles}, to have access to the profile management modal.
 
-Use {#kb agent::CycleModeSelector} to switch between profiles without opening the modal.
+Use {#kb agent::CycleModeSelector} to cycle through available profiles without opening the modal.
 
 #### Custom Profiles {#custom-profiles}
 

docs/src/ai/agent-settings.md 🔗

@@ -290,10 +290,10 @@ See the [Tool Permissions](./tool-permissions.md) documentation for more example
 
 > **Note:** Before Zed v0.224.0, tool approval was controlled by the `agent.always_allow_tool_actions` boolean (default `false`). Set it to `true` to auto-approve tool actions, or leave it `false` to require confirmation for edits and tool calls.
 
-### Single-file Review
+### Edit Display Mode
 
 Control whether to display review actions (accept & reject) in single buffers after the agent is done performing edits.
-The default value is `true`.
+The default value is `false`.
 
 ```json [settings]
 {
@@ -303,8 +303,6 @@ The default value is `true`.
 }
 ```
 
-When set to `false`, these controls are only available in the multibuffer review tab.
-
 ### Sound Notification
 
 Control whether to hear a notification sound when the agent is done generating changes or needs your input.

docs/src/ai/ai-improvement.md 🔗

@@ -3,73 +3,99 @@ title: AI Improvement and Data Collection - Zed
 description: Zed's opt-in approach to AI data collection for improving the agent panel and edit predictions.
 ---
 
-# Zed AI Improvement
+# Zed AI Features and Privacy
 
-## Agent Panel
+## Overview
 
-### Opt-In
+AI features in Zed include:
 
-When you use the Agent Panel through any of these means:
+- [Agent Panel](./agent-panel.md)
+- [Edit Predictions](./edit-prediction.md)
+- [Inline Assist](./inline-assistant.md)
+- [Text Threads](./text-threads.md)
+- Auto Git Commit Message Generation
 
-- [Zed's hosted models](./subscription.md)
-- [connecting a non-Zed AI service via API key](./llm-providers.md)
-- using an [external agent](./external-agents.md)
+By default, Zed does not store your prompts or code context. This data is sent to your selected AI provider (e.g., Anthropic, OpenAI, Google, or xAI) to generate responses, then discarded. Zed will not use your data to evaluate or improve AI features unless you explicitly share it (see [AI Feedback with Ratings](#ai-feedback-with-ratings)) or you opt in to edit prediction training data collection (see [Edit Predictions](#edit-predictions)).
+
+Zed is model-agnostic by design, and none of this changes based on which provider you choose. You can use your own API keys or Zed's hosted models without any data being retained.
+
+### Data Retention and Training
 
-Zed does not persistently store user content or use user content to evaluate and/or improve our AI features, unless it is explicitly shared with Zed. Each share is opt-in, and sharing once will not cause future content or data to be shared again.
+Zed's Agent Panel can be used via:
 
-> Note that rating responses will send your data related to that response to Zed's servers.
-> **_If you don't want data persisted on Zed's servers, don't rate_**. We will not collect data for improving our Agentic offering without you explicitly rating responses.
+- [Zed's hosted models](./subscription.md)
+- [connecting a non-Zed AI service via API key](./llm-providers.md)
+- using an [external agent](./external-agents.md) via ACP
 
-When using upstream services through Zed's hosted models, we require assurances from our service providers that your user content won't be used for training models.
+When using Zed's hosted models, we require assurances from our service providers that your user content won't be used for training models.
 
 | Provider  | No Training Guarantee                                   | Zero-Data Retention (ZDR)                                                                                                                     |
 | --------- | ------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------- |
 | Anthropic | [Yes](https://www.anthropic.com/legal/commercial-terms) | [Yes](https://privacy.anthropic.com/en/articles/8956058-i-have-a-zero-data-retention-agreement-with-anthropic-what-products-does-it-apply-to) |
 | Google    | [Yes](https://cloud.google.com/terms/service-terms)     | [Yes](https://cloud.google.com/terms/service-terms), see Service Terms sections 17 and 19h                                                    |
 | OpenAI    | [Yes](https://openai.com/enterprise-privacy/)           | [Yes](https://platform.openai.com/docs/guides/your-data)                                                                                      |
+| xAI       | [Yes](https://x.ai/legal/faq-enterprise)                | [Yes](https://x.ai/legal/faq-enterprise)                                                                                                      |
 
 When you use your own API keys or external agents, **Zed does not have control over how your data is used by that service provider.**
 You should reference your agreement with each service provider to understand what terms and conditions apply.
 
-### Data we collect
+### AI Feedback with Ratings
+
+You can provide feedback on Zed's AI features by rating specific AI responses in Zed and sharing details related to those conversations with Zed. Each share is opt-in, and sharing once will not cause future content or data to be shared again.
+
+> **Rating = Data Sharing:** When you rate a response, your entire conversation thread is sent to Zed. This includes messages, AI responses, and thread metadata.
+> **_If you don't want data persisted on Zed's servers, don't rate_**. We will not collect data for improving our AI features without you explicitly rating responses.
 
-For prompts you have explicitly shared with us, Zed may store copies of those prompts and other data about the specific use of the Agent Panel.
+### Data Collected (AI Feedback)
 
-This data includes:
+For conversations you have explicitly shared with us via rating, Zed may store:
 
-- The prompt given to the Agent
-- Any commentary you include
-- Product telemetry about the agentic thread
+- All messages in the thread (your prompts and AI responses)
+- Any commentary you include with your rating
+- Thread metadata (model used, token counts, timestamps)
 - Metadata about your Zed installation
 
-### Data Handling
+If you do not rate responses, Zed will not store Customer Data (code, conversations, responses) related to your usage of the AI features.
+
+Telemetry related to Zed's AI features is collected. This includes metadata such as the AI feature being used and high-level interactions with the feature to understand performance (e.g., Agent response time, edit acceptance/rejection in the Agent panel or edit completions). You can read more in Zed's [telemetry](../telemetry.md) documentation.
 
 Collected data is stored in Snowflake, a private database. We periodically review this data to refine the agent's system prompt and tool use. All data is anonymized and stripped of sensitive information (access tokens, user IDs, email addresses).
 
 ## Edit Predictions
 
-By default, when using Zed Edit Predictions, Zed does not persistently store user content or use user content for training of its models.
+Edit predictions can be powered by **Zed's Zeta model** or by **third-party providers** like GitHub Copilot.
+
+### Zed's Zeta Model (Default)
+
+Zed sends a limited context window to the model to generate predictions:
+
+- A code excerpt around your cursor (not the full file)
+- Recent edits as diffs
+- Relevant excerpts from related open files
 
-### Opt-in
+This data is processed transiently to generate predictions and is not retained afterward.
 
-Users who are working on open source licensed projects may optionally opt-in to providing model improvement feedback. This opt-in occurs on a per-project basis. If you work on multiple open source projects and wish to provide model improvement feedback you will have to opt-in for each individual project.
+### Third-Party Providers
 
-When working on other projects where you haven't opted-in, Zed will not persistently store user content or use user content for training of its models.
+When using third-party providers like GitHub Copilot, **Zed does not control how your data is handled** by that provider. You should consult their Terms and Conditions directly.
 
-You can see exactly how Zed detects open source licenses in: [license_detection.rs](https://github.com/zed-industries/zed/blob/main/crates/edit_prediction/src/license_detection.rs).
+Note: Zed's `disabled_globs` settings will prevent predictions from being requested, but third-party providers may receive file content when files are opened.
 
-### Exclusions
+### Training Data: Opt-In for Open Source Projects
 
-Zed will intentionally exclude certain files from Predictive Edits entirely, even when you have opted-in to model improvement feedback.
+Zed does not collect training data for our edit prediction model unless the following conditions are met:
 
-You can inspect this exclusion list by opening `zed: open default settings` from the command palette:
+1. **You opt in** – Toggle "Training Data Collection" under the **Privacy** section of the edit prediction status bar menu (click the edit prediction icon in the status bar).
+2. **The project is open source** — detected via LICENSE file ([see detection logic](https://github.com/zed-industries/zed/blob/main/crates/edit_prediction/src/license_detection.rs))
+3. **The file isn't excluded** — via `disabled_globs`
+
+### File Exclusions
+
+Certain files are always excluded from edit predictions—regardless of opt-in status:
 
 ```json [settings]
 {
   "edit_predictions": {
-    // A list of globs representing files that edit predictions should be disabled for.
-    // There's a sensible default list of globs already included.
-    // Any addition to this list will be merged with the default list.
     "disabled_globs": [
       "**/.env*",
       "**/*.pem",
@@ -92,22 +118,17 @@ Users may explicitly exclude additional paths and/or file extensions by adding t
 }
 ```
 
-### Data we collect
-
-For open source projects where you have opted-in, Zed may store copies of requests and responses to the Zed AI Prediction service.
-
-This data includes:
+### Data Collected (Edit Prediction Training Data)
 
-- sampled edit prediction examples (cursor context + recent diffs/edits) for offline evaluation
-- the edit prediction
-- a portion of the buffer content around the cursor
-- a few recent edits
-- the current buffer outline
-- diagnostics (errors, warnings, etc) from language servers
+For open source projects where you've opted in, Zed may collect:
 
-### Data Handling
+- Code excerpt around your cursor
+- Recent edit diffs
+- The generated prediction
+- Repository URL and git revision
+- Buffer outline and diagnostics
 
-Collected data is stored in Snowflake, a private database. We periodically select training samples from this data. All data is anonymized and stripped of sensitive information (access tokens, user IDs, email addresses). The training dataset is publicly available at [huggingface.co/datasets/zed-industries/zeta](https://huggingface.co/datasets/zed-industries/zeta).
+Collected data is stored in Snowflake. We periodically review this data to select training samples for inclusion in our model training dataset. We ensure any included data is anonymized and contains no sensitive information (access tokens, user IDs, email addresses, etc). This training dataset is publicly available at [huggingface.co/datasets/zed-industries/zeta](https://huggingface.co/datasets/zed-industries/zeta).
 
 ### Model Output
 
@@ -115,4 +136,4 @@ We then use this training dataset to fine-tune [Qwen2.5-Coder-7B](https://huggin
 
 ## Applicable terms
 
-Please see the [Zed Terms of Service](https://zed.dev/terms-of-service) for more.
+Please see the [Zed Terms of Service](https://zed.dev/terms) for more.

docs/src/ai/edit-prediction.md 🔗

@@ -301,8 +301,6 @@ Edit Prediction also works with other providers.
 
 ### GitHub Copilot {#github-copilot}
 
-> **Changed in Preview (v0.225).** See [release notes](/releases#0.225).
-
 To use GitHub Copilot as your provider, set this in your settings file ([how to edit](../configuring-zed.md#settings-files)):
 
 ```json [settings]
@@ -408,13 +406,47 @@ After adding your API key, Codestral will appear in the provider dropdown in the
 
 ### Self-Hosted OpenAI-compatible servers
 
-To configure Zed to use an arbitrary server for edit predictions:
+You can use any self-hosted server that implements the OpenAI completion API format. This works with vLLM, llama.cpp server, LocalAI, and other compatible servers.
 
-1. Open the Settings Editor (`Cmd+,` on macOS, `Ctrl+,` on Linux/Windows)
-2. Search for "Edit Predictions" and click **Configure Providers**
-3. Find the "OpenAI-compatible API" section and enter the URL and model name. You can also select a prompt format that Zed should use. Zed currently supports several FIM prompt formats, as well as Zed's own Zeta prompt format. If you do not select a prompt format, Zed will attempt to infer it from the model name.
+#### Configuration
+
+Set `open_ai_compatible_api` as your provider and configure the API endpoint:
+
+```json [settings]
+{
+  "edit_predictions": {
+    "provider": "open_ai_compatible_api",
+    "open_ai_compatible_api": {
+      "api_url": "http://localhost:8080/v1/completions",
+      "model": "deepseek-coder-6.7b-base",
+      "prompt_format": "deepseek_coder",
+      "max_output_tokens": 64
+    }
+  }
+}
+```
+
+The `prompt_format` setting controls how code context is formatted for the model. Use `"infer"` to detect the format from the model name, or specify one explicitly:
+
+- `code_llama` - CodeLlama format: `<PRE> prefix <SUF> suffix <MID>`
+- `star_coder` - StarCoder format: `<fim_prefix>prefix<fim_suffix>suffix<fim_middle>`
+- `deepseek_coder` - DeepSeek format with special unicode markers
+- `qwen` - Qwen/CodeGemma format: `<|fim_prefix|>prefix<|fim_suffix|>suffix<|fim_middle|>`
+- `codestral` - Codestral format: `[SUFFIX]suffix[PREFIX]prefix`
+- `glm` - GLM-4 format with code markers
+- `infer` - Auto-detect from model name (default)
 
-The URL must accept requests according to OpenAI's [Completions API](https://developers.openai.com/api/reference/resources/completions/methods/create)
+Your server must implement the OpenAI `/v1/completions` endpoint. Edit predictions will send POST requests with this format:
+
+```json
+{
+  "model": "your-model-name",
+  "prompt": "formatted-code-context",
+  "max_tokens": 256,
+  "temperature": 0.2,
+  "stop": ["<|endoftext|>", ...]
+}
+```
 
 ## See also
 

docs/src/ai/external-agents.md 🔗

@@ -27,7 +27,10 @@ If you'd like to bind this to a keyboard shortcut, you can do so by editing your
 [
   {
     "bindings": {
-      "cmd-alt-g": ["agent::NewExternalAgentThread", { "agent": "gemini" }]
+      "cmd-alt-g": [
+        "agent::NewExternalAgentThread",
+        { "agent": { "custom": { "name": "gemini" } } }
+      ]
     }
   }
 ]
@@ -38,32 +41,14 @@ If you'd like to bind this to a keyboard shortcut, you can do so by editing your
 The first time you create a Gemini CLI thread, Zed will install [@google/gemini-cli](https://github.com/google-gemini/gemini-cli).
 This installation is only available to Zed and is kept up to date as you use the agent.
 
-By default, Zed will use this managed version of Gemini CLI even if you have it installed globally.
-However, you can configure it to use a version in your `PATH` by adding this to your settings:
-
-```json [settings]
-{
-  "agent_servers": {
-    "gemini": {
-      "ignore_system_version": false
-    }
-  }
-}
-```
-
 #### Authentication
 
-After you have Gemini CLI running, you'll be prompted to choose your authentication method.
+After you have Gemini CLI running, you'll be prompted to authenticate.
 
-Most users should click the "Log in with Google".
-This will cause a browser window to pop-up and auth directly with Gemini CLI.
+Click the "Login" button to open the Gemini CLI interactively, where you can log in with your Google account or [Vertex AI](https://cloud.google.com/vertex-ai) credentials.
 Zed does not see your OAuth or access tokens in this case.
 
-You can also use the "Gemini API Key".
-If you select this, and have the `GEMINI_API_KEY` set, then we will use that.
-Otherwise Zed will prompt you for an API key which will be stored securely in your keychain, and used to start Gemini CLI from within Zed.
-
-The "Vertex AI" option is for those who are using [Vertex AI](https://cloud.google.com/vertex-ai), and have already configured their environment correctly.
+If the `GEMINI_API_KEY` environment variable (or `GOOGLE_AI_API_KEY`) is already set, or you have configured a Google AI API key in Zed's [language model provider settings](./llm-providers.md#google-ai), it will be passed to Gemini CLI automatically.
 
 For more information, see the [Gemini CLI docs](https://github.com/google-gemini/gemini-cli/blob/main/docs/index.md).
 
@@ -88,7 +73,10 @@ If you'd like to bind this to a keyboard shortcut, you can do so by editing your
 [
   {
     "bindings": {
-      "cmd-alt-c": ["agent::NewExternalAgentThread", { "agent": "claude_code" }]
+      "cmd-alt-c": [
+        "agent::NewExternalAgentThread",
+        { "agent": { "custom": { "name": "claude-acp" } } }
+      ]
     }
   }
 ]
@@ -114,7 +102,8 @@ If you want to override the executable used by the adapter, you can set the `CLA
 ```json
 {
   "agent_servers": {
-    "claude": {
+    "claude-acp": {
+      "type": "registry",
       "env": {
         "CLAUDE_CODE_EXECUTABLE": "/path/to/alternate-claude-code-executable"
       }
@@ -159,7 +148,10 @@ If you'd like to bind this to a keyboard shortcut, you can do so by editing your
 [
   {
     "bindings": {
-      "cmd-alt-c": ["agent::NewExternalAgentThread", { "agent": "codex" }]
+      "cmd-alt-c": [
+        "agent::NewExternalAgentThread",
+        { "agent": { "custom": { "name": "codex-acp" } } }
+      ]
     }
   }
 ]
@@ -248,7 +240,7 @@ You can also add agents through your settings file ([how to edit](../configuring
 
 This can be useful if you're in the middle of developing a new agent that speaks the protocol and you want to debug it.
 
-It's also possible to specify a custom path, arguments, or environment for the builtin integrations by using the `claude` and `gemini` names.
+It's also possible to customize environment variables for registry-installed agents like Claude Agent, Codex, and Gemini CLI by using their registry names (`claude-acp`, `codex-acp`, `gemini`) with `"type": "registry"` in your settings.
 
 ## Debugging Agents
 

docs/src/ai/llm-providers.md 🔗

@@ -1,6 +1,6 @@
 ---
 title: LLM Providers - Use Your Own API Keys in Zed
-description: Bring your own API keys to Zed. Set up Anthropic, OpenAI, Google AI, Ollama, DeepSeek, Mistral, OpenRouter, and more.
+description: Bring your own API keys to Zed. Set up Anthropic, OpenAI, Google AI, Ollama, DeepSeek, Mistral, OpenRouter, Vercel AI Gateway, and more.
 ---
 
 # LLM Providers
@@ -32,6 +32,7 @@ Zed supports these providers with your own API keys:
 - [OpenAI](#openai)
 - [OpenAI API Compatible](#openai-api-compatible)
 - [OpenRouter](#openrouter)
+- [Vercel AI Gateway](#vercel-ai-gateway)
 - [Vercel](#vercel-v0)
 - [xAI](#xai)
 
@@ -87,7 +88,7 @@ With that done, choose one of the three authentication methods:
 While it's possible to configure through the Agent Panel settings UI by entering your AWS access key and secret directly, we recommend using named profiles instead for better security practices.
 To do this:
 
-1. Create an IAM User that you can assume in the [IAM Console](https://us-east-1.console.aws.amazon.com/iam/home?region=us-east-1#/users).
+1. Create an IAM User in the [IAM Console](https://us-east-1.console.aws.amazon.com/iam/home?region=us-east-1#/users).
 2. Create security credentials for that User, save them and keep them secure.
 3. Open the Agent Configuration with (`agent: open settings`) and go to the Amazon Bedrock section
 4. Copy the credentials from Step 2 into the respective **Access Key ID**, **Secret Access Key**, and **Region** fields.
@@ -151,7 +152,7 @@ For the most up-to-date supported regions and models, refer to the [Supported Mo
 
 #### Extended Context Window {#bedrock-extended-context}
 
-Anthropic models on Bedrock support a [1M token extended context window](https://docs.anthropic.com/en/docs/build-with-claude/extended-context) beta. To enable this feature, add `"allow_extended_context": true` to your Bedrock configuration:
+Anthropic models on Bedrock support a 1M token extended context window through the `anthropic_beta` API parameter. To enable this feature, set `"allow_extended_context": true` in your Bedrock configuration:
 
 ```json [settings]
 {
@@ -166,9 +167,11 @@ Anthropic models on Bedrock support a [1M token extended context window](https:/
 }
 ```
 
-When enabled, Zed will include the `anthropic_beta` field in requests to Bedrock, enabling the 1M token context window for supported Anthropic models such as Claude Sonnet 4.5 and Claude Opus 4.6.
+Zed enables extended context for supported models (Claude Sonnet 4.5 and Claude Opus 4.6). Extended context usage may increase API costs—refer to AWS Bedrock pricing for details.
 
-> **Note**: Extended context usage may incur additional API costs. Refer to your AWS Bedrock pricing for details.
+#### Image Support {#bedrock-image-support}
+
+Bedrock models that support vision (Claude 3 and later, Amazon Nova Pro and Lite, Meta Llama 3.2 Vision models, Mistral Pixtral) can receive images in conversations and tool results.
 
 ### Anthropic {#anthropic}
 
@@ -303,6 +306,15 @@ Here is an example of a custom Google AI model you could add to your Zed setting
   "language_models": {
     "google": {
       "available_models": [
+        {
+          "name": "gemini-3.1-pro-preview",
+          "display_name": "Gemini 3.1 Pro",
+          "max_tokens": 1000000,
+          "mode": {
+            "type": "thinking",
+            "budget_tokens": 24000
+          }
+        },
         {
           "name": "gemini-3-flash-preview",
           "display_name": "Gemini 3 Flash (Thinking)",
@@ -614,6 +626,23 @@ The OpenRouter API key will be saved in your keychain.
 
 Zed will also use the `OPENROUTER_API_KEY` environment variable if it's defined.
 
+When using OpenRouter as your assistant provider, you must explicitly select a model in your settings. OpenRouter no longer provides a default model selection.
+
+Configure your preferred OpenRouter model in `settings.json`:
+
+```json [settings]
+{
+  "agent": {
+    "default_model": {
+      "provider": "openrouter",
+      "model": "openrouter/auto"
+    }
+  }
+}
+```
+
+The `openrouter/auto` model automatically routes your requests to the most appropriate available model. You can also specify any model available through OpenRouter's API.
+
 #### Custom Models {#openrouter-custom-models}
 
 You can add custom models to the OpenRouter provider by adding the following to your Zed settings file ([how to edit](../configuring-zed.md#settings-files)):
@@ -704,6 +733,30 @@ Example adding routing preferences to a model:
 
 These routing controls let you fine‑tune cost, capability, and reliability trade‑offs without changing the model name you select in the UI.
 
+### Vercel AI Gateway {#vercel-ai-gateway}
+
+[Vercel AI Gateway](https://vercel.com/ai-gateway) provides access to many models through a single OpenAI-compatible endpoint.
+
+1. Create an API key from your [Vercel AI Gateway keys page](https://vercel.com/d?to=%2F%5Bteam%5D%2F%7E%2Fai%2Fapi-keys&title=Go+to+AI+Gateway)
+2. Open the settings view (`agent: open settings`) and go to the **Vercel AI Gateway** section
+3. Enter your Vercel AI Gateway API key
+
+The Vercel AI Gateway API key will be saved in your keychain.
+
+Zed will also use the `VERCEL_AI_GATEWAY_API_KEY` environment variable if it's defined.
+
+You can also set a custom endpoint for Vercel AI Gateway in your settings file:
+
+```json [settings]
+{
+  "language_models": {
+    "vercel_ai_gateway": {
+      "api_url": "https://ai-gateway.vercel.sh/v1"
+    }
+  }
+}
+```
+
 ### Vercel v0 {#vercel-v0}
 
 [Vercel v0](https://v0.app/docs/api/model) is a model for generating full-stack apps, with framework-aware completions for stacks like Next.js and Vercel.

docs/src/ai/mcp.md 🔗

@@ -86,7 +86,7 @@ Once installation is complete, you can return to the Agent Panel and start promp
 How reliably MCP tools get called can vary from model to model.
 Mentioning the MCP server by name can help the model pick tools from that server.
 
-If you want to _ensure_ a given MCP server will be used, you can create [a custom profile](./agent-panel.md#custom-profiles) where all built-in tools (or the ones that could cause conflicts with the server's tools) are turned off and only the tools coming from the MCP server are turned on.
+However, if you want to _ensure_ a given MCP server will be used, you can create [a custom profile](./agent-panel.md#custom-profiles) where all built-in tools (or the ones that could cause conflicts with the server's tools) are turned off and only the tools coming from the MCP server are turned on.
 
 As an example, [the Dagger team suggests](https://container-use.com/agent-integrations#zed) doing that with their [Container Use MCP server](https://zed.dev/extensions/mcp-server-container-use):
 
@@ -156,3 +156,15 @@ Note that for [external agents](./external-agents.md) connected through the [Age
 
 Regarding the built-in ones, Claude Agent and Codex both support it, and Gemini CLI does not yet.
 In the meantime, learn how to add MCP server support to Gemini CLI through [their documentation](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#using-mcp-servers).
+
+### Error Handling
+
+When a MCP server encounters an error while processing a tool call, the agent receives the error message directly and the operation fails.
+Common error scenarios include:
+
+- Invalid parameters passed to the tool
+- Server-side failures (database connection issues, rate limits)
+- Unsupported operations or missing resources
+
+The error message from the context server will be shown in the agent's response, allowing you to diagnose and correct the issue.
+Check the context server's logs or documentation for details about specific error codes.

docs/src/ai/models.md 🔗

@@ -43,8 +43,6 @@ Zed's plans offer hosted versions of major LLMs with higher rate limits than dir
 |                        | OpenAI    | Cached Input        | $0.005                       | $0.0055                 |
 | Gemini 3.1 Pro         | Google    | Input               | $2.00                        | $2.20                   |
 |                        | Google    | Output              | $12.00                       | $13.20                  |
-| Gemini 3 Pro           | Google    | Input               | $2.00                        | $2.20                   |
-|                        | Google    | Output              | $12.00                       | $13.20                  |
 | Gemini 3 Flash         | Google    | Input               | $0.30                        | $0.33                   |
 |                        | Google    | Output              | $2.50                        | $2.75                   |
 | Grok 4                 | X.ai      | Input               | $3.00                        | $3.30                   |
@@ -68,7 +66,8 @@ As of February 19, 2026, Zed Pro serves newer model versions in place of the ret
 - Claude Sonnet 4 → Claude Sonnet 4.5 or Claude Sonnet 4.6
 - Claude Sonnet 3.7 (retired Feb 19) → Claude Sonnet 4.5 or Claude Sonnet 4.6
 - GPT-5.1 and GPT-5 → GPT-5.2 or GPT-5.2 Codex
-- Gemini 2.5 Pro → Gemini 3 Pro
+- Gemini 2.5 Pro → Gemini 3.1 Pro
+- Gemini 3 Pro → Gemini 3.1 Pro
 - Gemini 2.5 Flash → Gemini 3 Flash
 
 ## Usage {#usage}
@@ -93,7 +92,6 @@ A context window is the maximum span of text and code an LLM can consider at onc
 | GPT-5 mini        | OpenAI    | 400k                      |
 | GPT-5 nano        | OpenAI    | 400k                      |
 | Gemini 3.1 Pro    | Google    | 200k                      |
-| Gemini 3 Pro      | Google    | 200k                      |
 | Gemini 3 Flash    | Google    | 200k                      |
 
 > Context window limits for hosted Sonnet 4.5/4.6 and Gemini 3.1 Pro/3 Pro/Flash may increase in future releases.

docs/src/ai/overview.md 🔗

@@ -28,7 +28,7 @@ The [Inline Assistant](./inline-assistant.md) works differently: select code or
 
 [Edit Prediction](./edit-prediction.md) provides AI code completions on every keystroke. Each keypress sends a request to the prediction provider, which returns single or multi-line suggestions you accept with `tab`.
 
-The default provider is Zeta, Zed's open-source model trained on open data. You can also use GitHub Copilot, Supermaven, or Codestral.
+The default provider is Zeta, Zed's open-source model trained on open data. You can also use GitHub Copilot, or Codestral.
 
 ## Text threads
 

docs/src/ai/privacy-and-security.md 🔗

@@ -7,15 +7,17 @@ description: Zed's approach to AI privacy: opt-in data sharing by default, zero-
 
 ## Philosophy
 
-Zed aims to collect only the minimum data necessary to serve and improve our product.
+Zed collects minimal data necessary to serve and improve our product. Features that could share data, like AI and telemetry, are either opt-in or can be disabled.
 
-Data sharing is opt-in by default. Privacy is not a setting to toggle—it's the baseline.
+- **Telemetry**: Zed collects only the data necessary to understand usage and fix issues. Client-side telemetry can be disabled in settings.
 
-As an open-source product, we believe in maximal transparency, and invite you to examine our codebase. If you find issues, we encourage you to share them with us.
+- **AI**: Data sharing for AI improvement is opt-in, and each share is a one-time action; it does not grant permission for future data collection. You can use Zed's AI features without sharing any data with Zed and without authenticating.
 
-Zed, including AI features, works without sharing data with us and without authentication.
+- **Open-Source**: Zed's codebase is public. You can inspect exactly what data is collected and how it's handled. If you find issues, we encourage you to report them.
 
-## Documentation
+- **Secure-by-default**: Designing Zed and our Service with "secure-by-default" as an objective is of utmost importance to us. We take your security and ours very seriously and strive to follow industry best-practice in order to uphold that principle.
+
+## Related Documentation
 
 - [Tool Permissions](./tool-permissions.md): Configure granular rules to control which agent actions are auto-approved, blocked, or require confirmation.
 
@@ -23,16 +25,15 @@ Zed, including AI features, works without sharing data with us and without authe
 
 - [Telemetry](../telemetry.md): How Zed collects general telemetry data.
 
-- [AI Improvement](./ai-improvement.md): Zed's opt-in-only approach to data collection for AI improvement, whether our Agentic offering or Edit Predictions.
+- [Zed AI Features and Privacy](./ai-improvement.md): An overview of Zed's AI features, your data when using AI in Zed, and how to opt-in and help Zed improve these features.
 
 - [Accounts](../authentication.md): When and why you'd need to authenticate into Zed, how to do so, and what scope we need from you.
 
-- [Collab](https://zed.dev/faq#data-and-privacy): How Zed's live collaboration works, and how data flows to provide the experience (we don't store your code).
+- [Collab](https://zed.dev/faq#data-and-privacy): How Zed's live collaboration works and how data flows. Zed does not store your code.
 
 ## Legal Links
 
-- [Terms of Service](https://zed.dev/terms-of-service)
-- [Terms of Use](https://zed.dev/terms)
+- [Terms of Service](https://zed.dev/terms)
 - [Privacy Policy](https://zed.dev/privacy-policy)
 - [Zed's Contributor License and Feedback Agreement](https://zed.dev/cla)
 - [Subprocessors](https://zed.dev/subprocessors)

docs/src/ai/tools.md 🔗

@@ -91,6 +91,6 @@ Executes shell commands and returns the combined output, creating a new shell pr
 
 ## Other Tools
 
-### `subagent`
+### `spawn_agent`
 
-Spawns a subagent with its own context window to perform a delegated task. Useful for running parallel investigations, completing self-contained tasks, or performing research where only the outcome matters. Each subagent has access to the same tools as the parent agent.
+Spawns a subagent with its own context window to perform a delegated task. Each subagent has access to the same tools as the parent agent.

docs/src/collaboration/overview.md 🔗

@@ -19,3 +19,30 @@ The Collaboration Panel has two sections:
 > **Warning:** Sharing a project gives collaborators access to your local file system within that project. Only collaborate with people you trust.
 
 See the [Data and Privacy FAQs](https://zed.dev/faq#data-and-privacy) for more details.
+
+## Audio Settings {#audio-settings}
+
+### Selecting Audio Devices
+
+You can select specific input and output audio devices instead of using system defaults. To configure audio devices:
+
+1. Open {#kb zed::OpenSettings}
+2. Navigate to **Collaboration** > **Experimental**
+3. Use the **Output Audio Device** and **Input Audio Device** dropdowns to select your preferred devices
+
+Changes take effect immediately. If you select a device that becomes unavailable, Zed falls back to system defaults.
+
+To test your audio configuration, click **Test Audio** in the same section. This opens a window where you can verify your microphone and speaker work correctly with the selected devices.
+
+**JSON configuration:**
+
+```json [settings]
+{
+  "audio": {
+    "experimental.output_audio_device": "Device Name (device-id)",
+    "experimental.input_audio_device": "Device Name (device-id)"
+  }
+}
+```
+
+Set either value to `null` to use system defaults.

docs/src/completions.md 🔗

@@ -8,7 +8,7 @@ description: Zed's code completions from language servers and edit predictions.
 Zed supports two sources for completions:
 
 1. "Code Completions" provided by Language Servers (LSPs) automatically installed by Zed or via [Zed Language Extensions](languages.md).
-2. "Edit Predictions" provided by Zed's own Zeta model or by external providers like [GitHub Copilot](#github-copilot) or [Supermaven](#supermaven).
+2. "Edit Predictions" provided by Zed's own Zeta model or by external providers like [GitHub Copilot](#github-copilot).
 
 ## Language Server Code Completions {#code-completions}
 

docs/src/configuring-languages.md 🔗

@@ -122,11 +122,40 @@ You can specify your preference using the `language_servers` setting:
 
 In this example:
 
-- `intelephense` is set as the primary language server
-- `phpactor` is disabled (note the `!` prefix)
-- `...` expands to the rest of the language servers that are registered for PHP
+- `intelephense` is set as the primary language server.
+- `phpactor` and `phptools` are disabled (note the `!` prefix).
+- `"..."` expands to the rest of the language servers registered for PHP that are not already listed.
 
-This configuration allows you to tailor the language server setup to your specific needs, ensuring that you get the most suitable functionality for your development workflow.
+The `"..."` entry acts as a wildcard that includes any registered language server you haven't explicitly mentioned. Servers you list by name keep their position, and `"..."` fills in the remaining ones at that point in the list. Servers prefixed with `!` are excluded entirely. This means that if a new language server extension is installed or a new server is registered for a language, `"..."` will automatically include it. If you want full control over which servers are enabled, omit `"..."` — only the servers you list by name will be used.
+
+#### Examples
+
+Suppose you're working with Ruby. The default configuration is:
+
+```json [settings]
+{
+  "language_servers": [
+    "solargraph",
+    "!ruby-lsp",
+    "!rubocop",
+    "!sorbet",
+    "!steep",
+    "!kanayago",
+    "..."
+  ]
+}
+```
+
+When you override `language_servers` in your settings, your list **replaces** the default entirely. This means default-disabled servers like `kanayago` will be re-enabled by `"..."` unless you explicitly disable them again.
+
+| Configuration                                     | Result                                                             |
+| ------------------------------------------------- | ------------------------------------------------------------------ |
+| `["..."]`                                         | `solargraph`, `ruby-lsp`, `rubocop`, `sorbet`, `steep`, `kanayago` |
+| `["ruby-lsp", "..."]`                             | `ruby-lsp`, `solargraph`, `rubocop`, `sorbet`, `steep`, `kanayago` |
+| `["ruby-lsp", "!solargraph", "!kanayago", "..."]` | `ruby-lsp`, `rubocop`, `sorbet`, `steep`                           |
+| `["ruby-lsp", "solargraph"]`                      | `ruby-lsp`, `solargraph`                                           |
+
+> Note: In the first example, `"..."` includes `kanayago` even though it is disabled by default. The override replaced the default list, so the `"!kanayago"` entry is no longer present. To keep it disabled, you must include `"!kanayago"` in your configuration.
 
 ### Toolchains
 
@@ -136,6 +165,8 @@ Not all languages in Zed support toolchain discovery and selection, but for thos
 
 ### Configuring Language Servers
 
+When configuring language servers in your `settings.json`, autocomplete suggestions include all available LSP adapters recognized by Zed, not only those currently active for loaded languages. This helps you discover and configure language servers before opening files that use them.
+
 Many language servers accept custom configuration options. You can set these in the `lsp` section of your `settings.json`:
 
 ```json [settings]

docs/src/debugger.md 🔗

@@ -163,6 +163,14 @@ Some debug adapters (e.g. CodeLLDB and JavaScript) will also _verify_ whether yo
 All breakpoints enabled for a given project are also listed in "Breakpoints" item in your debugging session UI. From "Breakpoints" item in your UI you can also manage exception breakpoints.
 The debug adapter will then stop whenever an exception of a given kind occurs. Which exception types are supported depends on the debug adapter.
 
+## Working with Split Panes
+
+When debugging with multiple split panes open, Zed shows the active debug line in one pane and preserves your layout in others. If you have the same file open in multiple panes, the debugger picks a pane where the file is already the active tab—it won't switch tabs in panes where the file is inactive.
+
+Once the debugger picks a pane, it continues using that pane for subsequent breakpoints during the session. If you drag the tab with the active debug line to a different split, the debugger tracks the move and uses the new pane.
+
+This ensures the debugger doesn't disrupt your workflow when stepping through code across different files.
+
 ## Settings
 
 The settings for the debugger are grouped under the `debugger` key in `settings.json`:

docs/src/development.md 🔗

@@ -86,6 +86,30 @@ For benchmarking unit tests, annotate them with the `#[perf]` attribute from the
 perf-test -p $CRATE` to benchmark them. See the rustdoc documentation on `crates/util_macros` and `tooling/perf` for
 in-depth examples and explanations.
 
+## ETW Profiling on Windows
+
+Zed supports performance profiling with Event Tracing for Windows (ETW) to capture detailed performance data, including CPU, GPU, memory, disk, and file I/O activity. Data is saved to an `.etl` file, which can be opened in standard profiling tools for analysis.
+
+ETW recordings may contain personally identifiable or security-sensitive information, such as paths to files and registry keys accessed, as well as process names. Please keep this in mind when sharing traces with others.
+
+### Recording a trace
+
+Open the command palette and run one of the following:
+
+- `zed: record etw trace`: records CPU, GPU, memory, and I/O activity
+- `zed: record etw trace with heap tracing`: includes heap allocation data for the Zed process
+
+Zed will prompt you to choose a save location for the `.etl` file, then request administrator permission. Once granted, recording will begin.
+
+### Saving or canceling
+
+While a trace is recording, open the command palette and run one of the following:
+
+- `zed: save etw trace`: stops recording and saves the trace to disk
+- `zed: cancel etw trace`: stops recording without saving
+
+Recordings automatically save after 60 seconds if not stopped manually.
+
 ## Contributor links
 
 - [CONTRIBUTING.md](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md)

docs/src/extensions/languages.md 🔗

@@ -434,6 +434,40 @@ The `semantic_tokens` setting accepts the following values:
 - `"combined"`: Use LSP semantic tokens together with tree-sitter highlighting.
 - `"full"`: Use LSP semantic tokens exclusively, replacing tree-sitter highlighting.
 
+#### Extension-Provided Semantic Token Rules
+
+Language extensions can ship default semantic token rules for their language server's custom token types. To do this, place a `semantic_token_rules.json` file in the language directory alongside `config.toml`:
+
+```
+my-extension/
+  languages/
+    my-language/
+      config.toml
+      highlights.scm
+      semantic_token_rules.json
+```
+
+The file uses the same format as the `semantic_token_rules` array in user settings — a JSON array of rule objects:
+
+```json
+[
+  {
+    "token_type": "lifetime",
+    "style": ["lifetime"]
+  },
+  {
+    "token_type": "builtinType",
+    "style": ["type"]
+  },
+  {
+    "token_type": "selfKeyword",
+    "style": ["variable.special"]
+  }
+]
+```
+
+This is useful when a language server reports custom (non-standard) semantic token types that aren't covered by Zed's built-in default rules. Extension-provided rules act as sensible defaults for that language — users can always override them via `semantic_token_rules` in their settings file, and built-in default rules are only used when neither user nor extension rules match.
+
 #### Customizing Semantic Token Styles
 
 Zed supports customizing the styles used for semantic tokens. You can define rules in your settings file, which customize how semantic tokens get mapped to styles in your theme.
@@ -463,7 +497,13 @@ Zed supports customizing the styles used for semantic tokens. You can define rul
 }
 ```
 
-All rules that match a given `token_type` and `token_modifiers` are applied. Earlier rules take precedence. If no rules match, the token is not highlighted. User-defined rules take priority over the default rules.
+All rules that match a given `token_type` and `token_modifiers` are applied. Earlier rules take precedence. If no rules match, the token is not highlighted.
+
+Rules are applied in the following priority order (highest to lowest):
+
+1. **User settings** — rules from `semantic_token_rules` in your settings file.
+2. **Extension rules** — rules from `semantic_token_rules.json` in extension language directories.
+3. **Default rules** — Zed's built-in rules for standard LSP token types.
 
 Each rule in the `semantic_token_rules` array is defined as follows:
 

docs/src/finding-navigating.md 🔗

@@ -19,8 +19,6 @@ Open any file in your project with {#kb file_finder::Toggle}. Type part of the f
 
 ## Project Search
 
-> **Changed in Preview (v0.225).** See [release notes](/releases#0.225).
-
 Search across all files with {#kb pane::DeploySearch}. Start typing in the search field to begin searching—results appear as you type.
 
 Results appear in a [multibuffer](./multibuffers.md), letting you edit matches in place.

docs/src/getting-started.md 🔗

@@ -11,6 +11,12 @@ This guide covers the essential commands, environment setup, and navigation basi
 
 ## Quick Start
 
+### Welcome Page
+
+When you open Zed without a folder, you see the welcome page in the main editor area. The welcome page offers quick actions to open a folder, clone a repository, or view documentation. Once you open a folder or file, the welcome page disappears. If you split the editor into multiple panes, the welcome page appears only in the center pane when empty—other panes show a standard empty state.
+
+To reopen the welcome page, close all items in the center pane or use the command palette to search for "Welcome".
+
 ### 1. Open a Project
 
 Open a folder from the command line:

docs/src/git.md 🔗

@@ -72,8 +72,6 @@ To disable word diff for specific languages only, add this to your settings.json
 
 ### Diff View Styles
 
-> **Changed in Preview (v0.225).** See [release notes](/releases#0.225).
-
 Zed displays diffs in two modes: **split** (side-by-side comparison) or **unified** (inline changes). Split view is the default.
 
 #### Changing the diff view
@@ -101,11 +99,11 @@ You can switch between modes at any time. Your preference applies to [Project Di
 
 File History shows the commit history for an individual file. Each entry displays the commit's author, timestamp, and message. Selecting a commit opens a diff view filtered to show only the changes made to that file in that commit.
 
-To open File History:
+To view File History:
 
-- Right-click on a file in the Project Panel and select "Open File History"
-- Right-click on a file in the Git Panel and select "Open File History"
-- Right-click on an editor tab and select "Open File History"
+- Right-click on a file in the Project Panel and select "View File History"
+- Right-click on a file in the Git Panel and select "View File History"
+- Right-click on an editor tab and select "View File History"
 - Use the Command Palette and search for "file history"
 
 ## Fetch, Push, and Pull

docs/src/globs.md 🔗

@@ -24,8 +24,6 @@ A glob "pattern" is used to match a file name or complete file path. For example
 
 ### Multiple Patterns
 
-> **Changed in Preview (v0.225).** See [release notes](/releases#0.225).
-
 You can specify multiple glob patterns in Project Search filters by separating them with commas. When using comma-separated patterns, Zed correctly handles braces within individual patterns:
 
 - `*.ts, *.tsx` — Match TypeScript and TSX files

docs/src/languages/ansible.md 🔗

@@ -14,10 +14,13 @@ Support for Ansible in Zed is provided via a community-maintained [Ansible exten
 
 ### File detection
 
-To avoid mishandling non-Ansible YAML files, the Ansible Language is not associated with any file extensions by default. To change this behavior you can add a `"file_types"` section to Zed settings inside your project (`.zed/settings.json`) or your Zed user settings (`~/.config/zed/settings.json`) to match your folder/naming conventions. For example:
+To avoid mishandling non-Ansible YAML files, the Ansible Language is not associated with any file extensions by default.
+
+To change this behavior, you can add a `"file_types"` section to Zed settings inside your project (`.zed/settings.json`) or your Zed user settings (`~/.config/zed/settings.json`) to match your folder/naming conventions. For example:
 
 ```json [settings]
-"file_types": {
+{
+  "file_types": {
     "Ansible": [
       "**.ansible.yml",
       "**.ansible.yaml",
@@ -39,6 +42,7 @@ To avoid mishandling non-Ansible YAML files, the Ansible Language is not associa
       "**playbook*.yaml"
     ]
   }
+}
 ```
 
 Feel free to modify this list as per your needs.
@@ -47,34 +51,36 @@ Feel free to modify this list as per your needs.
 
 If your inventory file is in the YAML format, you can either:
 
-- Append the `ansible-lint` inventory json schema to it via the following comment at the top of your inventory file:
+- Append the `ansible-lint` inventory JSON schema to it via the following comment at the top of your inventory file:
 
 ```yml
 # yaml-language-server: $schema=https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/inventory.json
 ```
 
-- Or configure the yaml language server settings to set this schema for all your inventory files, that match your inventory pattern, under your Zed settings ([ref](https://zed.dev/docs/languages/yaml)):
+- or, configure the YAML language server settings to set this schema for all your inventory files, that match your inventory pattern, under your Zed settings ([ref](https://zed.dev/docs/languages/yaml)):
 
 ```json [settings]
-"lsp": {
+{
+  "lsp": {
     "yaml-language-server": {
       "settings": {
         "yaml": {
           "schemas": {
             "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/inventory.json": [
               "./inventory/*.yaml",
-              "hosts.yml",
+              "hosts.yml"
             ]
           }
         }
       }
     }
-},
+  }
+}
 ```
 
 ### LSP Configuration
 
-By default, the following default config is passed to the Ansible language server. It conveniently mirrors the defaults set by [nvim-lspconfig](https://github.com/neovim/nvim-lspconfig/blob/03bc581e05e81d33808b42b2d7e76d70adb3b595/lua/lspconfig/configs/ansiblels.lua) for the Ansible language server:
+By default, the following configuration is passed to the Ansible language server. It conveniently mirrors the defaults set by [nvim-lspconfig](https://github.com/neovim/nvim-lspconfig/blob/03bc581e05e81d33808b42b2d7e76d70adb3b595/lua/lspconfig/configs/ansiblels.lua) for the Ansible language server:
 
 ```json
 {
@@ -99,31 +105,32 @@ By default, the following default config is passed to the Ansible language serve
 }
 ```
 
-> [!NOTE]
-> In order for linting to work, ensure that `ansible-lint` is installed and discoverable on your PATH
+> **Note:** In order for linting to work, ensure that `ansible-lint` is installed and discoverable on your `$PATH`.
 
 When desired, any of the above default settings can be overridden under the `"lsp"` section of your Zed settings file. For example:
 
 ```json [settings]
-"lsp": {
-  // Note, the Zed Ansible extension prefixes all settings with `ansible`
-  // so instead of using `ansible.ansible.path` use `ansible.path`.
-  "ansible-language-server": {
-    "settings": {
-      "ansible": {
-        "path": "ansible"
-      },
-      "executionEnvironment": {
-        "enabled": false
-      },
-      "python": {
-        "interpreterPath": "python3"
-      },
-      "validation": {
-        "enabled": false, // disable validation
-        "lint": {
-          "enabled": false, // disable ansible-lint
-          "path": "ansible-lint"
+{
+  "lsp": {
+    // The Zed Ansible extension prefixes all settings with `ansible`
+    // so use `ansible.path` instead of `ansible.ansible.path`.
+    "ansible-language-server": {
+      "settings": {
+        "ansible": {
+          "path": "ansible"
+        },
+        "executionEnvironment": {
+          "enabled": false
+        },
+        "python": {
+          "interpreterPath": "python3"
+        },
+        "validation": {
+          "enabled": false,
+          "lint": {
+            "enabled": false,
+            "path": "ansible-lint"
+          }
         }
       }
     }
@@ -131,5 +138,4 @@ When desired, any of the above default settings can be overridden under the `"ls
 }
 ```
 
-A full list of options/settings, that can be passed to the server, can be found at the project's page [here](https://github.com/ansible/vscode-ansible/blob/5a89836d66d470fb9d20e7ea8aa2af96f12f61fb/docs/als/settings.md).
-Feel free to modify option values as needed.
+A full list of options/settings that can be passed to the server can be found at the project's page [here](https://github.com/ansible/vscode-ansible/blob/main/docs/als/settings.md).

docs/src/languages/bash.md 🔗

@@ -5,8 +5,7 @@ description: "Configure Bash language support in Zed, including language servers
 
 # Bash
 
-Bash language support in Zed is provided by the community-maintained [Basher extension](https://github.com/d1y/bash.zed).
-Report issues to: [https://github.com/d1y/bash.zed/issues](https://github.com/d1y/bash.zed/issues)
+Bash support is available through the [Bash extension](https://github.com/zed-extensions/bash).
 
 - Tree-sitter: [tree-sitter/tree-sitter-bash](https://github.com/tree-sitter/tree-sitter-bash)
 - Language Server: [bash-lsp/bash-language-server](https://github.com/bash-lsp/bash-language-server)

docs/src/languages/json.md 🔗

@@ -54,8 +54,6 @@ For example to for a `.luarc.json` for use with [lua-language-server](https://gi
 
 ### Schema Specification via Settings
 
-> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release.
-
 You can associate JSON Schemas with file paths using relative paths in your language server settings. Zed resolves paths relative to your project root:
 
 ```json [settings]

docs/src/languages/python.md 🔗

@@ -89,8 +89,8 @@ Configure language servers in Settings ({#kb zed::OpenSettings}) under Languages
   "languages": {
     "Python": {
       "language_servers": [
-        // Disable basedpyright and enable ty, and otherwise
-        // use the default configuration.
+        // Disable basedpyright and enable ty, and include all
+        // other registered language servers (ruff, pylsp, pyright).
         "ty",
         "!basedpyright",
         "..."

docs/src/languages/yaml.md 🔗

@@ -12,8 +12,6 @@ YAML support is available natively in Zed.
 
 ## Configuration
 
-> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release.
-
 You can configure various [yaml-language-server settings](https://github.com/redhat-developer/yaml-language-server?tab=readme-ov-file#language-server-settings) by adding them to your Zed settings.json in a `yaml-language-server` block under the `lsp` key.
 
 You can configure custom YAML schemas using relative paths. Zed resolves paths relative to your project root:

docs/src/migrate/vs-code.md 🔗

@@ -317,18 +317,12 @@ If you’re used to GitHub Copilot in VS Code, you can do the same in Zed. You c
 
 #### Configuring GitHub Copilot
 
-You should be able to sign-in to GitHub Copilot by clicking on the Zeta icon in the status bar and following the setup instructions.
-You can also add this to your settings:
+1. Open Settings with `Cmd+,` (macOS) or `Ctrl+,` (Linux/Windows)
+2. Navigate to **AI → Edit Predictions**
+3. Click **Configure** next to "Configure Providers"
+4. Under **GitHub Copilot**, click **Sign in to GitHub**
 
-```json
-{
-  "features": {
-    "edit_prediction_provider": "copilot"
-  }
-}
-```
-
-To invoke completions, just start typing. Zed will offer suggestions inline for you to accept.
+Once signed in, just start typing. Zed will offer suggestions inline for you to accept.
 
 #### Additional AI Options
 

docs/src/outline-panel.md 🔗

@@ -7,7 +7,7 @@ description: Navigate code structure with Zed's outline panel. View symbols, jum
 
 In addition to the modal outline (`cmd-shift-o`), Zed offers an outline panel. The outline panel can be deployed via `cmd-shift-b` (`outline panel: toggle focus` via the command palette), or by clicking the `Outline Panel` button in the status bar.
 
-When viewing a "singleton" buffer (i.e., a single file on a tab), the outline panel works similarly to that of the outline modal-it displays the outline of the current buffer's symbols, as reported by tree-sitter. Clicking on an entry allows you to jump to the associated section in the file. The outline view will also automatically scroll to the section associated with the current cursor position within the file.
+When viewing a "singleton" buffer (i.e., a single file on a tab), the outline panel works similarly to that of the outline modal-it displays the outline of the current buffer's symbols. Each symbol entry shows its type prefix (such as "struct", "fn", "mod", "impl") along with the symbol name, helping you quickly identify what kind of symbol you're looking at. Clicking on an entry allows you to jump to the associated section in the file. The outline view will also automatically scroll to the section associated with the current cursor position within the file.
 
 ![Using the outline panel in a singleton buffer](https://zed.dev/img/outline-panel/singleton.png)
 

docs/src/performance.md 🔗

@@ -78,7 +78,7 @@ Download the importer
 - `cd import && mkdir build && cd build`
 - Run cmake to generate build files: `cmake -G Ninja -DCMAKE_BUILD_TYPE=Release ..`
 - Build the importer: `ninja`
-- Run the importer on the trace file: `./tracy-import-miniprofiler /path/to/trace.miniprof /path/to/output.tracy`
+- Run the importer on the trace file: `./tracy-import-miniprofiler /path/to/trace.miniprof.json /path/to/output.tracy`
 - Open the trace in tracy:
   - If you're on windows download the v0.12.2 version from the releases on the upstream repo
   - If you're on other platforms open it on the website: https://tracy.nereid.pl/ (the version might mismatch so your luck might vary, we need to host our own ideally..)
@@ -87,7 +87,7 @@ Download the importer
 
 - Run the action: `zed open performance profiler`
 - Hit the save button. This opens a save dialog or if that fails to open the trace gets saved in your working directory.
-- Convert the profile so it can be imported in tracy using the importer: `./tracy-import-miniprofiler <path to performance_profile.miniprof> output.tracy`
+- Convert the profile so it can be imported in tracy using the importer: `./tracy-import-miniprofiler <path to performance_profile.miniprof.json> output.tracy`
 - Go to <https://tracy.nereid.pl/> hit the 'power button' in the top left and then open saved trace.
 - Now zoom in to see the tasks and how long they took
 

docs/src/reference/all-settings.md 🔗

@@ -519,8 +519,6 @@ Note: Dirty files (files with unsaved changes) will not be automatically closed
 - `"unified"`: Show changes inline with added and deleted lines stacked vertically
 - `"split"`: Display old and new versions side by side in separate panes (default)
 
-> **Changed in Preview (v0.225).** Values renamed from `"stacked"`/`"side_by_side"` to `"unified"`/`"split"`.
-
 See [Git documentation](../git.md#diff-view-styles) for more details.
 
 ## Disable AI
@@ -1802,17 +1800,7 @@ While other options may be changed at a runtime and should be placed under `sett
 }
 ```
 
-3. Use Supermaven as the edit prediction provider:
-
-```json [settings]
-{
-  "edit_predictions": {
-    "provider": "supermaven"
-  }
-}
-```
-
-4. Turn off edit predictions across all providers
+3. Turn off edit predictions across all providers
 
 ```json [settings]
 {
@@ -2774,8 +2762,6 @@ These values take in the same options as the root-level settings with the same n
 
 ### Document Symbols
 
-> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release.
-
 - Description: Controls the source of document symbols used for outlines and breadcrumbs.
 - Setting: `document_symbols`
 - Default: `off`
@@ -5140,8 +5126,6 @@ See the [debugger page](../debugger.md) for more information about debugging sup
 
 ## Git Worktree Directory
 
-> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release.
-
 - Description: Directory where git worktrees are created, relative to the repository working directory.
 - Setting: `git.worktree_directory`
 - Default: `"../worktrees"`

docs/src/reference/cli.md 🔗

@@ -71,8 +71,6 @@ zed -n ~/projects/myproject
 
 ### `-a`, `--add`
 
-> **Changed in Preview (v0.225).** See [release notes](/releases#0.225).
-
 Add paths to the currently focused workspace instead of opening a new window. When multiple workspace windows are open, files open in the focused window:
 
 ```sh

docs/src/repl.md 🔗

@@ -151,8 +151,6 @@ TBD: Improve Julia REPL instructions
 
 ## Changing which kernel is used per language {#changing-kernels}
 
-> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release.
-
 Zed automatically detects available kernels and organizes them in the kernel picker:
 
 - **Recommended**: The Python environment matching your active toolchain (if detected)
@@ -193,8 +191,6 @@ To configure a different default kernel for a language, you can assign a kernel
 
 ## Interactive Input
 
-> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release.
-
 When code execution requires user input (such as Python's `input()` function), the REPL displays an input prompt below the cell output.
 
 Type your response in the text field and press `Enter` to submit. The kernel receives your input and continues execution.

docs/src/semantic-tokens.md 🔗

@@ -48,7 +48,7 @@ You can configure this globally or per-language:
 Semantic tokens are styled using rules that map LSP token types and modifiers to theme styles or custom colors. Zed provides sensible defaults, but you can customize these in your settings.json: add rules under `global_lsp_settings.semantic_token_rules` key.
 
 Rules are matched in order, and the first matching rule wins.
-User-defined rules take precedence over defaults.
+User-defined rules take highest precedence, followed by extension-provided language rules, then Zed defaults.
 
 ### Rule Structure
 
@@ -139,7 +139,7 @@ To disable highlighting for a specific token type, add an empty rule that matche
 }
 ```
 
-Since user rules are prepended to defaults and the first match wins, this empty rule prevents any styling from being applied to comment tokens.
+Since user rules take highest precedence and the first match wins, this empty rule prevents any styling from being applied to comment tokens.
 
 ## Default Rules
 

docs/src/snippets.md 🔗

@@ -42,24 +42,4 @@ To create JSX snippets you have to use `javascript.json` snippets file, instead
 ## Known Limitations
 
 - Only the first prefix is used when a list of prefixes is passed in.
-- Currently only the `json` snippet file format is supported, even though the `simple-completion-language-server` supports both `json` and `toml` file formats.
-
-## See also
-
-The `feature_paths` option in `simple-completion-language-server` is disabled by default.
-
-If you want to enable it you can add the following to your `settings.json`:
-
-```json [settings]
-{
-  "lsp": {
-    "snippet-completion-server": {
-      "settings": {
-        "feature_paths": true
-      }
-    }
-  }
-}
-```
-
-For more configuration information, see the [`simple-completion-language-server` instructions](https://github.com/zed-industries/simple-completion-language-server/tree/main).
+- Currently only the `json` snippet file format is supported.

docs/src/tasks.md 🔗

@@ -223,6 +223,35 @@ This could be useful for launching a terminal application that you want to use i
 }
 ```
 
+## VS Code Task Format
+
+When importing VS Code tasks from `.vscode/tasks.json`, you can omit the `label` field. Zed automatically generates labels based on the task type:
+
+- **npm tasks**: `npm: <script>` (e.g., `npm: start`)
+- **gulp tasks**: `gulp: <task>` (e.g., `gulp: build`)
+- **shell tasks**: Uses the `command` string directly (e.g., `echo hello`), or `shell` if the command is empty
+- **Tasks without type**: `Untitled Task`
+
+Example task file with auto-generated labels:
+
+```json
+{
+  "version": "2.0.0",
+  "tasks": [
+    {
+      "type": "npm",
+      "script": "start"
+    },
+    {
+      "type": "shell",
+      "command": "cargo build --release"
+    }
+  ]
+}
+```
+
+These tasks appear in the task picker as "npm: start" and "cargo build --release". You can override the generated label by providing an explicit `label` field.
+
 ## Binding runnable tags to task templates
 
 Zed supports overriding the default action for inline runnable indicators via workspace-local and global `tasks.json` file with the following precedence hierarchy:
@@ -249,8 +278,6 @@ When you have a task definition that is bound to the runnable, you can quickly r
 
 ## Running Bash Scripts
 
-> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release.
-
 You can run bash scripts directly from Zed. When you open a `.sh` or `.bash` file, Zed automatically detects the script as runnable and makes it available in the task picker.
 
 To run a bash script:
@@ -276,8 +303,6 @@ If you need to pass arguments or customize the execution environment, add a task
 
 ## Shell Initialization
 
-> **Changed in Preview (v0.225).** See [release notes](/releases#0.225).
-
 When Zed runs a task, it launches the command in a login shell. This ensures your shell's initialization files (`.bash_profile`, `.zshrc`, etc.) are sourced before the task executes.
 
 This behavior gives tasks access to the same environment variables, aliases, and PATH modifications you've configured in your shell profile. If a task fails to find a command that works in your terminal, verify your shell configuration files are properly set up.

docs/src/telemetry.md 🔗

@@ -5,7 +5,12 @@ description: "What data Zed collects and how to control telemetry settings."
 
 # Telemetry in Zed
 
-Zed collects anonymous telemetry data to help the team understand how people are using the application and to see what sort of issues they are experiencing.
+Zed collects anonymous telemetry to understand usage patterns and diagnose issues.
+
+Telemetry falls into two categories:
+
+- **Client-side**: Usage metrics and crash reports. You can disable these in settings.
+- **Server-side**: Collected when using hosted services like AI or Collaboration. Required for these features to function.
 
 ## Configuring Telemetry Settings
 
@@ -21,7 +26,7 @@ To enable or disable some or all telemetry types, open Settings ({#kb zed::OpenS
 
 ## Dataflow
 
-Telemetry is sent from the application to our servers. Data is proxied through our servers to enable us to easily switch analytics services. We currently use:
+Telemetry is sent from the application to our servers every 5 minutes (or when 50 events accumulate), then routed to the appropriate service. We currently use:
 
 - [Sentry](https://sentry.io): Crash-monitoring service - stores diagnostic events
 - [Snowflake](https://snowflake.com): Data warehouse - stores both diagnostic and metric events
@@ -32,33 +37,33 @@ Telemetry is sent from the application to our servers. Data is proxied through o
 
 ### Diagnostics
 
-Crash reports consist of a [minidump](https://learn.microsoft.com/en-us/windows/win32/debug/minidump-files) and some extra debug information. Reports are sent on the first application launch after the crash occurred. We've built dashboards that allow us to visualize the frequency and severity of issues experienced by users. Having these reports sent automatically allows us to begin implementing fixes without the user needing to file a report in our issue tracker. The plots in the dashboards also give us an informal measurement of the stability of Zed.
+Crash reports consist of a [minidump](https://learn.microsoft.com/en-us/windows/win32/debug/minidump-files) and debug metadata. Reports are sent on the next launch after a crash, allowing Zed to identify and fix issues without requiring you to file a bug report.
 
-You can see what extra data is sent alongside the minidump in the `Panic` struct in [crates/telemetry_events/src/telemetry_events.rs](https://github.com/zed-industries/zed/blob/main/crates/telemetry_events/src/telemetry_events.rs) in the Zed repo. You can find additional information in the [Debugging Crashes](./development/debugging-crashes.md) documentation.
+You can inspect what data is sent in the `Panic` struct in [crates/telemetry_events/src/telemetry_events.rs](https://github.com/zed-industries/zed/blob/main/crates/telemetry_events/src/telemetry_events.rs). See also: [Debugging Crashes](./development/debugging-crashes.md).
 
-### Client-Side Usage Data {#client-metrics}
+### Client-Side Metrics
 
-To improve Zed and understand how it is being used in the wild, Zed optionally collects usage data like the following:
+Client-side telemetry includes:
 
-- (a) file extensions of opened files;
-- (b) features and tools You use within the Editor;
-- (c) project statistics (e.g., number of files); and
-- (d) frameworks detected in Your projects
+- File extensions of opened files
+- Features and tools used within the editor
+- Project statistics (e.g., number of files)
+- Frameworks detected in your projects
 
-Usage Data does not include any of Your software code or sensitive project details. Metric events are reported over HTTPS, and requests are rate-limited to avoid using significant network bandwidth.
+This data does not include your code or sensitive project details. Events are sent over HTTPS and rate-limited.
 
-Usage Data is associated with a secure random telemetry ID which may be linked to Your email address. This linkage currently serves two purposes: (1) it allows Zed to analyze usage patterns over time while maintaining Your privacy; and (2) it enables Zed to reach out to specific user groups for feedback and improvement suggestions.
+Usage data is tied to a random telemetry ID. If you've authenticated, this ID may be linked to your email so Zed can analyze patterns over time and reach out for feedback.
 
-You can audit the metrics data that Zed has reported by running the command {#action zed::OpenTelemetryLog} from the command palette, or clicking `Help > View Telemetry Log` in the application menu.
+To audit what Zed has reported, run {#action zed::OpenTelemetryLog} from the command palette or click `Help > View Telemetry Log`.
 
-You can see the full list of the event types and exactly the data sent for each by inspecting the `Event` enum and the associated structs in [crates/telemetry_events/src/telemetry_events.rs](https://github.com/zed-industries/zed/blob/main/crates/telemetry_events/src/telemetry_events.rs) in the Zed repository.
+For the full list of event types, see the `Event` enum in [telemetry_events.rs](https://github.com/zed-industries/zed/blob/main/crates/telemetry_events/src/telemetry_events.rs).
 
-### Server-Side Usage Data {#metrics}
+### Server-Side Metrics
 
-When using Zed's hosted services, we may collect, generate, and Process data to allow us to support users and improve our hosted offering. Examples include metadata around rate limiting and billing metrics/token usage. Zed does not persistently store user content or use user content to evaluate and/or improve our AI features, unless it is explicitly shared with Zed, and we have a zero-data retention agreement with Anthropic.
+When using Zed's hosted services, we collect metadata for rate limiting and billing (e.g., token usage). Zed does not store your prompts or code unless you explicitly share them via feedback ratings.
 
-You can see more about our stance on data collection (and that any prompt data shared with Zed is explicitly opt-in) at [AI Improvement](./ai/ai-improvement.md).
+For details on AI data handling, see [Zed AI Features and Privacy](./ai/ai-improvement.md).
 
 ## Concerns and Questions
 
-If you have concerns about telemetry, please feel free to [open an issue](https://github.com/zed-industries/zed/issues/new/choose).
+If you have concerns about telemetry, you can [open an issue](https://github.com/zed-industries/zed/issues/new/choose) or email hi@zed.dev.

docs/src/troubleshooting.md 🔗

@@ -83,3 +83,22 @@ If your issue persists after regenerating the database, please [file an issue](h
 ## Language Server Issues
 
 If you're experiencing language-server related issues, such as stale diagnostics or issues jumping to definitions, restarting the language server via {#action editor::RestartLanguageServer} from the command palette will often resolve the issue.
+
+## Agent Error Messages
+
+### "Max tokens reached"
+
+You see this error when the agent's response exceeds the model's maximum token limit. This happens when:
+
+- The agent generates an extremely long response
+- The conversation context plus the response exceeds the model's capacity
+- Tool outputs are large and consume the available token budget
+
+**To resolve this:**
+
+1. Start a new thread to reduce context size
+2. Use a model with a larger token limit in AI settings
+3. Break your request into smaller, focused tasks
+4. Clear tool outputs or previous messages using the thread controls
+
+The token limit varies by model—check your model provider's documentation for specific limits.

docs/src/vim.md 🔗

@@ -248,10 +248,6 @@ Below, you'll find tables listing the commands you can use in the command palett
 
 ### File and window management
 
-> **Changed in Preview (v0.225).** See [release notes](/releases#0.225).
->
-> The `:bd[elete]` command now closes the active file across all panes. Previously, it only closed the file in the active pane.
-
 This table shows commands for managing windows, tabs, and panes. As commands don't support arguments currently, you cannot specify a filename when saving or creating a new file.
 
 | Command         | Description                                          |

docs/src/visual-customization.md 🔗

@@ -436,7 +436,6 @@ TBD: Centered layout related settings
   "file_finder": {
     "file_icons": true,         // Show/hide file icons
     "modal_max_width": "small", // Horizontal size: small, medium, large, xlarge, full
-    "git_status": true,         // Show the git status for each entry
     "include_ignored": null     // gitignored files in results: true, false, null
   },
 ```

docs/theme/analytics.js 🔗

@@ -0,0 +1,93 @@
+const amplitudeKey = document.querySelector(
+  'meta[name="amplitude-key"]',
+)?.content;
+const consentInstance = document.querySelector(
+  'meta[name="consent-io-instance"]',
+)?.content;
+
+document.addEventListener("DOMContentLoaded", () => {
+  if (!consentInstance || consentInstance.length === 0) return;
+  const { getOrCreateConsentRuntime } = window.c15t;
+
+  const { consentStore } = getOrCreateConsentRuntime({
+    mode: "c15t",
+    backendURL: consentInstance,
+    consentCategories: ["necessary", "measurement", "marketing"],
+    storageConfig: {
+      crossSubdomain: true,
+    },
+    scripts: [
+      {
+        id: "amplitude",
+        src: `https://cdn.amplitude.com/script/${amplitudeKey}.js`,
+        category: "measurement",
+        onLoad: () => {
+          window.amplitude.init(amplitudeKey, {
+            fetchRemoteConfig: true,
+            autocapture: true,
+          });
+        },
+      },
+    ],
+  });
+
+  let previousActiveUI = consentStore.getState().activeUI;
+  const banner = document.getElementById("c15t-banner");
+  const configureSection = document.getElementById("c15t-configure-section");
+  const configureBtn = document.getElementById("c15t-configure-btn");
+  const measurementToggle = document.getElementById("c15t-toggle-measurement");
+  const marketingToggle = document.getElementById("c15t-toggle-marketing");
+
+  const toggleConfigureMode = () => {
+    const currentConsents = consentStore.getState().consents;
+    measurementToggle.checked = currentConsents
+      ? (currentConsents.measurement ?? false)
+      : false;
+    marketingToggle.checked = currentConsents
+      ? (currentConsents.marketing ?? false)
+      : false;
+    configureSection.style.display = "flex";
+    configureBtn.innerHTML = "Save";
+    configureBtn.className = "c15t-button secondary";
+    configureBtn.title = "";
+  };
+
+  consentStore.subscribe((state) => {
+    const hideBanner =
+      state.activeUI === "none" ||
+      (state.activeUI === "banner" && state.model === "opt-out");
+    banner.style.display = hideBanner ? "none" : "block";
+
+    if (state.activeUI === "dialog" && previousActiveUI !== "dialog") {
+      toggleConfigureMode();
+    }
+
+    previousActiveUI = state.activeUI;
+  });
+
+  configureBtn.addEventListener("click", () => {
+    if (consentStore.getState().activeUI === "dialog") {
+      consentStore
+        .getState()
+        .setConsent("measurement", measurementToggle.checked);
+      consentStore.getState().setConsent("marketing", marketingToggle.checked);
+      consentStore.getState().saveConsents("custom");
+    } else {
+      consentStore.getState().setActiveUI("dialog");
+    }
+  });
+
+  document.getElementById("c15t-accept").addEventListener("click", () => {
+    consentStore.getState().saveConsents("all");
+  });
+
+  document.getElementById("c15t-decline").addEventListener("click", () => {
+    consentStore.getState().saveConsents("necessary");
+  });
+
+  document
+    .getElementById("c15t-manage-consent-btn")
+    .addEventListener("click", () => {
+      consentStore.getState().setActiveUI("dialog");
+    });
+});

docs/theme/consent-banner.css 🔗

@@ -0,0 +1,292 @@
+#c15t-banner {
+  --color-offgray-50: hsl(218, 12%, 95%);
+  --color-offgray-100: hsl(218, 12%, 88%);
+  --color-offgray-200: hsl(218, 12%, 80%);
+  --color-offgray-300: hsl(218, 12%, 75%);
+  --color-offgray-400: hsl(218, 12%, 64%);
+  --color-offgray-500: hsl(218, 12%, 56%);
+  --color-offgray-600: hsl(218, 12%, 48%);
+  --color-offgray-700: hsl(218, 12%, 40%);
+  --color-offgray-800: hsl(218, 12%, 34%);
+  --color-offgray-900: hsl(218, 12%, 24%);
+  --color-offgray-950: hsl(218, 12%, 15%);
+  --color-offgray-1000: hsl(218, 12%, 5%);
+
+  --color-blue-50: oklch(97% 0.014 254.604);
+  --color-blue-100: oklch(93.2% 0.032 255.585);
+  --color-blue-200: oklch(88.2% 0.059 254.128);
+  --color-blue-300: oklch(80.9% 0.105 251.813);
+  --color-blue-400: oklch(70.7% 0.165 254.624);
+  --color-blue-500: oklch(62.3% 0.214 259.815);
+  --color-blue-600: oklch(54.6% 0.245 262.881);
+  --color-blue-700: oklch(48.8% 0.243 264.376);
+  --color-blue-800: oklch(42.4% 0.199 265.638);
+  --color-blue-900: oklch(37.9% 0.146 265.522);
+  --color-blue-950: oklch(28.2% 0.091 267.935);
+
+  --color-accent-blue: hsla(218, 93%, 42%, 1);
+
+  position: fixed;
+  z-index: 9999;
+  bottom: 16px;
+  right: 16px;
+  border-radius: 4px;
+  max-width: 300px;
+  background: white;
+  border: 1px solid
+    color-mix(in oklab, var(--color-offgray-200) 50%, transparent);
+  box-shadow: 6px 6px 0
+    color-mix(in oklab, var(--color-accent-blue) 6%, transparent);
+}
+
+.dark #c15t-banner {
+  border-color: color-mix(in oklab, var(--color-offgray-600) 14%, transparent);
+  background: var(--color-offgray-1000);
+  box-shadow: 5px 5px 0
+    color-mix(in oklab, var(--color-accent-blue) 8%, transparent);
+}
+
+#c15t-banner > div:first-child {
+  padding: 12px;
+  display: flex;
+  flex-direction: column;
+}
+
+#c15t-banner a {
+  color: var(--links);
+  text-decoration: underline;
+  text-decoration-color: var(--link-line-decoration);
+}
+
+#c15t-banner a:hover {
+  text-decoration-color: var(--link-line-decoration-hover);
+}
+
+#c15t-description {
+  font-size: 12px;
+  margin: 0;
+  margin-top: 4px;
+}
+
+#c15t-configure-section {
+  display: flex;
+  flex-direction: column;
+  gap: 8px;
+  border-top: 1px solid var(--divider);
+  padding: 12px;
+}
+
+#c15t-configure-section > div {
+  display: flex;
+  align-items: center;
+  justify-content: space-between;
+}
+
+#c15t-configure-section label {
+  text-transform: uppercase;
+  font-size: 11px;
+}
+
+#c15t-footer {
+  padding: 12px;
+  display: flex;
+  justify-content: space-between;
+  border-top: 1px solid var(--divider);
+  background-color: color-mix(
+    in oklab,
+    var(--color-offgray-50) 50%,
+    transparent
+  );
+}
+
+.dark #c15t-footer {
+  background-color: color-mix(
+    in oklab,
+    var(--color-offgray-600) 4%,
+    transparent
+  );
+}
+
+.c15t-button {
+  display: inline-flex;
+  align-items: center;
+  justify-content: center;
+  max-height: 28px;
+  color: black;
+  padding: 4px 8px;
+  font-size: 14px;
+  border-radius: 4px;
+  background: transparent;
+  border: 1px solid transparent;
+  transition: 100ms;
+  transition-property: box-shadow, border-color, background-color;
+}
+
+.c15t-button:hover {
+  background: color-mix(in oklab, var(--color-offgray-100) 50%, transparent);
+}
+
+.dark .c15t-button {
+  color: var(--color-offgray-50);
+}
+
+.dark .c15t-button:hover {
+  background: color-mix(in oklab, var(--color-offgray-500) 10%, transparent);
+}
+
+.c15t-button.icon {
+  padding: 0;
+  width: 24px;
+  height: 24px;
+}
+
+.c15t-button.primary {
+  color: var(--color-blue-700);
+  background: color-mix(in oklab, var(--color-blue-50) 60%, transparent);
+  border-color: color-mix(in oklab, var(--color-blue-500) 20%, transparent);
+  box-shadow: color-mix(in oklab, var(--color-blue-400) 10%, transparent) 0 -2px
+    0 0 inset;
+}
+
+.c15t-button.primary:hover {
+  background: color-mix(in oklab, var(--color-blue-100) 50%, transparent);
+  box-shadow: none;
+}
+
+.dark .c15t-button.primary {
+  color: var(--color-blue-50);
+  background: color-mix(in oklab, var(--color-blue-500) 10%, transparent);
+  border-color: color-mix(in oklab, var(--color-blue-300) 10%, transparent);
+  box-shadow: color-mix(in oklab, var(--color-blue-300) 8%, transparent) 0 -2px
+    0 0 inset;
+}
+
+.dark .c15t-button.primary:hover {
+  background: color-mix(in oklab, var(--color-blue-500) 20%, transparent);
+  box-shadow: none;
+}
+
+.c15t-button.secondary {
+  background: color-mix(in oklab, var(--color-offgray-50) 60%, transparent);
+  border-color: color-mix(in oklab, var(--color-offgray-200) 50%, transparent);
+  box-shadow: color-mix(in oklab, var(--color-offgray-500) 10%, transparent)
+    0 -2px 0 0 inset;
+}
+
+.c15t-button.secondary:hover {
+  background: color-mix(in oklab, var(--color-offgray-100) 50%, transparent);
+  box-shadow: none;
+}
+
+.dark .c15t-button.secondary {
+  background: color-mix(in oklab, var(--color-offgray-300) 5%, transparent);
+  border-color: color-mix(in oklab, var(--color-offgray-400) 20%, transparent);
+  box-shadow: color-mix(in oklab, var(--color-offgray-300) 8%, transparent)
+    0 -2px 0 0 inset;
+}
+
+.dark .c15t-button.secondary:hover {
+  background: color-mix(in oklab, var(--color-offgray-200) 10%, transparent);
+  box-shadow: none;
+}
+
+.c15t-switch {
+  position: relative;
+  display: inline-block;
+  width: 32px;
+  height: 20px;
+  flex-shrink: 0;
+}
+
+.c15t-switch input {
+  opacity: 0;
+  width: 0;
+  height: 0;
+  position: absolute;
+}
+
+.c15t-slider {
+  position: absolute;
+  cursor: pointer;
+  inset: 0;
+  background-color: color-mix(
+    in oklab,
+    var(--color-offgray-100) 80%,
+    transparent
+  );
+  border-radius: 20px;
+  box-shadow: inset 0 0 0 1px color-mix(in oklab, #000 5%, transparent);
+  transition: background-color 0.2s;
+}
+
+.c15t-slider:hover {
+  background-color: var(--color-offgray-100);
+}
+
+.dark .c15t-slider {
+  background-color: color-mix(in oklab, #fff 5%, transparent);
+  box-shadow: inset 0 0 0 1px color-mix(in oklab, #fff 15%, transparent);
+}
+
+.dark .c15t-slider:hover {
+  background-color: color-mix(in oklab, #fff 10%, transparent);
+}
+
+.c15t-slider:before {
+  position: absolute;
+  content: "";
+  height: 14px;
+  width: 14px;
+  left: 3px;
+  bottom: 3px;
+  background-color: white;
+  border-radius: 50%;
+  box-shadow:
+    0 1px 3px 0 rgb(0 0 0 / 0.1),
+    0 1px 2px -1px rgb(0 0 0 / 0.1);
+  transition: transform 0.2s;
+}
+
+.c15t-switch input:checked + .c15t-slider {
+  background-color: var(--color-accent-blue);
+  box-shadow: inset 0 0 0 1px color-mix(in oklab, #000 5%, transparent);
+}
+
+.c15t-switch input:checked + .c15t-slider:hover {
+  background-color: var(--color-accent-blue);
+}
+
+.dark .c15t-switch input:checked + .c15t-slider {
+  background-color: var(--color-accent-blue);
+  box-shadow: inset 0 0 0 1px color-mix(in oklab, #fff 15%, transparent);
+}
+
+.c15t-switch input:checked + .c15t-slider:before {
+  transform: translateX(12px);
+}
+
+.c15t-switch input:disabled + .c15t-slider {
+  opacity: 0.5;
+  cursor: default;
+  pointer-events: none;
+}
+
+.c15t-switch input:disabled + .c15t-slider:hover {
+  background-color: color-mix(
+    in oklab,
+    var(--color-offgray-100) 80%,
+    transparent
+  );
+}
+
+#c15t-manage-consent-btn {
+  appearance: none;
+  background: none;
+  border: none;
+  padding: 0;
+  cursor: pointer;
+}
+
+#c15t-manage-consent-btn:hover {
+  text-decoration-color: var(--link-line-decoration-hover);
+}

docs/theme/css/chrome.css 🔗

@@ -70,7 +70,9 @@ a > .hljs {
 }
 
 .logo-nav {
-  display: block;
+  display: flex;
+  align-items: center;
+  justify-content: center;
   filter: var(--logo-brightness);
 }
 
@@ -140,6 +142,40 @@ a > .hljs {
   text-decoration-color: transparent !important;
 }
 
+.footer {
+  display: flex;
+  align-items: center;
+  justify-content: center;
+  flex-wrap: wrap;
+  gap: 0.5rem;
+  padding: 24px 0;
+  border-top: 1px dashed var(--border-footer);
+}
+
+.footer-link {
+  font-size: 1.25rem;
+  color: var(--links);
+  text-decoration: underline;
+  text-decoration-color: var(--link-line-decoration);
+  text-wrap-mode: nowrap;
+
+  :hover {
+    text-decoration-color: var(--link-line-decoration-hover);
+  }
+}
+
+.footer-separator {
+  color: var(--border-light);
+  font-size: 0.8em;
+}
+
+.footer-logo {
+  height: 16px;
+  border-radius: 0 !important;
+  border: none !important;
+  background: transparent !important;
+}
+
 .mobile-nav-chapters {
   font-size: 2.5em;
   text-align: center;

docs/theme/css/variables.css 🔗

@@ -27,6 +27,7 @@
   --border: hsl(220, 13%, 80%);
   --border-light: hsl(220, 13%, 90%);
   --border-hover: hsl(220, 13%, 70%);
+  --border-footer: hsl(220, 13%, 91%);
 
   --media-bg: hsl(50, 25%, 92%);
 
@@ -124,6 +125,7 @@
   --border: hsl(220, 13%, 20%);
   --border-light: hsl(220, 13%, 15%);
   --border-hover: hsl(220, 13%, 40%);
+  --border-footer: hsl(220, 13%, 12%);
 
   --media-bg: hsl(220, 13%, 8%);
 

docs/theme/index.hbs 🔗

@@ -70,6 +70,8 @@
         <!-- MathJax -->
         <script async src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
         {{/if}}
+        <meta name="amplitude-key" content="#amplitude_key#" />
+        <meta name="consent-io-instance" content="#consent_io_instance#" />
     </head>
     <body class="no-js">
     <div id="body-container">
@@ -307,6 +309,50 @@
                             </a>
                             {{/next}}
                         </div>
+                        <footer class="footer">
+                            <a href="/" class="logo-nav">
+                                <img
+                                    src="https://zed.dev/logo_icon.webp"
+                                    class="footer-logo"
+                                    alt="Zed Industries"
+                                />
+                            </a>
+                            <span class="footer-separator">•</span>
+                            <a class="footer-link" href="https://zed.dev"
+                                >Back to Site</a
+                            >
+                            <span class="footer-separator">•</span>
+                            <a
+                                class="footer-link"
+                                href="https://zed.dev/releases"
+                                >Releases</a
+                            >
+                            <span class="footer-separator">•</span>
+                            <a
+                                class="footer-link"
+                                href="https://zed.dev/roadmap"
+                                >Roadmap</a
+                            >
+                            <span class="footer-separator">•</span>
+                            <a
+                                class="footer-link"
+                                href="https://github.com/zed-industries/zed"
+                                >GitHub</a
+                            >
+                            <span class="footer-separator">•</span>
+                            <a
+                                class="footer-link"
+                                href="https://zed.dev/blog"
+                                >Blog</a
+                            >
+                            <span class="footer-separator">•</span>
+                            <button
+                                id="c15t-manage-consent-btn"
+                                class="footer-link"
+                            >
+                                Manage Site Cookies
+                            </button>
+                        </footer>
                     </main>
                     <div class="toc-container">
                         <nav class="pagetoc"></nav>
@@ -407,23 +453,82 @@
         {{/if}}
         {{/if}}
 
-        <!-- Amplitude Analytics -->
-        <script>
-          (function() {
-            var amplitudeKey = '#amplitude_key#';
-            if (amplitudeKey && amplitudeKey.indexOf('#') === -1) {
-              var script = document.createElement('script');
-              script.src = 'https://cdn.amplitude.com/script/' + amplitudeKey + '.js';
-              script.onload = function() {
-                window.amplitude.init(amplitudeKey, {
-                  fetchRemoteConfig: true,
-                  autocapture: true
-                });
-              };
-              document.head.appendChild(script);
-            }
-          })();
-        </script>
+        <!-- c15t Consent Banner -->
+        <div id="c15t-banner" style="display: none;">
+            <div>
+                <p id="c15t-description">
+                    Zed uses cookies to improve your experience and for marketing. Read <a href="https://zed.dev/cookie-policy">our cookie policy</a> for more details.
+                </p>
+            </div>
+            <div id="c15t-configure-section" style="display: none">
+                <div>
+                    <label for="c15t-toggle-necessary"
+                        >Strictly Necessary</label
+                    >
+                    <label class="c15t-switch">
+                        <input
+                            type="checkbox"
+                            id="c15t-toggle-necessary"
+                            checked
+                            disabled
+                        />
+                        <span class="c15t-slider"></span>
+                    </label>
+                </div>
+                <div>
+                    <label for="c15t-toggle-measurement">Analytics</label>
+                    <label class="c15t-switch">
+                        <input
+                            type="checkbox"
+                            id="c15t-toggle-measurement"
+                        />
+                        <span class="c15t-slider"></span>
+                    </label>
+                </div>
+                <div>
+                    <label for="c15t-toggle-marketing">Marketing</label>
+                    <label class="c15t-switch">
+                        <input
+                            type="checkbox"
+                            id="c15t-toggle-marketing"
+                        />
+                        <span class="c15t-slider"></span>
+                    </label>
+                </div>
+            </div>
+            <div id="c15t-footer">
+                <button
+                    id="c15t-configure-btn"
+                    class="c15t-button icon"
+                    title="Configure"
+                >
+                    <svg
+                        xmlns="http://www.w3.org/2000/svg"
+                        width="14"
+                        height="14"
+                        viewBox="0 0 24 24"
+                        fill="none"
+                        stroke="currentColor"
+                        stroke-width="2"
+                        stroke-linecap="round"
+                        stroke-linejoin="round"
+                    >
+                        <path d="M20 7h-9" />
+                        <path d="M14 17H5" />
+                        <circle cx="17" cy="17" r="3" />
+                        <circle cx="7" cy="7" r="3" />
+                    </svg>
+                </button>
+                <div>
+                    <button id="c15t-decline" class="c15t-button">
+                        Reject all
+                    </button>
+                    <button id="c15t-accept" class="c15t-button primary">
+                        Accept all
+                    </button>
+                </div>
+            </div>
+        </div>
     </div>
     </body>
 </html>

extensions/glsl/languages/glsl/highlights.scm 🔗

@@ -1,82 +1,136 @@
 "break" @keyword
+
 "case" @keyword
+
 "const" @keyword
+
 "continue" @keyword
+
 "default" @keyword
+
 "do" @keyword
+
 "else" @keyword
+
 "enum" @keyword
+
 "extern" @keyword
+
 "for" @keyword
+
 "if" @keyword
+
 "inline" @keyword
+
 "return" @keyword
+
 "sizeof" @keyword
+
 "static" @keyword
+
 "struct" @keyword
+
 "switch" @keyword
+
 "typedef" @keyword
+
 "union" @keyword
+
 "volatile" @keyword
+
 "while" @keyword
 
 "#define" @keyword
+
 "#elif" @keyword
+
 "#else" @keyword
+
 "#endif" @keyword
+
 "#if" @keyword
+
 "#ifdef" @keyword
+
 "#ifndef" @keyword
+
 "#include" @keyword
+
 (preproc_directive) @keyword
 
 "--" @operator
+
 "-" @operator
+
 "-=" @operator
+
 "->" @operator
+
 "=" @operator
+
 "!=" @operator
+
 "*" @operator
+
 "&" @operator
+
 "&&" @operator
+
 "+" @operator
+
 "++" @operator
+
 "+=" @operator
+
 "<" @operator
+
 "==" @operator
+
 ">" @operator
+
 "||" @operator
 
 "." @delimiter
+
 ";" @delimiter
 
 (string_literal) @string
+
 (system_lib_string) @string
 
 (null) @constant
+
 (number_literal) @number
+
 (char_literal) @number
 
 (identifier) @variable
 
 (field_identifier) @property
+
 (statement_identifier) @label
+
 (type_identifier) @type
+
 (primitive_type) @type
+
 (sized_type_specifier) @type
 
 (call_expression
   function: (identifier) @function)
+
 (call_expression
   function: (field_expression
     field: (field_identifier) @function))
+
 (function_declarator
   declarator: (identifier) @function)
+
 (preproc_function_def
   name: (identifier) @function.special)
 
 ((identifier) @constant
- (#match? @constant "^[A-Z][A-Z\\d_]*$"))
+  (#match? @constant "^[A-Z][A-Z\\d_]*$"))
 
 (comment) @comment
 
@@ -111,7 +165,5 @@
 
 (extension_storage_class) @storageclass
 
-(
-  (identifier) @variable.builtin
-  (#match? @variable.builtin "^gl_")
-)
+((identifier) @variable.builtin
+  (#match? @variable.builtin "^gl_"))

extensions/html/languages/html/highlights.scm 🔗

@@ -1,12 +1,17 @@
 (tag_name) @tag
+
 (doctype) @tag.doctype
+
 (attribute_name) @attribute
+
 [
   "\""
   "'"
   (attribute_value)
 ] @string
+
 (comment) @comment
+
 (entity) @string.special
 
 "=" @punctuation.delimiter.html

extensions/html/languages/html/indents.scm 🔗

@@ -1,5 +1,8 @@
-(start_tag ">" @end) @indent
-(self_closing_tag "/>" @end) @indent
+(start_tag
+  ">" @end) @indent
+
+(self_closing_tag
+  "/>" @end) @indent
 
 (element
   (start_tag) @start

extensions/html/languages/html/injections.scm 🔗

@@ -1,6 +1,5 @@
 ((comment) @injection.content
- (#set! injection.language "comment")
-)
+  (#set! injection.language "comment"))
 
 (script_element
   (raw_text) @injection.content
@@ -11,11 +10,15 @@
   (#set! injection.language "css"))
 
 (attribute
-    (attribute_name) @_attribute_name (#match? @_attribute_name "^style$")
-    (quoted_attribute_value (attribute_value) @injection.content)
-    (#set! injection.language "css"))
+  (attribute_name) @_attribute_name
+  (#match? @_attribute_name "^style$")
+  (quoted_attribute_value
+    (attribute_value) @injection.content)
+  (#set! injection.language "css"))
 
 (attribute
-    (attribute_name) @_attribute_name (#match? @_attribute_name "^on[a-z]+$")
-    (quoted_attribute_value (attribute_value) @injection.content)
-    (#set! injection.language "javascript"))
+  (attribute_name) @_attribute_name
+  (#match? @_attribute_name "^on[a-z]+$")
+  (quoted_attribute_value
+    (attribute_value) @injection.content)
+  (#set! injection.language "javascript"))

extensions/proto/languages/proto/indents.scm 🔗

@@ -1,3 +1,11 @@
-(_ "{" "}" @end) @indent
-(_ "[" "]" @end) @indent
-(_ "(" ")" @end) @indent
+(_
+  "{"
+  "}" @end) @indent
+
+(_
+  "["
+  "]" @end) @indent
+
+(_
+  "("
+  ")" @end) @indent

extensions/proto/languages/proto/outline.scm 🔗

@@ -1,19 +1,19 @@
 (message
-    "message" @context
-    (message_name
-        (identifier) @name)) @item
+  "message" @context
+  (message_name
+    (identifier) @name)) @item
 
 (service
-    "service" @context
-    (service_name
-        (identifier) @name)) @item
+  "service" @context
+  (service_name
+    (identifier) @name)) @item
 
 (rpc
-    "rpc" @context
-    (rpc_name
-        (identifier) @name)) @item
+  "rpc" @context
+  (rpc_name
+    (identifier) @name)) @item
 
 (enum
-    "enum" @context
-    (enum_name
-        (identifier) @name)) @item
+  "enum" @context
+  (enum_name
+    (identifier) @name)) @item

extensions/proto/languages/proto/textobjects.scm 🔗

@@ -1,17 +1,21 @@
-(message (message_body
+(message
+  (message_body
     "{"
     (_)* @class.inside
     "}")) @class.around
-(enum (enum_body
+
+(enum
+  (enum_body
     "{"
     (_)* @class.inside
     "}")) @class.around
+
 (service
-    "service"
-    (_)
-    "{"
-    (_)* @class.inside
-    "}") @class.around
+  "service"
+  (_)
+  "{"
+  (_)* @class.inside
+  "}") @class.around
 
 (rpc) @function.around
 

extensions/test-extension/languages/gleam/highlights.scm 🔗

@@ -1,6 +1,8 @@
 ; Comments
 (module_comment) @comment
+
 (statement_comment) @comment
+
 (comment) @comment
 
 ; Constants
@@ -9,43 +11,61 @@
 
 ; Variables
 (identifier) @variable
+
 (discard) @comment.unused
 
 ; Modules
 (module) @module
-(import alias: (identifier) @module)
+
+(import
+  alias: (identifier) @module)
+
 (remote_type_identifier
   module: (identifier) @module)
+
 (remote_constructor_name
   module: (identifier) @module)
+
 ((field_access
   record: (identifier) @module
   field: (label) @function)
- (#is-not? local))
+  (#is-not? local))
 
 ; Functions
-(unqualified_import (identifier) @function)
-(unqualified_import "type" (type_identifier) @type)
-(unqualified_import (type_identifier) @constructor)
+(unqualified_import
+  (identifier) @function)
+
+(unqualified_import
+  "type"
+  (type_identifier) @type)
+
+(unqualified_import
+  (type_identifier) @constructor)
+
 (function
   name: (identifier) @function)
+
 (external_function
   name: (identifier) @function)
+
 (function_parameter
   name: (identifier) @variable.parameter)
+
 ((function_call
-   function: (identifier) @function)
- (#is-not? local))
+  function: (identifier) @function)
+  (#is-not? local))
+
 ((binary_expression
-   operator: "|>"
-   right: (identifier) @function)
- (#is-not? local))
+  operator: "|>"
+  right: (identifier) @function)
+  (#is-not? local))
 
 ; "Properties"
 ; Assumed to be intended to refer to a name for a field; something that comes
 ; before ":" or after "."
 ; e.g. record field names, tuple indices, names for named arguments, etc
 (label) @property
+
 (tuple_access
   index: (integer) @property)
 
@@ -54,10 +74,12 @@
   "@" @attribute
   name: (identifier) @attribute)
 
-(attribute_value (identifier) @constant)
+(attribute_value
+  (identifier) @constant)
 
 ; Type names
 (remote_type_identifier) @type
+
 (type_identifier) @type
 
 ; Data constructors
@@ -65,19 +87,24 @@
 
 ; Literals
 (string) @string
+
 ((escape_sequence) @warning
- ; Deprecated in v0.33.0-rc2:
- (#eq? @warning "\\e"))
+  ; Deprecated in v0.33.0-rc2:
+  (#eq? @warning "\\e"))
+
 (escape_sequence) @string.escape
+
 (bit_string_segment_option) @function.builtin
+
 (integer) @number
+
 (float) @number
 
 ; Reserved identifiers
 ; TODO: when tree-sitter supports `#any-of?` in the Rust bindings,
 ; refactor this to use `#any-of?` rather than `#match?`
 ((identifier) @warning
- (#match? @warning "^(auto|delegate|derive|else|implement|macro|test|echo)$"))
+  (#match? @warning "^(auto|delegate|derive|else|implement|macro|test|echo)$"))
 
 ; Keywords
 [
@@ -102,8 +129,12 @@
 ; Operators
 (binary_expression
   operator: _ @operator)
-(boolean_negation "!" @operator)
-(integer_negation "-" @operator)
+
+(boolean_negation
+  "!" @operator)
+
+(integer_negation
+  "-" @operator)
 
 ; Punctuation
 [
@@ -116,10 +147,11 @@
   "<<"
   ">>"
 ] @punctuation.bracket
+
 [
   "."
   ","
-  ;; Controversial -- maybe some are operators?
+  ; Controversial -- maybe some are operators?
   ":"
   "#"
   "="

extensions/test-extension/languages/gleam/outline.scm 🔗

@@ -1,31 +1,31 @@
 (external_type
-    (visibility_modifier)? @context
-    "type" @context
-    (type_name) @name) @item
+  (visibility_modifier)? @context
+  "type" @context
+  (type_name) @name) @item
 
 (type_definition
-    (visibility_modifier)? @context
-    (opacity_modifier)? @context
-    "type" @context
-    (type_name) @name) @item
+  (visibility_modifier)? @context
+  (opacity_modifier)? @context
+  "type" @context
+  (type_name) @name) @item
 
 (data_constructor
-    (constructor_name) @name) @item
+  (constructor_name) @name) @item
 
 (data_constructor_argument
-    (label) @name) @item
+  (label) @name) @item
 
 (type_alias
-    (visibility_modifier)? @context
-    "type" @context
-    (type_name) @name) @item
+  (visibility_modifier)? @context
+  "type" @context
+  (type_name) @name) @item
 
 (function
-    (visibility_modifier)? @context
-    "fn" @context
-    name: (_) @name) @item
+  (visibility_modifier)? @context
+  "fn" @context
+  name: (_) @name) @item
 
 (constant
-    (visibility_modifier)? @context
-    "const" @context
-    name: (_) @name) @item
+  (visibility_modifier)? @context
+  "const" @context
+  name: (_) @name) @item

extensions/workflows/shared/bump_version.yml 🔗

@@ -52,7 +52,7 @@ jobs:
       app-secret: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
     with:
       bump-type: ${{ needs.determine_bump_type.outputs.bump_type }}
-      force-bump: true
+      force-bump: ${{ github.event_name != 'push' }}
 concurrency:
   group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}labels
   cancel-in-progress: true

legal/privacy-policy.md 🔗

@@ -3,266 +3,178 @@ title: Privacy Policy
 slug: privacy-policy
 ---
 
-At Zed Industries, Inc. ("Zed", "Company" or "we"), we take privacy and the security of data seriously. This Privacy Policy is established to help advise you about how we treat your personal data. By using or accessing our website located at zed.dev, or the Solution or services available pursuant the Zed End User Terms located at [https://zed.dev/terms](https://zed.dev/terms) (collectively, the "Services"), you acknowledge awareness of the practices and policies outlined below, and hereby consent that we will collect, use, and share your personal data as described in this Privacy Policy.
+**Last Updated**: March 2, 2026
 
-As we grow and expand our Services, we may modify this Privacy Policy from time to time. When material modifications are made, we will alert you to any such changes by placing a notice on the Company website, by sending you an email and/or by some other means. Please note that if you've opted not to receive legal notice emails from us (or haven't provided us with a valid email address), those legal notices will still govern your use of the Services. If you use the Services after any changes to the Privacy Policy have been published on our website, you consent and agree to all of the changes.
+## Summary
 
-## What this Privacy Policy Covers
+Zed collects user information (name, email, username, IP address, etc.), device and usage data, and web analytics to improve our products and services, as well as payment details if you subscribe. We do not store your source code and we process but do not store audio/video/screenshare from collaboration sessions.
 
-Our Privacy Policy covers how we treat Personal Data that we gather when you access or use our Services. "Personal Data" means information that identifies or relates to a particular individual and includes information referred to as "personally identifiable information" or "personal information" under applicable data privacy laws, rules or regulations. Our Privacy Policy does not cover the practices of companies we don't own or control or people we don't manage.
+- **We don't sell your data and we don't allow AI providers to train on it**
+- **You control Zed editor telemetry** — disable it in Settings or learn more at [https://zed.dev/docs/telemetry](/telemetry)
+- **We honor Global Privacy Control** (GPC) signals from your browser
+- To access, correct, or delete your data, email privacy@zed.dev with subject "Privacy Request"
 
-## Personal Data
+Questions? Contact us at privacy@zed.dev. The full policy below contains complete details.
 
-### Categories of Personal Data We Collect
+## Introduction
 
-This chart details the categories of Personal Data that we collect and have collected over the past 12 months:
+This Privacy Policy explains how Zed Industries, Inc. ("**Zed**," "**we**," "**our**," or "**us**") collects, uses, and shares your personal data when you use our websites (including [https://zed.dev](https://zed.dev)), our downloadable software ("**Software**"), our subscription service ("**Service**"), or otherwise interact with us.
 
-| Category of personal data     | Examples of data we collect                                   | Categories of third parties with whom we share this data |
-| ----------------------------- | ------------------------------------------------------------- | -------------------------------------------------------- |
-| Profile or contact data       | First and last name<br/>Email address<br/>GitHub username     | Cloud infrastructure providers <br/>Analytics providers  |
-| IP data                       | IP address & derived geolocation data                         | Cloud infrastructure providers <br/>Analytics providers  |
-| Web analytics                 | Interactions<br/>Referrer<br/>Request IDs<br/>Statistics      | Cloud infrastructure providers <br/>Analytics providers  |
-| Photos, videos and recordings | Screenshots<br/>Videos and video recordings you share with us | Cloud infrastructure providers                           |
-| Audio, screenshare data       | Audio and screen sharing during collaboration calls           | Cloud infrastructure providers                           |
+As used in this Privacy Policy, “personal data” means any information relating to an identified or identifiable individual and includes any information that constitutes "personally identifiable information," “personal data,” or "personal information" under applicable privacy or data protection laws or regulations.
 
-Note that "collection" does not necessarily imply long-term storage.
+You acknowledge the collection, use, disclosure, procedures, and other processing described in this Privacy Policy. Beyond the Privacy Policy, your use of our products and services is also subject to our Terms of Service included alongside. This Privacy Policy does not apply to the extent we process personal data in the role of a processor or service provider on behalf of our Zed Business customers. Such processing is governed by our Data Processing Agreement, available upon request. For information about the sub-processors we engage in that capacity, see [https://zed.dev/subprocessors](/subprocessors).
 
-### Categories of Sources of Personal Data
-
-We collect Personal Data about you from the following categories of sources:
-
-#### You
-
-- When you provide such information directly to us. Examples include:
-  - When you create an account
-  - When you voluntarily provide information through our Services or through responses to surveys or questionnaires.
-  - When you send us an email or otherwise contact us.
-  - When you sign up to our mailing list.
-- When you use our hosted Services and such information is collected automatically. Examples include:
-  - Cookies (defined in the "Tracking Tools and Opt-Out" section below).
-- When you use the client software we provide on your machine. Examples include:
-  - Authentication information when you sign in.
-  - Version and system metadata when the software checks for updates.
-  - Usage data, unless you opt out.
-  - Crash reports, unless you opt out.
-  - When you make requests to language models we host for you.
-    - Zed does not store or train on your requests without consent.
-  - Other relevant data necessary to provide you with our Services.
-
-#### Third Parties
+## Personal Data We Collect
 
-- When you login to the service using a third-party service like GitHub.
-- Information collected by content delivery networks or similar service providers
-- We may use analytics providers to analyze how you interact and engage with the Services, or third parties may help us provide you with customer support.
+We may collect personal data from or about you and your devices from the sources and in the manner described below. If you do not provide requested data, some features may not work - either because we need that data to deliver them, or because we're legally required to collect it.
 
-## Our Business Purposes for Collecting or Disclosing Personal Data
+For authorized users on Zed Business plans, certain data described in this section may be processed by Zed as a data processor on behalf of the Zed Business customer. In those cases, the Zed Business customer determines the purposes and lawful basis for that processing, as set forth in our Data Processing Agreement.
 
-- Providing, Customizing and Improving the Services
-  - Creating and managing your account or other user profiles.
-  - Processing orders or other fee-based transactions; billing.
-  - Providing you with the products, services or information you request.
-  - Meeting or fulfilling the reason you provided the information to us.
-  - Providing support and assistance for the Services.
-  - Improving the Services, including testing, research, internal analytics and product development.
-  - Doing fraud protection, security and debugging.
-  - Carrying out other business purposes stated when collecting your Personal Data or as otherwise set forth in applicable data privacy laws.
-- Marketing the Services
-  - Marketing and selling the Services.
-- Corresponding with You
-  - Responding to correspondence that we receive from you, contacting you when necessary or requested, and sending you information about Zed or our Services.
-  - Sending emails and other communications according to your preferences or that display content that we think will interest you.
-- Meeting Legal Requirements and Enforcing Legal Terms
-  - Fulfilling our legal obligations under applicable law, regulation, court order or other legal process, such as preventing, detecting and investigating security incidents and potentially illegal or prohibited activities.
-  - Protecting the rights, property or safety of you, Zed or another party.
-  - Enforcing any agreements with you.
-  - Responding to claims that any posting or other content violates third-party rights.
-  - Resolving disputes.
+### Personal Data You Provide to Us
 
-We will not collect additional categories of Personal Data or use the Personal Data we collected for materially different, unrelated or incompatible purposes without providing you notice as is described above.
+- **Contact Information** - We may collect your personal data when you inquire about Zed, our products and services, or when you otherwise interact with us, including when you sign up for, attend, or take part in our demos, events, or webinars. This data may include your full name, work email, company name, company size, and any other data you share with us.
 
-## How We Disclose Your Personal Data
+- **Communications** - When you contact us directly, we may receive personal data about you, such as your name, email address, message contents and attachments, and - if you join a live collaboration session - we process, but do not store, your audio and shared screen. When you sign up for news and updates, we will collect your email address and any other data you share. When you communicate with us online, our third-party vendors may receive and store these communications on our behalf. Our emails may include tracking pixels to track information about how you interact with our emails, such as whether you open them and whether you access any included links, your approximate Location Information (described below) based on your IP address, and Device Information (described below), to improve our website, products, and services.
 
-We disclose your Personal Data to categories of service providers and other parties listed in this section. Some of these disclosures may constitute a "sale" of your Personal Data as defined under applicable laws. For more information, please refer to the state-specific sections below.
+- **Account Information** - When you create an Account with Zed, we collect the data you provide to create, update, or manage your Service account. Examples include: your name, username, and email address.
 
-- Service Providers. These parties help us provide the Services or perform business functions on our behalf. They include:
-  - Hosting, technology and communication providers.
-  - Providers of artificial intelligence or machine learning models
-  - Payment processors.
-    - If you are using our Services on a fee-basis, our payment processing partner Stripe, Inc. ("Stripe") collects your voluntarily-provided payment card information necessary to process your payment.
-    - Please see Stripe Terms of Service and Stripe Privacy Policy for information on its use and storage of your Personal Data.
-- Analytics Partners. These parties provide analytics on web traffic or usage of the Services. They include:
-  - Companies that track how users found or were referred to the Services.
-  - Companies that track how users interact with the Services.
-- Authorized authentication providers (e.g. GitHub OAuth)
+- **Careers** - If you apply for a job with us, you may submit your contact information and your resume online. We will collect any information you choose to provide on your resume, such as your contact information, education, and employment experience.
 
-### Fulfilling Legal Obligations
+- **Payment Information** - If you make a payment, your payment details, such as credit card, address, phone number, or other financial information, are collected by our third-party payment processor on our behalf. Zed does not collect, process, or store your payment information directly.
 
-We may share any Personal Data that we collect with third parties in relation to the activities set forth under "Meeting Legal Requirements and Enforcing Legal Terms" in the "Our Business Purposes for Collecting Personal Data" section above.
+- **Regarding Third-Party Services** - If you use or integrate third-party tools or link third-party services with the Software or Service, we may receive personal data about you, such as your [GitHub username and other related information](https://docs.github.com/en/apps/oauth-apps/using-oauth-apps/connecting-with-third-party-applications) that permits us to authenticate your user identity and keep your account secure. You can learn more about Zed Third Parties here: [https://zed.dev/](https://zed.dev/acceptable-use-policies)[acceptable-use-policies](/acceptable-use-policies)
 
-### Business Transfers
+### Personal Data We Collect When You Use Our Websites, Software, or Service
 
-Personal Data collected may be transferred to a third party if we undergo a merger, acquisition, bankruptcy or other transaction in which such third party assumes control of our business (in whole or in part). In such an event, we will make reasonable efforts to notify you before your information becomes subject to different privacy and security policies and practices as authorized or mandated by applicable law.
+- **Website, Software, and Service Telemetry** - We automatically collect telemetry - technical logs, metrics, and usage data - to improve and support Zed’s websites, Software, and Service. You may opt out of local telemetry collection in the Software settings. However, when you sign into or use the websites or Service (including via the Software) we collect telemetry on our servers related to use of the websites and Service.
 
-## Data that is Not Personal Data
+Learn more about telemetry and your choices and how to opt out of Software telemetry collection at [https://zed.dev/docs/telemetry](/telemetry)
 
-We may create aggregated, de-identified or anonymized data from the Personal Data we collect, including by removing information that makes the data personally identifiable to a particular user. We may use such aggregated, de-identified or anonymized data and share it with third parties for our lawful business purposes, including to analyze, build and improve the Services and promote our business, provided that we will not share such data in a manner that could identify you.
+- **Device and Location Information** - When you use the website, Software, or Service we may collect information about your device and software, including IP address (and inferred approximate location), device type, device identifiers, browser (type, version, user-agent, and language), and operating system or mobile device type. We do so to support improving and securing the Software and Service. Zed does not collect precise location information.
 
-## Tracking Tools and Opt-Out
+- **Usage Information** - We automatically collect information about how you use our website and Service, like the pages or other content you view and the dates and times of your visits. We do so to support improving and securing the websites, Software, and Service.
 
-The Services use cookies and similar technologies such as pixel tags, web beacons, clear GIFs and JavaScript (collectively, "Cookies") to enable our servers to recognize your web browser, tell us how and when you visit and use our Services, analyze trends, learn about our user base and operate and improve our Services. Cookies are small pieces of data– usually text files – placed on your computer, tablet, phone or similar device when you use that device to access our Services. We may also supplement the information we collect from you with information received from third parties, including third parties that have placed their own Cookies on your device(s).
+- **Information from Cookies and Similar Technologies** - We and our third-party partners may collect information using cookies, beacons, and similar technologies (collectively “**Cookies**”) to provide functionality and to recognize you across visits. See our [Cookie Policy](/cookie-policy), which includes information on how to control or opt out of these Cookies.
 
-### We use the following types of Cookies:
+## How We Use the Personal Data We Collect
 
-- Essential Cookies. Essential Cookies are required for providing you with features or services that you have requested. For example, certain Cookies enable you to log into secure areas of our Services. Disabling these Cookies may make certain features and services unavailable.
-- Functional Cookies. Functional Cookies are used to record your choices and settings regarding our Services, maintain your preferences over time and recognize you when you return to our Services. These Cookies help us to personalize our content for you, greet you by name and remember your preferences (for example, your choice of language or region).
-- Performance/Analytical Cookies. Performance/Analytical Cookies allow us to understand how visitors use our Services. They do this by collecting information about the number of visitors to the Services, what pages visitors view on our Services and how long visitors are viewing pages on the Services. Performance/Analytical Cookies also help us measure the performance of our advertising campaigns to help us improve our campaigns and Services' content for those who engage with our advertising.
+We use the personal data we collect:
 
-You can decide whether or not to accept Cookies through your internet browser's settings. Most browsers have an option for turning off the Cookie feature, which will prevent your browser from accepting new Cookies, as well as (depending on the sophistication of your browser software) allow you to decide on acceptance of each new Cookie in a variety of ways. You can also delete all Cookies that are already on your device. If you do this, however, you may have to manually adjust some preferences every time you visit our website and some of the Services and functionalities may not work.
+- To deliver and improve our products: Providing the Software and Service functionality you request, debugging issues, and developing new features based on usage patterns;
 
-To find out more information about Cookies generally, including information about how to manage and delete Cookies, please visit [https://allaboutcookies.org/](https://allaboutcookies.org/) or [https://ico.org.uk/for-the-public/online/cookies/](https://ico.org.uk/for-the-public/online/cookies/) if you are located in the European Union.
+- To communicate with you: Responding to support requests, sending service announcements, and (with your consent) marketing communications;
 
-## Data Security
+- To secure our services: Detecting and preventing fraud, abuse, and security threats;
 
-We endeavor to protect your Personal Data from unauthorized access, use and disclosure using appropriate physical, technical, organizational and administrative security measures based on our Services,the type of Personal Data being collected and how we are processing that data. You should also help protect your data by selecting and protecting your password and/or other sign-on mechanism(s) with care; limiting access to your computer or device and browser; and signing off after you have finished accessing your account. Although we work to protect the security of your account and other data that we hold in our records, be aware that no method of transmitting data over the internet or storing data is completely secure.
+- To meet legal obligations: Complying with tax, accounting, and regulatory requirements;
 
-## Data Retention
+- To process payments: Completing transactions through our payment processor; and
 
-We retain Personal Data about you for as long as reasonably necessary to provide you with our Services or otherwise in support of our business or commercial purposes for utilization of your Personal Data, as expressed. When establishing a retention period for particular categories of data, we consider who we collected the data from, our need for the Personal Data, why we collected the Personal Data, and the sensitivity of the Personal Data. In some cases we retain Personal Data for a longer period, if doing so is necessary to comply with our legal obligations, resolve disputes or collect fees owed, or as is otherwise permitted or required by applicable law, rule or regulation. We may further retain information in an anonymous or aggregated form where such information would not identify you personally.
+- To understand aggregate usage: Generating anonymized statistics to guide product decisions.
 
-For example:
+We do not use your personal data for purposes materially different from those described above without providing you notice and, where required by law, obtaining your consent.
 
-- We retain your profile information and credentials for as long as you have an account with us.
-- We retain your payment data for as long as we need to process your purchase or subscription.
-- We retain your device/IP data for as long as we need it to ensure that our systems are working appropriately, effectively and efficiently.
+## Legal Bases for Processing European Personal Data
 
-It's worth noting that we avoid retaining data unless necessary to provide our Service. For example:
+If you are located in the European Economic Area (“**EEA**”) or the United Kingdom (“**UK**”), we only process your personal data when we have a valid “legal basis,” including as set forth below.
 
-- We do not currently store source code that we proxy during collaboration sessions.
-- We do not currently store audio or video recordings of Collaboration calls handled by LiveKit.
+- **Consent** - We may process your personal data where you have consented to certain processing of your personal data. For example, we may process your personal data to send you marketing communications or to use Cookies where you have consented to such use.
 
-## Personal Data of Children
+- **Contractual Necessity** - We may process your personal data where required to provide you with our products and services. For example, we may need to process your personal data to respond to your inquiries or requests.
 
-We do not knowingly collect or solicit Personal Data from children under 13 years of age; if you are a child under the age of 13, please do not attempt to register for or otherwise use the Services or send us any Personal Data. If we learn we have collected Personal Data from a child under 13 years of age, we will delete that information as quickly as possible. If you believe that a child under 13 years of age may have provided Personal Data to us, please contact us at hi@zed.dev.
+- **Compliance with a Legal Obligation** - We may process your personal data where we have a legal obligation to do so. For example, we may process your personal data to comply with tax, labor and accounting obligations.
 
-## California Resident Rights
+- **Legitimate Interests** - We may process your personal data where we or a third party have a legitimate interest in processing your personal data. Specifically, we have a legitimate interest in using your personal data for product development and internal analytics purposes, and otherwise to improve the safety, security, and performance of our products and services. We only rely on our or a third party’s legitimate interests to process your personal data when these interests are not overridden by your rights and interests.
 
-If you are a California resident, you have the rights set forth in this section. Please see the "Exercising Your Rights" section below for instructions regarding how to exercise these rights. Please note that we may process Personal Data of our customers' end users or employees in connection with our provision of certain services to our customers. If we are processing your Personal Data as a service provider, you may contact the entity that collected your Personal Data in the first instance to address your rights with respect to such data as desired.
+## How We Disclose the Personal Data We Collect
 
-If there are any conflicts between this section and any other provision of this Privacy Policy and you are a California resident, the portion that is more protective of Personal Data shall control to the extent of such conflict. If you have any questions about this section or whether any of the following rights apply to you, please contact us at hi@zed.dev.
+The disclosures described below relate to Zed’s processing as a data controller. When we process data on behalf of Zed Business customers as a data processor, some of the third-parties described below may act as sub-processors under our Data Processing Agreement.
 
-### Access
+- **Zed does not sell your personal data to third-parties**.  We also do not share your data with third-parties for the purposes of cross-context advertising.
 
-You have the right to request certain information about our collection and use of your Personal Data over the past 12 months. In response, we will provide you with the following information:
+- **Partners and Affiliates** - We may share information we receive to our current or future affiliates (companies under common ownership with Zed) for any of the lawful business purposes described in this Privacy Policy above.
 
-- The categories of Personal Data that we have collected about you.
-- The categories of sources from which that Personal Data was collected.
-- The business or commercial purpose for collecting or selling your Personal Data.
-- The categories of third parties with whom we have shared your Personal Data.
-- The specific pieces of Personal Data that we have collected about you.
-- If we have disclosed your Personal Data to any third parties for a business purpose over the past 12 months, we will identify the categories of Personal Data shared with each category of third party recipient. If we have sold your Personal Data over the past 12 months, we will identify the categories of Personal Data sold to each category of third party recipient.
+- **Vendors and Service Providers** - We may disclose information we receive to vendors and service providers retained in connection with operating, maintaining, or monitoring our websites, products, and services for any of the lawful business purposes described in this Privacy Policy above.
 
-### Deletion
+- **AI Service Providers** - We may disclose information we receive to vendors that provide artificial intelligence services in connection with our websites, software, or services for legitimate business purposes only, including website performance monitoring and sales and marketing of our products and services. Zed does not utilize third-party services which use this information for AI training purposes.
 
-You have the right to request that we delete the Personal Data that we have collected about you. Under the CCPA, this right is subject to certain exceptions: for e.g., we may need to retain your Personal Data to provide you with the Services or complete a transaction or other action you may have requested, or if deletion of your Personal Data involves disproportionate effort to achieve. If your deletion request is subject to one of these exceptions, we may deny your deletion request to such data.
+- **Web Analytics** - We use analytics services such as Amplitude to collect and process certain analytics data related to your use of our websites. These services utilize first-party cookies to collect information about your use of our websites, apps, and online resources via HTTP referrer and/or depending on your choices regarding cookies. Zed does not use third-party tracking cookies that collect your activity for other websites.
 
-### Correction
+- **As Required By Law and Similar Disclosures** - We may access, preserve, and disclose your information if we believe doing so is required or appropriate to:
 
-You have the right to request that we correct any inaccurate Personal Data we have collected about you. Under the CCPA, this right is subject to certain exceptions: for example, if we reasonably decide, based on the totality of circumstances related to your Personal Data, that such data is correct. If your correction request is subject to one of these CCPA exceptions, we may deny your request to correct such data.
+  - Comply with law enforcement requests and legal process, such as a court order or subpoena;
+  - Respond to your requests;
+  - Protect your, our, or others’ rights, property, security, or safety;
+  - Protect against legal liability; or
+  - Investigate fraud or other unlawful activity.
 
-### Processing of Sensitive Personal Information Opt-Out
+  For the avoidance of doubt, the disclosure of your information may occur if you post any objectionable, harmful, or illegal content on or through our websites or products and services.
 
-Consumers have certain rights over the processing of their sensitive information. However, we do not intentionally collect sensitive categories of personal information, but it is possible to share sensitive information with us through your use of the Services. It is your responsibility not to share any such sensitive information when you use the Services.
+- **Merger, Sale, or Other Asset Transfers** - We may transfer your personal data to service providers, advisors, potential transactional partners, or other third parties in connection with the consideration, negotiation, or completion of a corporate transaction in which we are acquired by or merged with another company or we sell, liquidate, or transfer all or a portion of our assets.
 
-### Personal Data Sales Opt-Out and Opt-In
+- **With Your Consent** - We may also disclose your information for other purposes with your permission.
 
-We will not sell your Personal Data, and have not done so over the last 12 months. To our knowledge, we do not sell the Personal Data of minors under 16 years of age. Under the CCPA, California residents have certain rights when a business "shares" Personal Data with third parties for purposes of cross-contextual behavioral advertising. We have shared the foregoing categories of Personal Data for the purposes of cross-contextual behavioral advertising, as applicable.
+## Your Choices
 
-Under California Civil Code Sections 1798.83-1798.84, California residents are entitled to contact us to prevent disclosure of Personal Data to third parties for such third parties' direct marketing purposes; in order to submit such a request, please contact us at hi@zed.dev.
+- **Marketing Communications** - You can unsubscribe from our promotional emails via the link provided in the emails. Even if you opt out of receiving promotional messages from us, you will continue to receive administrative and security-related messages from us as long as you maintain a Service account.
 
-Your browser may offer you a "Do Not Track" option, which allows you to signal to operators of websites and web applications and services that you do not wish such operators to track certain of your online activities over time and across different websites. Our Services do not support Do Not Track requests at this time. To find out more about "Do Not Track," you can visit [www.allaboutdnt.com](https://www.allaboutdnt.com).
+- **Do Not Track** - Because there is no widely-accepted standard on how to respond to “Do Not Track” signals, we instead utilize and honor [Global Privacy Control (GPC)](https://globalprivacycontrol.org/#gpc-spec) as an alternative where and when feasible.
 
-### Exercising Your Rights under CCPA
+- **Opting-out of Software Telemetry** - Learn more about telemetry and how to opt out of Software telemetry collection at [https://zed.dev/docs/telemetry](/telemetry)
 
-To exercise the rights described in this Privacy Policy, you or, if you are a California resident, your Authorized Agent (as defined below) can send us a request that (1) provides sufficient information to allow us to adequately verify that you are the person about whom we have collected Personal Data, and (2) describes your request in sufficient detail to allow us to understand, evaluate and respond ( a "Valid Request"). We are not obligated to respond to requests that do not meet these criteria. We will only use Personal Data provided in a Valid Request to verify your identity and complete your request.
+- **Disabling Image Loading for Email** - In order to prevent the use of tracking pixels, you may disable image loading in your own email client.
 
-We are committed to respond to Valid Requests within the time frame required by applicable law. We will not charge you a fee for making a Valid Request unless your Valid Request(s) is excessive, repetitive or manifestly unfounded. If we determine that your Valid Request warrants a fee, we will notify you of the fee and explain that decision before completing your request.
+## Your Privacy Rights
 
-You may submit a Valid Request using the following methods:
+Depending on where you are located, applicable data protection laws may provide you with specific rights regarding your personal data. These may include the right to:
 
-- Email us at: hi@zed.dev
+- Request access to the personal data we maintain about you, update, and correct inaccuracies in your personal data, restrict or object to the processing of your personal data, have your personal data anonymized or deleted, as appropriate, or exercise your right to data portability to easily transfer your personal data to another company.
 
-If you are a California resident, you may also authorize an agent (an "Authorized Agent") to exercise your rights on your behalf.
+- Withdraw any consent you previously provided to us regarding the processing of your personal data at any time and free of charge. We will apply your preferences going forward and this will not affect the lawfulness of the processing before you withdrew your consent.
 
-### We Will Not Discriminate Against You for Exercising Your Rights
+- **Your European Privacy Rights** - If you are located in the European Economic Area (EEA) or the United Kingdom (UK), you may exercise any of the rights described above under GDPR or applicable local data protection law. You also have the right to lodge a complaint with a supervisory authority, including in your country of residence, place of work, or where an incident took place.
 
-We will not discriminate against you for exercising your rights under applicable data protection laws. We will not deny you our goods or services, charge you different prices or rates, or provide you a lower quality of goods and services if you exercise your rights under applicable law. However, we may offer different tiers of our Services, as allowed by applicable law, with varying prices, rates or levels of quality of the goods or services you receive related to the value of Personal Data that we receive from you.
+### How to Exercise Your Privacy Rights
 
-# European Union and United Kingdom Data Subject Rights
+Regardless of where you are located, you may exercise these rights by contacting us at [privacy@zed.dev](mailto:privacy@zed.dev) or by using the contact details at the end of this Privacy Policy. Please include the subject line "Privacy Request" and include: (1) the specific right you wish to exercise, (2) your account email address, and (3) any details that help us locate your data.
 
-## EU and UK Residents
+Before fulfilling your request, we may ask you to provide reasonable information to verify your identity. Zed will respond to these requests without undue delay and in any event, within one month and will execute the request within one month of responding. Complex requests may require an additional 60 days with notice provided to you.
 
-If you are a resident of the European Union ("EU"), United Kingdom ("UK"), Lichtenstein, Norway or Iceland, you may have additional rights under the EU or UK General Data Protection Regulation (the "GDPR") with respect to your Personal Data, as outlined below.
-We use the terms "Personal Data" and "processing" as they are defined in the GDPR in this section, but "Personal Data" generally means information that can be used to individually identify a person, and "processing" generally covers actions that can be performed in connection with data such as collection, use, storage and disclosure. Company will be the controller of your Personal Data processed in connection with the Services.
-If there are any conflicts between this section and any other provision of this Privacy Policy, the policy or portion that is more protective of Personal Data shall control to the extent of such conflict. If you have any questions about this section or whether any of the following applies to you, please contact us at hi@zed.dev. Note that we may also process Personal Data of our customers' end users or employees in connection with our provision of certain services to you, in which case we are the processor of Personal Data. If we are the processor of your Personal Data, please contact the controller party in the first instance to address your rights with respect to such data.
+Please note that there are exceptions and limitations to each of these rights, and that while any changes you make will be reflected in active user databases instantly or within a reasonable period of time, we may retain personal data for backups, archiving, prevention of fraud and abuse, satisfaction of legal obligations, or where we otherwise reasonably believe that we have a legitimate and lawful reason to do so.
 
-## Personal Data We Collect
+## Third Parties
 
-The "Categories of Personal Data We Collect" section above details the Personal Data that we collect from you.
+Our websites, products, and services may contain links to other websites, products, or services that we do not own or operate or permit you to integrate with third-party services. We are not responsible for the privacy or security practices of these third parties. Please be aware that this Privacy Policy does not apply to your activities on these third-party services or any data you disclose to these third parties. We encourage you to read their privacy policies before providing any data to them.
 
-## Personal Data Use and Processing Grounds
+## Retention
 
-The "Our Commercial or Business Purposes for Collecting Personal Data" section above explains how we use your Personal Data.
+We keep personal data as long as necessary to provide, maintain, and secure our websites, products, and services. We take measures to avoid retaining data we don't need - for example, we don't store source code proxied during collaboration sessions, or audio, video, and screen contents from calls.
 
-We will only process your Personal Data if we have a lawful basis for doing so. Lawful bases for processing include consent, contractual necessity and our "legitimate interests" or the legitimate interest of others, as further described below.
+When you request deletion, we take measures to delete your personal data or anonymize it, unless we're legally required to retain it. We determine retention periods based on the type of service, our relationship with you, legal requirements, and applicable statutes of limitations.
 
-- Contractual Necessity: We process the following categories of Personal Data as a matter of "contractual necessity", meaning that we need to process the data to perform under our End User Terms with you, which enables us to provide you with the Services. When we process data due to contractual necessity, failure to provide such Personal Data will result in your inability to use some or all portions of the Services that require such data.
-  - Profile or Contact Data
-  - Payment Data
-- Legitimate Interest: We process the following categories of Personal Data when we believe it furthers the legitimate interest of us or third parties:
-  - Device/IP Data
-  - Web Analytics
-  - We may also de-identify or anonymize Personal Data to further our legitimate interests.
-- Examples of these legitimate interests include (as described in more detail above):
-  - Providing, customizing and improving the Services.
-  - Marketing the Services.
-  - Corresponding with you.
-  - Meeting legal requirements and enforcing legal terms.
-  - Completing corporate transactions.
-- Consent: In some cases, we process Personal Data based on the consent you expressly grant to us at the time we collect such data.
-  - Other Processing Grounds: From time to time we may also need to process Personal Data to comply with a legal obligation, if it is necessary to protect the interests of you or other data subjects, or if it is necessary in the public interest.
+## Security
 
-## Sharing Personal Data
+Designing Zed and our Service with “secure-by-default” as an objective is of utmost importance to us. We take your security and ours very seriously and strive to follow industry best-practice in order to uphold that principle.  To learn more about Zed’s security program, please visit [https://zed.dev/docs/ai/privacy-and-security](https://zed.dev/docs/ai/privacy-and-security).
 
-The "How We Share Your Personal Data" section above details how we share your Personal Data with third parties.
+Zed will notify users as soon as possible should an incident affect their security or privacy. However, because no electronic transmission or storage of data can be proven entirely secure, we can make no guarantees as to the security or privacy of your data.
 
-## EU Data Subject Rights
+## Children’s Privacy
 
-For more information about these EU or UK personal data terms and your rights related thereto, or to submit a request for information, please email us at hi@zed.dev. Please note that in some circumstances, we may not be able to fully comply with your request, such as if it is frivolous or impractical, if it jeopardizes the rights of others, or if it is not required by law, but, in those circumstances, we are committed to respond to notify you of such a decision regardless. In some cases, we may also need you to provide us with additional information, which may include Personal Data, if necessary to verify your identity and the nature of your request.
+We do not knowingly collect, maintain, or use personal data from children under 18 years of age, and no part of our websites, products, or services is directed to children. If you learn that a child has provided us with personal data in violation of this Privacy Policy, alert us at [privacy@zed.dev](mailto:privacy@zed.dev).
 
-- Access: You can request more information about the Personal Data we hold about you and request a copy of such Personal Data. You can also access certain of your Personal Data by logging on to your account.
-- Rectification: If you believe that any Personal Data we are holding about you is incorrect or incomplete, you can request that we correct or supplement such data. You can also correct some of this information directly by logging on to your account.
-- Erasure: You can request that we erase some or all of your Personal Data from our systems.
-- Withdrawal of Consent: If we are processing your Personal Data based on your consent, you have the right to withdraw your consent at any time. Please note, however, that if you exercise this right, you may have to then provide express consent on a case-by-case basis for the use or disclosure of certain of your Personal Data, if such use or disclosure is necessary to enable you to utilize some or all of our Services.
-- Portability: You can ask for a copy of your Personal Data in a machine-readable format. You can also request that we transmit the data to another controller where technically feasible.
-- Objection: You can contact us to let us know that you object to the further use or disclosure of your Personal Data for certain purposes, such as for direct marketing purposes.
-- Restriction of Processing: You can ask us to restrict further processing of your Personal Data.
-- Right to File Complaint: You have the right to lodge a complaint about Company's practices with respect to your Personal Data with the supervisory authority of your country or EU Member State. A list of Supervisory Authorities is available here: [https://edpb.europa.eu/about-edpb/board/members_en](https://edpb.europa.eu/about-edpb/board/members_en)
+## International Visitors
 
-## Transfers of Personal Data

legal/subprocessors.md 🔗

@@ -3,24 +3,100 @@ title: Subprocessor List
 slug: subprocessors
 ---
 
-This page provides information about the Subprocessors Zed has engaged to provide processing activities on Customer Data as defined in the [Zed End User Terms](https://zed.dev/terms).
-
-| Subprocessor        | Purpose                  | Location      |
-| ------------------- | ------------------------ | ------------- |
-| Cloudflare          | Cloud Infrastructure     | Worldwide     |
-| Amazon Web Services | Cloud Infrastructure     | United States |
-| DigitalOcean        | Cloud Infrastructure     | United States |
-| Vercel              | Cloud Infrastructure     | United States |
-| ConvertKit          | Email Marketing          | United States |
-| Axiom               | Analytics                | United States |
-| Hex Technologies    | Analytics                | United States |
-| Snowflake           | Analytics                | United States |
-| LiveKit             | Audio/Video Conferencing | United States |
-| GitHub              | Authentication           | United States |
-| Anthropic           | AI Services              | United States |
-| BaseTen             | AI Services              | United States |
-| Exa Labs            | AI Services              | United States |
-| Google              | AI Services              | United States |
-| OpenAI              | AI Services              | United States |
-
-**DATE: May 6th, 2025**
+Zed uses select third-party subprocessors to deliver core product functionality. Each subprocessor processes customer personal data only as necessary to provide its service, and all are subject to appropriate data protection agreements.
+
+### How Zed Uses Subprocessors
+
+To provide fast, reliable, and secure functionality, Zed relies on a small number of carefully vetted third-party subprocessors. These vendors help us deliver essential capabilities such as hosting, billing, analytics, real-time collaboration, and hosted AI features.
+
+Each subprocessor only processes customer personal data as needed to provide its service.
+
+Zed maintains contracts and data protection agreements with all subprocessors, including GDPR-compliant terms where applicable. We do not sell customer data, and we do not share customer personal data with vendors for advertising or marketing purposes.
+
+### AI Subprocessors
+
+Zed offers three modes for AI:
+
+1. **Bring your own API key** — data goes directly from the customer to the model provider; Zed does not process or store it.
+2. [**External Agents**](https://zed.dev/docs/ai/external-agents) — Zed uses ACP to provide an enhanced experience with terminal-based AI code agents like Claude Code or OpenAI Codex. Data is not processed or stored by Zed when using external agents.
+3. **Zed-hosted models** — Zed sends customer prompts to one of its AI providers (listed below). These vendors act as subprocessors only for customers who choose this mode.
+
+### Ongoing Updates
+
+**Last Updated**: March 2, 2026
+
+This subprocessor list is reviewed regularly. Zed will notify customers of material changes in accordance with our [Terms](https://zed.dev/terms) and [Privacy Policy](https://zed.dev/privacy-policy).
+
+---
+
+## Infrastructure & Hosting
+
+| Subprocessor            | Purpose                                  | Data Location |
+| ----------------------- | ---------------------------------------- | ------------- |
+| **Cloudflare**          | Network services, Cloudflare Workers     | Global        |
+| **Amazon Web Services** | Telemetry ingestion pipeline, S3 buckets | United States |
+| **DigitalOcean**        | Application database hosting             | United States |
+| **Vercel**              | Website and edge infrastructure hosting  | United States |
+
+---
+
+## Billing & Payments
+
+| Subprocessor | Purpose                                                      | Data Location |
+| ------------ | ------------------------------------------------------------ | ------------- |
+| **Stripe**   | Payment processing                                           | United States |
+| **Orb**      | Usage tracking, subscription management, and metered billing | United States |
+
+---
+
+## Operational Tools
+
+| Subprocessor | Purpose                               | Data Location |
+| ------------ | ------------------------------------- | ------------- |
+| **Day.ai**   | Customer relationship management      | United States |
+| **Linear**   | Issue tracking and project management | United States |
+
+---
+
+## Email & Communication
+
+| Subprocessor   | Purpose                                                    | Data Location |
+| -------------- | ---------------------------------------------------------- | ------------- |
+| **ConvertKit** | Product update and feature announcement emails             | United States |
+| **Loops**      | Email marketing and product communications                 | United States |
+| **Plain**      | Consolidated platform for end-user support across channels | United States |
+
+---
+
+## Analytics & Data Processing
+
+| Subprocessor         | Purpose                                                                                  | Data Location |
+| -------------------- | ---------------------------------------------------------------------------------------- | ------------- |
+| **Amplitude**        | Product analytics                                                                        | United States |
+| **Axiom**            | Application telemetry, observability, and logs                                           | United States |
+| **Fivetran**         | Automates data pipeline integration (extract, transformation, and load services) for Zed | United States |
+| **Hex Technologies** | Analytics and debugging                                                                  | United States |
+| **Snowflake**        | Data warehouse                                                                           | United States |
+
+---
+
+## Collaboration Services
+
+| Subprocessor | Purpose                                                        | Data Location |
+| ------------ | -------------------------------------------------------------- | ------------- |
+| **LiveKit**  | Real-time audio/video and collaborative session infrastructure | United States |
+
+---
+
+## AI Services (Zed-Hosted Models)
+
+_These subprocessors apply only when customers opt to use Zed's hosted AI models. When users supply their own API keys, or use external agents, data is sent directly to the provider and does not pass through Zed's infrastructure._
+
+| Subprocessor        | Purpose                                                                                                                                                                                                          | Data Location |
+| ------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------- |
+| **Anthropic**       | Requests may be sent to Anthropic even if you have another provider's model selected in chat (e.g. for summarization or generating git commit messages). We have a zero data retention agreement with Anthropic. | United States |
+| **Baseten**         | Inference infrastructure for Edit Predictions                                                                                                                                                                    | United States |
+| **Exa Labs**        | AI-powered contextual search and retrieval                                                                                                                                                                       | United States |
+| **Google (Vertex)** | Requests may be sent to Google even if you have another provider's model selected in chat (e.g. for summarization). We have a zero data retention agreement with Google.                                         | United States |
+| **OpenAI**          | Requests may be sent to OpenAI even if you have another provider's model selected in chat (e.g. for summarization or generating git commit messages). We have a zero data retention agreement with OpenAI.       | United States |
+| **xAI**             | Requests may be sent to xAI even if you have another provider's model selected in chat (e.g. for summarization or generating git commit messages). We have a zero data retention agreement with xAI.             | United States |

legal/terms.md 🔗

@@ -1,197 +1,254 @@
 ---
-title: Zed End User Terms
+title: Terms of Service
 slug: terms
 ---
 
-PLEASE READ THESE TERMS AND CONDITIONS CAREFULLY BEFORE USING THE SERVICE OR SOFTWARE OFFERED BY ZED INDUSTRIES, INC. ("ZED", OR "WE"). BY ACCESSING OR USING THE SOLUTION (AS DEFINED BELOW) IN ANY MANNER, YOU ("YOU" OR "CUSTOMER") AGREE TO BE BOUND BY THESE TERMS (THE "AGREEMENT") TO THE EXCLUSION OF ALL OTHER TERMS. YOU REPRESENT AND WARRANT THAT YOU HAVE THE AUTHORITY TO ENTER INTO THIS AGREEMENT; IF YOU ARE ENTERING INTO THIS AGREEMENT ON BEHALF OF AN ORGANIZATION OR ENTITY, REFERENCES TO "CUSTOMER" AND "YOU" IN THIS AGREEMENT, REFER TO THAT ORGANIZATION OR ENTITY. IF YOU DO NOT AGREE TO ALL OF THE FOLLOWING, YOU MAY NOT USE OR ACCESS THE SOLUTION IN ANY MANNER. IF THE TERMS OF THIS AGREEMENT ARE CONSIDERED AN OFFER, ACCEPTANCE IS EXPRESSLY LIMITED TO SUCH TERMS.
+**Last Updated**: March 2, 2026
 
-## 1. ACCESS TO AND USE OF THE SOLUTION
+Welcome, and thank you for your interest in Zed Industries, Inc. (“**Zed**,” “**we**,” or “**us**”) and our website at [www.zed.dev](https://www.zed.dev), along with our downloadable Zed software (the “**Software**”) and related subscription service (the “**Service**”). These Terms of Service are a legally binding contract between you and Zed regarding your use of the Service.
 
-Subject to the terms and conditions of this Agreement, Zed hereby grants to You, and You hereby accept from Zed, a term-limited, non-exclusive, non-transferable, non-assignable and non-sublicensable license to make use of the Editor for Your internal use only, and subject to the use limitations in Section 2.2.
+Please read the following Terms carefully.
 
-## 2. TERMS APPLICABLE TO THE EDITOR
+**By accessing or using the Service, you (“You” or “Customer”) agree to these Terms of Service, the Data Processing Addendum (“DPA”), available upon request, and Zed’s [Privacy Policy](/privacy-policy) (collectively, the “Terms”).**
 
-### 2.1. License Grant
+If you are not eligible, or do not agree to the Terms, you may not access or use the Service.
 
-Subject to the terms and conditions of this Agreement, Zed hereby grants to You, and You hereby accept from Zed, a term-limited, non-exclusive, non-transferable, non-assignable and non-sublicensable license to make use of the Editor for Your internal use only, and subject to the use limitations in Section 2.2.
+By using the Service, you confirm that you have read and understand these Terms and that they form a binding agreement between you and Zed.
 
-### 2.2. License Limitations
+**ARBITRATION NOTICE**. Except for certain kinds of disputes described in Section 15.2 (Dispute Resolution and Arbitration), you agree that disputes arising under these Terms will be resolved by binding, individual arbitration, and BY ACCEPTING THESE TERMS, YOU AND ZED ARE EACH WAIVING THE RIGHT TO A TRIAL BY JURY OR TO PARTICIPATE IN ANY CLASS ACTION OR REPRESENTATIVE PROCEEDING. ALTERNATIVELY, CUSTOMER MAY OPT OUT OF ARBITRATION PER SECTION 17.2(a).
 
-You agree that You shall not: (a) exceed the scope of the licenses granted in Section 2.1; (b) make copies of the Editor; (c) distribute, sublicense, assign, delegate, rent, lease, sell, time-share or otherwise transfer the benefits of, use under, or rights to, the license granted in Section 2.1; (d) reverse engineer, decompile, disassemble or otherwise attempt to learn the source code, structure or algorithms underlying the Editor, except to the extent required to be permitted under applicable law; (e) modify, translate or create derivative works of the Editor; or (f) remove any copyright, trademark, patent or other proprietary notice that appears on the Editor or copies thereof.
+## 1. Overview
 
-### 2.3. Open Source Software
+Subject to these Terms, Zed will permit Customer to access and use Zed’s AI-enabled software-as-a-service offering (the “**Service**”), which enables certain additional features and functionality (including artificial intelligence and collaboration features) in Zed’s open source code editing software (“**Software**”).
 
-Zed makes certain versions of the Editor and related software available at the Zed GitHub Repository: [https://github.com/zed-industries/zed](https://github.com/zed-industries/zed) (the "Repo"). Your use of such software is subject to the open source software licenses declared in the Repo.
+## 2. Service
 
-## 3. TERMS APPLICABLE TO THE ZED SERVICE
+### 2.1. Eligibility
 
-### 3.1. Access to and Scope of Zed Service
+Customer must be at least 18 years old to use the Service. By agreeing to these Terms, Customer represents and warrants to Zed that: (a) Customer is at least 18 years old; (b) Customer has not previously been suspended or removed from the Service; and (c) Customer’s registration and use of the Service is in compliance with any and all applicable laws and regulations. If Customer is an entity, organization, or company, the individual accepting these Terms on Customer’s behalf represents and warrants that they have authority to bind Customer to these Terms and Customer agrees to be bound by these Terms.
 
-If you have elected to use the Zed Service by enabling or activating the Zed Service, Zed will use commercially reasonable efforts to make the Zed Service available to You as set forth in this Agreement. Once you elected to use the Zed Service, You may access and use the Zed Service during the Term, subject to Your compliance with the terms and conditions of the Agreement.
+### 2.2. Access Grant
 
-### 3.2. Restrictions

legal/third-party-terms.md 🔗

@@ -1,53 +1,39 @@
 ---
-title: 3rd Party Terms
-slug: third-party-terms
+title: Acceptable Use Policies
+slug: acceptable-use-policies
 ---
 
-In addition to the [Zed End User Terms](/terms) and [Zed Privacy Policy](/privacy-policy) usage of certain Zed features may also subject you to additional 3rd party terms and conditions. These terms and conditions may include, but are not limited to, the following:
+**Last Updated:** March 2, 2026
 
-## Anthropic
-
-- [Anthropic Usage Policy](https://www.anthropic.com/legal/aup)
-- [Anthropic Privacy Policy](https://www.anthropic.com/legal/privacy)
-- [Anthropic Commercial Terms of Service](https://www.anthropic.com/legal/commercial-terms)
+Some third-party services accessible through Zed have their own acceptable use policies. These apply whether Zed hosts the service on your behalf or you connect your own account. The applicable policies are listed below and apply alongside the [Zed End User Terms](/terms) and [Zed Privacy Policy](/privacy-policy).
 
-## Baseten
+These policies may be updated from time to time by the applicable provider.
 
-- [BaseTen Terms and Conditions](https://www.baseten.co/terms-and-conditions/)
-
-### Exa.ai
+## Anthropic
 
-- [Exa Labs Terms and Conditions](https://exa.ai/assets/Exa_Labs_Terms_of_Service.pdf)
-- [Exa Labs Privacy Policy](https://exa.ai/privacy-policy)
+- [Anthropic Usage Policy](https://www.anthropic.com/legal/aup)
 
 ## GitHub
 
-- [GitHub Terms of Service](https://docs.github.com/en/site-policy/github-terms/github-terms-of-service)
-- [GitHub Privacy Statement](https://docs.github.com/en/site-policy/privacy-policies/github-general-privacy-statement)
 - [GitHub Acceptable Use Policies](https://docs.github.com/en/site-policy/acceptable-use-policies/github-acceptable-use-policies)
 - [GitHub Copilot Product Specific Terms](https://github.com/customer-terms/github-copilot-product-specific-terms)
 
 ## Google
 
-- [Google APIs Terms of Service](https://developers.google.com/terms)
-- [Google Gemini API Additional Terms of Service](https://ai.google.dev/gemini-api/terms)
 - [Google Generative AI Prohibited Use Policy](https://policies.google.com/terms/generative-ai/use-policy)
 
-## LiveKit
+## OpenAI
+
+- [OpenAI Usage Policies](https://openai.com/policies/usage-policies/)
 
-- [LiveKit Terms of Service](https://livekit.io/legal/terms-of-service)
-- [LiveKit Privacy Policy](https://livekit.io/legal/privacy-policy)
+## OpenRouter
 
-## OpenAI
+- [OpenRouter Terms of Service](https://openrouter.ai/terms)
 
-- [OpenAI Terms of Use](https://openai.com/policies/terms-of-use/)
-- [OpenAI Privacy Policy](https://openai.com/policies/privacy-policy/)
-- [OpenAI Business terms](https://openai.com/policies/business-terms/)
-- [OpenAI Service terms](https://openai.com/policies/service-terms/)
+## Vercel
 
-## SuperMaven
+- [Vercel Acceptable Use Policy](https://vercel.com/legal/acceptable-use-policy)
 
-- [SuperMaven Terms of Service](https://supermaven.com/terms-of-service)
-- [SuperMaven Privacy Policy](https://supermaven.com/privacy-policy)
+## xAI
 
-**DATE: May 6, 2025**
+- [xAI Acceptable Use Policy](https://x.ai/legal/acceptable-use-policy)

nix/build.nix 🔗

@@ -1,4 +1,6 @@
 {
+  pkgs,
+  system,
   lib,
   stdenv,
 
@@ -24,10 +26,18 @@
   fontconfig,
   freetype,
   git,
+  glib,
+  libdrm,
+  libgbm,
   libgit2,
   libglvnd,
+  libva,
+  libxcomposite,
+  libxdamage,
+  libxext,
+  libxfixes,
   libxkbcommon,
-  livekit-libwebrtc,
+  libxrandr,
   nodejs_22,
   openssl,
   perl,
@@ -161,11 +171,21 @@ let
       ]
       ++ lib.optionals stdenv'.hostPlatform.isLinux [
         alsa-lib
+        glib
+        libva
         libxkbcommon
         wayland
         gpu-lib
         xorg.libX11
         xorg.libxcb
+        libdrm
+        libgbm
+        libva
+        libxcomposite
+        libxdamage
+        libxext
+        libxfixes
+        libxrandr
       ]
       ++ lib.optionals stdenv'.hostPlatform.isDarwin [
         apple-sdk_15
@@ -200,7 +220,7 @@ let
         };
         ZED_UPDATE_EXPLANATION = "Zed has been installed using Nix. Auto-updates have thus been disabled.";
         RELEASE_VERSION = version;
-        LK_CUSTOM_WEBRTC = livekit-libwebrtc;
+        LK_CUSTOM_WEBRTC = pkgs.callPackage ./livekit-libwebrtc/package.nix { };
         PROTOC = "${protobuf}/bin/protoc";
 
         CARGO_PROFILE = profile;
@@ -213,6 +233,7 @@ let
           lib.makeLibraryPath [
             gpu-lib
             wayland
+            libva
           ]
         }";
 
@@ -244,6 +265,16 @@ let
             postPatch = ''
               substituteInPlace webrtc-sys/build.rs --replace-fail \
                 "cargo:rustc-link-lib=static=webrtc" "cargo:rustc-link-lib=dylib=webrtc"
+
+              substituteInPlace webrtc-sys/build.rs --replace-fail \
+                'add_gio_headers(&mut builder);' \
+                'for lib_name in ["glib-2.0", "gio-2.0"] {
+                    if let Ok(lib) = pkg_config::Config::new().cargo_metadata(false).probe(lib_name) {
+                        for path in lib.include_paths {
+                            builder.include(&path);
+                        }
+                    }
+                }'
             ''
             + lib.optionalString withGLES ''
               cat ${glesConfig} >> .cargo/config/config.toml

nix/livekit-libwebrtc/0001-shared-libraries.patch 🔗

@@ -0,0 +1,17 @@
+--- a/BUILD.gn
++++ b/BUILD.gn
+@@ -143,8 +143,12 @@
+ # target_defaults and direct_dependent_settings.
+ config("common_inherited_config") {
+   defines = [ "PROTOBUF_ENABLE_DEBUG_LOGGING_MAY_LEAK_PII=0" ]
+-  cflags = []
+-  ldflags = []
++  cflags = [ "-fvisibility=default" ]
++  ldflags = [ "-lavutil", "-lavformat", "-lavcodec" ]
++
++  if (is_linux) {
++    ldflags += [ "-Wl,--version-script=" + rebase_path("//libwebrtc.version", root_build_dir) ]
++  }
+
+   if (rtc_objc_prefix != "") {
+     defines += [ "RTC_OBJC_TYPE_PREFIX=${rtc_objc_prefix}" ]

nix/livekit-libwebrtc/README.md 🔗

@@ -0,0 +1,7 @@
+# Vendored livekit-libwebrtc build
+
+The contents of this directory is vendored from [this nixpkgs
+PR](https://github.com/NixOS/nixpkgs/pull/478907).
+
+It should be removed as soon as said PR is merged and the new version of libwebrtc hits
+nixpkgs-unstable.

nix/livekit-libwebrtc/chromium-129-rust.patch 🔗

@@ -0,0 +1,21 @@
+diff --git a/build/config/compiler/BUILD.gn b/build/config/compiler/BUILD.gn
+index 45086d6838cac..81132ad8ecb31 100644
+--- a/build/config/compiler/BUILD.gn
++++ b/build/config/compiler/BUILD.gn
+@@ -1727,16 +1727,6 @@ config("runtime_library") {
+     configs += [ "//build/config/c++:runtime_library" ]
+   }
+ 
+-  # Rust and C++ both provide intrinsics for LLVM to call for math operations. We
+-  # want to use the C++ intrinsics, not the ones in the Rust compiler_builtins
+-  # library. The Rust symbols are marked as weak, so that they can be replaced by
+-  # the C++ symbols. This config ensures the C++ symbols exist and are strong in
+-  # order to cause that replacement to occur by explicitly linking in clang's
+-  # compiler-rt library.
+-  if (is_clang && !is_nacl && !is_cronet_build) {
+-    configs += [ "//build/config/clang:compiler_builtins" ]
+-  }
+-
+   # TODO(crbug.com/40570904): Come up with a better name for is POSIX + Fuchsia
+   # configuration.
+   if (is_posix || is_fuchsia) {

nix/livekit-libwebrtc/libwebrtc.version 🔗

@@ -0,0 +1,22 @@
+/* Linker version script for libwebrtc.so (Linux only).
+ *
+ * When libwebrtc.so is built with rtc_use_pipewire=true and
+ * -fvisibility=default, PipeWire lazy-load trampoline stubs (pw_*, spa_*)
+ * are exported as weak symbols. If the PipeWire ALSA plugin
+ * (libasound_module_pcm_pipewire.so) is later dlopen'd by libasound,
+ * the dynamic linker may resolve the plugin's pw_* references through
+ * libwebrtc.so's broken trampolines instead of the real libpipewire.so,
+ * causing a SIGSEGV (NULL function pointer dereference).
+ *
+ * This script hides only those third-party symbol namespaces while
+ * keeping every WebRTC / BoringSSL / internal symbol exported (which
+ * the Rust webrtc-sys bindings require).
+ */
+{
+  global:
+    *;
+
+  local:
+    pw_*;
+    spa_*;
+};

nix/livekit-libwebrtc/mkSystemLibraries.nix 🔗

@@ -0,0 +1,64 @@
+{
+  brotli,
+  fontconfig,
+  freetype,
+  harfbuzz,
+  icu,
+  jsoncpp,
+  libpng,
+  libwebp,
+  libxml2,
+  libxslt,
+  minizip,
+  ffmpeg_6,
+}:
+{
+  "brotli" = {
+    package = brotli;
+    path = "third_party/brotli/BUILD.gn";
+  };
+  "fontconfig" = {
+    package = fontconfig;
+    path = "third_party/fontconfig/BUILD.gn";
+  };
+  "freetype" = {
+    package = freetype;
+    path = "build/config/freetype/freetype.gni";
+  };
+  "harfbuzz-ng" = {
+    package = harfbuzz;
+    path = "third_party/harfbuzz-ng/harfbuzz.gni";
+  };
+  "jsoncpp" = {
+    package = jsoncpp;
+    path = "third_party/jsoncpp/BUILD.gn";
+  };
+  "icu" = {
+    package = icu;
+    path = "third_party/icu/BUILD.gn";
+  };
+  "libpng" = {
+    package = libpng;
+    path = "third_party/libpng/BUILD.gn";
+  };
+  "libwebp" = {
+    package = libwebp;
+    path = "third_party/libwebp/BUILD.gn";
+  };
+  "libxml" = {
+    package = libxml2;
+    path = "third_party/libxml/BUILD.gn";
+  };
+  "libxslt" = {
+    package = libxslt;
+    path = "third_party/libxslt/BUILD.gn";
+  };
+  "zlib" = {
+    package = minizip;
+    path = "third_party/zlib/BUILD.gn";
+  };
+  "ffmpeg" = {
+    package = ffmpeg_6;
+    path = "third_party/ffmpeg/BUILD.gn";
+  };
+}

nix/livekit-libwebrtc/package.nix 🔗

@@ -0,0 +1,342 @@
+{
+  stdenv,
+  clang,
+  gclient2nix,
+  lib,
+  gn,
+  fetchurl,
+  fetchpatch,
+  xcbuild,
+  python3,
+  ninja,
+  git,
+  cpio,
+  pkg-config,
+  glib,
+  alsa-lib,
+  pulseaudio,
+  nasm,
+  brotli,
+  fontconfig,
+  freetype,
+  harfbuzz,
+  icu,
+  jsoncpp,
+  libpng,
+  libwebp,
+  libxml2,
+  libxslt,
+  minizip,
+  ffmpeg_6,
+  libepoxy,
+  libgbm,
+  libGL,
+  libxcomposite,
+  libxdamage,
+  libxext,
+  libxfixes,
+  libxrandr,
+  libxtst,
+  pipewire,
+  xorg,
+}:
+let
+  platformMap = {
+    "x86_64" = "x64";
+    "i686" = "x86";
+    "arm" = "arm";
+    "aarch64" = "arm64";
+  };
+  cpuName = stdenv.hostPlatform.parsed.cpu.name;
+  gnArch = platformMap."${cpuName}" or (throw "unsupported arch ${cpuName}");
+  gnOs =
+    if stdenv.hostPlatform.isLinux then
+      "linux"
+    else if stdenv.hostPlatform.isDarwin then
+      "mac"
+    else
+      throw "unknown platform ${stdenv.hostPlatform.config}";
+  boringSslSymbols = fetchurl {
+    url = "https://raw.githubusercontent.com/livekit/rust-sdks/refs/tags/webrtc-dac8015-6/webrtc-sys/libwebrtc/boringssl_prefix_symbols.txt";
+    hash = "sha256-dAweArv8zjsFPENEKi9mNBQkt4y+hh3rCqG6QZjRC20=";
+  };
+  gnSystemLibraries = import ./mkSystemLibraries.nix {
+    inherit
+      brotli
+      fontconfig
+      freetype
+      harfbuzz
+      icu
+      jsoncpp
+      libpng
+      libwebp
+      libxml2
+      libxslt
+      minizip
+      ffmpeg_6
+      ;
+  };
+in
+stdenv.mkDerivation {
+  pname = "livekit-libwebrtc";
+  version = "137-unstable-2025-11-24";
+
+  gclientDeps = gclient2nix.importGclientDeps ./sources.json;
+  sourceRoot = "src";
+
+  patches = [
+    # Adds missing dependencies to generated LICENSE
+    (fetchpatch {
+      url = "https://raw.githubusercontent.com/livekit/rust-sdks/a4343fe9d88fcc96f8e88959c90d509abbd0307b/webrtc-sys/libwebrtc/patches/add_licenses.patch";
+      hash = "sha256-9A4KyRW1K3eoQxsTbPX0vOnj66TCs2Fxjpsu5wO8mGI=";
+    })
+    # Fixes the certificate chain, required for Let's Encrypt certs
+    (fetchpatch {
+      url = "https://raw.githubusercontent.com/livekit/rust-sdks/a4343fe9d88fcc96f8e88959c90d509abbd0307b/webrtc-sys/libwebrtc/patches/ssl_verify_callback_with_native_handle.patch";
+      hash = "sha256-RBvRcJzoKItpEbqpe07YZe1D1ZVGS12EnDSISldGy+0=";
+    })
+    # Adds dependencies and features required by livekit
+    (fetchpatch {
+      url = "https://raw.githubusercontent.com/livekit/rust-sdks/a4343fe9d88fcc96f8e88959c90d509abbd0307b/webrtc-sys/libwebrtc/patches/add_deps.patch";
+      hash = "sha256-DwRtGdU5sppmiFsVuyhJoVCQrRl5JFmZJfxgUPhYXBg=";
+    })
+    # Fix gcc-related errors
+    (fetchpatch {
+      url = "https://raw.githubusercontent.com/livekit/rust-sdks/a4343fe9d88fcc96f8e88959c90d509abbd0307b/webrtc-sys/libwebrtc/patches/force_gcc.patch";
+      hash = "sha256-1d73Pi1HkbunjYvp1NskUNE4xXbCmnh++rC6NrCJHbY=";
+      stripLen = 1;
+      extraPrefix = "build/";
+    })
+    # fix a gcc-related dav1d compile option
+    (fetchpatch {
+      url = "https://raw.githubusercontent.com/livekit/rust-sdks/a4343fe9d88fcc96f8e88959c90d509abbd0307b/webrtc-sys/libwebrtc/patches/david_disable_gun_source_macro.patch";
+      hash = "sha256-RCZpeeSQHaxkL3dY2oFFXDjYeU0KHw7idQFONGge8+0=";
+      stripLen = 1;
+      extraPrefix = "third_party/";
+    })
+    # Required for dynamically linking to ffmpeg libraries, exposing symbols,
+    # and hiding PipeWire symbols via version script (Linux only) to prevent
+    # SIGSEGV when ALSA's PipeWire plugin is loaded.
+    ./0001-shared-libraries.patch
+    # Borrow a patch from chromium to prevent a build failure due to missing libclang libraries
+    ./chromium-129-rust.patch
+  ];
+
+  postPatch = ''
+    substituteInPlace .gn \
+      --replace-fail "vpython3" "python3"
+
+    substituteInPlace tools/generate_shim_headers/generate_shim_headers.py \
+      --replace-fail "OFFICIAL_BUILD" "GOOGLE_CHROME_BUILD"
+
+    substituteInPlace BUILD.gn \
+      --replace-fail "rtc_static_library" "rtc_shared_library" \
+      --replace-fail "complete_static_lib = true" ""
+
+    substituteInPlace webrtc.gni \
+      --replace-fail "!build_with_chromium && is_component_build" "false"
+
+    substituteInPlace rtc_tools/BUILD.gn \
+      --replace-fail "\":frame_analyzer\"," ""
+
+    for lib in ${toString (builtins.attrNames gnSystemLibraries)}; do
+      if [ -d "third_party/$lib" ]; then
+        find "third_party/$lib" -type f \
+          \! -path "third_party/$lib/chromium/*" \
+          \! -path "third_party/$lib/google/*" \
+          \! -path "third_party/harfbuzz-ng/utils/hb_scoped.h" \
+          \! -regex '.*\.\(gn\|gni\|isolate\)' \
+          \! -name 'LICENSE*' \
+          \! -name 'COPYING*' \
+          -delete
+      fi
+    done
+
+    # Trick the update_rust.py script into thinking we have *this specific* rust available.
+    # It isn't actually needed for the libwebrtc build, but GN will fail if it isn't there.
+    mkdir -p third_party/rust-toolchain
+    (python3 tools/rust/update_rust.py --print-package-version || true) \
+      | head -n 1 \
+      | sed 's/.* expected Rust version is \([^ ]*\) .*/rustc 1.0 1234 (\1 chromium)/' \
+      > third_party/rust-toolchain/VERSION
+  ''
+  + lib.optionalString stdenv.hostPlatform.isLinux ''
+    mkdir -p buildtools/linux64
+    ln -sf ${lib.getExe gn} buildtools/linux64/gn
+    cp ${./libwebrtc.version} libwebrtc.version
+    substituteInPlace build/toolchain/linux/BUILD.gn \
+      --replace 'toolprefix = "aarch64-linux-gnu-"' 'toolprefix = ""'
+  ''
+  + lib.optionalString stdenv.hostPlatform.isDarwin ''
+    mkdir -p buildtools/mac
+    ln -sf ${lib.getExe gn} buildtools/mac/gn
+    chmod +x build/toolchain/apple/linker_driver.py
+    patchShebangs build/toolchain/apple/linker_driver.py
+    substituteInPlace build/toolchain/apple/toolchain.gni --replace-fail "/bin/cp -Rc" "cp -a"
+  '';
+
+  outputs = [
+    "dev"
+    "out"
+  ];
+
+  nativeBuildInputs =
+    (builtins.concatLists (
+      lib.mapAttrsToList (
+        _: library: if (library.package ? dev) then [ library.package.dev ] else [ ]
+      ) gnSystemLibraries
+    ))
+    ++ [
+      gclient2nix.gclientUnpackHook
+      gn
+      (python3.withPackages (ps: [ ps.setuptools ]))
+      ninja
+      git
+      cpio
+      pkg-config
+    ]
+    ++ lib.optionals stdenv.hostPlatform.isDarwin [ xcbuild ];
+
+  buildInputs = [
+    nasm
+  ]
+  ++ (lib.mapAttrsToList (_: library: library.package) gnSystemLibraries)
+  ++ (lib.optionals stdenv.hostPlatform.isLinux [
+    glib
+    alsa-lib
+    pulseaudio
+    libepoxy
+    libgbm
+    libGL
+    libxcomposite
+    libxdamage
+    libxext
+    libxfixes
+    libxrandr
+    libxtst
+    pipewire
+    xorg.libX11
+    xorg.libXi
+  ]);
+
+  preConfigure = ''
+    echo "generate_location_tags = true" >> build/config/gclient_args.gni
+    echo "0" > build/util/LASTCHANGE.committime
+
+    python build/linux/unbundle/replace_gn_files.py \
+        --system-libraries ${toString (builtins.attrNames gnSystemLibraries)}
+  '';
+
+  gnFlags = [
+    "is_debug=false"
+    "rtc_include_tests=false"
+    ''target_os="${gnOs}"''
+    ''target_cpu="${gnArch}"''
+    "treat_warnings_as_errors=false"
+    "rtc_enable_protobuf=false"
+    "rtc_include_tests=false"
+    "rtc_build_examples=false"
+    "rtc_build_tools=false"
+    "rtc_libvpx_build_vp9=true"
+    "enable_libaom=true"
+    "use_dummy_lastchange=true"
+    "is_component_build=true"
+    "enable_stripping=true"
+    "rtc_use_h264=true"
+    "rtc_use_h265=true"
+    "use_custom_libcxx=false"
+    "use_rtti=true"
+  ]
+  ++ (lib.optionals stdenv.hostPlatform.isLinux [
+    "rtc_use_pipewire=true"
+    "symbol_level=0"
+    "enable_iterator_debugging=false"
+    "rtc_use_x11=true"
+    "use_sysroot=false"
+    "use_custom_libcxx_for_host=false"
+    "use_libcxx_modules=false"
+    "use_llvm_libatomic=false"
+    "is_clang=false"
+  ])
+  ++ (lib.optionals stdenv.hostPlatform.isDarwin [
+    ''mac_deployment_target="${stdenv.hostPlatform.darwinMinVersion}"''
+    "rtc_enable_symbol_export=true"
+    "rtc_enable_objc_symbol_export=true"
+    "rtc_include_dav1d_in_internal_decoder_factory=true"
+    "clang_use_chrome_plugins=false"
+    "use_lld=false"
+    ''clang_base_path="${clang}"''
+  ]);
+
+  ninjaFlags = [
+    ":default"
+  ]
+  ++ lib.optionals stdenv.hostPlatform.isDarwin [
+    "api/audio_codecs:builtin_audio_decoder_factory"
+    "api/task_queue:default_task_queue_factory"
+    "sdk:native_api"
+    "sdk:default_codec_factory_objc"
+    "pc:peer_connection"
+    "sdk:videocapture_objc"
+    "sdk:mac_framework_objc"
+    "desktop_capture_objc"
+  ];
+
+  postBuild =
+    lib.optionalString stdenv.hostPlatform.isLinux ''
+      objcopy --redefine-syms="${boringSslSymbols}" "libwebrtc.so"
+    ''
+    + ''
+      # Generate licenses
+      python3 "../../tools_webrtc/libs/generate_licenses.py" \
+          --target ${if stdenv.hostPlatform.isDarwin then ":webrtc" else ":default"} $PWD $PWD
+    '';
+
+  installPhase = ''
+    runHook preInstall
+
+    mkdir -p $out/lib
+    mkdir -p $dev/include
+
+    install -m0644 obj/webrtc.ninja obj/modules/desktop_capture/desktop_capture.ninja args.gn LICENSE.md $dev
+
+    pushd ../..
+    find . -name "*.h" -print | cpio -pd $dev/include
+    find . -name "*.inc" -print | cpio -pd $dev/include
+    popd
+  ''
+  + lib.optionalString stdenv.hostPlatform.isLinux ''
+    install -m0644 libwebrtc.so libthird_party_boringssl.so $out/lib
+  ''
+  + lib.optionalString stdenv.hostPlatform.isDarwin ''
+    install -m0644 WebRTC.framework/Versions/A/WebRTC $out/lib/libwebrtc.dylib
+    install -m0644 libthird_party_boringssl.dylib $out/lib
+  ''
+  + ''
+    ln -s $out/lib $dev/lib
+
+    runHook postInstall
+  '';
+
+  postFixup = lib.optionalString stdenv.hostPlatform.isDarwin ''
+    boringssl="$out/lib/libthird_party_boringssl.dylib"
+    webrtc="$out/lib/libwebrtc.dylib"
+
+    install_name_tool -id "$boringssl" "$boringssl"
+    install_name_tool -id "$webrtc" "$webrtc"
+    install_name_tool -change @rpath/libthird_party_boringssl.dylib "$boringssl" "$webrtc"
+  '';
+
+  passthru.updateScript = ./update.sh;
+
+  meta = {
+    description = "WebRTC library used by livekit";
+    homepage = "https://github.com/livekit/rust-sdks/";
+    license = lib.licenses.bsd3;
+    maintainers = with lib.maintainers; [
+      WeetHet
+      niklaskorz
+    ];
+    platforms = lib.platforms.linux ++ lib.platforms.darwin;
+  };
+}

nix/livekit-libwebrtc/sources.json 🔗

@@ -0,0 +1,372 @@
+{
+    "src": {
+        "args": {
+            "hash": "sha256-+PgmOZD2Fi+SC66nguixhSwDsoXi4Sz693qOZZrLXm8=",
+            "owner": "webrtc-sdk",
+            "repo": "webrtc",
+            "rev": "624fa1dce239af785fc5fa9ca3b21b9250d3f835"
+        },
+        "fetcher": "fetchFromGitHub"
+    },
+    "src/base": {
+        "args": {
+            "hash": "sha256-MTG+pjMPY6/dqeEUy+xJVxPuICETtV98S+h/lFwGItg=",
+            "rev": "86c814633cf284bc8057a539bc722e2a672afe2f",
+            "url": "https://chromium.googlesource.com/chromium/src/base"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/build": {
+        "args": {
+            "hash": "sha256-qFZ12YFX4qxFEHU+VWOG+HDYYPXodgGz+iJ7WEc7cD8=",
+            "owner": "webrtc-sdk",
+            "repo": "build",
+            "rev": "01021e6c12636951a6b4e5342e16b2101b352367"
+        },
+        "fetcher": "fetchFromGitHub"
+    },
+    "src/buildtools": {
+        "args": {
+            "hash": "sha256-YWtmMKL1ydueNJ4XM/Pq+8OpqIFe5A6/vYyfZTv7/EI=",
+            "rev": "0f32cb9025766951122d4ed19aba87a94ded3f43",
+            "url": "https://chromium.googlesource.com/chromium/src/buildtools"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/testing": {
+        "args": {
+            "hash": "sha256-s65cABkyMo+FkAmilS67qM3VnrT7iYZg9scycrXzxyE=",
+            "rev": "a89c37d36bf80c05963727e28b9916835ae88d3a",
+            "url": "https://chromium.googlesource.com/chromium/src/testing"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party": {
+        "args": {
+            "hash": "sha256-q+xVOFlpC0vnLMSF9Z6ZRL7mb/cu8jBpsWjDNFFgiKM=",
+            "rev": "8062e0e102496ff14a8c58b586f014527424953d",
+            "url": "https://chromium.googlesource.com/chromium/src/third_party"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/boringssl/src": {
+        "args": {
+            "hash": "sha256-5Efqc8pLs4ZskXQGpFdTb5cw//v3+DR285m/DsrWSWA=",
+            "rev": "34492c89a8e381e0e856a686cc71b1eb5bd728db",
+            "url": "https://boringssl.googlesource.com/boringssl.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/breakpad/breakpad": {
+        "args": {
+            "hash": "sha256-0ynZuxIqBIpNkfD3Y9XdPFQr7HeQcsUO3lhnqvH+k8c=",
+            "rev": "232a723f5096ab02d53d87931efa485fa77d3b03",
+            "url": "https://chromium.googlesource.com/breakpad/breakpad.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/catapult": {
+        "args": {
+            "hash": "sha256-FIJZE1Qu1MLZA4qxB68k1NjhgSbFTjf57YF85JicVZw=",
+            "rev": "000f47cfa393d7f9557025a252862e2a61a60d44",
+            "url": "https://chromium.googlesource.com/catapult.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/ced/src": {
+        "args": {
+            "hash": "sha256-ySG74Rj2i2c/PltEgHVEDq+N8yd9gZmxNktc56zIUiY=",
+            "rev": "ba412eaaacd3186085babcd901679a48863c7dd5",
+            "url": "https://chromium.googlesource.com/external/github.com/google/compact_enc_det.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/clang-format/script": {
+        "args": {
+            "hash": "sha256-d9uweklBffiuCWEb03ti1eFLnMac2qRtvggzXY1n/RU=",
+            "rev": "37f6e68a107df43b7d7e044fd36a13cbae3413f2",
+            "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/clang/tools/clang-format.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/colorama/src": {
+        "args": {
+            "hash": "sha256-6ZTdPYSHdQOLYMSnE+Tp7PgsVTs3U2awGu9Qb4Rg/tk=",
+            "rev": "3de9f013df4b470069d03d250224062e8cf15c49",
+            "url": "https://chromium.googlesource.com/external/colorama.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/compiler-rt/src": {
+        "args": {
+            "hash": "sha256-yo7BFGgwJNScsXwnCAu8gFBdZVS8/HJplzUk2e73mVg=",
+            "rev": "57213f125d03209892fed26189feb3b736e96735",
+            "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/compiler-rt.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/crc32c/src": {
+        "args": {
+            "hash": "sha256-KBraGaO5LmmPP+p8RuDogGldbTWdNDK+WzF4Q09keuE=",
+            "rev": "d3d60ac6e0f16780bcfcc825385e1d338801a558",
+            "url": "https://chromium.googlesource.com/external/github.com/google/crc32c.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/dav1d/libdav1d": {
+        "args": {
+            "hash": "sha256-+DY4p41VuAlx7NvOfXjWzgEhvtpebjkjbFwSYOzSjv4=",
+            "rev": "8d956180934f16244bdb58b39175824775125e55",
+            "url": "https://chromium.googlesource.com/external/github.com/videolan/dav1d.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/depot_tools": {
+        "args": {
+            "hash": "sha256-DWQyYtpAAGiryeGJzIWlUwY5yn4cNwXY957vlPDUNak=",
+            "rev": "fa8fc854e1766b86f10c9a15902cf3cc23adaac2",
+            "url": "https://chromium.googlesource.com/chromium/tools/depot_tools.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/ffmpeg": {
+        "args": {
+            "hash": "sha256-hNzQZQxaa2Wtl7GWWF852cFmmXy4pc15Pp0d59TTfnI=",
+            "rev": "01f23648c6b84de6c0f717fa4e1816f53b9ee72e",
+            "url": "https://chromium.googlesource.com/chromium/third_party/ffmpeg.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/flatbuffers/src": {
+        "args": {
+            "hash": "sha256-tbc45o0MbMvK5XqRUJt5Eg8BU6+TJqlmwFgQhHq6wRM=",
+            "rev": "8db59321d9f02cdffa30126654059c7d02f70c32",
+            "url": "https://chromium.googlesource.com/external/github.com/google/flatbuffers.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/fontconfig/src": {
+        "args": {
+            "hash": "sha256-W5WIgC6A52kY4fNkbsDEa0o+dfd97Rl5NKfgnIRpI00=",
+            "rev": "14d466b30a8ab4a9d789977ed94f2c30e7209267",
+            "url": "https://chromium.googlesource.com/external/fontconfig.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/freetype/src": {
+        "args": {
+            "hash": "sha256-Vlin6Z+QisUyj6R+TclVOm8x6673YhUIWob9Ih6gzC8=",
+            "rev": "1da283b8ae6d6b94f34a5c4b8c1227adc9dbb1d8",
+            "url": "https://chromium.googlesource.com/chromium/src/third_party/freetype2.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/fuzztest/src": {
+        "args": {
+            "hash": "sha256-L2QG0pUmGjGdtdlivxYfxSqO9YaVHpIT6lvJwBMTxMw=",
+            "rev": "b10387fdbbca18192f85eaa5323a59f44bf9c468",
+            "url": "https://chromium.googlesource.com/external/github.com/google/fuzztest.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/google_benchmark/src": {
+        "args": {
+            "hash": "sha256-cH8s1gP6kCcojAAfTt5iQCVqiAaSooNk4BdaILujM3w=",
+            "rev": "761305ec3b33abf30e08d50eb829e19a802581cc",
+            "url": "https://chromium.googlesource.com/external/github.com/google/benchmark.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/googletest/src": {
+        "args": {
+            "hash": "sha256-QT9PQ9bF+eCPfRLkcHpH4jc0UZfGPc98fHf8QDV5bZg=",
+            "rev": "cd430b47a54841ec45d64d2377d7cabaf0eba610",
+            "url": "https://chromium.googlesource.com/external/github.com/google/googletest.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/grpc/src": {
+        "args": {
+            "hash": "sha256-xivmP36VCSbiMAV3PDUjzCrF+AJzFXJdMe5e2q9yW/k=",
+            "rev": "957c9f95224b1e1318c0ecb98d0e7584ea5ccff2",
+            "url": "https://chromium.googlesource.com/external/github.com/grpc/grpc.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/gtest-parallel": {
+        "args": {
+            "hash": "sha256-VUuk5tBTh+aU2dxVWUF1FePWlKUJaWSiGSXk/J5zgHw=",
+            "rev": "96f4f904922f9bf66689e749c40f314845baaac8",
+            "url": "https://chromium.googlesource.com/external/github.com/google/gtest-parallel"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/harfbuzz-ng/src": {
+        "args": {
+            "hash": "sha256-lNnCtgIegUy4DLhYaGZXcEaFw83KWAHoKpz69AEsWp4=",
+            "rev": "9f83bbbe64654b45ba5bb06927ff36c2e7588495",
+            "url": "https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/icu": {
+        "args": {
+            "hash": "sha256-eGI/6wk6IOUPvX7pRTm4VJk1CqkkxalTu84L36i/D6k=",
+            "rev": "4c8cc4b365a505ce35be1e0bd488476c5f79805d",
+            "url": "https://chromium.googlesource.com/chromium/deps/icu.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/instrumented_libs": {
+        "args": {
+            "hash": "sha256-8kokdsnn5jD9KgM/6g0NuITBbKkGXWEM4BMr1nCrfdU=",
+            "rev": "69015643b3f68dbd438c010439c59adc52cac808",
+            "url": "https://chromium.googlesource.com/chromium/third_party/instrumented_libraries.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/jsoncpp/source": {
+        "args": {
+            "hash": "sha256-bSLNcoYBz3QCt5VuTR056V9mU2PmBuYBa0W6hFg2m8Q=",
+            "rev": "42e892d96e47b1f6e29844cc705e148ec4856448",
+            "url": "https://chromium.googlesource.com/external/github.com/open-source-parsers/jsoncpp.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/libFuzzer/src": {
+        "args": {
+            "hash": "sha256-Lb+HczYax0T7qvC0/Nwhc5l2szQTUYDouWRMD/Qz7sA=",
+            "rev": "e31b99917861f891308269c36a32363b120126bb",
+            "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/compiler-rt/lib/fuzzer.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/libaom/source/libaom": {
+        "args": {
+            "hash": "sha256-ngVZ+xK0b+jKUmawteQ7VFAQzoebX4jqZ3hP9pW+Q0Q=",
+            "rev": "a23a4799ec2d7dd6e436c7b64a34553773014ed7",
+            "url": "https://aomedia.googlesource.com/aom.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/libc++/src": {
+        "args": {
+            "hash": "sha256-lqeuVUgeAKm1pxo+w1vyUbBkBXBzLCQ+Lfu44neKLPo=",
+            "rev": "917609c669e43edc850eeb192a342434a54e1dfd",
+            "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxx.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/libc++abi/src": {
+        "args": {
+            "hash": "sha256-X9cAbyd8ZPSwqOGhPYwIZ6b9E3tVwAuAYZKMgbZQxgk=",
+            "rev": "f2a7f2987f9dcdf8b04c2d8cd4dcb186641a7c3e",
+            "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxxabi.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/libjpeg_turbo": {
+        "args": {
+            "hash": "sha256-Ig+tmprZDvlf/M72/DTar2pbxat9ZElgSqdXdoM0lPs=",
+            "rev": "e14cbfaa85529d47f9f55b0f104a579c1061f9ad",
+            "url": "https://chromium.googlesource.com/chromium/deps/libjpeg_turbo.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/libsrtp": {
+        "args": {
+            "hash": "sha256-bkG1+ss+1a2rCHGwZjhvf5UaNVbPPZJt9HZSIPBKGwM=",
+            "rev": "a52756acb1c5e133089c798736dd171567df11f5",
+            "url": "https://chromium.googlesource.com/chromium/deps/libsrtp.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/libunwind/src": {
+        "args": {
+            "hash": "sha256-XdFKn+cGOxA0fHkVMG9UAhCmpML44ocoyHB7XnumX7o=",
+            "rev": "81e2cb40a70de2b6978e6d8658891ded9a77f7e3",
+            "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/libvpx/source/libvpx": {
+        "args": {
+            "hash": "sha256-NIGpzP6elcPScHJlZmnPHJdmXsuHcbuELT0C4Ha5PcA=",
+            "rev": "ff1d193f4b9dfa9b2ced51efbb6ec7a69e58e88c",
+            "url": "https://chromium.googlesource.com/webm/libvpx.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/libyuv": {
+        "args": {
+            "hash": "sha256-b/EYCWBQvsNoGhea31DPBKpG8eouf0OBi5TgdHDHs9A=",
+            "rev": "1e40e34573c3861480d107cd4a4ce290df79951f",
+            "url": "https://chromium.googlesource.com/libyuv/libyuv.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/llvm-libc/src": {
+        "args": {
+            "hash": "sha256-yNNx3gOGafMNvZ+aebDKHVj6QM8g0zt0d69PWlWLkyk=",
+            "rev": "912274164f0877ca917c06e8484ad3be1784833a",
+            "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libc.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/lss": {
+        "args": {
+            "hash": "sha256-rhp4EcZYdgSfu9cqn+zxxGx6v2IW8uX8V+iA0UfZhFY=",
+            "rev": "ed31caa60f20a4f6569883b2d752ef7522de51e0",
+            "url": "https://chromium.googlesource.com/linux-syscall-support.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/nasm": {
+        "args": {
+            "hash": "sha256-neYrS4kQ76ihUh22Q3uPR67Ld8+yerA922YSZU1KxJs=",
+            "rev": "9f916e90e6fc34ec302573f6ce147e43e33d68ca",
+            "url": "https://chromium.googlesource.com/chromium/deps/nasm.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/openh264/src": {
+        "args": {
+            "hash": "sha256-tf0lnxATCkoq+xRti6gK6J47HwioAYWnpEsLGSA5Xdg=",
+            "rev": "652bdb7719f30b52b08e506645a7322ff1b2cc6f",
+            "url": "https://chromium.googlesource.com/external/github.com/cisco/openh264"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/perfetto": {
+        "args": {
+            "hash": "sha256-I0qiAh3VliVop+3S2/tP6VwCAJOk0Vu7xy8vHJZ1w2A=",
+            "rev": "a54dd38d60593129ae56d400f1a72860670abea4",
+            "url": "https://chromium.googlesource.com/external/github.com/google/perfetto.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/protobuf-javascript/src": {
+        "args": {
+            "hash": "sha256-zq86SrDASl6aYPFPijRZp03hJqXUFz2Al/KkiNq7i0M=",
+            "rev": "eb785a9363664a402b6336dfe96aad27fb33ffa8",
+            "url": "https://chromium.googlesource.com/external/github.com/protocolbuffers/protobuf-javascript"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/third_party/re2/src": {
+        "args": {
+            "hash": "sha256-f/k2rloV2Nwb0KuJGUX4SijFxAx69EXcsXOG4vo+Kis=",
+            "rev": "c84a140c93352cdabbfb547c531be34515b12228",
+            "url": "https://chromium.googlesource.com/external/github.com/google/re2.git"
+        },
+        "fetcher": "fetchFromGitiles"
+    },
+    "src/tools": {
+        "args": {
+            "hash": "sha256-kZFZl8SC9nZIIOVtNl/5H4huw6BCBsBkJVJ4gaUmly4=",
+            "rev": "ffcbc837bbb14d80d09147c2af5302ff6bd4bd69",
+            "url": "https://chromium.googlesource.com/chromium/src/tools"
+        },
+        "fetcher": "fetchFromGitiles"
+    }
+}

nix/livekit-libwebrtc/update.sh 🔗

@@ -0,0 +1,33 @@
+#!/usr/bin/env nix-shell
+#!nix-shell -i bash -p gitMinimal curl gojq gclient2nix
+
+set -eou pipefail
+package="livekit-libwebrtc"
+pkg_dir="$(dirname "$0")"
+nixpkgs="$(git rev-parse --show-toplevel)"
+
+gh-curl () {
+  curl --silent ${GITHUB_TOKEN:+-u ":$GITHUB_TOKEN"} "$1"
+}
+
+# Get the current version part before the "-unstable-" for the branch name.
+# To manually update to a new major version, you can also invoke the script
+# with the new major version, e.g., UPDATE_MAJOR_VERSION=137.
+old_version="${UPDATE_NIX_OLD_VERSION:-$(nix-instantiate --eval -E "(import \"$nixpkgs\" { }).$package.version" | tr -d '"')}"
+major_version="${UPDATE_MAJOR_VERSION:-${old_version%%-unstable-*}}"
+branch="m${major_version}_release"
+
+# Fetch the current HEAD commit of the release branch
+head="$(gh-curl "https://api.github.com/repos/webrtc-sdk/webrtc/git/refs/heads/$branch" | gojq '.object.sha' --raw-output)"
+if gojq -e ".src.args.rev == \"$head\"" "$pkg_dir/sources.json"; then
+  echo "$package is already up-to-date: $head"
+  exit 0
+fi
+
+# Get the commit's date for the version field
+date="$(gh-curl "https://api.github.com/repos/webrtc-sdk/webrtc/git/commits/$head" | gojq '.committer.date| split("T") | .[0]' --raw-output)"
+
+echo "Updating sources.json to $head"
+gclient2nix generate --root src "https://github.com/webrtc-sdk/webrtc@$head" > "$pkg_dir/sources.json"
+
+sed -i "s|$old_version|$major_version-unstable-$date|g" "$pkg_dir/package.nix"

rust-toolchain.toml 🔗

@@ -4,5 +4,6 @@ profile = "minimal"
 components = [ "rustfmt", "clippy", "rust-analyzer", "rust-src" ]
 targets = [
     "wasm32-wasip2", # extensions
+    "wasm32-unknown-unknown", # gpui on the web
     "x86_64-unknown-linux-musl", # remote server
 ]

script/bundle-linux 🔗

@@ -71,7 +71,7 @@ if "$rustup_installed"; then
     rustup target add "$remote_server_triple"
 fi
 
-export CC=$(which clang)
+export CC=${CC:-$(which clang)}
 
 # Build binary in release mode
 export RUSTFLAGS="${RUSTFLAGS:-} -C link-args=-Wl,--disable-new-dtags,-rpath,\$ORIGIN/../lib"

script/clippy 🔗

@@ -16,4 +16,8 @@ if [[ -z "${GITHUB_ACTIONS+x}" ]]; then
 
     which typos >/dev/null 2>&1 || exit 0
     typos --config typos.toml
+
+    which buf >/dev/null 2>&1 || exit 0
+    buf lint crates/proto/proto
+    buf format --diff --exit-code crates/proto/proto
 fi

script/docs-strip-preview-callouts 🔗

@@ -117,8 +117,8 @@ if [[ "$DRY_RUN" == "true" ]]; then
     exit 0
 fi
 
-# Check for clean working state
-if [[ -n "$(git status --porcelain docs/)" ]]; then
+# Check for clean working state (ignore untracked files)
+if [[ -n "$(git status --porcelain docs/ | grep -v '^??' || true)" ]]; then
     error "docs/ directory has uncommitted changes. Please commit or stash first."
 fi
 
@@ -213,8 +213,11 @@ And:
 > **Changed in Preview (v0.XXX).** See [release notes](/releases#0.XXX).
 \`\`\`
 
-These features are now in Stable, so the callouts are no longer needed." \
-    --label "documentation"
+These features are now in Stable, so the callouts are no longer needed.
+
+Release Notes:
+
+- N/A"
 
 PR_URL=$(gh pr view --json url --jq '.url')
 

script/docs-suggest-publish 🔗

@@ -7,16 +7,19 @@
 #
 # This script:
 # 1. Reads pending suggestions from the docs/suggestions-pending branch
-# 2. Uses Droid to apply all suggestions directly to docs files
+# 2. Uses Droid to apply suggestions in batches (default 10 per batch)
 # 3. Runs docs formatting
-# 4. Creates a draft PR for human review/merge
-# 5. Optionally resets the suggestions branch after successful PR creation
+# 4. Validates docs build (action references, JSON schemas, links)
+# 5. Creates a draft PR for human review/merge
+# 6. Optionally resets the suggestions branch after successful PR creation
 #
 # Options:
-#   --dry-run       Show what would be done without creating PR
-#   --keep-queue    Don't reset the suggestions branch after PR creation
-#   --model MODEL   Override Droid model used for auto-apply
-#   --verbose       Show detailed progress
+#   --dry-run          Show what would be done without creating PR
+#   --keep-queue       Don't reset the suggestions branch after PR creation
+#   --model MODEL      Override Droid model used for auto-apply
+#   --batch-size N     Suggestions per Droid invocation (default: 10)
+#   --skip-validation  Skip the docs build validation step
+#   --verbose          Show detailed progress
 #
 # Run this as part of the preview release workflow.
 
@@ -25,7 +28,9 @@ set -euo pipefail
 DRY_RUN=false
 KEEP_QUEUE=false
 VERBOSE=false
-MODEL="${DROID_MODEL:-claude-sonnet-4-5-20250929}"
+SKIP_VALIDATION=false
+BATCH_SIZE=10
+MODEL="${DROID_MODEL:-claude-sonnet-4-5-latest}"
 
 SUGGESTIONS_BRANCH="docs/suggestions-pending"
 
@@ -66,8 +71,16 @@ while [[ $# -gt 0 ]]; do
             MODEL="$2"
             shift 2
             ;;
+        --batch-size)
+            BATCH_SIZE="$2"
+            shift 2
+            ;;
+        --skip-validation)
+            SKIP_VALIDATION=true
+            shift
+            ;;
         -h|--help)
-            head -26 "$0" | tail -24
+            head -30 "$0" | tail -28
             exit 0
             ;;
         *)
@@ -132,12 +145,16 @@ if [[ "$DRY_RUN" == "true" ]]; then
     exit 0
 fi
 
-# Ensure clean working state
-if [[ -n "$(git status --porcelain)" ]]; then
+# Ensure clean working state (ignore untracked files with grep -v '??')
+if [[ -n "$(git status --porcelain | grep -v '^??' || true)" ]]; then
     error "Working directory has uncommitted changes. Please commit or stash first."
 fi
 
-for command in git gh jq droid; do
+REQUIRED_COMMANDS=(git gh jq droid)
+if [[ "$SKIP_VALIDATION" != "true" ]]; then
+    REQUIRED_COMMANDS+=(mdbook)
+fi
+for command in "${REQUIRED_COMMANDS[@]}"; do
     if ! command -v "$command" > /dev/null 2>&1; then
         error "Required command not found: $command"
     fi
@@ -157,24 +174,165 @@ git checkout -b "$DOCS_BRANCH" origin/main
 TMPDIR=$(mktemp -d)
 trap 'rm -rf "$TMPDIR"' EXIT
 
-SUGGESTIONS_FILE="$TMPDIR/suggestions.md"
-APPLY_PROMPT_FILE="$TMPDIR/apply-prompt.md"
 APPLY_SUMMARY_FILE="$TMPDIR/apply-summary.md"
-
-# Combine queued suggestion files into one input
-for file in $(echo "$MANIFEST" | jq -r '.suggestions[].file'); do
-    {
-        echo "## Source: $file"
-        echo ""
-        git show "origin/$SUGGESTIONS_BRANCH:$file" 2>/dev/null || error "Suggestion file missing: $file"
-        echo ""
-        echo "---"
+touch "$APPLY_SUMMARY_FILE"
+
+# Collect suggestion files into an array
+SUGGESTION_FILES=()
+while IFS= read -r file; do
+    SUGGESTION_FILES+=("$file")
+done < <(echo "$MANIFEST" | jq -r '.suggestions[].file')
+
+# Determine which PRs are already in the latest stable release.
+# Suggestions queued with --preview may reference features that shipped in stable
+# by the time this script runs, so their Preview callouts should be stripped.
+STABLE_PRS=()
+STABLE_TAG=$(git tag -l 'v*' --sort=-v:refname | grep -v 'pre' | head -1 || true)
+if [[ -n "$STABLE_TAG" ]]; then
+    log "Latest stable release tag: $STABLE_TAG"
+    for file in "${SUGGESTION_FILES[@]}"; do
+        pr_num=$(echo "$MANIFEST" | jq -r --arg f "$file" '.suggestions[] | select(.file == $f) | .pr')
+        # Find the merge commit for this PR
+        merge_sha=$(gh pr view "$pr_num" --json mergeCommit --jq '.mergeCommit.oid' 2>/dev/null || true)
+        if [[ -n "$merge_sha" ]] && git merge-base --is-ancestor "$merge_sha" "$STABLE_TAG" 2>/dev/null; then
+            STABLE_PRS+=("$pr_num")
+            log "PR #$pr_num is in stable ($STABLE_TAG)"
+        fi
+    done
+    if [[ ${#STABLE_PRS[@]} -gt 0 ]]; then
+        echo -e "${YELLOW}Note:${NC} ${#STABLE_PRS[@]} suggestion(s) are for PRs already in stable ($STABLE_TAG)."
+        echo "  Preview callouts will be stripped for: ${STABLE_PRS[*]}"
         echo ""
-    } >> "$SUGGESTIONS_FILE"
-done
+    fi
+else
+    log "No stable release tag found, treating all suggestions as preview-only"
+fi
 
-# Build auto-apply prompt
-cat > "$APPLY_PROMPT_FILE" << 'EOF'
+# Determine which PRs touch code gated behind feature flags.
+# Features behind flags aren't generally available and shouldn't be documented yet.
+FLAGGED_PRS=()
+FLAGS_FILE="$REPO_ROOT/crates/feature_flags/src/flags.rs"
+if [[ -f "$FLAGS_FILE" ]]; then
+    # Extract feature flag struct names (e.g. SubagentsFeatureFlag, GitGraphFeatureFlag)
+    FLAG_NAMES=$(grep -oE 'pub struct \w+FeatureFlag' "$FLAGS_FILE" | awk '{print $3}')
+    if [[ -n "$FLAG_NAMES" ]]; then
+        FLAG_PATTERN=$(echo "$FLAG_NAMES" | tr '\n' '|' | sed 's/|$//')
+        log "Feature flags found: $(echo "$FLAG_NAMES" | tr '\n' ' ')"
+        for file in "${SUGGESTION_FILES[@]}"; do
+            pr_num=$(echo "$MANIFEST" | jq -r --arg f "$file" '.suggestions[] | select(.file == $f) | .pr')
+            # Skip PRs already marked as stable (no need to double-check)
+            is_already_stable=false
+            for stable_pr in "${STABLE_PRS[@]+"${STABLE_PRS[@]}"}"; do
+                if [[ "$stable_pr" == "$pr_num" ]]; then
+                    is_already_stable=true
+                    break
+                fi
+            done
+            if [[ "$is_already_stable" == "true" ]]; then
+                continue
+            fi
+            # Check if the PR diff references any feature flag
+            pr_diff=$(gh pr diff "$pr_num" 2>/dev/null || true)
+            if [[ -n "$pr_diff" ]] && echo "$pr_diff" | grep -qE "$FLAG_PATTERN"; then
+                matched_flags=$(echo "$pr_diff" | grep -oE "$FLAG_PATTERN" | sort -u | tr '\n' ', ' | sed 's/,$//')
+                FLAGGED_PRS+=("$pr_num")
+                log "PR #$pr_num is behind feature flag(s): $matched_flags"
+            fi
+        done
+        if [[ ${#FLAGGED_PRS[@]} -gt 0 ]]; then
+            echo -e "${YELLOW}Note:${NC} ${#FLAGGED_PRS[@]} suggestion(s) are for features behind feature flags."
+            echo "  These will be skipped: ${FLAGGED_PRS[*]}"
+            echo ""
+        fi
+    fi
+else
+    log "Feature flags file not found, skipping flag detection"
+fi
+
+# Split into batches
+TOTAL=${#SUGGESTION_FILES[@]}
+BATCH_COUNT=$(( (TOTAL + BATCH_SIZE - 1) / BATCH_SIZE ))
+
+if [[ "$BATCH_COUNT" -gt 1 ]]; then
+    echo "Processing $TOTAL suggestions in $BATCH_COUNT batches of up to $BATCH_SIZE..."
+else
+    echo "Processing $TOTAL suggestions..."
+fi
+echo ""
+
+for (( batch=0; batch<BATCH_COUNT; batch++ )); do
+    START=$(( batch * BATCH_SIZE ))
+    END=$(( START + BATCH_SIZE ))
+    if [[ "$END" -gt "$TOTAL" ]]; then
+        END=$TOTAL
+    fi
+
+    BATCH_NUM=$(( batch + 1 ))
+    BATCH_SUGGESTIONS_FILE="$TMPDIR/batch-${BATCH_NUM}-suggestions.md"
+    BATCH_PROMPT_FILE="$TMPDIR/batch-${BATCH_NUM}-prompt.md"
+    BATCH_SUMMARY_FILE="$TMPDIR/batch-${BATCH_NUM}-summary.md"
+
+    echo -e "${BLUE}Batch $BATCH_NUM/$BATCH_COUNT${NC} (suggestions $(( START + 1 ))-$END of $TOTAL)"
+
+    # Combine suggestion files for this batch, skipping flagged PRs and annotating stable PRs
+    BATCH_HAS_SUGGESTIONS=false
+    for (( i=START; i<END; i++ )); do
+        file="${SUGGESTION_FILES[$i]}"
+        pr_num=$(echo "$MANIFEST" | jq -r --arg f "$file" '.suggestions[] | select(.file == $f) | .pr')
+
+        # Skip PRs behind feature flags entirely
+        is_flagged=false
+        for flagged_pr in "${FLAGGED_PRS[@]+"${FLAGGED_PRS[@]}"}"; do
+            if [[ "$flagged_pr" == "$pr_num" ]]; then
+                is_flagged=true
+                break
+            fi
+        done
+        if [[ "$is_flagged" == "true" ]]; then
+            log "Skipping PR #$pr_num (behind feature flag)"
+            {
+                echo "### Skipped: PR #$pr_num"
+                echo ""
+                echo "This PR is behind a feature flag and was not applied."
+                echo ""
+            } >> "$APPLY_SUMMARY_FILE"
+            continue
+        fi
+
+        BATCH_HAS_SUGGESTIONS=true
+
+        # Check if PR is already in stable
+        is_stable=false
+        for stable_pr in "${STABLE_PRS[@]+"${STABLE_PRS[@]}"}"; do
+            if [[ "$stable_pr" == "$pr_num" ]]; then
+                is_stable=true
+                break
+            fi
+        done
+        {
+            echo "## Source: $file"
+            if [[ "$is_stable" == "true" ]]; then
+                echo ""
+                echo "> **ALREADY IN STABLE**: PR #$pr_num shipped in $STABLE_TAG."
+                echo "> Do NOT add Preview or Changed-in-Preview callouts for this suggestion."
+                echo "> Apply the documentation content only, without any preview-related callouts."
+            fi
+            echo ""
+            git show "origin/$SUGGESTIONS_BRANCH:$file" 2>/dev/null || error "Suggestion file missing: $file"
+            echo ""
+            echo "---"
+            echo ""
+        } >> "$BATCH_SUGGESTIONS_FILE"
+    done
+
+    # Skip this batch if all its suggestions were flagged
+    if [[ "$BATCH_HAS_SUGGESTIONS" == "false" ]]; then
+        echo -e "  ${YELLOW}Batch $BATCH_NUM skipped (all suggestions behind feature flags)${NC}"
+        continue
+    fi
+
+    # Build auto-apply prompt for this batch
+    cat > "$BATCH_PROMPT_FILE" << 'EOF'
 # Documentation Auto-Apply Request (Preview Release)
 
 Apply all queued documentation suggestions below directly to docs files in this repository.
@@ -201,7 +359,15 @@ Before making edits, read and follow these rule files:
 6. Keep preview callout semantics correct:
    - Additive features: `> **Preview:** ...`
    - Behavior modifications: `> **Changed in Preview (vX.XXX).** ...`
+   - **Exception**: Suggestions marked "ALREADY IN STABLE" must NOT get any preview callouts.
+     These features already shipped in a stable release. Apply the content changes only.
+   - Suggestions for features behind feature flags have been pre-filtered and excluded.
+     If you encounter references to feature-flagged functionality, do not document it.
 7. If a suggestion is too ambiguous to apply safely, skip it and explain why in the summary.
+8. **Do not invent `{#kb}` or `{#action}` references.** Only use action names that already
+   appear in the existing docs files you are editing. If unsure whether an action name is
+   valid, use plain text instead. The docs build validates all action references against
+   the compiled binary and will reject unknown names.
 
 ## Output format (after making edits)
 
@@ -218,12 +384,30 @@ Do not include a patch in the response; apply edits directly to files.
 
 EOF
 
-cat "$SUGGESTIONS_FILE" >> "$APPLY_PROMPT_FILE"
+    cat "$BATCH_SUGGESTIONS_FILE" >> "$BATCH_PROMPT_FILE"
+
+    log "Running Droid auto-apply (batch $BATCH_NUM) with model: $MODEL"
+    if ! droid exec -m "$MODEL" -f "$BATCH_PROMPT_FILE" --auto high > "$BATCH_SUMMARY_FILE" 2>&1; then
+        echo "Droid exec output (batch $BATCH_NUM):"
+        cat "$BATCH_SUMMARY_FILE"
+        error "Droid exec failed on batch $BATCH_NUM. See output above."
+    fi
+
+    # Append batch summary
+    {
+        echo "### Batch $BATCH_NUM"
+        echo ""
+        cat "$BATCH_SUMMARY_FILE"
+        echo ""
+    } >> "$APPLY_SUMMARY_FILE"
+
+    echo -e "  ${GREEN}Batch $BATCH_NUM complete${NC}"
+done
+echo ""
 
-log "Running Droid auto-apply with model: $MODEL"
-droid exec -m "$MODEL" -f "$APPLY_PROMPT_FILE" > "$APPLY_SUMMARY_FILE"
+log "All batches completed, checking results..."
 
-if [[ -n "$(git status --porcelain | grep -vE '^.. docs/' || true)" ]]; then
+if [[ -n "$(git status --porcelain | grep -v '^??' | grep -vE '^.. docs/' || true)" ]]; then
     error "Auto-apply modified non-doc files. Revert and re-run."
 fi
 
@@ -232,12 +416,33 @@ if [[ -z "$(git status --porcelain docs/ | grep '^.. docs/src/' || true)" ]]; th
 fi
 
 log "Running docs formatter"
-./script/prettier
+./script/prettier --write
 
 if [[ -z "$(git status --porcelain docs/ | grep '^.. docs/src/' || true)" ]]; then
     error "No docs/src changes remain after formatting; aborting PR creation."
 fi
 
+# Validate docs build before creating PR
+if [[ "$SKIP_VALIDATION" != "true" ]]; then
+    echo "Validating docs build..."
+    log "Generating action metadata..."
+    if ! ./script/generate-action-metadata > /dev/null 2>&1; then
+        echo -e "${YELLOW}Warning:${NC} Could not generate action metadata (cargo build may have failed)."
+        echo "Skipping docs build validation. CI will still catch errors."
+    else
+        VALIDATION_DIR="$TMPDIR/docs-validation"
+        if ! mdbook build ./docs --dest-dir="$VALIDATION_DIR" 2>"$TMPDIR/validation-errors.txt"; then
+            echo ""
+            echo -e "${RED}Docs build validation failed:${NC}"
+            cat "$TMPDIR/validation-errors.txt"
+            echo ""
+            error "Fix the errors above and re-run, or use --skip-validation to bypass."
+        fi
+        echo -e "${GREEN}Docs build validation passed.${NC}"
+    fi
+    echo ""
+fi
+
 # Build PR body from suggestions
 PR_BODY_FILE="$TMPDIR/pr-body.md"
 cat > "$PR_BODY_FILE" << 'EOF'
@@ -309,7 +514,7 @@ Release Notes:
 EOF
 
 git add docs/
-git commit -m "docs: auto-apply preview release suggestions
+git commit -m "docs: Auto-apply preview release suggestions
 
 Auto-applied queued documentation suggestions from:
 $(echo "$MANIFEST" | jq -r '.suggestions[] | "- PR #\(.pr)"')
@@ -323,9 +528,8 @@ git push -u origin "$DOCS_BRANCH"
 log "Creating PR..."
 PR_URL=$(gh pr create \
     --draft \
-    --title "docs: auto-apply preview release suggestions" \
-    --body-file "$PR_BODY_FILE" \
-    --label "documentation")
+    --title "docs: Apply preview release suggestions" \
+    --body-file "$PR_BODY_FILE")
 
 echo ""
 echo -e "${GREEN}PR created:${NC} $PR_URL"
@@ -366,6 +570,7 @@ EOF
 
 Previous suggestions published in: $PR_URL"
     
+    # Force push required: replacing the orphan suggestions branch with a clean slate
     git push -f origin "${SUGGESTIONS_BRANCH}-reset:$SUGGESTIONS_BRANCH"
     git checkout "$ORIGINAL_BRANCH"
     git branch -D "${SUGGESTIONS_BRANCH}-reset"
@@ -377,9 +582,6 @@ else
     echo "Suggestions queue kept (--keep-queue). Remember to reset manually after PR is merged."
 fi
 
-# Cleanup
-
-
 echo ""
 echo -e "${GREEN}Done!${NC}"
 echo ""

script/github-check-new-issue-for-duplicates.py 🔗

@@ -89,7 +89,12 @@ def post_comment(issue_number: int, body):
 def build_duplicate_comment(matches):
     """Build the comment body for potential duplicates."""
     match_list = "\n".join(f"- #{m['number']}" for m in matches)
-    explanations = "\n\n".join(f"**#{m['number']}:** {m['explanation']}" for m in matches)
+    explanations = "\n\n".join(
+        f"**#{m['number']}:** {m['explanation']}\n\n**Shared root cause:** {m['shared_root_cause']}"
+        if m.get('shared_root_cause')
+        else f"**#{m['number']}:** {m['explanation']}"
+        for m in matches
+    )
 
     return f"""This issue appears to be a duplicate of:
 
@@ -307,7 +312,7 @@ def enrich_magnets(magnets):
     for magnet in magnets:
         data = github_api_get(f"/repos/{REPO_OWNER}/{REPO_NAME}/issues/{magnet['number']}")
         magnet["title"] = data["title"]
-        magnet["body_preview"] = (data.get("body") or "")[:500]
+        magnet["body_preview"] = (data.get("body") or "")[:1000]
 
 
 def areas_match(detected, magnet_area):
@@ -381,7 +386,7 @@ def search_for_similar_issues(issue, detected_areas, max_searches=6):
                         "title": item["title"],
                         "state": item.get("state", ""),
                         "created_at": item.get("created_at", ""),
-                        "body_preview": body[:500],
+                        "body_preview": body[:1000],
                         "source": search_type,
                     }
         except requests.RequestException as e:
@@ -414,12 +419,30 @@ def analyze_duplicates(anthropic_key, issue, magnets, search_results):
 
     system_prompt = """You analyze GitHub issues to identify potential duplicates.
 
-Given a new issue and a list of existing issues, identify which existing issues might be duplicates.
+Given a new issue and a list of existing issues, identify which existing issues are duplicates — meaning
+they are caused by the SAME BUG in the code, not just similar symptoms.
+
+CRITICAL DISTINCTION — shared symptoms vs shared root cause:
+- "models missing", "can't sign in", "editor hangs", "venv not detected" are SYMPTOMS that many
+  different bugs can produce. Two reports of the same symptom are NOT duplicates unless you can
+  identify a specific shared root cause.
+- A duplicate means: if a developer fixed the existing issue, the new issue would also be fixed.
+- If the issues just happen to be in the same feature area, or describe similar-sounding problems
+  with different specifics (different error messages, different triggers, different platforms, different
+  configurations), they are NOT duplicates.
 
 For each potential duplicate, assess confidence:
-- "high": Very likely the same issue (same root cause, same symptoms)
-- "medium": Possibly related (likely to be the same root cause)
-- Do NOT include tangentially related issues (same general area but probably different issues)
+- "high": Almost certainly the same bug. You can name a specific shared root cause, and the
+  reproduction steps / error messages / triggers are consistent.
+- "medium": Likely the same bug based on specific technical details, but some uncertainty remains.
+- Do NOT include issues that merely share symptoms, affect the same feature area, or sound similar
+  at a surface level.
+
+Examples of things that are NOT duplicates:
+- Two issues about "Copilot models not showing" — one caused by a Zed update breaking the model list,
+  the other caused by the user's plan not including those models.
+- Two issues about "Zed hangs" — one triggered by network drives, the other by large projects.
+- Two issues about "can't sign in" — one caused by a missing system package, the other by a server-side error.
 
 Output only valid JSON (no markdown code blocks) with this structure:
 {
@@ -427,13 +450,18 @@ Output only valid JSON (no markdown code blocks) with this structure:
     {
       "number": 12345,
       "confidence": "high|medium",
-      "explanation": "Brief explanation of why this might be a duplicate"
+      "shared_root_cause": "The specific bug/root cause shared by both issues",
+      "explanation": "Brief explanation with concrete evidence from both issues"
     }
   ],
   "summary": "One sentence summary of findings"
 }
 
-Only include matches with "high" or "medium" confidence. Return empty matches array if none found."""
+When in doubt, return an empty matches array. A false positive (flagging a non-duplicate) is much
+worse than a false negative (missing a real duplicate), because it wastes the time of both the
+issue author and the maintainers.
+
+Return empty matches array if none found or if you can only identify shared symptoms."""
 
     user_content = f"""## New Issue #{issue['number']}
 **Title:** {issue['title']}

script/github-track-duplicate-bot-effectiveness.py 🔗

@@ -24,6 +24,7 @@ import functools
 import os
 import re
 import sys
+from datetime import datetime, timezone
 
 import requests
 
@@ -39,6 +40,22 @@ BOT_START_DATE = "2026-02-18"
 NEEDS_TRIAGE_LABEL = "state:needs triage"
 DEFAULT_PROJECT_NUMBER = 76
 VALID_CLOSED_AS_VALUES = {"duplicate", "not_planned", "completed"}
+# Add a new tuple when you deploy a new version of the bot that you want to
+# keep track of (e.g. the prompt gets a rewrite or the model gets swapped).
+# Newest first, please. The datetime is for the deployment time (merge to maain).
+BOT_VERSION_TIMELINE = [
+    ("v2", datetime(2026, 2, 26, 14, 9, tzinfo=timezone.utc)),
+    ("v1", datetime(2026, 2, 18, tzinfo=timezone.utc)),
+]
+
+
+def bot_version_for_time(date_string):
+    """Return the bot version that was active at the given ISO 8601 timestamp."""
+    timestamp = datetime.fromisoformat(date_string.replace("Z", "+00:00"))
+    for version, deployed in BOT_VERSION_TIMELINE:
+        if timestamp >= deployed:
+            return version
+    return BOT_VERSION_TIMELINE[-1][0]
 
 
 def github_api_get(path, params=None):
@@ -75,13 +92,14 @@ def fetch_issue(issue_number):
         "node_id": data["node_id"],
         "author": (data.get("user") or {}).get("login", ""),
         "type_name": (data.get("type") or {}).get("name"),
+        "created_at": data.get("created_at", ""),
     }
 
 
-def get_bot_duplicate_comment(issue_number):
-    """Get the bot's duplicate-detection comment body from an issue.
+def get_bot_comment_with_time(issue_number):
+    """Get the bot's duplicate-detection comment and its timestamp from an issue.
 
-    Returns the comment body if found, else None.
+    Returns {"body": str, "created_at": str} if found, else None.
     """
     comments_path = f"/repos/{REPO_OWNER}/{REPO_NAME}/issues/{issue_number}/comments"
     page = 1
@@ -90,7 +108,7 @@ def get_bot_duplicate_comment(issue_number):
             author = (comment.get("user") or {}).get("login", "")
             body = comment.get("body", "")
             if author == BOT_LOGIN and body.startswith(BOT_COMMENT_PREFIX):
-                return body
+                return {"body": body, "created_at": comment.get("created_at", "")}
         page += 1
     return None
 
@@ -100,8 +118,8 @@ def parse_suggested_issues(comment_body):
     return [int(match) for match in re.findall(r"^- #(\d+)", comment_body, re.MULTILINE)]
 
 
-def github_api_graphql(query, variables=None):
-    """Execute a GitHub GraphQL query. Raises on errors."""
+def github_api_graphql(query, variables=None, partial_errors_ok=False):
+    """Execute a GitHub GraphQL query. Raises on errors unless partial_errors_ok is set."""
     response = requests.post(
         GRAPHQL_URL,
         headers=GITHUB_HEADERS,
@@ -110,43 +128,51 @@ def github_api_graphql(query, variables=None):
     response.raise_for_status()
     data = response.json()
     if "errors" in data:
-        raise RuntimeError(f"GraphQL errors: {data['errors']}")
+        if not partial_errors_ok or "data" not in data:
+            raise RuntimeError(f"GraphQL errors: {data['errors']}")
+        print(f"  GraphQL partial errors (ignored): {data['errors']}")
     return data["data"]
 
 
-def get_closed_as_duplicate_of(issue_number):
-    """Get the issue number this issue was closed as a duplicate of.
+def find_canonical_among(duplicate_number, candidates):
+    """Check if any candidate issue has duplicate_number marked as a duplicate.
 
-    Uses the timeline to find the most recent MarkedAsDuplicateEvent.
-    Returns the original issue number, or None.
+    The MarkedAsDuplicateEvent lives on the canonical issue's timeline, not the
+    duplicate's. So to find which canonical issue our duplicate was closed against,
+    we check each candidate's timeline for a MarkedAsDuplicateEvent whose
+    `duplicate` field matches our issue.
 
-    Note: not all "closed as duplicate" issues have a MarkedAsDuplicateEvent.
-    If the closer used the "Close as duplicate" button without separately
-    marking the duplicate relationship, no event is created and this returns
-    None. The caller handles this by flagging the item for manual review.
+    Returns the matching canonical issue number, or None.
     """
+    if not candidates:
+        return None
+
     data = github_api_graphql(
         """
-        query($owner: String!, $repo: String!, $number: Int!) {
+        query($owner: String!, $repo: String!, $numbers: [Int!]!) {
           repository(owner: $owner, name: $repo) {
-            issue(number: $number) {
-              timelineItems(last: 10, itemTypes: [MARKED_AS_DUPLICATE_EVENT]) {
-                nodes {
-                  ... on MarkedAsDuplicateEvent {
-                    canonical { ... on Issue { number } }
-                  }
-                }
-              }
-            }
+            PLACEHOLDER
           }
         }
-        """,
-        {"owner": REPO_OWNER, "repo": REPO_NAME, "number": issue_number},
+        """.replace("PLACEHOLDER", "\n            ".join(
+            f'issue_{number}: issue(number: {number}) {{'
+            f' timelineItems(last: 50, itemTypes: [MARKED_AS_DUPLICATE_EVENT]) {{'
+            f' nodes {{ ... on MarkedAsDuplicateEvent {{ duplicate {{ ... on Issue {{ number }} }} }} }} }} }}'
+            for number in candidates
+        )),
+        {"owner": REPO_OWNER, "repo": REPO_NAME, "numbers": list(candidates)},
+        partial_errors_ok=True,
     )
-    nodes = data["repository"]["issue"]["timelineItems"]["nodes"]
-    for node in reversed(nodes):
-        if original := (node.get("canonical") or {}).get("number"):
-            return original
+
+    repo = data["repository"]
+    for candidate in candidates:
+        issue_data = repo.get(f"issue_{candidate}")
+        if not issue_data:
+            continue
+        for node in issue_data["timelineItems"]["nodes"]:
+            dup_number = (node.get("duplicate") or {}).get("number")
+            if dup_number == duplicate_number:
+                return candidate
     return None
 
 
@@ -261,7 +287,7 @@ def set_field_value(item_id, field_name, value):
     )
 
 
-def add_or_update_project_item(issue_node_id, outcome, closed_as=None, status="Auto-classified", notes=None):
+def add_or_update_project_item(issue_node_id, outcome, closed_as=None, status="Auto-classified", notes=None, bot_comment_time=None):
     """Add an issue to the project board (or update it if already there), setting field values."""
     item_id = find_project_item(issue_node_id)
     if item_id:
@@ -279,6 +305,9 @@ def add_or_update_project_item(issue_node_id, outcome, closed_as=None, status="A
     if notes:
         set_field_value(item_id, "Notes", notes)
 
+    if bot_comment_time:
+        set_field_value(item_id, "Bot version", bot_version_for_time(bot_comment_time))
+
     return item_id
 
 
@@ -296,14 +325,14 @@ def classify_closed(issue_number, closer_login, state_reason):
         print(f"  Skipping: author '{author}' is a staff member")
         return
 
-    bot_comment = get_bot_duplicate_comment(issue_number)
+    bot_comment = get_bot_comment_with_time(issue_number)
     bot_commented = bot_comment is not None
     print(f"  Bot commented: {bot_commented}")
 
     closer_is_author = closer_login == author
 
     if bot_commented and closer_is_author:
-        classify_as_success(issue, state_reason)
+        classify_as_success(issue, bot_comment, state_reason)
     elif bot_commented and not closer_is_author:
         # Only authors, staff, and triagers can close issues, so
         # a non-author closer is always someone with elevated permissions.
@@ -314,7 +343,7 @@ def classify_closed(issue_number, closer_login, state_reason):
         print("  Skipping: no bot comment and not closed as duplicate")
 
 
-def classify_as_success(issue, state_reason):
+def classify_as_success(issue, bot_comment, state_reason):
     """Author closed their own issue after the bot commented."""
     if state_reason == "duplicate":
         status = "Auto-classified"
@@ -334,6 +363,7 @@ def classify_as_success(issue, state_reason):
         closed_as=state_reason,
         status=status,
         notes=notes,
+        bot_comment_time=bot_comment["created_at"],
     )
 
 
@@ -350,46 +380,48 @@ def classify_non_author_closed(issue, bot_comment, state_reason):
             closed_as=state_reason,
             status="Needs review",
             notes=notes,
+            bot_comment_time=bot_comment["created_at"],
         )
 
 
 def classify_as_assist(issue, bot_comment):
     """Staff member closed as duplicate after the bot commented. Check if the dup matches."""
-    suggested = parse_suggested_issues(bot_comment)
+    suggested = parse_suggested_issues(bot_comment["body"])
+    if not suggested:
+        print("  -> Assist, needs review (could not parse bot suggestions)")
+        add_or_update_project_item(
+            issue["node_id"], outcome="Assist", closed_as="duplicate",
+            status="Needs review", notes="Could not parse bot suggestions",
+            bot_comment_time=bot_comment["created_at"])
+        return
+
     original = None
     try:
-        original = get_closed_as_duplicate_of(issue["number"])
+        original = find_canonical_among(issue["number"], suggested)
     except (requests.RequestException, RuntimeError) as error:
-        print(f"  Warning: failed to get the original-for the duplicate issue: {error}")
-
-    if original and suggested:
-        if original in suggested:
-            status = "Auto-classified"
-            notes = None
-            print(f"  -> Assist (original #{original} matches bot suggestion)")
-        else:
-            status = "Needs review"
-            suggested_str = ", ".join(f"#{number}" for number in suggested)
-            notes = f"Bot suggested {suggested_str}; closed as dup of #{original}"
-            print(f"  -> Possible Assist, needs review ({notes})")
+        print(f"  Warning: failed to query candidate timelines: {error}")
+
+    if original:
+        status = "Auto-classified"
+        notes = None
+        print(f"  -> Assist (original #{original} matches bot suggestion)")
     else:
-        # couldn't determine original or no suggestions parsed
         status = "Needs review"
-        if not original:
-            notes = "Could not determine original issue from timeline"
-        else:
-            notes = f"Closed as dup of #{original}; could not parse bot suggestions"
+        suggested_str = ", ".join(f"#{number}" for number in suggested)
+        notes = f"Bot suggested {suggested_str}; none matched as canonical"
         print(f"  -> Possible Assist, needs review ({notes})")
 
     add_or_update_project_item(
-        issue["node_id"], outcome="Assist", closed_as="duplicate", status=status, notes=notes)
+        issue["node_id"], outcome="Assist", closed_as="duplicate", status=status, notes=notes,
+        bot_comment_time=bot_comment["created_at"])
 
 
 def classify_as_missed_opportunity(issue):
     """Issue closed as duplicate but the bot never commented."""
     print("  -> Missed opportunity")
     add_or_update_project_item(
-        issue["node_id"], outcome="Missed opportunity", closed_as="duplicate", status="Auto-classified")
+        issue["node_id"], outcome="Missed opportunity", closed_as="duplicate", status="Auto-classified",
+        bot_comment_time=issue["created_at"])
 
 
 def classify_open():
@@ -419,16 +451,18 @@ def classify_open():
                 f"type is {type_name}" if type_name not in ("Bug", "Crash")
                 else f"author {author} is staff" if is_staff_member(author)
                 else "already on the board" if find_project_item(node_id)
-                else "no bot duplicate comment found" if not get_bot_duplicate_comment(number)
+                else "no bot duplicate comment found" if not (bot_comment := get_bot_comment_with_time(number))
                 else None
             )
+
             if skip_reason:
                 print(f"  #{number}: skipping, {skip_reason}")
                 skipped += 1
                 continue
 
             print(f"  #{number}: adding as Noise")
-            add_or_update_project_item(node_id, outcome="Noise", status="Auto-classified")
+            add_or_update_project_item(node_id, outcome="Noise", status="Auto-classified",
+                                       bot_comment_time=bot_comment["created_at"])
             added += 1
         except Exception as error:  # broad catch: one issue failing shouldn't stop the sweep
             print(f"  #{number}: error processing issue, skipping: {error}")

script/linux 🔗

@@ -27,13 +27,15 @@ if [[ -n $apt ]]; then
     g++
     libasound2-dev
     libfontconfig-dev
+    libgit2-dev
+    libglib2.0-dev
+    libssl-dev
+    libva-dev
+    libvulkan1
     libwayland-dev
     libx11-xcb-dev
     libxkbcommon-x11-dev
-    libssl-dev
     libzstd-dev
-    libvulkan1
-    libgit2-dev
     make
     cmake
     clang
@@ -55,11 +57,21 @@ if [[ -n $apt ]]; then
   elif (grep -qP 'PRETTY_NAME="((Debian|Raspbian).+12|Linux Mint 21|.+22\.04)' /etc/os-release); then
     deps+=( mold libstdc++-12-dev )
   elif (grep -qP 'PRETTY_NAME="((Debian|Raspbian).+11|Linux Mint 20|.+20\.04)' /etc/os-release); then
-    deps+=( libstdc++-10-dev )
+    # Ubuntu 20.04 ships clang-10 and libstdc++-10 which lack adequate C++20
+    # support for building webrtc-sys (requires -std=c++20, lambdas in
+    # unevaluated contexts from clang 17+, and working std::ranges in the
+    # stdlib). clang-18 is available in focal-security/universe as an official
+    # backport, and libstdc++-11-dev from the ubuntu-toolchain-r PPA provides
+    # headers with working pointer_traits/contiguous_range.
+    # Note: the prebuilt libwebrtc.a is compiled with libstdc++, so we must
+    # use libstdc++ (not libc++) to avoid ABI mismatches at link time.
+    $maysudo add-apt-repository -y ppa:ubuntu-toolchain-r/test
+    deps+=( clang-18 libstdc++-11-dev )
   fi
 
   $maysudo "$apt" update
   $maysudo "$apt" install -y "${deps[@]}"
+
   finalize
   exit 0
 fi
@@ -78,6 +90,8 @@ if [[ -n $dnf ]] || [[ -n $yum ]]; then
     cmake
     alsa-lib-devel
     fontconfig-devel
+    glib2-devel
+    libva-devel
     wayland-devel
     libxcb-devel
     libxkbcommon-x11-devel
@@ -141,7 +155,9 @@ if [[ -n $zyp ]]; then
     cmake
     fontconfig-devel
     gcc
+    libva-devel
     gcc-c++
+    glib2-devel
     git
     gzip
     jq
@@ -175,6 +191,8 @@ if [[ -n $pacman ]]; then
     cmake
     alsa-lib
     fontconfig
+    glib2
+    libva
     wayland
     libgit2
     libxcb
@@ -205,6 +223,8 @@ if [[ -n $xbps ]]; then
     gcc
     alsa-lib-devel
     fontconfig-devel
+    glib-devel
+    libva-devel
     libxcb-devel
     libxkbcommon-devel
     libzstd-devel
@@ -226,11 +246,13 @@ if [[ -n $emerge ]]; then
   deps=(
     app-arch/zstd
     app-misc/jq
+    dev-libs/glib
     dev-libs/openssl
     dev-libs/wayland
     dev-util/cmake
     media-libs/alsa-lib
     media-libs/fontconfig
+    media-libs/libva
     media-libs/vulkan-loader
     x11-libs/libxcb
     x11-libs/libxkbcommon

script/terms/terms.rtf 🔗

@@ -2,128 +2,194 @@
 {\colortbl;\red255\green0\blue0;\red0\green0\blue255;}
 \widowctrl\hyphauto
 
-{\pard \qc \f0 \sa180 \li0 \fi0 \b \fs36 Zed End User Terms\par}
+{\pard \qc \f0 \sa180 \li0 \fi0 \b \fs36 Terms of Service\par}
 {\pard \ql \f0 \sa180 \li0 \fi0 \par}
-{\pard \ql \f0 \sa180 \li0 \fi0 PLEASE READ THESE TERMS AND CONDITIONS CAREFULLY BEFORE USING THE SERVICE OR SOFTWARE OFFERED BY ZED INDUSTRIES, INC. ("ZED", OR "WE"). BY ACCESSING OR USING THE SOLUTION (AS DEFINED BELOW) IN ANY MANNER, YOU ("YOU" OR "CUSTOMER") AGREE TO BE BOUND BY THESE TERMS (THE "AGREEMENT") TO THE EXCLUSION OF ALL OTHER TERMS. YOU REPRESENT AND WARRANT THAT YOU HAVE THE AUTHORITY TO ENTER INTO THIS AGREEMENT; IF YOU ARE ENTERING INTO THIS AGREEMENT ON BEHALF OF AN ORGANIZATION OR ENTITY, REFERENCES TO "CUSTOMER" AND "YOU" IN THIS AGREEMENT, REFER TO THAT ORGANIZATION OR ENTITY. IF YOU DO NOT AGREE TO ALL OF THE FOLLOWING, YOU MAY NOT USE OR ACCESS THE SOLUTION IN ANY MANNER. IF THE TERMS OF THIS AGREEMENT ARE CONSIDERED AN OFFER, ACCEPTANCE IS EXPRESSLY LIMITED TO SUCH TERMS.\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 1. ACCESS TO AND USE OF THE SOLUTION\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 Subject to the terms and conditions of this Agreement, Zed hereby grants to You, and You hereby accept from Zed, a term-limited, non-exclusive, non-transferable, non-assignable and non-sublicensable license to make use of the Editor for Your internal use only, and subject to the use limitations in Section 2.2.\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 2. TERMS APPLICABLE TO THE EDITOR\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.1. License Grant\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 Subject to the terms and conditions of this Agreement, Zed hereby grants to You, and You hereby accept from Zed, a term-limited, non-exclusive, non-transferable, non-assignable and non-sublicensable license to make use of the Editor for Your internal use only, and subject to the use limitations in Section 2.2.\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.2. License Limitations\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 You agree that You shall not: (a) exceed the scope of the licenses granted in Section 2.1; (b) make copies of the Editor; (c) distribute, sublicense, assign, delegate, rent, lease, sell, time-share or otherwise transfer the benefits of, use under, or rights to, the license granted in Section 2.1; (d) reverse engineer, decompile, disassemble or otherwise attempt to learn the source code, structure or algorithms underlying the Editor, except to the extent required to be permitted under applicable law; (e) modify, translate or create derivative works of the Editor; or (f) remove any copyright, trademark, patent or other proprietary notice that appears on the Editor or copies thereof.\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.3. Open Source Software\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 Zed makes certain versions of the Editor and related software available at the Zed GitHub Repository: {\field{\*\fldinst{HYPERLINK "https://github.com/zed-industries/zed"}}{\fldrslt{\ul
-https://github.com/zed-industries/zed
-}}}
- (the "Repo"). Your use of such software is subject to the open source software licenses declared in the Repo.\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 3. TERMS APPLICABLE TO THE ZED SERVICE\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.1. Access to and Scope of Zed Service\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 If you have elected to use the Zed Service by enabling or activating the Zed Service, Zed will use commercially reasonable efforts to make the Zed Service available to You as set forth in this Agreement. Once you elected to use the Zed Service, You may access and use the Zed Service during the Term, subject to Your compliance with the terms and conditions of the Agreement.\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.2. Restrictions\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 You will use the Zed Service only in accordance with all applicable laws, including, but not limited to, laws related to data (whether applicable within the United States, the European Union, or otherwise). You agree not to (and will not allow any third party to): (i) remove or otherwise alter any proprietary notices or labels from the Zed Service or any portion thereof; (ii) reverse engineer, decompile, disassemble, or otherwise attempt to discover the underlying structure, ideas, or algorithms of the Zed Service or any software used to provide or make the Zed Service available; or (iii) rent, resell or otherwise allow any third party access to or use of the Zed Service. Zed may suspend Your access to or use of the Zed Service as follows: (a) immediately if Zed reasonably believes Your use of the Zed Service may pose a security risk to or may adversely impact the Zed Service; or (b) if You are in breach of this Agreement.\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.3. Customer Data\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 You are solely responsible for Customer Data including, but not limited to: (a) compliance with all applicable laws and this Agreement; (b) any claims relating to Customer Data; and (c) any claims that Customer Data infringes, misappropriates, or otherwise violates the rights of any third party. You agree and acknowledge that Customer Data may be irretrievably deleted if Your account is terminated. For purposes of this Agreement, "Customer Data" shall mean any data, information or other material provided, uploaded, or submitted by You to the Zed Service in the course of using the Zed Service. Notwithstanding anything to the contrary, You represent and warrant that You will not transfer or make available to Zed any personally identifiable information or related information subject to applicable data privacy laws or regulations, unless otherwise agreed to in writing by Zed.\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.1. Customer Data Made Available to Zed\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 To the extent You elect to make Customer Data available to Zed, the same may only be used by Zed according to the Customer Data type and the use rights regarding the same as described herein:\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.2. Usage Data\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 To improve the Editor and understand how You use it, Zed optionally collects the following usage data:\par}
-{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (a)\tx360\tab file extensions of opened files;\sa180\par}
-{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (b)\tx360\tab features and tools You use within the Editor;\sa180\par}
-{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (c)\tx360\tab project statistics (e.g., number of files); and\sa180\par}
-{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (d)\tx360\tab frameworks detected in Your projects\sa180\sa180\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 (a-d collectively, "Usage Data"). Usage Data does not include any of Your software code or sensitive project details. You may change Your preferences disabling the collection of Usage Data and You can audit Usage Data collected by the Editor at any time. See {\field{\*\fldinst{HYPERLINK "https://zed.dev/docs/telemetry"}}{\fldrslt{\ul
-https://zed.dev/docs/telemetry
-}}}
- for more.\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 Usage Data is associated with a secure random telemetry ID which may be linked to Your email address. This linkage currently serves two purposes: (1) it allows Zed to analyze usage patterns over time while maintaining Your privacy; and (2) it enables Zed to reach out to specific user groups for feedback and improvement suggestions. Zed may contact You based on Your usage patterns to better understand your needs and improve the Solution. If You delete Your account, the link between Your telemetry ID and Your email address will be permanently removed. By continuing to use Editor or Solution with this feature enabled You agree to this Usage Data collection.\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.3. Crash Reports\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 Customer Data consisting of data related to the behavior of the Solution prior to a crash or failure, such as stack traces are collected and classified as "Crash Reports". Zed will use commercially reasonable efforts to exclude any personally identifiable information from Crash Reports, but due to the nature of a crash, Zed does not ensure that information such as paths will be excluded from Crash Reports. Crash Reports will be used solely for Zed's internal purposes in connection with diagnosing defects in the Solution that led to the crash. You may grant us permission to capture Crash Reports when installing or activating the Solution, and You may change Your preferences at any time in the settings feature of the Solution. Once You grant us this permission, Zed will retain the Crash Reports indefinitely.\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.4. User Content\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 \u8226 ? You may access, modify or create certain data or information in connection with your access or use of the Zed Editor or the Solution. Such data and information may include, but is not limited to any of the following:\par}
-{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (a)\tx360\tab file contents and associated metadata (e.g., filename, paths, size, timestamps);\sa180\par}
-{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (b)\tx360\tab source control history, comments and metadata (e.g., git history, commit messages);\sa180\par}
-{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (c)\tx360\tab configuration data (e.g., settings, keymaps);\sa180\par}
-{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (d)\tx360\tab anything typed, pasted and/or displayed on screen while using the Editor;\sa180\par}
-{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (e)\tx360\tab derivative works of the above generated by the Editor (e.g., format conversions, summaries, indexes, caches);\sa180\par}
-{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (f)\tx360\tab metadata, code and other derivative works of the above returned by language servers and other local tooling; and\sa180\par}
-{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (g)\tx360\tab metadata, code and other derivative works of the above returned by services integrated with the Zed Editor\sa180\sa180\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 (a-g collectively, "User Content").\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.5. Handling of User Content\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 Zed will make use of or transfer User Content only as specified in this Agreement, or as necessary to comply with applicable law.\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.5.1. Zed Collaboration Services\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 When using Zed Collaboration Services, User Content is transmitted from Your environment only if You collaborate with other Zed users by electing to share a project in the Editor. Once You share a project, Zed may transmit User Content consisting of file paths, file contents, and metadata regarding the code returned by language servers. Currently, Zed does not persist any User Content beyond the Your collaboration session. If You unshare a project or disconnect from the Solution, all information associated with such project will be deleted from Zed servers. In the future, Zed may save User Content regarding projects beyond the scope of a single collaboration session. We may share such User Content with those users You elected to grant access to. Zed's access to such User Content is limited to debugging and making improvements to the Solution.\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.5.2. Other Services\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 The Zed Editor supports integration with API-based services maintained and not operated by Zed (the "Other Services"). By way of example, Other Services includes those made available by GitHub, Anthropic, OpenAI, and similar providers, or those You host or manage directly. You may configure the Zed Editor to interoperate, communicate with, and exchange data (including User Content) directly with the Other Services. Zed is not responsible or otherwise liable with respect to Your use of any Other Service, including but not limited to the exchange of data between the Other Service and the Zed Editor. The terms and conditions, including the applicable privacy policy, with respect to the Other Service are those made available by the applicable Other Service, not these Terms.\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.5.3. Zed AI Services\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 The Zed Editor supports integration with API-based services maintained and operated by Zed (the "Zed AI Services"). You may elect to use Zed AI Services as the provider for various Zed Editor features (e.g., Agent Panel, Inline Assistant, Edit Predictions, and similar features). In connection with Your use of these features, the Zed Editor and Zed AI Services may make use of User Content to generate contextually relevant responses (the \u8220"Output\u8221"). Other than as specified in Section 3.3.5.4 of these Terms, Zed will not use User Content for training of its models, or disclose User Content.\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 Output is provided "as is" without any warranties or guarantees of functionality, security, or fitness for a particular purpose. While efforts are made to ensure the accuracy and reliability, Output may include errors, vulnerabilities, and defects. You are responsible for reviewing, testing, and validating Output before use in any production or critical environment. Zed assumes no liability for any damages, losses, or liability arising from the use, modification, reliance on, or deployment of Output. Any such use is at Your own risk.\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.5.4. Improvement Feedback\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 When using Zed AI Services to provide Edit Predictions in connection with certain open source software projects, You may elect to share requests, responses and feedback comments (collectively "Model Improvement Feedback") with Zed, and Zed may use the same to improve Zed Edit Predictions models. You may opt-out of sharing Model Improvement Feedback at any time.\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 For more information on Zed Edit Predictions please see: {\field{\*\fldinst{HYPERLINK "https://zed.dev/docs/ai/ai-improvement"}}{\fldrslt{\ul
-https://zed.dev/docs/ai/ai-improvement
-}}}
-\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 When using Zed AI Services in connection with the Agent Panel, You may elect to share with Zed requests, responses and feedback regarding the Agent Panel and related Output (the \u8220"Agent Improvement Feedback\u8221") with Zed, and Zed may use the same to improve the Agent Panel and related Output. Zed will only collect Agent Improvement Feedback when You elect to share the same.\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 For more information regarding the Agent Panel please see: {\field{\*\fldinst{HYPERLINK "https://zed.dev/docs/ai/ai-improvement"}}{\fldrslt{\ul
-https://zed.dev/docs/ai/ai-improvement
-}}}
-\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.4. Privacy Policy\par}
-{\pard \ql \f0 \sa180 \li0 \fi0 You and Zed are bound by the terms and conditions contained in the Zed Privacy Policy which is incorporated by reference hereto. The Zed Privacy Policy is available at the following URL: {\field{\*\fldinst{HYPERLINK "https://zed.dev/privacy-policy"}}{\fldrslt{\ul
-https://zed.dev/privacy-policy
+{\pard \ql \f0 \sa180 \li0 \fi0 {\b Last Updated}: March 2, 2026\par}
+{\pard \ql \f0 \sa180 \li0 \fi0 Welcome, and thank you for your interest in Zed Industries, Inc. (\u8220"{\b Zed},\u8221" \u8220"{\b we},\u8221" or \u8220"{\b us}\u8221") and our website at {\field{\*\fldinst{HYPERLINK "https://www.zed.dev"}}{\fldrslt{\ul
+www.zed.dev
+}}}
+, along with our downloadable Zed software (the \u8220"{\b Software}\u8221") and related subscription service (the \u8220"{\b Service}\u8221"). These Terms of Service are a legally binding contract between you and Zed regarding your use of the Service.\par}
+{\pard \ql \f0 \sa180 \li0 \fi0 Please read the following Terms carefully.\par}
+{\pard \ql \f0 \sa180 \li0 \fi0 {\b By accessing or using the Service, you (\u8220"You\u8221" or \u8220"Customer\u8221")\u160 ?agree to these Terms of Service, the Data Processing Addendum (\u8220"DPA\u8221"), available upon request,\u160 ?and Zed\u8217's {\field{\*\fldinst{HYPERLINK "/privacy-policy"}}{\fldrslt{\ul
+Privacy Policy
+}}}
+ (collectively, the \u8220"Terms\u8221").}\par}
+{\pard \ql \f0 \sa180 \li0 \fi0 If you are not eligible, or do not agree to the Terms, you may not access or use the Service.\par}
+{\pard \ql \f0 \sa180 \li0 \fi0 By using the Service, you confirm that you have read and understand these Terms and that they form a binding agreement between you and Zed.\par}
+{\pard \ql \f0 \sa180 \li0 \fi0 {\b ARBITRATION\u160 ?NOTICE}. Except for certain kinds of disputes described in Section\u160 ?15.2 (Dispute Resolution and Arbitration), you agree that disputes arising under these Terms will be resolved by binding, individual arbitration, and BY ACCEPTING THESE TERMS, YOU AND ZED ARE EACH WAIVING THE RIGHT TO A TRIAL BY JURY OR TO PARTICIPATE IN ANY CLASS ACTION OR REPRESENTATIVE PROCEEDING.\u160 ?ALTERNATIVELY, CUSTOMER MAY OPT OUT OF ARBITRATION PER SECTION 17.2(a).\par}
+{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 1. Overview\par}
+{\pard \ql \f0 \sa180 \li0 \fi0 Subject to these Terms, Zed will permit Customer to access and use Zed\u8217's AI-enabled software-as-a-service offering (the \u8220"{\b Service}\u8221"), which enables certain additional features and functionality (including artificial intelligence and collaboration features) in Zed\u8217's open source code editing software (\u8220"{\b Software}\u8221").\par}
+{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 2. Service\par}
+{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.1. Eligibility\par}
+{\pard \ql \f0 \sa180 \li0 \fi0 Customer\u160 ?must be at least 18 years old\u160 ?to use the Service. By agreeing to these Terms, Customer represents and warrants to Zed that: (a) Customer is at least 18 years old; (b) Customer has not previously been suspended or removed from the Service; and (c) Customer\u8217's registration and use of the Service is in compliance with any and all applicable laws and regulations. If Customer is an entity, organization, or company, the individual accepting these Terms on Customer\u8217's behalf represents and warrants that they have authority to bind Customer to these Terms and Customer agrees to be bound by these Terms.\par}
+{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.2. Access Grant\par}

tooling/xtask/Cargo.toml 🔗

@@ -9,6 +9,7 @@ license = "GPL-3.0-or-later"
 workspace = true
 
 [dependencies]
+annotate-snippets = "0.12.1"
 anyhow.workspace = true
 backtrace.workspace = true
 cargo_metadata.workspace = true
@@ -17,7 +18,11 @@ clap = { workspace = true, features = ["derive"] }
 toml.workspace = true
 indoc.workspace = true
 indexmap.workspace = true
+itertools.workspace = true
+regex.workspace = true
 serde.workspace = true
 serde_json.workspace = true
+serde_yaml = "0.9.34"
+strum.workspace = true
 toml_edit.workspace = true
 gh-workflow.workspace = true

tooling/xtask/src/main.rs 🔗

@@ -20,7 +20,10 @@ enum CliCommand {
     PackageConformity(tasks::package_conformity::PackageConformityArgs),
     /// Publishes GPUI and its dependencies to crates.io.
     PublishGpui(tasks::publish_gpui::PublishGpuiArgs),
+    /// Builds GPUI web examples and serves them.
+    WebExamples(tasks::web_examples::WebExamplesArgs),
     Workflows(tasks::workflows::GenerateWorkflowArgs),
+    CheckWorkflows(tasks::workflow_checks::WorkflowValidationArgs),
 }
 
 fn main() -> Result<()> {
@@ -33,6 +36,8 @@ fn main() -> Result<()> {
             tasks::package_conformity::run_package_conformity(args)
         }
         CliCommand::PublishGpui(args) => tasks::publish_gpui::run_publish_gpui(args),
+        CliCommand::WebExamples(args) => tasks::web_examples::run_web_examples(args),
         CliCommand::Workflows(args) => tasks::workflows::run_workflows(args),
+        CliCommand::CheckWorkflows(args) => tasks::workflow_checks::validate(args),
     }
 }

tooling/xtask/src/tasks.rs 🔗

@@ -2,4 +2,6 @@ pub mod clippy;
 pub mod licenses;
 pub mod package_conformity;
 pub mod publish_gpui;
+pub mod web_examples;
+pub mod workflow_checks;
 pub mod workflows;

tooling/xtask/src/tasks/web_examples.rs 🔗

@@ -0,0 +1,338 @@
+#![allow(clippy::disallowed_methods, reason = "tooling is exempt")]
+
+use std::io::Write;
+use std::path::Path;
+use std::process::Command;
+
+use anyhow::{Context as _, Result, bail};
+use clap::Parser;
+
+#[derive(Parser)]
+pub struct WebExamplesArgs {
+    #[arg(long)]
+    pub release: bool,
+    #[arg(long, default_value = "8080")]
+    pub port: u16,
+    #[arg(long)]
+    pub no_serve: bool,
+}
+
+fn check_program(binary: &str, install_hint: &str) -> Result<()> {
+    match Command::new(binary).arg("--version").output() {
+        Ok(output) if output.status.success() => Ok(()),
+        _ => bail!("`{binary}` not found. Install with: {install_hint}"),
+    }
+}
+
+fn discover_examples() -> Result<Vec<String>> {
+    let examples_dir = Path::new("crates/gpui/examples");
+    let mut names = Vec::new();
+
+    for entry in std::fs::read_dir(examples_dir).context("failed to read crates/gpui/examples")? {
+        let path = entry?.path();
+        if path.extension().and_then(|e| e.to_str()) == Some("rs") {
+            if let Some(stem) = path.file_stem().and_then(|s| s.to_str()) {
+                names.push(stem.to_string());
+            }
+        }
+    }
+
+    if names.is_empty() {
+        bail!("no examples found in crates/gpui/examples");
+    }
+
+    names.sort();
+    Ok(names)
+}
+
+pub fn run_web_examples(args: WebExamplesArgs) -> Result<()> {
+    let cargo = std::env::var("CARGO").unwrap_or_else(|_| "cargo".to_string());
+    let profile = if args.release { "release" } else { "debug" };
+    let out_dir = "target/web-examples";
+
+    check_program("wasm-bindgen", "cargo install wasm-bindgen-cli")?;
+
+    let examples = discover_examples()?;
+    eprintln!(
+        "Building {} example(s) for wasm32-unknown-unknown ({profile})...\n",
+        examples.len()
+    );
+
+    std::fs::create_dir_all(out_dir).context("failed to create output directory")?;
+
+    eprintln!("Building all examples...");
+
+    let mut cmd = Command::new(&cargo);
+    cmd.args([
+        "build",
+        "--target",
+        "wasm32-unknown-unknown",
+        "-p",
+        "gpui",
+        "--keep-going",
+    ]);
+    // 🙈
+    cmd.env("RUSTC_BOOTSTRAP", "1");
+    for name in &examples {
+        cmd.args(["--example", name]);
+    }
+    if args.release {
+        cmd.arg("--release");
+    }
+
+    let _ = cmd.status().context("failed to run cargo build")?;
+
+    // Run wasm-bindgen on each .wasm that was produced.
+    let mut succeeded: Vec<String> = Vec::new();
+    let mut failed: Vec<String> = Vec::new();
+
+    for name in &examples {
+        let wasm_path = format!("target/wasm32-unknown-unknown/{profile}/examples/{name}.wasm");
+        if !Path::new(&wasm_path).exists() {
+            eprintln!("[{name}] SKIPPED (build failed)");
+            failed.push(name.clone());
+            continue;
+        }
+
+        eprintln!("[{name}] Running wasm-bindgen...");
+
+        let example_dir = format!("{out_dir}/{name}");
+        std::fs::create_dir_all(&example_dir)
+            .with_context(|| format!("failed to create {example_dir}"))?;
+
+        let status = Command::new("wasm-bindgen")
+            .args([
+                &wasm_path,
+                "--target",
+                "web",
+                "--no-typescript",
+                "--out-dir",
+                &example_dir,
+                "--out-name",
+                name,
+            ])
+            // 🙈
+            .env("RUSTC_BOOTSTRAP", "1")
+            .status()
+            .context("failed to run wasm-bindgen")?;
+        if !status.success() {
+            eprintln!("[{name}] SKIPPED (wasm-bindgen failed)");
+            failed.push(name.clone());
+            continue;
+        }
+
+        // Write per-example index.html.
+        let html_path = format!("{example_dir}/index.html");
+        std::fs::File::create(&html_path)
+            .and_then(|mut file| file.write_all(make_example_html(name).as_bytes()))
+            .with_context(|| format!("failed to write {html_path}"))?;
+
+        eprintln!("[{name}] OK");
+        succeeded.push(name.clone());
+    }
+
+    if succeeded.is_empty() {
+        bail!("all {} examples failed to build", examples.len());
+    }
+
+    let example_names: Vec<&str> = succeeded.iter().map(|s| s.as_str()).collect();
+    let index_path = format!("{out_dir}/index.html");
+    std::fs::File::create(&index_path)
+        .and_then(|mut file| file.write_all(make_gallery_html(&example_names).as_bytes()))
+        .context("failed to write index.html")?;
+
+    if args.no_serve {
+        return Ok(());
+    }
+
+    // Serve with COEP/COOP headers required for WebGPU / SharedArrayBuffer.
+    eprintln!("Serving on http://127.0.0.1:{}...", args.port);
+
+    let server_script = format!(
+        r#"
+import http.server
+class Handler(http.server.SimpleHTTPRequestHandler):
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, directory="{out_dir}", **kwargs)
+    def end_headers(self):
+        self.send_header("Cross-Origin-Embedder-Policy", "require-corp")
+        self.send_header("Cross-Origin-Opener-Policy", "same-origin")
+        super().end_headers()
+http.server.HTTPServer(("127.0.0.1", {port}), Handler).serve_forever()
+"#,
+        port = args.port,
+    );
+
+    let status = Command::new("python3")
+        .args(["-c", &server_script])
+        .status()
+        .context("failed to run python3 http server (is python3 installed?)")?;
+    if !status.success() {
+        bail!("python3 http server exited with: {status}");
+    }
+
+    Ok(())
+}
+
+fn make_example_html(name: &str) -> String {
+    format!(
+        r#"<!DOCTYPE html>
+<html lang="en">
+<head>
+    <meta charset="utf-8">
+    <meta name="viewport" content="width=device-width, initial-scale=1.0">
+    <title>GPUI Web: {name}</title>
+    <style>
+        * {{ margin: 0; padding: 0; box-sizing: border-box; }}
+        html, body {{
+            width: 100%; height: 100%; overflow: hidden;
+            background: #1e1e2e; color: #cdd6f4;
+            font-family: system-ui, -apple-system, sans-serif;
+        }}
+        canvas {{ display: block; width: 100%; height: 100%; }}
+        #loading {{
+            position: fixed; inset: 0;
+            display: flex; align-items: center; justify-content: center;
+            font-size: 1.25rem; opacity: 0.6;
+        }}
+        #loading.hidden {{ display: none; }}
+    </style>
+</head>
+<body>
+    <div id="loading">Loading {name}…</div>
+    <script type="module">
+        import init from './{name}.js';
+        await init();
+        document.getElementById('loading').classList.add('hidden');
+    </script>
+</body>
+</html>
+"#
+    )
+}
+
+fn make_gallery_html(examples: &[&str]) -> String {
+    let mut buttons = String::new();
+    for name in examples {
+        buttons.push_str(&format!(
+            "                <button class=\"example-btn\" data-name=\"{name}\">{name}</button>\n"
+        ));
+    }
+
+    let first = examples.first().copied().unwrap_or("hello_web");
+
+    format!(
+        r##"<!DOCTYPE html>
+<html lang="en">
+<head>
+    <meta charset="utf-8">
+    <meta name="viewport" content="width=device-width, initial-scale=1.0">
+    <title>GPUI Web Examples</title>
+    <style>
+        * {{ margin: 0; padding: 0; box-sizing: border-box; }}
+        html, body {{
+            width: 100%; height: 100%; overflow: hidden;
+            background: #1e1e2e; color: #cdd6f4;
+            font-family: system-ui, -apple-system, sans-serif;
+        }}
+        #app {{ display: flex; width: 100%; height: 100%; }}
+
+        #sidebar {{
+            width: 240px; min-width: 240px;
+            background: #181825;
+            border-right: 1px solid #313244;
+            display: flex; flex-direction: column;
+        }}
+        #sidebar-header {{
+            padding: 16px 14px 12px;
+            font-size: 0.8rem; font-weight: 700;
+            text-transform: uppercase; letter-spacing: 0.08em;
+            color: #a6adc8; border-bottom: 1px solid #313244;
+        }}
+        #sidebar-header span {{
+            font-size: 1rem; text-transform: none; letter-spacing: normal;
+            color: #cdd6f4; display: block; margin-top: 2px;
+        }}
+        #example-list {{
+            flex: 1; overflow-y: auto; padding: 8px 0;
+        }}
+        .example-btn {{
+            display: block; width: 100%;
+            padding: 8px 14px; border: none;
+            background: transparent; color: #bac2de;
+            font-size: 0.85rem; text-align: left;
+            cursor: pointer;
+            font-family: 'SF Mono', 'Fira Code', 'Cascadia Code', monospace;
+        }}
+        .example-btn:hover {{ background: #313244; color: #cdd6f4; }}
+        .example-btn.active {{ background: #45475a; color: #f5e0dc; font-weight: 600; }}
+
+        #main {{ flex: 1; display: flex; flex-direction: column; min-width: 0; }}
+        #toolbar {{
+            height: 40px; display: flex; align-items: center;
+            padding: 0 16px; gap: 12px;
+            background: #1e1e2e; border-bottom: 1px solid #313244;
+            font-size: 0.8rem; color: #a6adc8;
+        }}
+        #current-name {{
+            font-weight: 600; color: #cdd6f4;
+            font-family: 'SF Mono', 'Fira Code', 'Cascadia Code', monospace;
+        }}
+        #open-tab {{
+            margin-left: auto; padding: 4px 10px;
+            border: 1px solid #585b70; border-radius: 4px;
+            background: transparent; color: #a6adc8;
+            font-size: 0.75rem; cursor: pointer;
+            text-decoration: none;
+        }}
+        #open-tab:hover {{ background: #313244; color: #cdd6f4; }}
+        #viewer {{ flex: 1; border: none; width: 100%; background: #11111b; }}
+    </style>
+</head>
+<body>
+    <div id="app">
+        <div id="sidebar">
+            <div id="sidebar-header">
+                GPUI Examples
+                <span>{count} available</span>
+            </div>
+            <div id="example-list">
+{buttons}            </div>
+        </div>
+        <div id="main">
+            <div id="toolbar">
+                <span id="current-name">{first}</span>
+                <a id="open-tab" href="./{first}/" target="_blank">Open in new tab ↗</a>
+            </div>
+            <iframe id="viewer" src="./{first}/"></iframe>
+        </div>
+    </div>
+    <script>
+        const buttons = document.querySelectorAll('.example-btn');
+        const viewer  = document.getElementById('viewer');
+        const nameEl  = document.getElementById('current-name');
+        const openEl  = document.getElementById('open-tab');
+
+        function select(name) {{
+            buttons.forEach(b => b.classList.toggle('active', b.dataset.name === name));
+            viewer.src = './' + name + '/';
+            nameEl.textContent = name;
+            openEl.href = './' + name + '/';
+            history.replaceState(null, '', '#' + name);
+        }}
+
+        buttons.forEach(b => b.addEventListener('click', () => select(b.dataset.name)));
+
+        const hash = location.hash.slice(1);
+        if (hash && [...buttons].some(b => b.dataset.name === hash)) {{
+            select(hash);
+        }} else {{
+            select('{first}');
+        }}
+    </script>
+</body>
+</html>
+"##,
+        count = examples.len(),
+    )
+}

tooling/xtask/src/tasks/workflow_checks.rs 🔗

@@ -0,0 +1,118 @@
+mod check_run_patterns;
+
+use std::{fs, path::PathBuf};
+
+use annotate_snippets::Renderer;
+use anyhow::{Result, anyhow};
+use clap::Parser;
+use itertools::{Either, Itertools};
+use serde_yaml::Value;
+use strum::IntoEnumIterator;
+
+use crate::tasks::{
+    workflow_checks::check_run_patterns::{
+        RunValidationError, WorkflowFile, WorkflowValidationError,
+    },
+    workflows::WorkflowType,
+};
+
+pub use check_run_patterns::validate_run_command;
+
+#[derive(Default, Parser)]
+pub struct WorkflowValidationArgs {}
+
+pub fn validate(_: WorkflowValidationArgs) -> Result<()> {
+    let (parsing_errors, file_errors): (Vec<_>, Vec<_>) = get_all_workflow_files()
+        .map(check_workflow)
+        .flat_map(Result::err)
+        .partition_map(|error| match error {
+            WorkflowError::ParseError(error) => Either::Left(error),
+            WorkflowError::ValidationError(error) => Either::Right(error),
+        });
+
+    if !parsing_errors.is_empty() {
+        Err(anyhow!(
+            "Failed to read or parse some workflow files: {}",
+            parsing_errors.into_iter().join("\n")
+        ))
+    } else if !file_errors.is_empty() {
+        let errors: Vec<_> = file_errors
+            .iter()
+            .map(|error| error.annotation_group())
+            .collect();
+
+        let renderer =
+            Renderer::styled().decor_style(annotate_snippets::renderer::DecorStyle::Ascii);
+        println!("{}", renderer.render(errors.as_slice()));
+
+        Err(anyhow!("Workflow checks failed!"))
+    } else {
+        Ok(())
+    }
+}
+
+enum WorkflowError {
+    ParseError(anyhow::Error),
+    ValidationError(Box<WorkflowValidationError>),
+}
+
+fn get_all_workflow_files() -> impl Iterator<Item = PathBuf> {
+    WorkflowType::iter()
+        .map(|workflow_type| workflow_type.folder_path())
+        .flat_map(|folder_path| {
+            fs::read_dir(folder_path).into_iter().flat_map(|entries| {
+                entries
+                    .flat_map(Result::ok)
+                    .map(|entry| entry.path())
+                    .filter(|path| {
+                        path.extension()
+                            .is_some_and(|ext| ext == "yaml" || ext == "yml")
+                    })
+            })
+        })
+}
+
+fn check_workflow(workflow_file_path: PathBuf) -> Result<(), WorkflowError> {
+    fn collect_errors(
+        iter: impl Iterator<Item = Result<(), Vec<RunValidationError>>>,
+    ) -> Result<(), Vec<RunValidationError>> {
+        Some(iter.flat_map(Result::err).flatten().collect::<Vec<_>>())
+            .filter(|errors| !errors.is_empty())
+            .map_or(Ok(()), Err)
+    }
+
+    fn check_recursive(key: &Value, value: &Value) -> Result<(), Vec<RunValidationError>> {
+        match value {
+            Value::Mapping(mapping) => collect_errors(
+                mapping
+                    .into_iter()
+                    .map(|(key, value)| check_recursive(key, value)),
+            ),
+            Value::Sequence(sequence) => collect_errors(
+                sequence
+                    .into_iter()
+                    .map(|value| check_recursive(key, value)),
+            ),
+            Value::String(string) => check_string(key, string).map_err(|error| vec![error]),
+            Value::Null | Value::Bool(_) | Value::Number(_) | Value::Tagged(_) => Ok(()),
+        }
+    }
+
+    let file_content =
+        WorkflowFile::load(&workflow_file_path).map_err(WorkflowError::ParseError)?;
+
+    check_recursive(&Value::Null, &file_content.parsed_content).map_err(|errors| {
+        WorkflowError::ValidationError(Box::new(WorkflowValidationError::new(
+            errors,
+            file_content,
+            workflow_file_path,
+        )))
+    })
+}
+
+fn check_string(key: &Value, value: &str) -> Result<(), RunValidationError> {
+    match key {
+        Value::String(key) if key == "run" => validate_run_command(value),
+        _ => Ok(()),
+    }
+}

tooling/xtask/src/tasks/workflow_checks/check_run_patterns.rs 🔗

@@ -0,0 +1,124 @@
+use annotate_snippets::{AnnotationKind, Group, Level, Snippet};
+use anyhow::{Result, anyhow};
+use regex::Regex;
+use serde_yaml::Value;
+use std::{
+    collections::HashMap,
+    fs,
+    ops::Range,
+    path::{Path, PathBuf},
+    sync::LazyLock,
+};
+
+static GITHUB_INPUT_PATTERN: LazyLock<Regex> = LazyLock::new(|| {
+    Regex::new(r#"\$\{\{[[:blank:]]*([[:alnum:]]|[[:punct:]])+?[[:blank:]]*\}\}"#)
+        .expect("Should compile")
+});
+
+pub struct WorkflowFile {
+    raw_content: String,
+    pub parsed_content: Value,
+}
+
+impl WorkflowFile {
+    pub fn load(workflow_file_path: &Path) -> Result<Self> {
+        fs::read_to_string(workflow_file_path)
+            .map_err(|_| {
+                anyhow!(
+                    "Could not read workflow file at {}",
+                    workflow_file_path.display()
+                )
+            })
+            .and_then(|file_content| {
+                serde_yaml::from_str(&file_content)
+                    .map(|parsed_content| Self {
+                        raw_content: file_content,
+                        parsed_content,
+                    })
+                    .map_err(|e| anyhow!("Failed to parse workflow file: {e:?}"))
+            })
+    }
+}
+
+pub struct WorkflowValidationError {
+    file_path: PathBuf,
+    contents: WorkflowFile,
+    errors: Vec<RunValidationError>,
+}
+
+impl WorkflowValidationError {
+    pub fn new(
+        errors: Vec<RunValidationError>,
+        contents: WorkflowFile,
+        file_path: PathBuf,
+    ) -> Self {
+        Self {
+            file_path,
+            contents,
+            errors,
+        }
+    }
+
+    pub fn annotation_group<'a>(&'a self) -> Group<'a> {
+        let raw_content = &self.contents.raw_content;
+        let mut identical_lines = HashMap::new();
+
+        let ranges = self
+            .errors
+            .iter()
+            .flat_map(|error| error.found_injection_patterns.iter())
+            .map(|(line, pattern_range)| {
+                let initial_offset = identical_lines
+                    .get(&(line.as_str(), pattern_range.start))
+                    .copied()
+                    .unwrap_or_default();
+
+                let line_start = raw_content[initial_offset..]
+                    .find(line.as_str())
+                    .map(|offset| offset + initial_offset)
+                    .unwrap_or_default();
+
+                let pattern_start = line_start + pattern_range.start;
+                let pattern_end = pattern_start + pattern_range.len();
+
+                identical_lines.insert((line.as_str(), pattern_range.start), pattern_end);
+
+                pattern_start..pattern_end
+            });
+
+        Level::ERROR
+            .primary_title("Found GitHub input injection in run command")
+            .element(
+                Snippet::source(&self.contents.raw_content)
+                    .path(self.file_path.display().to_string())
+                    .annotations(ranges.map(|range| {
+                        AnnotationKind::Primary
+                            .span(range)
+                            .label("This should be passed via an environment variable")
+                    })),
+            )
+    }
+}
+
+pub struct RunValidationError {
+    found_injection_patterns: Vec<(String, Range<usize>)>,
+}
+
+pub fn validate_run_command(command: &str) -> Result<(), RunValidationError> {
+    let patterns: Vec<_> = command
+        .lines()
+        .flat_map(move |line| {
+            GITHUB_INPUT_PATTERN
+                .find_iter(line)
+                .map(|m| (line.to_owned(), m.range()))
+        })
+        .collect();
+
+    if patterns.is_empty() {
+        Ok(())
+    } else {
+        Err(RunValidationError {
+            found_injection_patterns: patterns,
+        })
+    }
+}

tooling/xtask/src/tasks/workflows.rs 🔗

@@ -4,6 +4,8 @@ use gh_workflow::Workflow;
 use std::fs;
 use std::path::{Path, PathBuf};
 
+use crate::tasks::workflow_checks::{self};
+
 mod after_release;
 mod autofix_pr;
 mod bump_patch_version;
@@ -87,8 +89,8 @@ impl WorkflowFile {
     }
 }
 
-#[derive(PartialEq, Eq)]
-enum WorkflowType {
+#[derive(PartialEq, Eq, strum::EnumIter)]
+pub enum WorkflowType {
     /// Workflows living in the Zed repository
     Zed,
     /// Workflows living in the `zed-extensions/workflows` repository that are
@@ -113,7 +115,7 @@ impl WorkflowType {
         )
     }
 
-    fn folder_path(&self) -> PathBuf {
+    pub fn folder_path(&self) -> PathBuf {
         match self {
             WorkflowType::Zed => PathBuf::from(".github/workflows"),
             WorkflowType::ExtensionCi => PathBuf::from("extensions/workflows"),
@@ -155,5 +157,5 @@ pub fn run_workflows(_: GenerateWorkflowArgs) -> Result<()> {
         workflow_file.generate_file()?;
     }
 
-    Ok(())
+    workflow_checks::validate(Default::default())
 }

tooling/xtask/src/tasks/workflows/after_release.rs 🔗

@@ -123,7 +123,7 @@ fn publish_winget() -> NamedJob {
                 "X-GitHub-Api-Version" = "2022-11-28"
             }
             $body = @{ branch = "master" } | ConvertTo-Json
-            $uri = "https://api.github.com/repos/${{ github.repository_owner }}/winget-pkgs/merge-upstream"
+            $uri = "https://api.github.com/repos/$env:GITHUB_REPOSITORY_OWNER/winget-pkgs/merge-upstream"
             try {
                 Invoke-RestMethod -Uri $uri -Method Post -Headers $headers -Body $body -ContentType "application/json"
                 Write-Host "Successfully synced winget-pkgs fork"

tooling/xtask/src/tasks/workflows/autofix_pr.rs 🔗

@@ -55,7 +55,8 @@ fn download_patch_artifact() -> Step<Use> {
 
 fn run_autofix(pr_number: &WorkflowInput, run_clippy: &WorkflowInput) -> NamedJob {
     fn checkout_pr(pr_number: &WorkflowInput) -> Step<Run> {
-        named::bash(&format!("gh pr checkout {pr_number}"))
+        named::bash(r#"gh pr checkout "$PR_NUMBER""#)
+            .add_env(("PR_NUMBER", pr_number.to_string()))
             .add_env(("GITHUB_TOKEN", vars::GITHUB_TOKEN))
     }
 
@@ -133,7 +134,9 @@ fn run_autofix(pr_number: &WorkflowInput, run_clippy: &WorkflowInput) -> NamedJo
 
 fn commit_changes(pr_number: &WorkflowInput, autofix_job: &NamedJob) -> NamedJob {
     fn checkout_pr(pr_number: &WorkflowInput, token: &StepOutput) -> Step<Run> {
-        named::bash(&format!("gh pr checkout {pr_number}")).add_env(("GITHUB_TOKEN", token))
+        named::bash(r#"gh pr checkout "$PR_NUMBER""#)
+            .add_env(("PR_NUMBER", pr_number.to_string()))
+            .add_env(("GITHUB_TOKEN", token))
     }
 
     fn apply_patch() -> Step<Run> {

tooling/xtask/src/tasks/workflows/cherry_pick.rs 🔗

@@ -35,7 +35,10 @@ fn run_cherry_pick(
         channel: &WorkflowInput,
         token: &StepOutput,
     ) -> Step<Run> {
-        named::bash(&format!("./script/cherry-pick {branch} {commit} {channel}"))
+        named::bash(r#"./script/cherry-pick "$BRANCH" "$COMMIT" "$CHANNEL""#)
+            .add_env(("BRANCH", branch.to_string()))
+            .add_env(("COMMIT", commit.to_string()))
+            .add_env(("CHANNEL", channel.to_string()))
             .add_env(("GIT_COMMITTER_NAME", "Zed Zippy"))
             .add_env(("GIT_COMMITTER_EMAIL", "hi@zed.dev"))
             .add_env(("GITHUB_TOKEN", token))

tooling/xtask/src/tasks/workflows/compare_perf.rs 🔗

@@ -29,14 +29,16 @@ pub fn run_perf(
     crate_name: &WorkflowInput,
 ) -> NamedJob {
     fn cargo_perf_test(ref_name: &WorkflowInput, crate_name: &WorkflowInput) -> Step<Run> {
-        named::bash(&format!(
-            "
-            if [ -n \"{crate_name}\" ]; then
-                cargo perf-test -p {crate_name} -- --json={ref_name};
+        named::bash(
+            r#"
+            if [ -n "$CRATE_NAME" ]; then
+                cargo perf-test -p "$CRATE_NAME" -- --json="$REF_NAME";
             else
-                cargo perf-test -p vim -- --json={ref_name};
-            fi"
-        ))
+                cargo perf-test -p vim -- --json="$REF_NAME";
+            fi"#,
+        )
+        .add_env(("REF_NAME", ref_name.to_string()))
+        .add_env(("CRATE_NAME", crate_name.to_string()))
     }
 
     fn install_hyperfine() -> Step<Use> {
@@ -44,9 +46,9 @@ pub fn run_perf(
     }
 
     fn compare_runs(head: &WorkflowInput, base: &WorkflowInput) -> Step<Run> {
-        named::bash(&format!(
-            "cargo perf-compare --save=results.md {base} {head}"
-        ))
+        named::bash(r#"cargo perf-compare --save=results.md "$BASE" "$HEAD""#)
+            .add_env(("BASE", base.to_string()))
+            .add_env(("HEAD", head.to_string()))
     }
 
     named::job(

tooling/xtask/src/tasks/workflows/deploy_collab.rs 🔗

@@ -1,5 +1,5 @@
 use gh_workflow::{Container, Event, Port, Push, Run, Step, Use, Workflow};
-use indoc::{formatdoc, indoc};
+use indoc::indoc;
 
 use crate::tasks::workflows::runners::{self, Platform};
 use crate::tasks::workflows::steps::{
@@ -115,9 +115,10 @@ fn deploy(deps: &[&NamedJob]) -> NamedJob {
     }
 
     fn sign_into_kubernetes() -> Step<Run> {
-        named::bash(formatdoc! {r#"
-            doctl kubernetes cluster kubeconfig save --expiry-seconds 600 {cluster_name}
-        "#, cluster_name = vars::CLUSTER_NAME})
+        named::bash(
+            r#"doctl kubernetes cluster kubeconfig save --expiry-seconds 600 "$CLUSTER_NAME""#,
+        )
+        .add_env(("CLUSTER_NAME", vars::CLUSTER_NAME))
     }
 
     fn start_rollout() -> Step<Run> {
@@ -139,7 +140,7 @@ fn deploy(deps: &[&NamedJob]) -> NamedJob {
             echo "Deploying collab:$GITHUB_SHA to $ZED_KUBE_NAMESPACE"
 
             source script/lib/deploy-helpers.sh
-            export_vars_for_environment $ZED_KUBE_NAMESPACE
+            export_vars_for_environment "$ZED_KUBE_NAMESPACE"
 
             ZED_DO_CERTIFICATE_ID="$(doctl compute certificate list --format ID --no-header)"
             export ZED_DO_CERTIFICATE_ID
@@ -149,14 +150,14 @@ fn deploy(deps: &[&NamedJob]) -> NamedJob {
             export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT
             export DATABASE_MAX_CONNECTIONS=850
             envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f -
-            kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch
+            kubectl -n "$ZED_KUBE_NAMESPACE" rollout status "deployment/$ZED_SERVICE_NAME" --watch
             echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}"
 
             export ZED_SERVICE_NAME=api
             export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_API_LOAD_BALANCER_SIZE_UNIT
             export DATABASE_MAX_CONNECTIONS=60
             envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f -
-            kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch
+            kubectl -n "$ZED_KUBE_NAMESPACE" rollout status "deployment/$ZED_SERVICE_NAME" --watch
             echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}"
         "#})
     }

tooling/xtask/src/tasks/workflows/extension_bump.rs 🔗

@@ -150,8 +150,8 @@ pub(crate) fn compare_versions() -> (Step<Run>, StepOutput, StepOutput) {
     r#"
         CURRENT_VERSION="$({VERSION_CHECK})"
 
-        if [[ "${{{{ github.event_name }}}}" == "pull_request" ]]; then
-            PR_FORK_POINT="$(git merge-base --fork-point main)"
+        if [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then
+            PR_FORK_POINT="$(git merge-base origin/main HEAD)"
             git checkout "$PR_FORK_POINT"
         elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then
             git checkout "$BRANCH_PARENT_SHA"
@@ -191,7 +191,7 @@ fn bump_extension_version(
 
     let job = steps::dependant_job(dependencies)
         .cond(Expression::new(format!(
-            "{DEFAULT_REPOSITORY_OWNER_GUARD} &&\n({force_bump} == 'true' || {version_changed} == 'false')",
+            "{DEFAULT_REPOSITORY_OWNER_GUARD} &&\n({force_bump} == true || {version_changed} == 'false')",
             force_bump = force_bump_output.expr(),
             version_changed = version_changed_output.expr(),
         )))
@@ -258,8 +258,6 @@ fn install_bump_2_version() -> Step<Run> {
 
 fn bump_version(current_version: &JobOutput, bump_type: &WorkflowInput) -> (Step<Run>, StepOutput) {
     let step = named::bash(formatdoc! {r#"
-        OLD_VERSION="{current_version}"
-
         BUMP_FILES=("extension.toml")
         if [[ -f "Cargo.toml" ]]; then
             BUMP_FILES+=("Cargo.toml")
@@ -269,7 +267,7 @@ fn bump_version(current_version: &JobOutput, bump_type: &WorkflowInput) -> (Step
             --search "version = \"{{current_version}}"\" \
             --replace "version = \"{{new_version}}"\" \
             --current-version "$OLD_VERSION" \
-            --no-configured-files {bump_type} "${{BUMP_FILES[@]}}"
+            --no-configured-files "$BUMP_TYPE" "${{BUMP_FILES[@]}}"
 
         if [[ -f "Cargo.toml" ]]; then
             cargo update --workspace
@@ -280,7 +278,9 @@ fn bump_version(current_version: &JobOutput, bump_type: &WorkflowInput) -> (Step
         echo "new_version=${{NEW_VERSION}}" >> "$GITHUB_OUTPUT"
         "#
     })
-    .id("bump-version");
+    .id("bump-version")
+    .add_env(("OLD_VERSION", current_version.to_string()))
+    .add_env(("BUMP_TYPE", bump_type.to_string()));
 
     let new_version = StepOutput::new(&step, "new_version");
     (step, new_version)

tooling/xtask/src/tasks/workflows/extension_tests.rs 🔗

@@ -1,9 +1,11 @@
 use gh_workflow::*;
-use indoc::{formatdoc, indoc};
+use indoc::indoc;
 
 use crate::tasks::workflows::{
     extension_bump::compare_versions,
-    run_tests::{orchestrate_without_package_filter, tests_pass},
+    run_tests::{
+        fetch_ts_query_ls, orchestrate_without_package_filter, run_ts_query_ls, tests_pass,
+    },
     runners,
     steps::{
         self, CommonJobConditions, FluentBuilder, NamedJob, cache_rust_dependencies_namespace,
@@ -94,6 +96,8 @@ pub(crate) fn check_extension() -> NamedJob {
         .add_step(download_zed_extension_cli(cache_hit))
         .add_step(cache_rust_dependencies_namespace()) // Extensions can compile Rust, so provide the cache if needed.
         .add_step(check())
+        .add_step(fetch_ts_query_ls())
+        .add_step(run_ts_query_ls())
         .add_step(check_version_job)
         .add_step(verify_version_did_not_change(version_changed));
 
@@ -138,12 +142,14 @@ pub fn check() -> Step<Run> {
 }
 
 fn verify_version_did_not_change(version_changed: StepOutput) -> Step<Run> {
-    named::bash(formatdoc! {r#"
-        if [[ {version_changed} == "true" && "${{{{ github.event_name }}}}" == "pull_request" && "${{{{ github.event.pull_request.user.login }}}}" != "zed-zippy[bot]" ]] ; then
+    named::bash(indoc! {r#"
+        if [[ "$VERSION_CHANGED" == "true" && "$GITHUB_EVENT_NAME" == "pull_request" && "$PR_USER_LOGIN" != "zed-zippy[bot]" ]] ; then
             echo "Version change detected in your change!"
             echo "Version changes happen in separate PRs and will be performed by the zed-zippy bot"
             exit 42
         fi
         "#
     })
+    .add_env(("VERSION_CHANGED", version_changed.to_string()))
+    .add_env(("PR_USER_LOGIN", "${{ github.event.pull_request.user.login }}"))
 }

tooling/xtask/src/tasks/workflows/extension_workflow_rollout.rs 🔗

@@ -105,10 +105,8 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob {
     }
 
     fn get_removed_files(prev_commit: &StepOutput) -> (Step<Run>, StepOutput) {
-        let step = named::bash(formatdoc! {r#"
-            PREV_COMMIT="{prev_commit}"
-
-            if [ "${{{{ matrix.repo }}}}" = "workflows" ]; then
+        let step = named::bash(indoc::indoc! {r#"
+            if [ "$MATRIX_REPO" = "workflows" ]; then
                 WORKFLOW_DIR="extensions/workflows"
             else
                 WORKFLOW_DIR="extensions/workflows/shared"
@@ -119,8 +117,8 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob {
             # Get deleted files (status D) and renamed files (status R - old name needs removal)
             # Using -M to detect renames, then extracting files that are gone from their original location
             REMOVED_FILES=$(git diff --name-status -M "$PREV_COMMIT" HEAD -- "$WORKFLOW_DIR" | \
-                awk '/^D/ {{ print $2 }} /^R/ {{ print $2 }}' | \
-                xargs -I{{}} basename {{}} 2>/dev/null | \
+                awk '/^D/ { print $2 } /^R/ { print $2 }' | \
+                xargs -I{} basename {} 2>/dev/null | \
                 tr '\n' ' ' || echo "")
 
             REMOVED_FILES=$(echo "$REMOVED_FILES" | xargs)
@@ -129,7 +127,9 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob {
             echo "removed_files=$REMOVED_FILES" >> "$GITHUB_OUTPUT"
         "#})
         .id("calc-changes")
-        .working_directory("zed");
+        .working_directory("zed")
+        .add_env(("PREV_COMMIT", prev_commit.to_string()))
+        .add_env(("MATRIX_REPO", "${{ matrix.repo }}"));
 
         let removed_files = StepOutput::new(&step, "removed_files");
 
@@ -137,9 +137,7 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob {
     }
 
     fn sync_workflow_files(removed_files: &StepOutput) -> Step<Run> {
-        named::bash(formatdoc! {r#"
-            REMOVED_FILES="{removed_files}"
-
+        named::bash(indoc::indoc! {r#"
             mkdir -p extension/.github/workflows
             cd extension/.github/workflows
 
@@ -153,17 +151,19 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob {
 
             cd - > /dev/null
 
-            if [ "${{{{ matrix.repo }}}}" = "workflows" ]; then
+            if [ "$MATRIX_REPO" = "workflows" ]; then
                 cp zed/extensions/workflows/*.yml extension/.github/workflows/
             else
                 cp zed/extensions/workflows/shared/*.yml extension/.github/workflows/
             fi
         "#})
+        .add_env(("REMOVED_FILES", removed_files.to_string()))
+        .add_env(("MATRIX_REPO", "${{ matrix.repo }}"))
     }
 
     fn get_short_sha() -> (Step<Run>, StepOutput) {
         let step = named::bash(indoc::indoc! {r#"
-            echo "sha_short=$(git rev-parse --short HEAD)" >> "$GITHUB_OUTPUT"
+            echo "sha_short=$(git rev-parse --short=7 HEAD)" >> "$GITHUB_OUTPUT"
         "#})
         .id("short-sha")
         .working_directory("zed");
@@ -205,13 +205,16 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob {
 
     fn enable_auto_merge(token: &StepOutput) -> Step<gh_workflow::Run> {
         named::bash(indoc::indoc! {r#"
-            PR_NUMBER="${{ steps.create-pr.outputs.pull-request-number }}"
             if [ -n "$PR_NUMBER" ]; then
                 cd extension
                 gh pr merge "$PR_NUMBER" --auto --squash
             fi
         "#})
         .add_env(("GH_TOKEN", token.to_string()))
+        .add_env((
+            "PR_NUMBER",
+            "${{ steps.create-pr.outputs.pull-request-number }}",
+        ))
     }
 
     let (authenticate, token) = generate_token(

tooling/xtask/src/tasks/workflows/publish_extension_cli.rs 🔗

@@ -28,7 +28,7 @@ fn publish_job() -> NamedJob {
     }
 
     fn upload_binary() -> Step<Run> {
-        named::bash("script/upload-extension-cli ${{ github.sha }}")
+        named::bash(r#"script/upload-extension-cli "$GITHUB_SHA""#)
             .add_env((
                 "DIGITALOCEAN_SPACES_ACCESS_KEY",
                 vars::DIGITALOCEAN_SPACES_ACCESS_KEY,
@@ -60,7 +60,7 @@ fn update_sha_in_zed(publish_job: &NamedJob) -> NamedJob {
 
     fn replace_sha() -> Step<Run> {
         named::bash(indoc! {r#"
-            sed -i "s/ZED_EXTENSION_CLI_SHA: &str = \"[a-f0-9]*\"/ZED_EXTENSION_CLI_SHA: \&str = \"${{ github.sha }}\"/" \
+            sed -i "s/ZED_EXTENSION_CLI_SHA: &str = \"[a-f0-9]*\"/ZED_EXTENSION_CLI_SHA: \&str = \"$GITHUB_SHA\"/" \
                 tooling/xtask/src/tasks/workflows/extension_tests.rs
         "#})
     }
@@ -139,7 +139,7 @@ fn update_sha_in_extensions(publish_job: &NamedJob) -> NamedJob {
 
     fn replace_sha() -> Step<Run> {
         named::bash(indoc! {r#"
-            sed -i "s/ZED_EXTENSION_CLI_SHA: [a-f0-9]*/ZED_EXTENSION_CLI_SHA: ${{ github.sha }}/" \
+            sed -i "s/ZED_EXTENSION_CLI_SHA: [a-f0-9]*/ZED_EXTENSION_CLI_SHA: $GITHUB_SHA/" \
                 .github/workflows/ci.yml
         "#})
     }
@@ -191,7 +191,7 @@ fn create_pull_request_extensions(
 
 fn get_short_sha() -> (Step<Run>, StepOutput) {
     let step = named::bash(indoc::indoc! {r#"
-        echo "sha_short=$(echo "${{ github.sha }}" | cut -c1-7)" >> "$GITHUB_OUTPUT"
+        echo "sha_short=$(echo "$GITHUB_SHA" | cut -c1-7)" >> "$GITHUB_OUTPUT"
     "#})
     .id("short-sha");
 

tooling/xtask/src/tasks/workflows/release.rs 🔗

@@ -272,18 +272,55 @@ pub(crate) fn push_release_update_notification(
     test_jobs: &[&NamedJob],
     bundle_jobs: &ReleaseBundleJobs,
 ) -> NamedJob {
-    let all_job_names = test_jobs
-        .into_iter()
+    fn env_name(name: &str) -> String {
+        format!("RESULT_{}", name.to_uppercase())
+    }
+
+    let all_job_names: Vec<&str> = test_jobs
+        .iter()
         .map(|j| j.name.as_ref())
-        .chain(bundle_jobs.jobs().into_iter().map(|j| j.name.as_ref()));
+        .chain(bundle_jobs.jobs().into_iter().map(|j| j.name.as_ref()))
+        .collect();
+
+    let env_entries = [
+        (
+            "DRAFT_RESULT".into(),
+            format!("${{{{ needs.{}.result }}}}", create_draft_release_job.name),
+        ),
+        (
+            "UPLOAD_RESULT".into(),
+            format!("${{{{ needs.{}.result }}}}", upload_assets_job.name),
+        ),
+        (
+            "VALIDATE_RESULT".into(),
+            format!("${{{{ needs.{}.result }}}}", validate_assets_job.name),
+        ),
+        (
+            "AUTO_RELEASE_RESULT".into(),
+            format!("${{{{ needs.{}.result }}}}", auto_release_preview.name),
+        ),
+        ("RUN_URL".into(), CURRENT_ACTION_RUN_URL.to_string()),
+    ]
+    .into_iter()
+    .chain(
+        all_job_names
+            .iter()
+            .map(|name| (env_name(name), format!("${{{{ needs.{name}.result }}}}"))),
+    );
+
+    let failure_checks = all_job_names
+        .iter()
+        .map(|name| {
+            format!(
+                "if [ \"${env_name}\" == \"failure\" ];then FAILED_JOBS=\"$FAILED_JOBS {name}\"; fi",
+                    env_name = env_name(name)
+            )
+        })
+        .collect::<Vec<_>>()
+        .join("\n        ");
 
     let notification_script = formatdoc! {r#"
-        DRAFT_RESULT="${{{{ needs.{draft_job}.result }}}}"
-        UPLOAD_RESULT="${{{{ needs.{upload_job}.result }}}}"
-        VALIDATE_RESULT="${{{{ needs.{validate_job}.result }}}}"
-        AUTO_RELEASE_RESULT="${{{{ needs.{auto_release_job}.result }}}}"
         TAG="$GITHUB_REF_NAME"
-        RUN_URL="{run_url}"
 
         if [ "$DRAFT_RESULT" == "failure" ]; then
             echo "❌ Draft release creation failed for $TAG: $RUN_URL"
@@ -319,19 +356,6 @@ pub(crate) fn push_release_update_notification(
             fi
         fi
         "#,
-        draft_job = create_draft_release_job.name,
-        upload_job = upload_assets_job.name,
-        validate_job = validate_assets_job.name,
-        auto_release_job = auto_release_preview.name,
-        run_url = CURRENT_ACTION_RUN_URL,
-        failure_checks = all_job_names
-            .into_iter()
-            .map(|name: &str| format!(
-                "if [ \"${{{{ needs.{name}.result }}}}\" == \"failure\" ];\
-                then FAILED_JOBS=\"$FAILED_JOBS {name}\"; fi"
-            ))
-            .collect::<Vec<_>>()
-            .join("\n        "),
     };
 
     let mut all_deps: Vec<&NamedJob> = vec![
@@ -347,7 +371,10 @@ pub(crate) fn push_release_update_notification(
         .runs_on(runners::LINUX_SMALL)
         .cond(Expression::new("always()"));
 
-    for step in notify_slack(MessageType::Evaluated(notification_script)) {
+    for step in notify_slack(MessageType::Evaluated {
+        script: notification_script,
+        env: env_entries.collect(),
+    }) {
         job = job.add_step(step);
     }
     named::job(job)
@@ -368,14 +395,17 @@ pub(crate) fn notify_on_failure(deps: &[&NamedJob]) -> NamedJob {
 
 pub(crate) enum MessageType {
     Static(String),
-    Evaluated(String),
+    Evaluated {
+        script: String,
+        env: Vec<(String, String)>,
+    },
 }
 
 fn notify_slack(message: MessageType) -> Vec<Step<Run>> {
     match message {
         MessageType::Static(message) => vec![send_slack_message(message)],
-        MessageType::Evaluated(expression) => {
-            let (generate_step, generated_message) = generate_slack_message(expression);
+        MessageType::Evaluated { script, env } => {
+            let (generate_step, generated_message) = generate_slack_message(script, env);
 
             vec![
                 generate_step,
@@ -385,26 +415,32 @@ fn notify_slack(message: MessageType) -> Vec<Step<Run>> {
     }
 }
 
-fn generate_slack_message(expression: String) -> (Step<Run>, StepOutput) {
+fn generate_slack_message(
+    expression: String,
+    env: Vec<(String, String)>,
+) -> (Step<Run>, StepOutput) {
     let script = formatdoc! {r#"
         MESSAGE=$({expression})
         echo "message=$MESSAGE" >> "$GITHUB_OUTPUT"
         "#
     };
-    let generate_step = named::bash(&script)
+    let mut generate_step = named::bash(&script)
         .id("generate-webhook-message")
         .add_env(("GH_TOKEN", Context::github().token()));
 
+    for (name, value) in env {
+        generate_step = generate_step.add_env((name, value));
+    }
+
     let output = StepOutput::new(&generate_step, "message");
 
     (generate_step, output)
 }
 
 fn send_slack_message(message: String) -> Step<Run> {
-    let script = formatdoc! {r#"
-        curl -X POST -H 'Content-type: application/json'\
-         --data '{{"text":"{message}"}}' "$SLACK_WEBHOOK"
-        "#
-    };
-    named::bash(&script).add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES))
+    named::bash(
+        r#"curl -X POST -H 'Content-type: application/json' --data "$(jq -n --arg text "$SLACK_MESSAGE" '{"text": $text}')" "$SLACK_WEBHOOK""#
+    )
+    .add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES))
+    .add_env(("SLACK_MESSAGE", message))
 }

tooling/xtask/src/tasks/workflows/run_agent_evals.rs 🔗

@@ -123,7 +123,7 @@ fn cron_unit_evals() -> NamedJob {
 const UNIT_EVAL_MODELS: &[&str] = &[
     "anthropic/claude-sonnet-4-5-latest",
     "anthropic/claude-opus-4-5-latest",
-    "google/gemini-3-pro",
+    "google/gemini-3.1-pro",
     "openai/gpt-5",
 ];
 

tooling/xtask/src/tasks/workflows/run_bundling.rs 🔗

@@ -146,6 +146,8 @@ pub(crate) fn bundle_linux(
         job: bundle_job(deps)
             .runs_on(arch.linux_bundler())
             .envs(bundle_envs(platform))
+            .add_env(Env::new("CC", "clang-18"))
+            .add_env(Env::new("CXX", "clang++-18"))
             .add_step(steps::checkout_repo())
             .when_some(release_channel, |job, release_channel| {
                 job.add_step(set_release_channel(platform, release_channel))

tooling/xtask/src/tasks/workflows/run_tests.rs 🔗

@@ -3,9 +3,13 @@ use gh_workflow::{
     Workflow,
 };
 use indexmap::IndexMap;
+use indoc::formatdoc;
 
 use crate::tasks::workflows::{
-    steps::{CommonJobConditions, repository_owner_guard_expression},
+    steps::{
+        CommonJobConditions, cache_rust_dependencies_namespace, repository_owner_guard_expression,
+        use_clang,
+    },
     vars::{self, PathCondition},
 };
 
@@ -50,6 +54,7 @@ pub(crate) fn run_tests() -> Workflow {
         should_run_tests.guard(run_platform_tests(Platform::Mac)),
         should_run_tests.guard(doctests()),
         should_run_tests.guard(check_workspace_binaries()),
+        should_run_tests.guard(check_wasm()),
         should_run_tests.guard(check_dependencies()), // could be more specific here?
         should_check_docs.guard(check_docs()),
         should_check_licences.guard(check_licenses()),
@@ -114,7 +119,7 @@ fn orchestrate_impl(rules: &[&PathCondition], include_package_filter: bool) -> N
           git fetch origin "$GITHUB_BASE_REF" --depth=350
           COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
         fi
-        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
+        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")"
 
         check_pattern() {
           local output_name="$1"
@@ -238,15 +243,20 @@ pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
 
     "#});
 
+    let env_entries: Vec<_> = jobs
+        .iter()
+        .map(|job| {
+            let env_name = format!("RESULT_{}", job.name.to_uppercase());
+            let env_value = format!("${{{{ needs.{}.result }}}}", job.name);
+            (env_name, env_value)
+        })
+        .collect();
+
     script.push_str(
         &jobs
             .iter()
-            .map(|job| {
-                format!(
-                    "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"",
-                    job.name, job.name
-                )
-            })
+            .zip(env_entries.iter())
+            .map(|(job, (env_name, _))| format!("check_result \"{}\" \"${}\"", job.name, env_name))
             .collect::<Vec<_>>()
             .join("\n"),
     );
@@ -261,11 +271,43 @@ pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
                 .collect::<Vec<String>>(),
         )
         .cond(repository_owner_guard_expression(true))
-        .add_step(named::bash(&script));
+        .add_step(
+            env_entries
+                .into_iter()
+                .fold(named::bash(&script), |step, env_item| {
+                    step.add_env(env_item)
+                }),
+        );
 
     named::job(job)
 }
 
+const TS_QUERY_LS_FILE: &str = "ts_query_ls-x86_64-unknown-linux-gnu.tar.gz";
+const CI_TS_QUERY_RELEASE: &str = "tags/v3.15.1";
+
+pub(crate) fn fetch_ts_query_ls() -> Step<Use> {
+    named::uses(
+        "dsaltares",
+        "fetch-gh-release-asset",
+        "aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c",
+    ) // v1.1.1
+    .add_with(("repo", "ribru17/ts_query_ls"))
+    .add_with(("version", CI_TS_QUERY_RELEASE))
+    .add_with(("file", TS_QUERY_LS_FILE))
+}
+
+pub(crate) fn run_ts_query_ls() -> Step<Run> {
+    named::bash(formatdoc!(
+        r#"tar -xf {TS_QUERY_LS_FILE}
+        ./ts_query_ls format --check . || {{
+            echo "Found unformatted queries, please format them with ts_query_ls."
+            echo "For easy use, install the Tree-sitter query extension:"
+            echo "zed://extension/tree-sitter-query"
+            false
+        }}"#
+    ))
+}
+
 fn check_style() -> NamedJob {
     fn check_for_typos() -> Step<Use> {
         named::uses(
@@ -275,6 +317,7 @@ fn check_style() -> NamedJob {
         ) // v1.40.0
         .with(("config", "./typos.toml"))
     }
+
     named::job(
         release_job(&[])
             .runs_on(runners::LINUX_MEDIUM)
@@ -285,7 +328,9 @@ fn check_style() -> NamedJob {
             .add_step(steps::cargo_fmt())
             .add_step(steps::script("./script/check-todos"))
             .add_step(steps::script("./script/check-keymaps"))
-            .add_step(check_for_typos()),
+            .add_step(check_for_typos())
+            .add_step(fetch_ts_query_ls())
+            .add_step(run_ts_query_ls()),
     )
 }
 
@@ -323,7 +368,7 @@ fn check_dependencies() -> NamedJob {
         .with(("license-check", false))
     }
 
-    named::job(
+    named::job(use_clang(
         release_job(&[])
             .runs_on(runners::LINUX_SMALL)
             .add_step(steps::checkout_repo())
@@ -332,11 +377,43 @@ fn check_dependencies() -> NamedJob {
             .add_step(run_cargo_machete())
             .add_step(check_cargo_lock())
             .add_step(check_vulnerable_dependencies()),
+    ))
+}
+
+fn check_wasm() -> NamedJob {
+    fn install_nightly_wasm_toolchain() -> Step<Run> {
+        named::bash(
+            "rustup toolchain install nightly --component rust-src --target wasm32-unknown-unknown",
+        )
+    }
+
+    fn cargo_check_wasm() -> Step<Run> {
+        named::bash(concat!(
+            "cargo +nightly -Zbuild-std=std,panic_abort ",
+            "check --target wasm32-unknown-unknown -p gpui_platform",
+        ))
+        .add_env((
+            "CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS",
+            "-C target-feature=+atomics,+bulk-memory,+mutable-globals",
+        ))
+    }
+
+    named::job(
+        release_job(&[])
+            .runs_on(runners::LINUX_LARGE)
+            .add_step(steps::checkout_repo())
+            .add_step(steps::setup_cargo_config(Platform::Linux))
+            .add_step(steps::cache_rust_dependencies_namespace())
+            .add_step(install_nightly_wasm_toolchain())
+            .add_step(steps::setup_sccache(Platform::Linux))
+            .add_step(cargo_check_wasm())
+            .add_step(steps::show_sccache_stats(Platform::Linux))
+            .add_step(steps::cleanup_cargo_config(Platform::Linux)),
     )
 }
 
 fn check_workspace_binaries() -> NamedJob {
-    named::job(
+    named::job(use_clang(
         release_job(&[])
             .runs_on(runners::LINUX_LARGE)
             .add_step(steps::checkout_repo())
@@ -348,7 +425,7 @@ fn check_workspace_binaries() -> NamedJob {
             .add_step(steps::script("cargo build --workspace --bins --examples"))
             .add_step(steps::show_sccache_stats(Platform::Linux))
             .add_step(steps::cleanup_cargo_config(Platform::Linux)),
-    )
+    ))
 }
 
 pub(crate) fn clippy(platform: Platform) -> NamedJob {
@@ -357,23 +434,27 @@ pub(crate) fn clippy(platform: Platform) -> NamedJob {
         Platform::Linux => runners::LINUX_DEFAULT,
         Platform::Mac => runners::MAC_DEFAULT,
     };
+    let mut job = release_job(&[])
+        .runs_on(runner)
+        .add_step(steps::checkout_repo())
+        .add_step(steps::setup_cargo_config(platform))
+        .when(
+            platform == Platform::Linux || platform == Platform::Mac,
+            |this| this.add_step(steps::cache_rust_dependencies_namespace()),
+        )
+        .when(
+            platform == Platform::Linux,
+            steps::install_linux_dependencies,
+        )
+        .add_step(steps::setup_sccache(platform))
+        .add_step(steps::clippy(platform))
+        .add_step(steps::show_sccache_stats(platform));
+    if platform == Platform::Linux {
+        job = use_clang(job);
+    }
     NamedJob {
         name: format!("clippy_{platform}"),
-        job: release_job(&[])
-            .runs_on(runner)
-            .add_step(steps::checkout_repo())
-            .add_step(steps::setup_cargo_config(platform))
-            .when(
-                platform == Platform::Linux || platform == Platform::Mac,
-                |this| this.add_step(steps::cache_rust_dependencies_namespace()),
-            )
-            .when(
-                platform == Platform::Linux,
-                steps::install_linux_dependencies,
-            )
-            .add_step(steps::setup_sccache(platform))
-            .add_step(steps::clippy(platform))
-            .add_step(steps::show_sccache_stats(platform)),
+        job,
     }
 }
 
@@ -411,10 +492,12 @@ fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJo
             })
             .add_step(steps::checkout_repo())
             .add_step(steps::setup_cargo_config(platform))
-            .when(
-                platform == Platform::Linux || platform == Platform::Mac,
-                |this| this.add_step(steps::cache_rust_dependencies_namespace()),
-            )
+            .when(platform == Platform::Mac, |this| {
+                this.add_step(steps::cache_rust_dependencies_namespace())
+            })
+            .when(platform == Platform::Linux, |this| {
+                use_clang(this.add_step(steps::cache_rust_dependencies_namespace()))
+            })
             .when(
                 platform == Platform::Linux,
                 steps::install_linux_dependencies,
@@ -464,6 +547,14 @@ pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
             .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
     }
 
+    fn buf_lint() -> Step<Run> {
+        named::bash("buf lint crates/proto/proto")
+    }
+
+    fn check_protobuf_formatting() -> Step<Run> {
+        named::bash("buf format --diff --exit-code crates/proto/proto")
+    }
+
     named::job(
         release_job(&[])
             .runs_on(runners::LINUX_DEFAULT)
@@ -474,7 +565,9 @@ pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
             .add_step(steps::checkout_repo().with_full_history())
             .add_step(ensure_fresh_merge())
             .add_step(bufbuild_setup_action())
-            .add_step(bufbuild_breaking_action()),
+            .add_step(bufbuild_breaking_action())
+            .add_step(buf_lint())
+            .add_step(check_protobuf_formatting()),
     )
 }
 
@@ -486,7 +579,7 @@ fn doctests() -> NamedJob {
         .id("run_doctests")
     }
 
-    named::job(
+    named::job(use_clang(
         release_job(&[])
             .runs_on(runners::LINUX_DEFAULT)
             .add_step(steps::checkout_repo())
@@ -497,7 +590,7 @@ fn doctests() -> NamedJob {
             .add_step(run_doctests())
             .add_step(steps::show_sccache_stats(Platform::Linux))
             .add_step(steps::cleanup_cargo_config(Platform::Linux)),
-    )
+    ))
 }
 
 fn check_licenses() -> NamedJob {
@@ -539,7 +632,7 @@ fn check_docs() -> NamedJob {
         "#})
     }
 
-    named::job(
+    named::job(use_clang(
         release_job(&[])
             .runs_on(runners::LINUX_LARGE)
             .add_step(steps::checkout_repo())
@@ -556,7 +649,7 @@ fn check_docs() -> NamedJob {
             .add_step(
                 lychee_link_check("target/deploy/docs"), // check links in generated html
             ),
-    )
+    ))
 }
 
 pub(crate) fn check_scripts() -> NamedJob {
@@ -567,9 +660,10 @@ pub(crate) fn check_scripts() -> NamedJob {
     }
 
     fn run_actionlint() -> Step<Run> {
-        named::bash(indoc::indoc! {r#"
-            ${{ steps.get_actionlint.outputs.executable }} -color
-        "#})
+        named::bash(r#""$ACTIONLINT_BIN" -color"#).add_env((
+            "ACTIONLINT_BIN",
+            "${{ steps.get_actionlint.outputs.executable }}",
+        ))
     }
 
     fn run_shellcheck() -> Step<Run> {
@@ -594,6 +688,7 @@ pub(crate) fn check_scripts() -> NamedJob {
             .add_step(run_shellcheck())
             .add_step(download_actionlint().id("get_actionlint"))
             .add_step(run_actionlint())
+            .add_step(cache_rust_dependencies_namespace())
             .add_step(check_xtask_workflows()),
     )
 }

tooling/xtask/src/tasks/workflows/steps.rs 🔗

@@ -3,6 +3,11 @@ use serde_json::Value;
 
 use crate::tasks::workflows::{runners::Platform, vars, vars::StepOutput};
 
+pub(crate) fn use_clang(job: Job) -> Job {
+    job.add_env(Env::new("CC", "clang"))
+        .add_env(Env::new("CXX", "clang++"))
+}
+
 const SCCACHE_R2_BUCKET: &str = "sccache-zed";
 
 const BASH_SHELL: &str = "bash -euxo pipefail {0}";
@@ -498,9 +503,8 @@ pub mod named {
 }
 
 pub fn git_checkout(ref_name: &dyn std::fmt::Display) -> Step<Run> {
-    named::bash(&format!(
-        "git fetch origin {ref_name} && git checkout {ref_name}"
-    ))
+    named::bash(r#"git fetch origin "$REF_NAME" && git checkout "$REF_NAME""#)
+        .add_env(("REF_NAME", ref_name.to_string()))
 }
 
 pub fn authenticate_as_zippy() -> (Step<Use>, StepOutput) {

typos.toml 🔗

@@ -4,6 +4,9 @@ ignore-hidden = false
 extend-exclude = [
     ".git/",
 
+    # Typewriter model names used for agent branch names aren't typos.
+    "crates/agent_ui/src/branch_names.rs",
+
     # Contributor names aren't typos.
     ".mailmap",
 
@@ -42,6 +45,8 @@ extend-exclude = [
     "crates/gpui_windows/src/window.rs",
     # Some typos in the base mdBook CSS.
     "docs/theme/css/",
+    # Automatically generated JS.
+    "docs/theme/c15t@*.js",
     # Spellcheck triggers on `|Fixe[sd]|` regex part.
     "script/danger/dangerfile.ts",
     # Eval examples for prompts and criteria